file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
query-result-stores.js | /* eslint-disable no-await-in-loop */
const assert = require('assert');
const TestUtils = require('../utils');
const query1 = `SELECT 1 AS id, 'blue' AS color`;
function | (ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
async function testBatchToCompletion(config) {
const utils = new TestUtils(config);
await utils.init(true);
const connection = await utils.post('admin', '/api/connections', {
name: 'test connection',
driver: 'sqlite',
data: {
filename: './test/fixtures/sales.sqlite',
},
});
let batch = await utils.post('admin', `/api/batches`, {
connectionId: connection.id,
batchText: query1,
});
while (batch.status !== 'finished' && batch.status !== 'error') {
await wait(25);
batch = await utils.get('admin', `/api/batches/${batch.id}`);
}
const statements = await utils.get(
'admin',
`/api/batches/${batch.id}/statements`
);
const statement1 = statements[0];
let result1 = await utils.get(
'admin',
`/api/statements/${statement1.id}/results`
);
assert.deepEqual(result1, [[1, 'blue']], 'results as expected');
// remove should succeed
await utils.models.statements.removeById(statement1.id);
await utils.get('admin', `/api/statements/${statement1.id}/results`, 404);
}
describe('api/query-result-stores', function () {
it('file', async function () {
return testBatchToCompletion({ queryResultStore: 'file' });
});
it('redis', async function () {
const available = await TestUtils.redisAvailable('redis://localhost:6379');
if (!available || process.env.SKIP_INTEGRATION === 'true') {
return this.skip();
}
return testBatchToCompletion({
queryResultStore: 'redis',
redisUri: 'redis://localhost:6379',
});
});
it('database', async function () {
return testBatchToCompletion({
queryResultStore: 'database',
});
});
it('memory', async function () {
return testBatchToCompletion({
queryResultStore: 'memory',
});
});
});
| wait | identifier_name |
test_flag_collection.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# TEST_UNICODE_LITERALS
from __future__ import (absolute_import, division, print_function,
unicode_literals) |
import numpy as np
from ...tests.helper import pytest
from .. import FlagCollection
def test_init():
FlagCollection(shape=(1, 2, 3))
def test_init_noshape():
with pytest.raises(Exception) as exc:
FlagCollection()
assert exc.value.args[0] == ('FlagCollection should be initialized with '
'the shape of the data')
def test_init_notiterable():
with pytest.raises(Exception) as exc:
FlagCollection(shape=1.)
assert exc.value.args[0] == ('FlagCollection shape should be '
'an iterable object')
def test_setitem():
f = FlagCollection(shape=(1, 2, 3))
f['a'] = np.ones((1, 2, 3)).astype(float)
f['b'] = np.ones((1, 2, 3)).astype(int)
f['c'] = np.ones((1, 2, 3)).astype(bool)
f['d'] = np.ones((1, 2, 3)).astype(str)
@pytest.mark.parametrize(('value'), [1, 1., 'spam', [1, 2, 3], (1., 2., 3.)])
def test_setitem_invalid_type(value):
f = FlagCollection(shape=(1, 2, 3))
with pytest.raises(Exception) as exc:
f['a'] = value
assert exc.value.args[0] == 'flags should be given as a Numpy array'
def test_setitem_invalid_shape():
f = FlagCollection(shape=(1, 2, 3))
with pytest.raises(ValueError) as exc:
f['a'] = np.ones((3, 2, 1))
assert exc.value.args[0].startswith('flags array shape')
assert exc.value.args[0].endswith('does not match data shape (1, 2, 3)') | random_line_split |
|
test_flag_collection.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# TEST_UNICODE_LITERALS
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from ...tests.helper import pytest
from .. import FlagCollection
def test_init():
FlagCollection(shape=(1, 2, 3))
def test_init_noshape():
with pytest.raises(Exception) as exc:
FlagCollection()
assert exc.value.args[0] == ('FlagCollection should be initialized with '
'the shape of the data')
def test_init_notiterable():
with pytest.raises(Exception) as exc:
FlagCollection(shape=1.)
assert exc.value.args[0] == ('FlagCollection shape should be '
'an iterable object')
def test_setitem():
f = FlagCollection(shape=(1, 2, 3))
f['a'] = np.ones((1, 2, 3)).astype(float)
f['b'] = np.ones((1, 2, 3)).astype(int)
f['c'] = np.ones((1, 2, 3)).astype(bool)
f['d'] = np.ones((1, 2, 3)).astype(str)
@pytest.mark.parametrize(('value'), [1, 1., 'spam', [1, 2, 3], (1., 2., 3.)])
def | (value):
f = FlagCollection(shape=(1, 2, 3))
with pytest.raises(Exception) as exc:
f['a'] = value
assert exc.value.args[0] == 'flags should be given as a Numpy array'
def test_setitem_invalid_shape():
f = FlagCollection(shape=(1, 2, 3))
with pytest.raises(ValueError) as exc:
f['a'] = np.ones((3, 2, 1))
assert exc.value.args[0].startswith('flags array shape')
assert exc.value.args[0].endswith('does not match data shape (1, 2, 3)')
| test_setitem_invalid_type | identifier_name |
test_flag_collection.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# TEST_UNICODE_LITERALS
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from ...tests.helper import pytest
from .. import FlagCollection
def test_init():
FlagCollection(shape=(1, 2, 3))
def test_init_noshape():
with pytest.raises(Exception) as exc:
FlagCollection()
assert exc.value.args[0] == ('FlagCollection should be initialized with '
'the shape of the data')
def test_init_notiterable():
with pytest.raises(Exception) as exc:
FlagCollection(shape=1.)
assert exc.value.args[0] == ('FlagCollection shape should be '
'an iterable object')
def test_setitem():
f = FlagCollection(shape=(1, 2, 3))
f['a'] = np.ones((1, 2, 3)).astype(float)
f['b'] = np.ones((1, 2, 3)).astype(int)
f['c'] = np.ones((1, 2, 3)).astype(bool)
f['d'] = np.ones((1, 2, 3)).astype(str)
@pytest.mark.parametrize(('value'), [1, 1., 'spam', [1, 2, 3], (1., 2., 3.)])
def test_setitem_invalid_type(value):
|
def test_setitem_invalid_shape():
f = FlagCollection(shape=(1, 2, 3))
with pytest.raises(ValueError) as exc:
f['a'] = np.ones((3, 2, 1))
assert exc.value.args[0].startswith('flags array shape')
assert exc.value.args[0].endswith('does not match data shape (1, 2, 3)')
| f = FlagCollection(shape=(1, 2, 3))
with pytest.raises(Exception) as exc:
f['a'] = value
assert exc.value.args[0] == 'flags should be given as a Numpy array' | identifier_body |
extern-call-scrub.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This time we're testing repeatedly going up and down both stacks to
// make sure the stack pointers are maintained properly in both
// directions
extern crate libc;
use std::thread::Thread;
mod rustrt {
extern crate libc;
#[link(name = "rust_test_helpers")]
extern {
pub fn rust_dbg_call(cb: extern "C" fn(libc::uintptr_t) -> libc::uintptr_t,
data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1 {
data
} else {
count(data - 1) + count(data - 1)
}
}
fn count(n: libc::uintptr_t) -> libc::uintptr_t {
unsafe {
println!("n = {}", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
// Make sure we're on a task with small Rust stacks (main currently
// has a large stack)
let _t = Thread::spawn(move|| {
let result = count(12);
println!("result = {}", result);
assert_eq!(result, 2048); | } | }); | random_line_split |
extern-call-scrub.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This time we're testing repeatedly going up and down both stacks to
// make sure the stack pointers are maintained properly in both
// directions
extern crate libc;
use std::thread::Thread;
mod rustrt {
extern crate libc;
#[link(name = "rust_test_helpers")]
extern {
pub fn rust_dbg_call(cb: extern "C" fn(libc::uintptr_t) -> libc::uintptr_t,
data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1 | else {
count(data - 1) + count(data - 1)
}
}
fn count(n: libc::uintptr_t) -> libc::uintptr_t {
unsafe {
println!("n = {}", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
// Make sure we're on a task with small Rust stacks (main currently
// has a large stack)
let _t = Thread::spawn(move|| {
let result = count(12);
println!("result = {}", result);
assert_eq!(result, 2048);
});
}
| {
data
} | conditional_block |
extern-call-scrub.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This time we're testing repeatedly going up and down both stacks to
// make sure the stack pointers are maintained properly in both
// directions
extern crate libc;
use std::thread::Thread;
mod rustrt {
extern crate libc;
#[link(name = "rust_test_helpers")]
extern {
pub fn rust_dbg_call(cb: extern "C" fn(libc::uintptr_t) -> libc::uintptr_t,
data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1 {
data
} else {
count(data - 1) + count(data - 1)
}
}
fn | (n: libc::uintptr_t) -> libc::uintptr_t {
unsafe {
println!("n = {}", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
// Make sure we're on a task with small Rust stacks (main currently
// has a large stack)
let _t = Thread::spawn(move|| {
let result = count(12);
println!("result = {}", result);
assert_eq!(result, 2048);
});
}
| count | identifier_name |
extern-call-scrub.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This time we're testing repeatedly going up and down both stacks to
// make sure the stack pointers are maintained properly in both
// directions
extern crate libc;
use std::thread::Thread;
mod rustrt {
extern crate libc;
#[link(name = "rust_test_helpers")]
extern {
pub fn rust_dbg_call(cb: extern "C" fn(libc::uintptr_t) -> libc::uintptr_t,
data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t |
fn count(n: libc::uintptr_t) -> libc::uintptr_t {
unsafe {
println!("n = {}", n);
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
// Make sure we're on a task with small Rust stacks (main currently
// has a large stack)
let _t = Thread::spawn(move|| {
let result = count(12);
println!("result = {}", result);
assert_eq!(result, 2048);
});
}
| {
if data == 1 {
data
} else {
count(data - 1) + count(data - 1)
}
} | identifier_body |
settings.py | """
Django settings for testproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n$(okl9n*#au0%^wxgu$c#x(f%lby3v_j)wuti&6q-nx_35uj6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'testproject.urls'
WSGI_APPLICATION = 'testproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
} |
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/' | } | random_line_split |
IsClassMethods.py | """Subclass of IsClassMethods, which is generated by wxFormBuilder."""
from beatle import model
from beatle.lib import wxx
from beatle.activity.models.ui import ui as ui
from beatle.app.utils import cached_type
# Implementing IsClassMethods
class | (ui.IsClassMethods):
"""
This dialog allows to add/remove is_class methods.
"""
@wxx.SetInfo(__doc__)
def __init__(self, parent, container):
"""Dialog initialization"""
super(IsClassMethods, self).__init__(parent)
# container es la clase base
self.container = container
# create a map of feasible casts
self._classes = []
for k in container._deriv:
self.visit(k)
# get current methods
self._is_class_methods = container(model.cc.IsClassMethod)
# create map from names to implementations
self._is_class_method_names = dict([(x._name, x) for x in self._is_class_methods])
# create map from feasible is_class to current impl
self._map = {}
for k in self._classes:
name = k.scoped
name = "is_" + name.replace('::', '_')
if name in self._is_class_method_names:
self._map[name] = (k, self._is_class_method_names[name])
else:
self._map[name] = (k, None)
# do a label insertion remembering state
pos = 0
for k in self._map:
v = self._map[k]
self.m_checkList2.Insert(k, pos, v)
if v[1]:
self.m_checkList2.Check(pos)
pos = pos + 1
def visit(self, k):
"""Add inheritance branch"""
for l in k._deriv:
self.visit(l)
self._classes.append(k)
def get_kwargs(self):
"""Returns kwargs dictionary suitable for objects creation"""
kwargs_list = []
tbool = cached_type(self.container.project, 'bool')
for item in range(0, self.m_checkList2.GetCount()):
v = self.m_checkList2.GetClientData(item)
c = self.m_checkList2.IsChecked(item)
if (c and v[1]) or (not c and not v[1]):
continue
if c:
kwargs = {}
derivative = v[0]
kwargs['parent'] = self.container
kwargs['name'] = 'is_' + derivative.scoped.replace('::', '_')
kwargs['type'] = model.cc.typeinst(
type=tbool, const=True)
kwargs['constmethod'] = True
kwargs['note'] = 'This method checks if the instance is specialized as {0}'.format(derivative.GetFullLabel())
kwargs['declare'] = True
kwargs['implement'] = True
kwargs['content'] = '\treturn ( dynamic_cast<const {0}*>(this) != nullptr );'.format(derivative.scoped)
kwargs_list.append(kwargs)
else:
v[1].Delete()
return kwargs_list
| IsClassMethods | identifier_name |
IsClassMethods.py | """Subclass of IsClassMethods, which is generated by wxFormBuilder."""
from beatle import model
from beatle.lib import wxx
from beatle.activity.models.ui import ui as ui
from beatle.app.utils import cached_type
# Implementing IsClassMethods
class IsClassMethods(ui.IsClassMethods):
"""
This dialog allows to add/remove is_class methods.
"""
@wxx.SetInfo(__doc__)
def __init__(self, parent, container):
"""Dialog initialization"""
super(IsClassMethods, self).__init__(parent)
# container es la clase base
self.container = container
# create a map of feasible casts
self._classes = []
for k in container._deriv:
self.visit(k)
# get current methods
self._is_class_methods = container(model.cc.IsClassMethod)
# create map from names to implementations
self._is_class_method_names = dict([(x._name, x) for x in self._is_class_methods])
# create map from feasible is_class to current impl
self._map = {}
for k in self._classes:
|
# do a label insertion remembering state
pos = 0
for k in self._map:
v = self._map[k]
self.m_checkList2.Insert(k, pos, v)
if v[1]:
self.m_checkList2.Check(pos)
pos = pos + 1
def visit(self, k):
"""Add inheritance branch"""
for l in k._deriv:
self.visit(l)
self._classes.append(k)
def get_kwargs(self):
"""Returns kwargs dictionary suitable for objects creation"""
kwargs_list = []
tbool = cached_type(self.container.project, 'bool')
for item in range(0, self.m_checkList2.GetCount()):
v = self.m_checkList2.GetClientData(item)
c = self.m_checkList2.IsChecked(item)
if (c and v[1]) or (not c and not v[1]):
continue
if c:
kwargs = {}
derivative = v[0]
kwargs['parent'] = self.container
kwargs['name'] = 'is_' + derivative.scoped.replace('::', '_')
kwargs['type'] = model.cc.typeinst(
type=tbool, const=True)
kwargs['constmethod'] = True
kwargs['note'] = 'This method checks if the instance is specialized as {0}'.format(derivative.GetFullLabel())
kwargs['declare'] = True
kwargs['implement'] = True
kwargs['content'] = '\treturn ( dynamic_cast<const {0}*>(this) != nullptr );'.format(derivative.scoped)
kwargs_list.append(kwargs)
else:
v[1].Delete()
return kwargs_list
| name = k.scoped
name = "is_" + name.replace('::', '_')
if name in self._is_class_method_names:
self._map[name] = (k, self._is_class_method_names[name])
else:
self._map[name] = (k, None) | conditional_block |
IsClassMethods.py | """Subclass of IsClassMethods, which is generated by wxFormBuilder."""
from beatle import model
from beatle.lib import wxx
from beatle.activity.models.ui import ui as ui
from beatle.app.utils import cached_type
# Implementing IsClassMethods
class IsClassMethods(ui.IsClassMethods):
"""
This dialog allows to add/remove is_class methods.
"""
@wxx.SetInfo(__doc__)
def __init__(self, parent, container):
"""Dialog initialization"""
super(IsClassMethods, self).__init__(parent)
# container es la clase base
self.container = container
# create a map of feasible casts
self._classes = []
for k in container._deriv:
self.visit(k)
# get current methods
self._is_class_methods = container(model.cc.IsClassMethod)
# create map from names to implementations
self._is_class_method_names = dict([(x._name, x) for x in self._is_class_methods])
# create map from feasible is_class to current impl
self._map = {}
for k in self._classes:
name = k.scoped
name = "is_" + name.replace('::', '_')
if name in self._is_class_method_names:
self._map[name] = (k, self._is_class_method_names[name])
else:
self._map[name] = (k, None)
# do a label insertion remembering state
pos = 0
for k in self._map:
v = self._map[k]
self.m_checkList2.Insert(k, pos, v)
if v[1]:
self.m_checkList2.Check(pos)
pos = pos + 1
def visit(self, k):
"""Add inheritance branch"""
for l in k._deriv:
self.visit(l)
self._classes.append(k)
def get_kwargs(self):
"""Returns kwargs dictionary suitable for objects creation"""
kwargs_list = []
tbool = cached_type(self.container.project, 'bool')
for item in range(0, self.m_checkList2.GetCount()):
v = self.m_checkList2.GetClientData(item) | kwargs = {}
derivative = v[0]
kwargs['parent'] = self.container
kwargs['name'] = 'is_' + derivative.scoped.replace('::', '_')
kwargs['type'] = model.cc.typeinst(
type=tbool, const=True)
kwargs['constmethod'] = True
kwargs['note'] = 'This method checks if the instance is specialized as {0}'.format(derivative.GetFullLabel())
kwargs['declare'] = True
kwargs['implement'] = True
kwargs['content'] = '\treturn ( dynamic_cast<const {0}*>(this) != nullptr );'.format(derivative.scoped)
kwargs_list.append(kwargs)
else:
v[1].Delete()
return kwargs_list | c = self.m_checkList2.IsChecked(item)
if (c and v[1]) or (not c and not v[1]):
continue
if c: | random_line_split |
IsClassMethods.py | """Subclass of IsClassMethods, which is generated by wxFormBuilder."""
from beatle import model
from beatle.lib import wxx
from beatle.activity.models.ui import ui as ui
from beatle.app.utils import cached_type
# Implementing IsClassMethods
class IsClassMethods(ui.IsClassMethods):
"""
This dialog allows to add/remove is_class methods.
"""
@wxx.SetInfo(__doc__)
def __init__(self, parent, container):
|
def visit(self, k):
"""Add inheritance branch"""
for l in k._deriv:
self.visit(l)
self._classes.append(k)
def get_kwargs(self):
"""Returns kwargs dictionary suitable for objects creation"""
kwargs_list = []
tbool = cached_type(self.container.project, 'bool')
for item in range(0, self.m_checkList2.GetCount()):
v = self.m_checkList2.GetClientData(item)
c = self.m_checkList2.IsChecked(item)
if (c and v[1]) or (not c and not v[1]):
continue
if c:
kwargs = {}
derivative = v[0]
kwargs['parent'] = self.container
kwargs['name'] = 'is_' + derivative.scoped.replace('::', '_')
kwargs['type'] = model.cc.typeinst(
type=tbool, const=True)
kwargs['constmethod'] = True
kwargs['note'] = 'This method checks if the instance is specialized as {0}'.format(derivative.GetFullLabel())
kwargs['declare'] = True
kwargs['implement'] = True
kwargs['content'] = '\treturn ( dynamic_cast<const {0}*>(this) != nullptr );'.format(derivative.scoped)
kwargs_list.append(kwargs)
else:
v[1].Delete()
return kwargs_list
| """Dialog initialization"""
super(IsClassMethods, self).__init__(parent)
# container es la clase base
self.container = container
# create a map of feasible casts
self._classes = []
for k in container._deriv:
self.visit(k)
# get current methods
self._is_class_methods = container(model.cc.IsClassMethod)
# create map from names to implementations
self._is_class_method_names = dict([(x._name, x) for x in self._is_class_methods])
# create map from feasible is_class to current impl
self._map = {}
for k in self._classes:
name = k.scoped
name = "is_" + name.replace('::', '_')
if name in self._is_class_method_names:
self._map[name] = (k, self._is_class_method_names[name])
else:
self._map[name] = (k, None)
# do a label insertion remembering state
pos = 0
for k in self._map:
v = self._map[k]
self.m_checkList2.Insert(k, pos, v)
if v[1]:
self.m_checkList2.Check(pos)
pos = pos + 1 | identifier_body |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule, LOCALE_ID } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { MaterializeModule } from 'angular2-materialize';
import { AppComponent } from './app.component';
import { HeaderComponent } from './pages/themes/header/header.component';
import { MenuComponent } from './pages/themes/menu/menu.component';
import { FooterComponent } from './pages/themes/footer/footer.component';
import { EnderecoComponent } from './pages/endereco/endereco.component';
import { PermicaoComponent } from './pages/permicao/permicao.component';
import { PerfilComponent } from './pages/perfil/perfil.component';
import { UsuarioComponent } from './pages/usuario/usuario.component';
import { EmpresaComponent } from './pages/empresa/empresa.component';
import { HomeComponent } from './pages/home/home.component';
import { LoginComponent } from './pages/login/login.component';
import { routing } from './app.routing';
import { AuthService } from './pages/login/auth.service';
import { AuthGuard } from './pages/login/guards/auth.guard';
import { EnderecoService } from './pages/endereco/endereco.service';
import { MessageService } from './message.service';
@NgModule({
declarations: [
AppComponent,
HeaderComponent,
MenuComponent,
FooterComponent,
EnderecoComponent,
PermicaoComponent,
PerfilComponent,
UsuarioComponent,
EmpresaComponent,
HomeComponent,
LoginComponent,
UsuarioComponent
],
imports: [
BrowserModule,
FormsModule,
HttpModule,
routing,
MaterializeModule
],
providers: [{
provide: LOCALE_ID,
useValue: 'pt-BR'
},
AuthService,
AuthGuard,
EnderecoService,
MessageService
],
bootstrap: [AppComponent]
})
export class | { }
| AppModule | identifier_name |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule, LOCALE_ID } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { MaterializeModule } from 'angular2-materialize';
import { AppComponent } from './app.component';
import { HeaderComponent } from './pages/themes/header/header.component';
import { MenuComponent } from './pages/themes/menu/menu.component';
import { FooterComponent } from './pages/themes/footer/footer.component';
import { EnderecoComponent } from './pages/endereco/endereco.component';
import { PermicaoComponent } from './pages/permicao/permicao.component';
import { PerfilComponent } from './pages/perfil/perfil.component';
import { UsuarioComponent } from './pages/usuario/usuario.component'; | import { LoginComponent } from './pages/login/login.component';
import { routing } from './app.routing';
import { AuthService } from './pages/login/auth.service';
import { AuthGuard } from './pages/login/guards/auth.guard';
import { EnderecoService } from './pages/endereco/endereco.service';
import { MessageService } from './message.service';
@NgModule({
declarations: [
AppComponent,
HeaderComponent,
MenuComponent,
FooterComponent,
EnderecoComponent,
PermicaoComponent,
PerfilComponent,
UsuarioComponent,
EmpresaComponent,
HomeComponent,
LoginComponent,
UsuarioComponent
],
imports: [
BrowserModule,
FormsModule,
HttpModule,
routing,
MaterializeModule
],
providers: [{
provide: LOCALE_ID,
useValue: 'pt-BR'
},
AuthService,
AuthGuard,
EnderecoService,
MessageService
],
bootstrap: [AppComponent]
})
export class AppModule { } | import { EmpresaComponent } from './pages/empresa/empresa.component';
import { HomeComponent } from './pages/home/home.component'; | random_line_split |
test_classes.py | from __future__ import absolute_import
from __future__ import print_function
from contextlib import contextmanager
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
Sized, Tuple, Union, Text)
from django.core.urlresolvers import resolve
from django.conf import settings
from django.test import TestCase
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.template import loader
from django.test.testcases import SerializeMixin
from django.http import HttpResponse
from django.db.utils import IntegrityError
from zerver.lib.initial_password import initial_password
from zerver.lib.db import TimeTrackingCursor
from zerver.lib.str_utils import force_text
from zerver.lib.utils import is_remote_server
from zerver.lib import cache
from zerver.tornado.handlers import allocate_handler_id
from zerver.worker import queue_processors
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, bulk_remove_subscriptions
)
from zerver.lib.test_helpers import (
instrument_url, find_key_by_email,
)
from zerver.models import (
get_stream,
get_user,
get_user_profile_by_email,
get_realm,
get_realm_by_email_domain,
Client,
Message,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.lib.request import JsonableError
from zilencer.models import get_remote_server_by_uuid
import base64
import mock
import os
import re
import time
import ujson
import unittest
from six.moves import urllib
from six import binary_type
from zerver.lib.str_utils import NonBinaryStr
from contextlib import contextmanager
import six
API_KEYS = {} # type: Dict[Text, Text]
def flush_caches_for_testing():
# type: () -> None
global API_KEYS
API_KEYS = {}
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
"""
lockfile = 'var/upload_lock'
@classmethod
def setUpClass(cls, *args, **kwargs):
# type: (*Any, **Any) -> None
if not os.path.exists(cls.lockfile):
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
pass
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
class ZulipTestCase(TestCase):
# Ensure that the test system just shows us diffs
maxDiff = None # type: Optional[int]
'''
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regext.
'''
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# This method should be removed when we migrate to version 3 of Python
import six
if six.PY2:
|
super(ZulipTestCase, self).__init__(*args, **kwargs)
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
@instrument_url
def client_patch(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(url, encoded, **kwargs)
@instrument_url
def client_patch_multipart(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(
url,
encoded,
content_type=MULTIPART_CONTENT,
**kwargs)
@instrument_url
def client_put(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.put(url, encoded, **kwargs)
@instrument_url
def client_delete(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.delete(url, encoded, **kwargs)
@instrument_url
def client_options(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.options(url, encoded, **kwargs)
@instrument_url
def client_post(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.post(url, info, **kwargs)
@instrument_url
def client_post_request(self, url, req):
# type: (Text, Any) -> HttpResponse
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.get(url, info, **kwargs)
example_user_map = dict(
hamlet=u'[email protected]',
cordelia=u'[email protected]',
iago=u'[email protected]',
prospero=u'[email protected]',
othello=u'[email protected]',
AARON=u'[email protected]',
aaron=u'[email protected]',
ZOE=u'[email protected]',
)
mit_user_map = dict(
sipbtest=u"[email protected]",
starnine=u"[email protected]",
espuser=u"[email protected]",
)
# Non-registered test users
nonreg_user_map = dict(
test=u'[email protected]',
test1=u'[email protected]',
alice=u'[email protected]',
newuser=u'[email protected]',
bob=u'[email protected]',
cordelia=u'[email protected]',
newguy=u'[email protected]',
me=u'[email protected]',
)
def nonreg_user(self, name):
# type: (str) -> UserProfile
email = self.nonreg_user_map[name]
return get_user(email, get_realm_by_email_domain(email))
def example_user(self, name):
# type: (str) -> UserProfile
email = self.example_user_map[name]
return get_user(email, get_realm('zulip'))
def mit_user(self, name):
# type: (str) -> UserProfile
email = self.mit_user_map[name]
return get_user(email, get_realm('zephyr'))
def nonreg_email(self, name):
# type: (str) -> Text
return self.nonreg_user_map[name]
def example_email(self, name):
# type: (str) -> Text
return self.example_user_map[name]
def mit_email(self, name):
# type: (str) -> Text
return self.mit_user_map[name]
def notification_bot(self):
# type: () -> UserProfile
return get_user('[email protected]', get_realm('zulip'))
def login_with_return(self, email, password=None):
# type: (Text, Optional[Text]) -> HttpResponse
if password is None:
password = initial_password(email)
return self.client_post('/accounts/login/',
{'username': email, 'password': password})
def login(self, email, password=None, fails=False):
# type: (Text, Optional[Text], bool) -> HttpResponse
if password is None:
password = initial_password(email)
if not fails:
self.assertTrue(self.client.login(username=email, password=password))
else:
self.assertFalse(self.client.login(username=email, password=password))
def logout(self):
# type: () -> None
self.client.logout()
def register(self, email, password):
# type: (Text, Text) -> HttpResponse
self.client_post('/accounts/home/', {'email': email})
return self.submit_reg_form_for_user(email, password)
def submit_reg_form_for_user(self, email, password, realm_name="Zulip Test",
realm_subdomain="zuliptest",
from_confirmation='', full_name=None, timezone=u'', **kwargs):
# type: (Text, Text, Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text], **Any) -> HttpResponse
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
if full_name is None:
full_name = email.replace("@", "_")
return self.client_post('/accounts/register/',
{'full_name': full_name,
'password': password,
'realm_name': realm_name,
'realm_subdomain': realm_subdomain,
'key': find_key_by_email(email),
'timezone': timezone,
'terms': True,
'from_confirmation': from_confirmation},
**kwargs)
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
# type: (Text, Text) -> Text
from django.core.mail import outbox
for message in reversed(outbox):
if email_address in message.to:
return re.search(settings.EXTERNAL_HOST + path_pattern,
message.body).groups()[0]
else:
raise AssertionError("Couldn't find a confirmation email.")
def get_api_key(self, email):
# type: (Text) -> Text
if email not in API_KEYS:
API_KEYS[email] = get_user_profile_by_email(email).api_key
return API_KEYS[email]
def get_server_api_key(self, server_uuid):
# type: (Text) -> Text
if server_uuid not in API_KEYS:
API_KEYS[server_uuid] = get_remote_server_by_uuid(server_uuid).api_key
return API_KEYS[server_uuid]
def api_auth(self, identifier):
# type: (Text) -> Dict[str, Text]
"""
identifier: Can be an email or a remote server uuid.
"""
if is_remote_server(identifier):
api_key = self.get_server_api_key(identifier)
else:
api_key = self.get_api_key(identifier)
credentials = u"%s:%s" % (identifier, api_key)
return {
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
}
def get_streams(self, email, realm):
# type: (Text, Realm) -> List[Text]
"""
Helper function to get the stream names for a user
"""
user_profile = get_user(email, realm)
subs = Subscription.objects.filter(
user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM)
return [cast(Text, get_display_recipient(sub.recipient)) for sub in subs]
def send_message(self, sender_name, raw_recipients, message_type,
content=u"test content", subject=u"test", **kwargs):
# type: (Text, Union[Text, List[Text]], int, Text, Text, **Any) -> int
sender = get_user_profile_by_email(sender_name)
if message_type in [Recipient.PERSONAL, Recipient.HUDDLE]:
message_type_name = "private"
else:
message_type_name = "stream"
if isinstance(raw_recipients, six.string_types):
recipient_list = [raw_recipients]
else:
recipient_list = raw_recipients
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, message_type_name, recipient_list, subject,
content, forged=False, forged_timestamp=None,
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
def get_messages(self, anchor=1, num_before=100, num_after=100,
use_first_unread_anchor=False):
# type: (int, int, int, bool) -> List[Dict[str, Any]]
post_params = {"anchor": anchor, "num_before": num_before,
"num_after": num_after,
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
result = self.client_get("/json/messages", dict(post_params))
data = ujson.loads(result.content)
return data['messages']
def users_subscribed_to_stream(self, stream_name, realm):
# type: (Text, Realm) -> List[UserProfile]
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_url_serves_contents_of_file(self, url, result):
# type: (str, bytes) -> None
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEqual(result, data)
def assert_json_success(self, result):
# type: (HttpResponse) -> Dict[str, Any]
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
self.assertEqual(result.status_code, 200, result)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
return json
def get_json_error(self, result, status_code=400):
# type: (HttpResponse, int) -> Dict[str, Any]
self.assertEqual(result.status_code, status_code)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "error")
return json['msg']
def assert_json_error(self, result, msg, status_code=400):
# type: (HttpResponse, Text, int) -> None
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, queries, count):
# type: (Sized, int) -> None
actual_count = len(queries)
return self.assertTrue(actual_count == count,
"len(%s) == %s, != %s" % (queries, actual_count, count))
def assert_json_error_contains(self, result, msg_substring, status_code=400):
# type: (HttpResponse, Text, int) -> None
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_in_response(self, substring, response):
# type: (Text, HttpResponse) -> None
self.assertIn(substring, response.content.decode('utf-8'))
def assert_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertIn(substring, decoded)
def assert_not_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertNotIn(substring, decoded)
def fixture_data(self, type, action, file_type='json'):
# type: (Text, Text, Text) -> Text
return force_text(open(os.path.join(os.path.dirname(__file__),
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type))).read())
def make_stream(self, stream_name, realm=None, invite_only=False):
# type: (Text, Optional[Realm], Optional[bool]) -> Stream
if realm is None:
realm = self.DEFAULT_REALM
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
)
except IntegrityError: # nocoverage -- this is for bugs in the tests
raise Exception('''
%s already exists
Please call make_stream with a stream name
that is not already in use.''' % (stream_name,))
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream
# Subscribe to a stream directly
def subscribe_to_stream(self, email, stream_name, realm=None):
# type: (Text, Text, Optional[Realm]) -> Stream
if realm is None:
realm = get_realm_by_email_domain(email)
try:
stream = get_stream(stream_name, realm)
from_stream_creation = False
except Stream.DoesNotExist:
stream, from_stream_creation = create_stream_if_needed(realm, stream_name)
user_profile = get_user_profile_by_email(email)
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
return stream
def unsubscribe_from_stream(self, email, stream_name, realm):
# type: (Text, Text, Realm) -> None
user_profile = get_user(email, realm)
stream = get_stream(stream_name, realm)
bulk_remove_subscriptions([user_profile], [stream])
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
# type: (Text, Iterable[Text], Dict[str, Any], bool) -> HttpResponse
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
'invite_only': ujson.dumps(invite_only)}
post_data.update(extra_post_data)
result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
return result
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
# type: (Text, Text, Union[Text, Dict[str, Any]], Optional[Text], **Any) -> Message
if stream_name is not None:
self.subscribe_to_stream(email, stream_name)
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
self.assertEqual(msg.sender.email, email)
if stream_name is not None:
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
# TODO: should also validate recipient for private messages
return msg
def get_last_message(self):
# type: () -> Message
return Message.objects.latest('id')
def get_second_to_last_message(self):
# type: () -> Message
return Message.objects.all().order_by('-id')[1]
@contextmanager
def simulated_markdown_failure(self):
# type: () -> Iterator[None]
'''
This raises a failure inside of the try/except block of
bugdown.__init__.do_convert.
'''
with \
self.settings(ERROR_BOT=None), \
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
yield
class WebhookTestCase(ZulipTestCase):
"""
Common for all webhooks tests
Override below class attributes and run send_and_test_message
If you create your url in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME = None # type: Optional[Text]
TEST_USER_EMAIL = '[email protected]'
URL_TEMPLATE = None # type: Optional[Text]
FIXTURE_DIR_NAME = None # type: Optional[Text]
def setUp(self):
# type: () -> None
self.url = self.build_webhook_url()
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Optional[Text], Optional[Text], Optional[Text], **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
self.STREAM_NAME, **kwargs)
self.do_test_subject(msg, expected_subject)
self.do_test_message(msg, expected_message)
return msg
def send_and_test_private_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Text, Text, str, **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
stream_name=None, **kwargs)
self.do_test_message(msg, expected_message)
return msg
def build_webhook_url(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
url = self.URL_TEMPLATE
if url.find("api_key") >= 0:
api_key = self.get_api_key(self.TEST_USER_EMAIL)
url = self.URL_TEMPLATE.format(api_key=api_key,
stream=self.STREAM_NAME)
else:
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
has_arguments = kwargs or args
if has_arguments and url.find('?') == -1:
url = "{}?".format(url)
else:
url = "{}&".format(url)
for key, value in kwargs.items():
url = "{}{}={}&".format(url, key, value)
for arg in args:
url = "{}{}&".format(url, arg)
return url[:-1] if has_arguments else url
def get_body(self, fixture_name):
# type: (Text) -> Union[Text, Dict[str, Text]]
"""Can be implemented either as returning a dictionary containing the
post parameters or as string containing the body of the request."""
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
def do_test_subject(self, msg, expected_subject):
# type: (Message, Optional[Text]) -> None
if expected_subject is not None:
self.assertEqual(msg.topic_name(), expected_subject)
def do_test_message(self, msg, expected_message):
# type: (Message, Optional[Text]) -> None
if expected_message is not None:
self.assertEqual(msg.content, expected_message)
| self.assertRaisesRegex = self.assertRaisesRegexp | conditional_block |
test_classes.py | from __future__ import absolute_import
from __future__ import print_function
from contextlib import contextmanager
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
Sized, Tuple, Union, Text)
from django.core.urlresolvers import resolve
from django.conf import settings
from django.test import TestCase
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.template import loader
from django.test.testcases import SerializeMixin
from django.http import HttpResponse
from django.db.utils import IntegrityError
from zerver.lib.initial_password import initial_password
from zerver.lib.db import TimeTrackingCursor
from zerver.lib.str_utils import force_text
from zerver.lib.utils import is_remote_server
from zerver.lib import cache
from zerver.tornado.handlers import allocate_handler_id
from zerver.worker import queue_processors
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, bulk_remove_subscriptions
)
from zerver.lib.test_helpers import (
instrument_url, find_key_by_email,
)
from zerver.models import (
get_stream,
get_user,
get_user_profile_by_email,
get_realm,
get_realm_by_email_domain,
Client,
Message,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.lib.request import JsonableError
from zilencer.models import get_remote_server_by_uuid
import base64
import mock
import os
import re
import time
import ujson
import unittest
from six.moves import urllib
from six import binary_type
from zerver.lib.str_utils import NonBinaryStr
from contextlib import contextmanager
import six
API_KEYS = {} # type: Dict[Text, Text]
def flush_caches_for_testing():
# type: () -> None
global API_KEYS
API_KEYS = {}
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
"""
lockfile = 'var/upload_lock'
@classmethod
def setUpClass(cls, *args, **kwargs):
# type: (*Any, **Any) -> None
if not os.path.exists(cls.lockfile):
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
pass
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
class ZulipTestCase(TestCase):
# Ensure that the test system just shows us diffs
maxDiff = None # type: Optional[int]
'''
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regext.
'''
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# This method should be removed when we migrate to version 3 of Python
import six
if six.PY2:
self.assertRaisesRegex = self.assertRaisesRegexp
super(ZulipTestCase, self).__init__(*args, **kwargs)
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
@instrument_url
def client_patch(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(url, encoded, **kwargs)
@instrument_url
def client_patch_multipart(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(
url,
encoded,
content_type=MULTIPART_CONTENT,
**kwargs)
@instrument_url
def client_put(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.put(url, encoded, **kwargs)
@instrument_url
def client_delete(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.delete(url, encoded, **kwargs)
@instrument_url
def client_options(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.options(url, encoded, **kwargs)
@instrument_url
def client_post(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.post(url, info, **kwargs)
@instrument_url
def client_post_request(self, url, req):
# type: (Text, Any) -> HttpResponse
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.get(url, info, **kwargs)
example_user_map = dict(
hamlet=u'[email protected]',
cordelia=u'[email protected]',
iago=u'[email protected]',
prospero=u'[email protected]',
othello=u'[email protected]',
AARON=u'[email protected]',
aaron=u'[email protected]',
ZOE=u'[email protected]',
)
mit_user_map = dict(
sipbtest=u"[email protected]",
starnine=u"[email protected]",
espuser=u"[email protected]",
)
# Non-registered test users
nonreg_user_map = dict(
test=u'[email protected]',
test1=u'[email protected]',
alice=u'[email protected]',
newuser=u'[email protected]',
bob=u'[email protected]',
cordelia=u'[email protected]',
newguy=u'[email protected]',
me=u'[email protected]',
)
def nonreg_user(self, name):
# type: (str) -> UserProfile
|
def example_user(self, name):
# type: (str) -> UserProfile
email = self.example_user_map[name]
return get_user(email, get_realm('zulip'))
def mit_user(self, name):
# type: (str) -> UserProfile
email = self.mit_user_map[name]
return get_user(email, get_realm('zephyr'))
def nonreg_email(self, name):
# type: (str) -> Text
return self.nonreg_user_map[name]
def example_email(self, name):
# type: (str) -> Text
return self.example_user_map[name]
def mit_email(self, name):
# type: (str) -> Text
return self.mit_user_map[name]
def notification_bot(self):
# type: () -> UserProfile
return get_user('[email protected]', get_realm('zulip'))
def login_with_return(self, email, password=None):
# type: (Text, Optional[Text]) -> HttpResponse
if password is None:
password = initial_password(email)
return self.client_post('/accounts/login/',
{'username': email, 'password': password})
def login(self, email, password=None, fails=False):
# type: (Text, Optional[Text], bool) -> HttpResponse
if password is None:
password = initial_password(email)
if not fails:
self.assertTrue(self.client.login(username=email, password=password))
else:
self.assertFalse(self.client.login(username=email, password=password))
def logout(self):
# type: () -> None
self.client.logout()
def register(self, email, password):
# type: (Text, Text) -> HttpResponse
self.client_post('/accounts/home/', {'email': email})
return self.submit_reg_form_for_user(email, password)
def submit_reg_form_for_user(self, email, password, realm_name="Zulip Test",
realm_subdomain="zuliptest",
from_confirmation='', full_name=None, timezone=u'', **kwargs):
# type: (Text, Text, Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text], **Any) -> HttpResponse
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
if full_name is None:
full_name = email.replace("@", "_")
return self.client_post('/accounts/register/',
{'full_name': full_name,
'password': password,
'realm_name': realm_name,
'realm_subdomain': realm_subdomain,
'key': find_key_by_email(email),
'timezone': timezone,
'terms': True,
'from_confirmation': from_confirmation},
**kwargs)
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
# type: (Text, Text) -> Text
from django.core.mail import outbox
for message in reversed(outbox):
if email_address in message.to:
return re.search(settings.EXTERNAL_HOST + path_pattern,
message.body).groups()[0]
else:
raise AssertionError("Couldn't find a confirmation email.")
def get_api_key(self, email):
# type: (Text) -> Text
if email not in API_KEYS:
API_KEYS[email] = get_user_profile_by_email(email).api_key
return API_KEYS[email]
def get_server_api_key(self, server_uuid):
# type: (Text) -> Text
if server_uuid not in API_KEYS:
API_KEYS[server_uuid] = get_remote_server_by_uuid(server_uuid).api_key
return API_KEYS[server_uuid]
def api_auth(self, identifier):
# type: (Text) -> Dict[str, Text]
"""
identifier: Can be an email or a remote server uuid.
"""
if is_remote_server(identifier):
api_key = self.get_server_api_key(identifier)
else:
api_key = self.get_api_key(identifier)
credentials = u"%s:%s" % (identifier, api_key)
return {
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
}
def get_streams(self, email, realm):
# type: (Text, Realm) -> List[Text]
"""
Helper function to get the stream names for a user
"""
user_profile = get_user(email, realm)
subs = Subscription.objects.filter(
user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM)
return [cast(Text, get_display_recipient(sub.recipient)) for sub in subs]
def send_message(self, sender_name, raw_recipients, message_type,
content=u"test content", subject=u"test", **kwargs):
# type: (Text, Union[Text, List[Text]], int, Text, Text, **Any) -> int
sender = get_user_profile_by_email(sender_name)
if message_type in [Recipient.PERSONAL, Recipient.HUDDLE]:
message_type_name = "private"
else:
message_type_name = "stream"
if isinstance(raw_recipients, six.string_types):
recipient_list = [raw_recipients]
else:
recipient_list = raw_recipients
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, message_type_name, recipient_list, subject,
content, forged=False, forged_timestamp=None,
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
def get_messages(self, anchor=1, num_before=100, num_after=100,
use_first_unread_anchor=False):
# type: (int, int, int, bool) -> List[Dict[str, Any]]
post_params = {"anchor": anchor, "num_before": num_before,
"num_after": num_after,
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
result = self.client_get("/json/messages", dict(post_params))
data = ujson.loads(result.content)
return data['messages']
def users_subscribed_to_stream(self, stream_name, realm):
# type: (Text, Realm) -> List[UserProfile]
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_url_serves_contents_of_file(self, url, result):
# type: (str, bytes) -> None
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEqual(result, data)
def assert_json_success(self, result):
# type: (HttpResponse) -> Dict[str, Any]
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
self.assertEqual(result.status_code, 200, result)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
return json
def get_json_error(self, result, status_code=400):
# type: (HttpResponse, int) -> Dict[str, Any]
self.assertEqual(result.status_code, status_code)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "error")
return json['msg']
def assert_json_error(self, result, msg, status_code=400):
# type: (HttpResponse, Text, int) -> None
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, queries, count):
# type: (Sized, int) -> None
actual_count = len(queries)
return self.assertTrue(actual_count == count,
"len(%s) == %s, != %s" % (queries, actual_count, count))
def assert_json_error_contains(self, result, msg_substring, status_code=400):
# type: (HttpResponse, Text, int) -> None
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_in_response(self, substring, response):
# type: (Text, HttpResponse) -> None
self.assertIn(substring, response.content.decode('utf-8'))
def assert_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertIn(substring, decoded)
def assert_not_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertNotIn(substring, decoded)
def fixture_data(self, type, action, file_type='json'):
# type: (Text, Text, Text) -> Text
return force_text(open(os.path.join(os.path.dirname(__file__),
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type))).read())
def make_stream(self, stream_name, realm=None, invite_only=False):
# type: (Text, Optional[Realm], Optional[bool]) -> Stream
if realm is None:
realm = self.DEFAULT_REALM
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
)
except IntegrityError: # nocoverage -- this is for bugs in the tests
raise Exception('''
%s already exists
Please call make_stream with a stream name
that is not already in use.''' % (stream_name,))
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream
# Subscribe to a stream directly
def subscribe_to_stream(self, email, stream_name, realm=None):
# type: (Text, Text, Optional[Realm]) -> Stream
if realm is None:
realm = get_realm_by_email_domain(email)
try:
stream = get_stream(stream_name, realm)
from_stream_creation = False
except Stream.DoesNotExist:
stream, from_stream_creation = create_stream_if_needed(realm, stream_name)
user_profile = get_user_profile_by_email(email)
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
return stream
def unsubscribe_from_stream(self, email, stream_name, realm):
# type: (Text, Text, Realm) -> None
user_profile = get_user(email, realm)
stream = get_stream(stream_name, realm)
bulk_remove_subscriptions([user_profile], [stream])
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
# type: (Text, Iterable[Text], Dict[str, Any], bool) -> HttpResponse
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
'invite_only': ujson.dumps(invite_only)}
post_data.update(extra_post_data)
result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
return result
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
# type: (Text, Text, Union[Text, Dict[str, Any]], Optional[Text], **Any) -> Message
if stream_name is not None:
self.subscribe_to_stream(email, stream_name)
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
self.assertEqual(msg.sender.email, email)
if stream_name is not None:
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
# TODO: should also validate recipient for private messages
return msg
def get_last_message(self):
# type: () -> Message
return Message.objects.latest('id')
def get_second_to_last_message(self):
# type: () -> Message
return Message.objects.all().order_by('-id')[1]
@contextmanager
def simulated_markdown_failure(self):
# type: () -> Iterator[None]
'''
This raises a failure inside of the try/except block of
bugdown.__init__.do_convert.
'''
with \
self.settings(ERROR_BOT=None), \
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
yield
class WebhookTestCase(ZulipTestCase):
"""
Common for all webhooks tests
Override below class attributes and run send_and_test_message
If you create your url in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME = None # type: Optional[Text]
TEST_USER_EMAIL = '[email protected]'
URL_TEMPLATE = None # type: Optional[Text]
FIXTURE_DIR_NAME = None # type: Optional[Text]
def setUp(self):
# type: () -> None
self.url = self.build_webhook_url()
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Optional[Text], Optional[Text], Optional[Text], **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
self.STREAM_NAME, **kwargs)
self.do_test_subject(msg, expected_subject)
self.do_test_message(msg, expected_message)
return msg
def send_and_test_private_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Text, Text, str, **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
stream_name=None, **kwargs)
self.do_test_message(msg, expected_message)
return msg
def build_webhook_url(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
url = self.URL_TEMPLATE
if url.find("api_key") >= 0:
api_key = self.get_api_key(self.TEST_USER_EMAIL)
url = self.URL_TEMPLATE.format(api_key=api_key,
stream=self.STREAM_NAME)
else:
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
has_arguments = kwargs or args
if has_arguments and url.find('?') == -1:
url = "{}?".format(url)
else:
url = "{}&".format(url)
for key, value in kwargs.items():
url = "{}{}={}&".format(url, key, value)
for arg in args:
url = "{}{}&".format(url, arg)
return url[:-1] if has_arguments else url
def get_body(self, fixture_name):
# type: (Text) -> Union[Text, Dict[str, Text]]
"""Can be implemented either as returning a dictionary containing the
post parameters or as string containing the body of the request."""
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
def do_test_subject(self, msg, expected_subject):
# type: (Message, Optional[Text]) -> None
if expected_subject is not None:
self.assertEqual(msg.topic_name(), expected_subject)
def do_test_message(self, msg, expected_message):
# type: (Message, Optional[Text]) -> None
if expected_message is not None:
self.assertEqual(msg.content, expected_message)
| email = self.nonreg_user_map[name]
return get_user(email, get_realm_by_email_domain(email)) | identifier_body |
test_classes.py | from __future__ import absolute_import
from __future__ import print_function
from contextlib import contextmanager
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
Sized, Tuple, Union, Text)
from django.core.urlresolvers import resolve
from django.conf import settings
from django.test import TestCase
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.template import loader
from django.test.testcases import SerializeMixin
from django.http import HttpResponse
from django.db.utils import IntegrityError
from zerver.lib.initial_password import initial_password
from zerver.lib.db import TimeTrackingCursor
from zerver.lib.str_utils import force_text
from zerver.lib.utils import is_remote_server
from zerver.lib import cache
from zerver.tornado.handlers import allocate_handler_id
from zerver.worker import queue_processors
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, bulk_remove_subscriptions
)
from zerver.lib.test_helpers import (
instrument_url, find_key_by_email,
)
from zerver.models import (
get_stream,
get_user,
get_user_profile_by_email,
get_realm,
get_realm_by_email_domain,
Client,
Message,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.lib.request import JsonableError
from zilencer.models import get_remote_server_by_uuid
import base64
import mock
import os
import re
import time
import ujson
import unittest
from six.moves import urllib
from six import binary_type
from zerver.lib.str_utils import NonBinaryStr
from contextlib import contextmanager
import six
API_KEYS = {} # type: Dict[Text, Text]
def flush_caches_for_testing():
# type: () -> None
global API_KEYS
API_KEYS = {}
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
"""
lockfile = 'var/upload_lock'
@classmethod
def setUpClass(cls, *args, **kwargs):
# type: (*Any, **Any) -> None
if not os.path.exists(cls.lockfile):
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
pass
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
class ZulipTestCase(TestCase):
# Ensure that the test system just shows us diffs
maxDiff = None # type: Optional[int]
'''
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regext.
'''
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# This method should be removed when we migrate to version 3 of Python
import six
if six.PY2:
self.assertRaisesRegex = self.assertRaisesRegexp
super(ZulipTestCase, self).__init__(*args, **kwargs)
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
@instrument_url
def client_patch(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(url, encoded, **kwargs)
@instrument_url
def client_patch_multipart(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(
url,
encoded,
content_type=MULTIPART_CONTENT,
**kwargs)
@instrument_url
def client_put(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.put(url, encoded, **kwargs)
@instrument_url
def client_delete(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.delete(url, encoded, **kwargs)
@instrument_url
def client_options(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.options(url, encoded, **kwargs)
@instrument_url
def client_post(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.post(url, info, **kwargs)
@instrument_url
def client_post_request(self, url, req):
# type: (Text, Any) -> HttpResponse
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.get(url, info, **kwargs)
example_user_map = dict(
hamlet=u'[email protected]',
cordelia=u'[email protected]',
iago=u'[email protected]',
prospero=u'[email protected]',
othello=u'[email protected]',
AARON=u'[email protected]',
aaron=u'[email protected]',
ZOE=u'[email protected]',
)
mit_user_map = dict(
sipbtest=u"[email protected]",
starnine=u"[email protected]",
espuser=u"[email protected]",
)
# Non-registered test users
nonreg_user_map = dict(
test=u'[email protected]',
test1=u'[email protected]',
alice=u'[email protected]',
newuser=u'[email protected]',
bob=u'[email protected]',
cordelia=u'[email protected]',
newguy=u'[email protected]',
me=u'[email protected]',
)
def nonreg_user(self, name):
# type: (str) -> UserProfile
email = self.nonreg_user_map[name]
return get_user(email, get_realm_by_email_domain(email))
def example_user(self, name):
# type: (str) -> UserProfile
email = self.example_user_map[name]
return get_user(email, get_realm('zulip'))
def mit_user(self, name):
# type: (str) -> UserProfile
email = self.mit_user_map[name]
return get_user(email, get_realm('zephyr'))
def nonreg_email(self, name):
# type: (str) -> Text
return self.nonreg_user_map[name]
def example_email(self, name):
# type: (str) -> Text
return self.example_user_map[name]
def | (self, name):
# type: (str) -> Text
return self.mit_user_map[name]
def notification_bot(self):
# type: () -> UserProfile
return get_user('[email protected]', get_realm('zulip'))
def login_with_return(self, email, password=None):
# type: (Text, Optional[Text]) -> HttpResponse
if password is None:
password = initial_password(email)
return self.client_post('/accounts/login/',
{'username': email, 'password': password})
def login(self, email, password=None, fails=False):
# type: (Text, Optional[Text], bool) -> HttpResponse
if password is None:
password = initial_password(email)
if not fails:
self.assertTrue(self.client.login(username=email, password=password))
else:
self.assertFalse(self.client.login(username=email, password=password))
def logout(self):
# type: () -> None
self.client.logout()
def register(self, email, password):
# type: (Text, Text) -> HttpResponse
self.client_post('/accounts/home/', {'email': email})
return self.submit_reg_form_for_user(email, password)
def submit_reg_form_for_user(self, email, password, realm_name="Zulip Test",
realm_subdomain="zuliptest",
from_confirmation='', full_name=None, timezone=u'', **kwargs):
# type: (Text, Text, Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text], **Any) -> HttpResponse
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
if full_name is None:
full_name = email.replace("@", "_")
return self.client_post('/accounts/register/',
{'full_name': full_name,
'password': password,
'realm_name': realm_name,
'realm_subdomain': realm_subdomain,
'key': find_key_by_email(email),
'timezone': timezone,
'terms': True,
'from_confirmation': from_confirmation},
**kwargs)
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
# type: (Text, Text) -> Text
from django.core.mail import outbox
for message in reversed(outbox):
if email_address in message.to:
return re.search(settings.EXTERNAL_HOST + path_pattern,
message.body).groups()[0]
else:
raise AssertionError("Couldn't find a confirmation email.")
def get_api_key(self, email):
# type: (Text) -> Text
if email not in API_KEYS:
API_KEYS[email] = get_user_profile_by_email(email).api_key
return API_KEYS[email]
def get_server_api_key(self, server_uuid):
# type: (Text) -> Text
if server_uuid not in API_KEYS:
API_KEYS[server_uuid] = get_remote_server_by_uuid(server_uuid).api_key
return API_KEYS[server_uuid]
def api_auth(self, identifier):
# type: (Text) -> Dict[str, Text]
"""
identifier: Can be an email or a remote server uuid.
"""
if is_remote_server(identifier):
api_key = self.get_server_api_key(identifier)
else:
api_key = self.get_api_key(identifier)
credentials = u"%s:%s" % (identifier, api_key)
return {
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
}
def get_streams(self, email, realm):
# type: (Text, Realm) -> List[Text]
"""
Helper function to get the stream names for a user
"""
user_profile = get_user(email, realm)
subs = Subscription.objects.filter(
user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM)
return [cast(Text, get_display_recipient(sub.recipient)) for sub in subs]
def send_message(self, sender_name, raw_recipients, message_type,
content=u"test content", subject=u"test", **kwargs):
# type: (Text, Union[Text, List[Text]], int, Text, Text, **Any) -> int
sender = get_user_profile_by_email(sender_name)
if message_type in [Recipient.PERSONAL, Recipient.HUDDLE]:
message_type_name = "private"
else:
message_type_name = "stream"
if isinstance(raw_recipients, six.string_types):
recipient_list = [raw_recipients]
else:
recipient_list = raw_recipients
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, message_type_name, recipient_list, subject,
content, forged=False, forged_timestamp=None,
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
def get_messages(self, anchor=1, num_before=100, num_after=100,
use_first_unread_anchor=False):
# type: (int, int, int, bool) -> List[Dict[str, Any]]
post_params = {"anchor": anchor, "num_before": num_before,
"num_after": num_after,
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
result = self.client_get("/json/messages", dict(post_params))
data = ujson.loads(result.content)
return data['messages']
def users_subscribed_to_stream(self, stream_name, realm):
# type: (Text, Realm) -> List[UserProfile]
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_url_serves_contents_of_file(self, url, result):
# type: (str, bytes) -> None
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEqual(result, data)
def assert_json_success(self, result):
# type: (HttpResponse) -> Dict[str, Any]
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
self.assertEqual(result.status_code, 200, result)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
return json
def get_json_error(self, result, status_code=400):
# type: (HttpResponse, int) -> Dict[str, Any]
self.assertEqual(result.status_code, status_code)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "error")
return json['msg']
def assert_json_error(self, result, msg, status_code=400):
# type: (HttpResponse, Text, int) -> None
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, queries, count):
# type: (Sized, int) -> None
actual_count = len(queries)
return self.assertTrue(actual_count == count,
"len(%s) == %s, != %s" % (queries, actual_count, count))
def assert_json_error_contains(self, result, msg_substring, status_code=400):
# type: (HttpResponse, Text, int) -> None
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_in_response(self, substring, response):
# type: (Text, HttpResponse) -> None
self.assertIn(substring, response.content.decode('utf-8'))
def assert_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertIn(substring, decoded)
def assert_not_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertNotIn(substring, decoded)
def fixture_data(self, type, action, file_type='json'):
# type: (Text, Text, Text) -> Text
return force_text(open(os.path.join(os.path.dirname(__file__),
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type))).read())
def make_stream(self, stream_name, realm=None, invite_only=False):
# type: (Text, Optional[Realm], Optional[bool]) -> Stream
if realm is None:
realm = self.DEFAULT_REALM
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
)
except IntegrityError: # nocoverage -- this is for bugs in the tests
raise Exception('''
%s already exists
Please call make_stream with a stream name
that is not already in use.''' % (stream_name,))
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream
# Subscribe to a stream directly
def subscribe_to_stream(self, email, stream_name, realm=None):
# type: (Text, Text, Optional[Realm]) -> Stream
if realm is None:
realm = get_realm_by_email_domain(email)
try:
stream = get_stream(stream_name, realm)
from_stream_creation = False
except Stream.DoesNotExist:
stream, from_stream_creation = create_stream_if_needed(realm, stream_name)
user_profile = get_user_profile_by_email(email)
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
return stream
def unsubscribe_from_stream(self, email, stream_name, realm):
# type: (Text, Text, Realm) -> None
user_profile = get_user(email, realm)
stream = get_stream(stream_name, realm)
bulk_remove_subscriptions([user_profile], [stream])
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
# type: (Text, Iterable[Text], Dict[str, Any], bool) -> HttpResponse
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
'invite_only': ujson.dumps(invite_only)}
post_data.update(extra_post_data)
result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
return result
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
# type: (Text, Text, Union[Text, Dict[str, Any]], Optional[Text], **Any) -> Message
if stream_name is not None:
self.subscribe_to_stream(email, stream_name)
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
self.assertEqual(msg.sender.email, email)
if stream_name is not None:
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
# TODO: should also validate recipient for private messages
return msg
def get_last_message(self):
# type: () -> Message
return Message.objects.latest('id')
def get_second_to_last_message(self):
# type: () -> Message
return Message.objects.all().order_by('-id')[1]
@contextmanager
def simulated_markdown_failure(self):
# type: () -> Iterator[None]
'''
This raises a failure inside of the try/except block of
bugdown.__init__.do_convert.
'''
with \
self.settings(ERROR_BOT=None), \
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
yield
class WebhookTestCase(ZulipTestCase):
"""
Common for all webhooks tests
Override below class attributes and run send_and_test_message
If you create your url in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME = None # type: Optional[Text]
TEST_USER_EMAIL = '[email protected]'
URL_TEMPLATE = None # type: Optional[Text]
FIXTURE_DIR_NAME = None # type: Optional[Text]
def setUp(self):
# type: () -> None
self.url = self.build_webhook_url()
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Optional[Text], Optional[Text], Optional[Text], **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
self.STREAM_NAME, **kwargs)
self.do_test_subject(msg, expected_subject)
self.do_test_message(msg, expected_message)
return msg
def send_and_test_private_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Text, Text, str, **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
stream_name=None, **kwargs)
self.do_test_message(msg, expected_message)
return msg
def build_webhook_url(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
url = self.URL_TEMPLATE
if url.find("api_key") >= 0:
api_key = self.get_api_key(self.TEST_USER_EMAIL)
url = self.URL_TEMPLATE.format(api_key=api_key,
stream=self.STREAM_NAME)
else:
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
has_arguments = kwargs or args
if has_arguments and url.find('?') == -1:
url = "{}?".format(url)
else:
url = "{}&".format(url)
for key, value in kwargs.items():
url = "{}{}={}&".format(url, key, value)
for arg in args:
url = "{}{}&".format(url, arg)
return url[:-1] if has_arguments else url
def get_body(self, fixture_name):
# type: (Text) -> Union[Text, Dict[str, Text]]
"""Can be implemented either as returning a dictionary containing the
post parameters or as string containing the body of the request."""
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
def do_test_subject(self, msg, expected_subject):
# type: (Message, Optional[Text]) -> None
if expected_subject is not None:
self.assertEqual(msg.topic_name(), expected_subject)
def do_test_message(self, msg, expected_message):
# type: (Message, Optional[Text]) -> None
if expected_message is not None:
self.assertEqual(msg.content, expected_message)
| mit_email | identifier_name |
test_classes.py | from __future__ import absolute_import
from __future__ import print_function
from contextlib import contextmanager
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
Sized, Tuple, Union, Text)
from django.core.urlresolvers import resolve
from django.conf import settings
from django.test import TestCase
from django.test.client import (
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
)
from django.template import loader
from django.test.testcases import SerializeMixin
from django.http import HttpResponse
from django.db.utils import IntegrityError
from zerver.lib.initial_password import initial_password
from zerver.lib.db import TimeTrackingCursor
from zerver.lib.str_utils import force_text
from zerver.lib.utils import is_remote_server
from zerver.lib import cache
from zerver.tornado.handlers import allocate_handler_id
from zerver.worker import queue_processors
from zerver.lib.actions import (
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
get_display_recipient, bulk_remove_subscriptions
)
from zerver.lib.test_helpers import (
instrument_url, find_key_by_email,
)
from zerver.models import (
get_stream,
get_user,
get_user_profile_by_email,
get_realm,
get_realm_by_email_domain,
Client,
Message,
Realm,
Recipient,
Stream,
Subscription,
UserMessage,
UserProfile,
)
from zerver.lib.request import JsonableError
from zilencer.models import get_remote_server_by_uuid
import base64
import mock
import os
import re
import time
import ujson
import unittest
from six.moves import urllib
from six import binary_type
from zerver.lib.str_utils import NonBinaryStr
from contextlib import contextmanager
import six
API_KEYS = {} # type: Dict[Text, Text]
def flush_caches_for_testing():
# type: () -> None
global API_KEYS
API_KEYS = {}
class UploadSerializeMixin(SerializeMixin):
"""
We cannot use override_settings to change upload directory because
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
are compiled only once. Otherwise using a different upload directory
for conflicting test cases would have provided better performance
while providing the required isolation.
"""
lockfile = 'var/upload_lock'
@classmethod
def setUpClass(cls, *args, **kwargs):
# type: (*Any, **Any) -> None
if not os.path.exists(cls.lockfile):
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
pass
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
class ZulipTestCase(TestCase):
# Ensure that the test system just shows us diffs
maxDiff = None # type: Optional[int]
'''
WRAPPER_COMMENT:
We wrap calls to self.client.{patch,put,get,post,delete} for various
reasons. Some of this has to do with fixing encodings before calling
into the Django code. Some of this has to do with providing a future
path for instrumentation. Some of it's just consistency.
The linter will prevent direct calls to self.client.foo, so the wrapper
functions have to fake out the linter by using a local variable called
django_client to fool the regext.
'''
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# This method should be removed when we migrate to version 3 of Python
import six
if six.PY2:
self.assertRaisesRegex = self.assertRaisesRegexp
super(ZulipTestCase, self).__init__(*args, **kwargs)
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
@instrument_url
def client_patch(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
We need to urlencode, since Django's function won't do it for us.
"""
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(url, encoded, **kwargs)
@instrument_url
def client_patch_multipart(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
"""
Use this for patch requests that have file uploads or
that need some sort of multi-part content. In the future
Django's test client may become a bit more flexible,
so we can hopefully eliminate this. (When you post
with the Django test client, it deals with MULTIPART_CONTENT
automatically, but not patch.)
"""
encoded = encode_multipart(BOUNDARY, info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.patch(
url,
encoded,
content_type=MULTIPART_CONTENT,
**kwargs)
@instrument_url
def client_put(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.put(url, encoded, **kwargs)
@instrument_url
def client_delete(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.delete(url, encoded, **kwargs)
@instrument_url
def client_options(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
encoded = urllib.parse.urlencode(info)
django_client = self.client # see WRAPPER_COMMENT
return django_client.options(url, encoded, **kwargs)
@instrument_url
def client_post(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.post(url, info, **kwargs)
@instrument_url
def client_post_request(self, url, req):
# type: (Text, Any) -> HttpResponse
"""
We simulate hitting an endpoint here, although we
actually resolve the URL manually and hit the view
directly. We have this helper method to allow our
instrumentation to work for /notify_tornado and
future similar methods that require doing funny
things to a request object.
"""
match = resolve(url)
return match.func(req)
@instrument_url
def client_get(self, url, info={}, **kwargs):
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
django_client = self.client # see WRAPPER_COMMENT
return django_client.get(url, info, **kwargs)
example_user_map = dict(
hamlet=u'[email protected]',
cordelia=u'[email protected]',
iago=u'[email protected]',
prospero=u'[email protected]',
othello=u'[email protected]',
AARON=u'[email protected]',
aaron=u'[email protected]',
ZOE=u'[email protected]',
)
mit_user_map = dict(
sipbtest=u"[email protected]",
starnine=u"[email protected]",
espuser=u"[email protected]",
)
# Non-registered test users
nonreg_user_map = dict(
test=u'[email protected]',
test1=u'[email protected]',
alice=u'[email protected]',
newuser=u'[email protected]',
bob=u'[email protected]',
cordelia=u'[email protected]',
newguy=u'[email protected]',
me=u'[email protected]',
)
def nonreg_user(self, name):
# type: (str) -> UserProfile
email = self.nonreg_user_map[name]
return get_user(email, get_realm_by_email_domain(email))
def example_user(self, name):
# type: (str) -> UserProfile
email = self.example_user_map[name]
return get_user(email, get_realm('zulip'))
def mit_user(self, name):
# type: (str) -> UserProfile
email = self.mit_user_map[name]
return get_user(email, get_realm('zephyr'))
def nonreg_email(self, name):
# type: (str) -> Text
return self.nonreg_user_map[name]
def example_email(self, name):
# type: (str) -> Text
return self.example_user_map[name]
def mit_email(self, name):
# type: (str) -> Text
return self.mit_user_map[name]
def notification_bot(self):
# type: () -> UserProfile
return get_user('[email protected]', get_realm('zulip'))
def login_with_return(self, email, password=None):
# type: (Text, Optional[Text]) -> HttpResponse
if password is None:
password = initial_password(email)
return self.client_post('/accounts/login/',
{'username': email, 'password': password})
def login(self, email, password=None, fails=False):
# type: (Text, Optional[Text], bool) -> HttpResponse
if password is None:
password = initial_password(email)
if not fails:
self.assertTrue(self.client.login(username=email, password=password))
else:
self.assertFalse(self.client.login(username=email, password=password))
def logout(self):
# type: () -> None
self.client.logout()
def register(self, email, password):
# type: (Text, Text) -> HttpResponse
self.client_post('/accounts/home/', {'email': email})
return self.submit_reg_form_for_user(email, password)
def submit_reg_form_for_user(self, email, password, realm_name="Zulip Test",
realm_subdomain="zuliptest",
from_confirmation='', full_name=None, timezone=u'', **kwargs):
# type: (Text, Text, Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text], **Any) -> HttpResponse
"""
Stage two of the two-step registration process.
If things are working correctly the account should be fully
registered after this call.
You can pass the HTTP_HOST variable for subdomains via kwargs.
"""
if full_name is None:
full_name = email.replace("@", "_")
return self.client_post('/accounts/register/',
{'full_name': full_name,
'password': password,
'realm_name': realm_name,
'realm_subdomain': realm_subdomain,
'key': find_key_by_email(email),
'timezone': timezone,
'terms': True,
'from_confirmation': from_confirmation},
**kwargs)
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
# type: (Text, Text) -> Text
from django.core.mail import outbox
for message in reversed(outbox):
if email_address in message.to:
return re.search(settings.EXTERNAL_HOST + path_pattern,
message.body).groups()[0]
else:
raise AssertionError("Couldn't find a confirmation email.")
def get_api_key(self, email):
# type: (Text) -> Text
if email not in API_KEYS:
API_KEYS[email] = get_user_profile_by_email(email).api_key
return API_KEYS[email]
def get_server_api_key(self, server_uuid):
# type: (Text) -> Text
if server_uuid not in API_KEYS:
API_KEYS[server_uuid] = get_remote_server_by_uuid(server_uuid).api_key
return API_KEYS[server_uuid]
def api_auth(self, identifier):
# type: (Text) -> Dict[str, Text]
"""
identifier: Can be an email or a remote server uuid.
"""
if is_remote_server(identifier):
api_key = self.get_server_api_key(identifier)
else:
api_key = self.get_api_key(identifier)
credentials = u"%s:%s" % (identifier, api_key)
return {
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
}
def get_streams(self, email, realm):
# type: (Text, Realm) -> List[Text]
"""
Helper function to get the stream names for a user
"""
user_profile = get_user(email, realm)
subs = Subscription.objects.filter(
user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM)
return [cast(Text, get_display_recipient(sub.recipient)) for sub in subs]
def send_message(self, sender_name, raw_recipients, message_type,
content=u"test content", subject=u"test", **kwargs):
# type: (Text, Union[Text, List[Text]], int, Text, Text, **Any) -> int
sender = get_user_profile_by_email(sender_name)
if message_type in [Recipient.PERSONAL, Recipient.HUDDLE]:
message_type_name = "private"
else:
message_type_name = "stream"
if isinstance(raw_recipients, six.string_types):
recipient_list = [raw_recipients]
else:
recipient_list = raw_recipients
(sending_client, _) = Client.objects.get_or_create(name="test suite")
return check_send_message(
sender, sending_client, message_type_name, recipient_list, subject,
content, forged=False, forged_timestamp=None,
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
def get_messages(self, anchor=1, num_before=100, num_after=100,
use_first_unread_anchor=False):
# type: (int, int, int, bool) -> List[Dict[str, Any]]
post_params = {"anchor": anchor, "num_before": num_before,
"num_after": num_after,
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
result = self.client_get("/json/messages", dict(post_params))
data = ujson.loads(result.content)
return data['messages']
def users_subscribed_to_stream(self, stream_name, realm):
# type: (Text, Realm) -> List[UserProfile]
stream = Stream.objects.get(name=stream_name, realm=realm)
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
return [subscription.user_profile for subscription in subscriptions]
def assert_url_serves_contents_of_file(self, url, result):
# type: (str, bytes) -> None
response = self.client_get(url)
data = b"".join(response.streaming_content)
self.assertEqual(result, data)
def assert_json_success(self, result):
# type: (HttpResponse) -> Dict[str, Any]
"""
Successful POSTs return a 200 and JSON of the form {"result": "success",
"msg": ""}.
"""
self.assertEqual(result.status_code, 200, result)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "success")
# We have a msg key for consistency with errors, but it typically has an
# empty value.
self.assertIn("msg", json)
return json
def get_json_error(self, result, status_code=400):
# type: (HttpResponse, int) -> Dict[str, Any]
self.assertEqual(result.status_code, status_code)
json = ujson.loads(result.content)
self.assertEqual(json.get("result"), "error")
return json['msg']
def assert_json_error(self, result, msg, status_code=400):
# type: (HttpResponse, Text, int) -> None
"""
Invalid POSTs return an error status code and JSON of the form
{"result": "error", "msg": "reason"}.
"""
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
def assert_length(self, queries, count):
# type: (Sized, int) -> None
actual_count = len(queries)
return self.assertTrue(actual_count == count,
"len(%s) == %s, != %s" % (queries, actual_count, count))
def assert_json_error_contains(self, result, msg_substring, status_code=400):
# type: (HttpResponse, Text, int) -> None
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
def assert_in_response(self, substring, response):
# type: (Text, HttpResponse) -> None
self.assertIn(substring, response.content.decode('utf-8'))
def assert_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertIn(substring, decoded)
def assert_not_in_success_response(self, substrings, response):
# type: (List[Text], HttpResponse) -> None
self.assertEqual(response.status_code, 200)
decoded = response.content.decode('utf-8')
for substring in substrings:
self.assertNotIn(substring, decoded)
def fixture_data(self, type, action, file_type='json'):
# type: (Text, Text, Text) -> Text
return force_text(open(os.path.join(os.path.dirname(__file__),
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type))).read())
def make_stream(self, stream_name, realm=None, invite_only=False):
# type: (Text, Optional[Realm], Optional[bool]) -> Stream
if realm is None:
realm = self.DEFAULT_REALM
try:
stream = Stream.objects.create(
realm=realm,
name=stream_name,
invite_only=invite_only,
)
except IntegrityError: # nocoverage -- this is for bugs in the tests
raise Exception('''
%s already exists
Please call make_stream with a stream name
that is not already in use.''' % (stream_name,))
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
return stream
# Subscribe to a stream directly
def subscribe_to_stream(self, email, stream_name, realm=None):
# type: (Text, Text, Optional[Realm]) -> Stream
if realm is None:
realm = get_realm_by_email_domain(email)
try:
stream = get_stream(stream_name, realm)
from_stream_creation = False
except Stream.DoesNotExist:
stream, from_stream_creation = create_stream_if_needed(realm, stream_name)
user_profile = get_user_profile_by_email(email)
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
return stream
def unsubscribe_from_stream(self, email, stream_name, realm):
# type: (Text, Text, Realm) -> None
user_profile = get_user(email, realm)
stream = get_stream(stream_name, realm)
bulk_remove_subscriptions([user_profile], [stream])
# Subscribe to a stream by making an API request
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
# type: (Text, Iterable[Text], Dict[str, Any], bool) -> HttpResponse
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
'invite_only': ujson.dumps(invite_only)}
post_data.update(extra_post_data)
result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
return result
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
# type: (Text, Text, Union[Text, Dict[str, Any]], Optional[Text], **Any) -> Message
if stream_name is not None:
self.subscribe_to_stream(email, stream_name)
result = self.client_post(url, payload, **post_params)
self.assert_json_success(result)
# Check the correct message was sent
msg = self.get_last_message()
self.assertEqual(msg.sender.email, email)
if stream_name is not None:
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
# TODO: should also validate recipient for private messages
return msg
def get_last_message(self):
# type: () -> Message
return Message.objects.latest('id')
def get_second_to_last_message(self):
# type: () -> Message
return Message.objects.all().order_by('-id')[1]
@contextmanager
def simulated_markdown_failure(self):
# type: () -> Iterator[None]
'''
This raises a failure inside of the try/except block of
bugdown.__init__.do_convert.
'''
with \
self.settings(ERROR_BOT=None), \
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
yield
class WebhookTestCase(ZulipTestCase):
"""
Common for all webhooks tests
Override below class attributes and run send_and_test_message
If you create your url in uncommon way you can override build_webhook_url method
In case that you need modify body or create it without using fixture you can also override get_body method
"""
STREAM_NAME = None # type: Optional[Text]
TEST_USER_EMAIL = '[email protected]'
URL_TEMPLATE = None # type: Optional[Text]
FIXTURE_DIR_NAME = None # type: Optional[Text]
def setUp(self):
# type: () -> None
self.url = self.build_webhook_url()
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Optional[Text], Optional[Text], Optional[Text], **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
self.STREAM_NAME, **kwargs)
self.do_test_subject(msg, expected_subject)
self.do_test_message(msg, expected_message)
return msg
def send_and_test_private_message(self, fixture_name, expected_subject=None,
expected_message=None, content_type="application/json", **kwargs):
# type: (Text, Text, Text, str, **Any) -> Message
payload = self.get_body(fixture_name)
if content_type is not None:
kwargs['content_type'] = content_type
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
stream_name=None, **kwargs)
self.do_test_message(msg, expected_message)
return msg
def build_webhook_url(self, *args, **kwargs):
# type: (*Any, **Any) -> Text
url = self.URL_TEMPLATE
if url.find("api_key") >= 0:
api_key = self.get_api_key(self.TEST_USER_EMAIL)
url = self.URL_TEMPLATE.format(api_key=api_key,
stream=self.STREAM_NAME)
else:
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
has_arguments = kwargs or args
if has_arguments and url.find('?') == -1:
url = "{}?".format(url)
else:
url = "{}&".format(url)
for key, value in kwargs.items():
url = "{}{}={}&".format(url, key, value)
for arg in args:
url = "{}{}&".format(url, arg)
return url[:-1] if has_arguments else url
def get_body(self, fixture_name):
# type: (Text) -> Union[Text, Dict[str, Text]]
"""Can be implemented either as returning a dictionary containing the
post parameters or as string containing the body of the request."""
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
def do_test_subject(self, msg, expected_subject):
# type: (Message, Optional[Text]) -> None
if expected_subject is not None:
self.assertEqual(msg.topic_name(), expected_subject) |
def do_test_message(self, msg, expected_message):
# type: (Message, Optional[Text]) -> None
if expected_message is not None:
self.assertEqual(msg.content, expected_message) | random_line_split |
|
background.rs | use super::SkewTContext;
use crate::{
app::config::{self},
coords::TPCoords,
gui::DrawingArgs,
};
use metfor::{Celsius, CelsiusDiff, HectoPascal, Quantity};
impl SkewTContext {
pub fn draw_clear_background(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.background_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
const MINT: Celsius = Celsius(-160.0);
const MAXT: Celsius = Celsius(100.0);
self.draw_temperature_band(MINT, MAXT, args);
}
pub fn draw_temperature_banding(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.background_band_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
let mut start_line = -160i32;
while start_line < 100 {
let t1 = Celsius(f64::from(start_line));
let t2 = t1 + CelsiusDiff(10.0);
self.draw_temperature_band(t1, t2, args);
start_line += 20;
}
}
pub fn draw_hail_growth_zone(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.hail_zone_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
self.draw_temperature_band(Celsius(-30.0), Celsius(-10.0), args);
}
pub fn draw_dendtritic_growth_zone(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.dendritic_zone_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
self.draw_temperature_band(Celsius(-18.0), Celsius(-12.0), args);
}
fn draw_temperature_band(&self, cold_t: Celsius, warm_t: Celsius, args: DrawingArgs<'_, '_>) {
let cr = args.cr;
// Assume color has already been set up for us.
const MAXP: HectoPascal = config::MAXP;
const MINP: HectoPascal = config::MINP;
let mut coords = [
(warm_t.unpack(), MAXP.unpack()),
(warm_t.unpack(), MINP.unpack()),
(cold_t.unpack(), MINP.unpack()),
(cold_t.unpack(), MAXP.unpack()),
];
// Convert points to screen coords
for coord in &mut coords {
let screen_coords = self.convert_tp_to_screen(TPCoords {
temperature: Celsius(coord.0),
pressure: HectoPascal(coord.1),
});
coord.0 = screen_coords.x;
coord.1 = screen_coords.y;
}
let mut coord_iter = coords.iter();
for coord in coord_iter.by_ref().take(1) {
cr.move_to(coord.0, coord.1);
}
for coord in coord_iter {
cr.line_to(coord.0, coord.1); | }
} | }
cr.close_path();
cr.fill().unwrap(); | random_line_split |
background.rs | use super::SkewTContext;
use crate::{
app::config::{self},
coords::TPCoords,
gui::DrawingArgs,
};
use metfor::{Celsius, CelsiusDiff, HectoPascal, Quantity};
impl SkewTContext {
pub fn draw_clear_background(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.background_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
const MINT: Celsius = Celsius(-160.0);
const MAXT: Celsius = Celsius(100.0);
self.draw_temperature_band(MINT, MAXT, args);
}
pub fn draw_temperature_banding(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.background_band_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
let mut start_line = -160i32;
while start_line < 100 {
let t1 = Celsius(f64::from(start_line));
let t2 = t1 + CelsiusDiff(10.0);
self.draw_temperature_band(t1, t2, args);
start_line += 20;
}
}
pub fn draw_hail_growth_zone(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.hail_zone_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
self.draw_temperature_band(Celsius(-30.0), Celsius(-10.0), args);
}
pub fn draw_dendtritic_growth_zone(&self, args: DrawingArgs<'_, '_>) |
fn draw_temperature_band(&self, cold_t: Celsius, warm_t: Celsius, args: DrawingArgs<'_, '_>) {
let cr = args.cr;
// Assume color has already been set up for us.
const MAXP: HectoPascal = config::MAXP;
const MINP: HectoPascal = config::MINP;
let mut coords = [
(warm_t.unpack(), MAXP.unpack()),
(warm_t.unpack(), MINP.unpack()),
(cold_t.unpack(), MINP.unpack()),
(cold_t.unpack(), MAXP.unpack()),
];
// Convert points to screen coords
for coord in &mut coords {
let screen_coords = self.convert_tp_to_screen(TPCoords {
temperature: Celsius(coord.0),
pressure: HectoPascal(coord.1),
});
coord.0 = screen_coords.x;
coord.1 = screen_coords.y;
}
let mut coord_iter = coords.iter();
for coord in coord_iter.by_ref().take(1) {
cr.move_to(coord.0, coord.1);
}
for coord in coord_iter {
cr.line_to(coord.0, coord.1);
}
cr.close_path();
cr.fill().unwrap();
}
}
| {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.dendritic_zone_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
self.draw_temperature_band(Celsius(-18.0), Celsius(-12.0), args);
} | identifier_body |
background.rs | use super::SkewTContext;
use crate::{
app::config::{self},
coords::TPCoords,
gui::DrawingArgs,
};
use metfor::{Celsius, CelsiusDiff, HectoPascal, Quantity};
impl SkewTContext {
pub fn draw_clear_background(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.background_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
const MINT: Celsius = Celsius(-160.0);
const MAXT: Celsius = Celsius(100.0);
self.draw_temperature_band(MINT, MAXT, args);
}
pub fn draw_temperature_banding(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.background_band_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
let mut start_line = -160i32;
while start_line < 100 {
let t1 = Celsius(f64::from(start_line));
let t2 = t1 + CelsiusDiff(10.0);
self.draw_temperature_band(t1, t2, args);
start_line += 20;
}
}
pub fn draw_hail_growth_zone(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.hail_zone_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
self.draw_temperature_band(Celsius(-30.0), Celsius(-10.0), args);
}
pub fn draw_dendtritic_growth_zone(&self, args: DrawingArgs<'_, '_>) {
let (cr, config) = (args.cr, args.ac.config.borrow());
let rgba = config.dendritic_zone_rgba;
cr.set_source_rgba(rgba.0, rgba.1, rgba.2, rgba.3);
self.draw_temperature_band(Celsius(-18.0), Celsius(-12.0), args);
}
fn | (&self, cold_t: Celsius, warm_t: Celsius, args: DrawingArgs<'_, '_>) {
let cr = args.cr;
// Assume color has already been set up for us.
const MAXP: HectoPascal = config::MAXP;
const MINP: HectoPascal = config::MINP;
let mut coords = [
(warm_t.unpack(), MAXP.unpack()),
(warm_t.unpack(), MINP.unpack()),
(cold_t.unpack(), MINP.unpack()),
(cold_t.unpack(), MAXP.unpack()),
];
// Convert points to screen coords
for coord in &mut coords {
let screen_coords = self.convert_tp_to_screen(TPCoords {
temperature: Celsius(coord.0),
pressure: HectoPascal(coord.1),
});
coord.0 = screen_coords.x;
coord.1 = screen_coords.y;
}
let mut coord_iter = coords.iter();
for coord in coord_iter.by_ref().take(1) {
cr.move_to(coord.0, coord.1);
}
for coord in coord_iter {
cr.line_to(coord.0, coord.1);
}
cr.close_path();
cr.fill().unwrap();
}
}
| draw_temperature_band | identifier_name |
legacy.mouse.shim.js | (function($, undefined) {
/**
* Plugin to force OpenSeadragon to use the legacy mouse pointer event model
*/
$.MouseTracker.subscribeEvents = [ "click", "dblclick", "keypress", "focus", "blur", $.MouseTracker.wheelEventName ];
if( $.MouseTracker.wheelEventName == "DOMMouseScroll" ) |
$.MouseTracker.havePointerEvents = false;
if ( $.Browser.vendor === $.BROWSERS.IE && $.Browser.version < 9 ) {
$.MouseTracker.subscribeEvents.push( "mouseenter", "mouseleave" );
$.MouseTracker.haveMouseEnter = true;
} else {
$.MouseTracker.subscribeEvents.push( "mouseover", "mouseout" );
$.MouseTracker.haveMouseEnter = false;
}
$.MouseTracker.subscribeEvents.push( "mousedown", "mouseup", "mousemove" );
if ( 'ontouchstart' in window ) {
// iOS, Android, and other W3c Touch Event implementations
// (see http://www.w3.org/TR/touch-events/)
// (see https://developer.apple.com/library/ios/documentation/AppleApplications/Reference/SafariWebContent/HandlingEvents/HandlingEvents.html)
// (see https://developer.apple.com/library/safari/documentation/AppleApplications/Reference/SafariWebContent/HandlingEvents/HandlingEvents.html)
$.MouseTracker.subscribeEvents.push( "touchstart", "touchend", "touchmove", "touchcancel" );
}
if ( 'ongesturestart' in window ) {
// iOS (see https://developer.apple.com/library/ios/documentation/AppleApplications/Reference/SafariWebContent/HandlingEvents/HandlingEvents.html)
// Subscribe to these to prevent default gesture handling
$.MouseTracker.subscribeEvents.push( "gesturestart", "gesturechange" );
}
$.MouseTracker.mousePointerId = "legacy-mouse";
$.MouseTracker.maxTouchPoints = 10;
}(OpenSeadragon));
| {
// Older Firefox
$.MouseTracker.subscribeEvents.push( "MozMousePixelScroll" );
} | conditional_block |
legacy.mouse.shim.js | (function($, undefined) {
/**
* Plugin to force OpenSeadragon to use the legacy mouse pointer event model
*/ | $.MouseTracker.subscribeEvents.push( "MozMousePixelScroll" );
}
$.MouseTracker.havePointerEvents = false;
if ( $.Browser.vendor === $.BROWSERS.IE && $.Browser.version < 9 ) {
$.MouseTracker.subscribeEvents.push( "mouseenter", "mouseleave" );
$.MouseTracker.haveMouseEnter = true;
} else {
$.MouseTracker.subscribeEvents.push( "mouseover", "mouseout" );
$.MouseTracker.haveMouseEnter = false;
}
$.MouseTracker.subscribeEvents.push( "mousedown", "mouseup", "mousemove" );
if ( 'ontouchstart' in window ) {
// iOS, Android, and other W3c Touch Event implementations
// (see http://www.w3.org/TR/touch-events/)
// (see https://developer.apple.com/library/ios/documentation/AppleApplications/Reference/SafariWebContent/HandlingEvents/HandlingEvents.html)
// (see https://developer.apple.com/library/safari/documentation/AppleApplications/Reference/SafariWebContent/HandlingEvents/HandlingEvents.html)
$.MouseTracker.subscribeEvents.push( "touchstart", "touchend", "touchmove", "touchcancel" );
}
if ( 'ongesturestart' in window ) {
// iOS (see https://developer.apple.com/library/ios/documentation/AppleApplications/Reference/SafariWebContent/HandlingEvents/HandlingEvents.html)
// Subscribe to these to prevent default gesture handling
$.MouseTracker.subscribeEvents.push( "gesturestart", "gesturechange" );
}
$.MouseTracker.mousePointerId = "legacy-mouse";
$.MouseTracker.maxTouchPoints = 10;
}(OpenSeadragon)); |
$.MouseTracker.subscribeEvents = [ "click", "dblclick", "keypress", "focus", "blur", $.MouseTracker.wheelEventName ];
if( $.MouseTracker.wheelEventName == "DOMMouseScroll" ) {
// Older Firefox | random_line_split |
moh.py | #
# Medal of Honor Parser for BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2010 James 'Bakes' Baker ([email protected])
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# CHANGELOG
# 2010/11/07 - 0.10 - Courgette
# * add new maps info
# 2010/11/08 - 0.9.2 - GrosBedo
# * messages can now be empty (no message broadcasted on kick/tempban/ban/unban)
# 2010/10/27 - 0.9.1 - GrosBedo
# * messages now support named $variables instead of %s
# 2010/10/27 - 0.9 - Courgette
# * when banning, also kick to take over MoH engine failure to enforce bans. This
# will need more test to determine how to make the MoH engine enforce temp bans.
# 2010/10/24 - 0.8 - Courgette
# * fix OnServerRoundover and OnServerRoundoverplayers
# 2010/10/24 - 0.7 - Courgette
# * add missing getTeam() method
# 2010/10/24 - 0.6 - Courgette
# * minor fixes
# 2010/10/23 - 0.5 - Courgette
# * create specific events : EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES
# * now fires native B3 event EVT_GAME_ROUND_END
# * manage team changed event correctly
# 2010/10/23 - 0.4 - Courgette
# * refactor inheriting from frostbite AbstratParser
# * change available server var list
# 2010/10/10 - 0.3 - Bakes
# * getEasyName is now implemented and working, getHardName is implemented
# but not working.
# 2010/10/07 - 0.2 - Courgette
# * add gameName property. Fix SAY_LINE_MAX_LENGTH
# 2010/09/25 - 0.1 - Bakes
# * Initial version of MoH parser - hasn't been tested with OnKill events yet
# but basic commands seem to work.
# 2010-11-21 - 1.0 - Courgette
# * add rotateMap and changeMap to fix !maprotate and !map#
# 2011-02-01 - 1.1 - xlr8or
# * adapted to server R9 version 615937 - fixed onPlayerSpawn and vars.noCrosshairs errors
# 2011-03-05 - 1.2 - xlr8or
# * admin.kickPlayer after ban now in try/except to avoid error msg when player is already gone
# 2011-04-09 - 1.2.1 - Courgette
# * import missing time module
# 2011-05-22 - 1.2.2 - Courgette
# * move events EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES to abstract Frostbite parser
# as they also exist in BFBC2
# 2011-06-04 - 1.3.0 - Courgette
# makes use of the new pluginsStarted parser hook
# 2011-06-05 - 1.4.0 - Courgette
# * change data format for EVT_CLIENT_BAN_TEMP and EVT_CLIENT_BAN events
# 2012-10-60 - 1.5 - Courgette
# reflect changes in abstract parser 1.6
#
__author__ = 'Bakes, Courgette'
__version__ = '1.5'
import time
import b3.events
from b3.parsers.frostbite.abstractParser import AbstractParser
from b3.parsers.frostbite.util import PlayerInfoBlock
import b3.functions
SAY_LINE_MAX_LENGTH = 100
class MohParser(AbstractParser):
gameName = 'moh'
_gameServerVars = (
'serverName', # vars.serverName [name] Set the server name
'gamePassword', # vars.gamePassword [password] Set the game password for the server
'punkBuster', # vars.punkBuster [enabled] Set if the server will use PunkBuster or not
'hardCore', # vars.hardCore[enabled] Set hardcore mode
'ranked', # vars.ranked [enabled] Set ranked or not
'skillLimit', # vars.skillLimit [lower, upper] Set the skill limits allowed on to the server
'noUnlocks', # vars.noUnlocks [enabled] Set if unlocks should be disabled
'noAmmoPickups', # vars.noAmmoPickups [enabled] Set if pickups should be disabled
'realisticHealth', # vars.realisticHealth [enabled] Set if health should be realistic
'supportAction', # vars.supportAction [enabled] Set if support action should be enabled
'preRoundLimit', # vars.preRoundLimit [upper, lower] Set pre round limits. Setting both to zero means the game uses whatever settings are used on the specific levels. On ranked servers, the lowest values allowed are lower = 2 and upper = 4.
'roundStartTimerPlayersLimit', # vars.roundStartTimerPlayersLimit [limit] Get/Set the number of players that need to spawn on each team for the round start timer to start counting down.
'roundStartTimerDelay', # vars.roundStartTimerDelay [delay] If set to other than -1, this value overrides the round start delay set on the individual levels.
'tdmScoreCounterMaxScore', # vars.tdmScoreCounterMaxScore [score] If set to other than -1, this value overrides the score needed to win a round of Team Assault, Sector Control or Hot Zone.
'clanTeams', # vars.clanTeams [enabled] Set if clan teams should be used
'friendlyFire', # vars.friendlyFire [enabled] Set if the server should allow team damage
'currentPlayerLimit', # vars.currentPlayerLimit Retrieve the current maximum number of players
'maxPlayerLimit', # vars.maxPlayerLimit Retrieve the server-enforced maximum number of players
'playerLimit', # vars.playerLimit [nr of players] Set desired maximum number of players
'bannerUrl', # vars.bannerUrl [url] Set banner url
'serverDescription', # vars.serverDescription [description] Set server description
'noCrosshairs', # vars.noCrosshairs [enabled] Set if crosshairs for all weapons is hidden
'noSpotting', # vars.noSpotting [enabled] Set if spotted targets are disabled in the 3d-world
'teamKillCountForKick', # vars.teamKillCountForKick [count] Set number of teamkills allowed during a round
'teamKillValueForKick', # vars.teamKillValueForKick [count] Set max kill-value allowed for a player before he/she is kicked
'teamKillValueIncrease', # vars.teamKillValueIncrease [count] Set kill-value increase for a teamkill
'teamKillValueDecreasePerSecond', # vars.teamKillValueDecreasePerSecond [count] Set kill-value decrease per second
'idleTimeout', # vars.idleTimeout [time] Set idle timeout vars.profanityFilter [enabled] Set if profanity filter is enabled
)
def startup(self):
AbstractParser.startup(self)
# create the 'Server' client
self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
if self.config.has_option('moh', 'max_say_line_length'):
try:
maxlength = self.config.getint('moh', 'max_say_line_length')
if maxlength > SAY_LINE_MAX_LENGTH:
self.warning('max_say_line_length cannot be greater than %s' % SAY_LINE_MAX_LENGTH)
maxlength = SAY_LINE_MAX_LENGTH
if maxlength < 20:
self.warning('max_say_line_length is way too short. using default')
maxlength = self._settings['line_length']
self._settings['line_length'] = maxlength
self._settings['min_wrap_length'] = maxlength
except Exception, err:
self.error('failed to read max_say_line_length setting "%s" : %s' % (self.config.get('moh', 'max_say_line_length'), err))
self.debug('line_length: %s' % self._settings['line_length'])
self.verbose('GameType: %s, Map: %s' %(self.game.gameType, self.game.mapName))
def pluginsStarted(self):
|
def checkVersion(self):
version = self.output.write('version')
self.info('server version : %s' % version)
if version[0] != 'MOH':
raise Exception("the moh parser can only work with Medal of Honor")
def getClient(self, cid, _guid=None):
"""Get a connected client from storage or create it
B3 CID <--> MoH character name
B3 GUID <--> MoH EA_guid
"""
# try to get the client from the storage of already authed clients
client = self.clients.getByCID(cid)
if not client:
if cid == 'Server':
return self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
# must be the first time we see this client
words = self.write(('admin.listPlayers', 'player', cid))
pib = PlayerInfoBlock(words)
if len(pib) == 0:
self.debug('no such client found')
return None
p = pib[0]
cid = p['name']
name = p['name']
# Let's see if we have a guid, either from the PlayerInfoBlock, or passed to us by OnPlayerAuthenticated()
if p['guid']:
guid = p['guid']
elif _guid:
guid = _guid
else:
# If we still don't have a guid, we cannot create a newclient without the guid!
self.debug('No guid for %s, waiting for next event.' %name)
return None
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
client = self.clients.newClient(cid, guid=guid, name=name, team=self.getTeam(p['teamId']), teamId=int(p['teamId']), data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
return client
def getHardName(self, mapname):
""" Change real name to level name """
mapname = mapname.lower()
if mapname.startswith('mazar-i-sharif airfield'):
return 'levels/mp_01'
elif mapname.startswith('bagram hanger'):
return 'levels/mp_01_elimination'
elif mapname.startswith('shah-i-knot mountains'):
return 'levels/mp_02'
elif mapname.startswith('hindu kush pass'):
return 'levels/mp_02_koth'
elif mapname.startswith('khyber caves'):
return 'levels/mp_03'
#return 'levels/mp_03_elimination'
elif mapname.startswith('helmand valley'):
return 'levels/mp_04'
elif mapname.startswith('helmand river hill'):
return 'levels/mp_04_koth'
elif mapname.startswith('kandahar marketplace'):
return 'levels/mp_05'
elif mapname.startswith('diwagal camp'):
return 'levels/mp_06'
#return 'mp_06_elimination'
elif mapname.startswith('korengal outpost'):
return 'levels/mp_07_koth'
elif mapname.startswith('kunar base'):
return 'levels/mp_08'
elif mapname.startswith('kabul city ruins'):
return 'levels/mp_09'
#return 'levels/mp_09_elimination'
elif mapname.startswith('garmzir town'):
return 'levels/mp_10'
else:
self.warning('unknown level name \'%s\'. Please make sure you have entered a valid mapname' % mapname)
return mapname
def getEasyName(self, mapname):
""" Change levelname to real name """
if mapname.startswith('levels/mp_01_elimination'):
return 'Bagram Hanger'
elif mapname.startswith('levels/mp_01'):
return 'Mazar-i-Sharif Airfield'
elif mapname.startswith('levels/mp_02_koth'):
return 'Hindu Kush Pass'
elif mapname.startswith('levels/mp_02'):
return 'Shah-i-Knot Mountains'
elif mapname.startswith('levels/mp_03'):
return 'Khyber Caves'
elif mapname.startswith('levels/mp_04_koth'):
return 'Helmand River Hill'
elif mapname.startswith('levels/mp_04'):
return 'Helmand Valley'
elif mapname.startswith('levels/mp_05'):
return 'Kandahar Marketplace'
elif mapname.startswith('levels/mp_06'):
return 'Diwagal Camp'
elif mapname.startswith('levels/mp_07_koth'):
return 'Korengal Outpost'
elif mapname.startswith('levels/mp_08'):
return 'Kunar Base'
elif mapname.startswith('levels/mp_09'):
return 'Kabul City Ruins'
elif mapname.startswith('levels/mp_10'):
return 'Garmzir Town'
else:
self.warning('unknown level name \'%s\'. Please report this on B3 forums' % mapname)
return mapname
def getServerVars(self):
"""Update the game property from server fresh data"""
try: self.game.serverName = self.getCvar('serverName').getBoolean()
except: pass
try: self.game.gamePassword = self.getCvar('gamePassword').getBoolean()
except: pass
try: self.game.punkBuster = self.getCvar('punkBuster').getBoolean()
except: pass
try: self.game.hardCore = self.getCvar('hardCore').getBoolean()
except: pass
try: self.game.ranked = self.getCvar('ranked').getBoolean()
except: pass
try: self.game.skillLimit = self.getCvar('skillLimit').getBoolean()
except: pass
try: self.game.noUnlocks = self.getCvar('noUnlocks').getBoolean()
except: pass
try: self.game.noAmmoPickups = self.getCvar('noAmmoPickups').getBoolean()
except: pass
try: self.game.realisticHealth = self.getCvar('realisticHealth').getBoolean()
except: pass
try: self.game.supportAction = self.getCvar('supportAction').getBoolean()
except: pass
try: self.game.preRoundLimit = self.getCvar('preRoundLimit').getBoolean()
except: pass
try: self.game.roundStartTimerPlayersLimit = self.getCvar('roundStartTimerPlayersLimit').getBoolean()
except: pass
try: self.game.roundStartTimerDelay = self.getCvar('roundStartTimerDelay').getBoolean()
except: pass
try: self.game.tdmScoreCounterMaxScore = self.getCvar('tdmScoreCounterMaxScore').getBoolean()
except: pass
try: self.game.clanTeams = self.getCvar('clanTeams').getBoolean()
except: pass
try: self.game.friendlyFire = self.getCvar('friendlyFire').getBoolean()
except: pass
try: self.game.currentPlayerLimit = self.getCvar('currentPlayerLimit').getBoolean()
except: pass
try: self.game.maxPlayerLimit = self.getCvar('maxPlayerLimit').getBoolean()
except: pass
try: self.game.playerLimit = self.getCvar('playerLimit').getBoolean()
except: pass
try: self.game.bannerUrl = self.getCvar('bannerUrl').getBoolean()
except: pass
try: self.game.serverDescription = self.getCvar('serverDescription').getBoolean()
except: pass
try: self.game.noCrosshair = self.getCvar('noCrosshair').getBoolean()
except: pass
try: self.game.noSpotting = self.getCvar('noSpotting').getBoolean()
except: pass
try: self.game.teamKillCountForKick = self.getCvar('teamKillCountForKick').getBoolean()
except: pass
try: self.game.teamKillValueForKick = self.getCvar('teamKillValueForKick').getBoolean()
except: pass
try: self.game.teamKillValueIncrease = self.getCvar('teamKillValueIncrease').getBoolean()
except: pass
try: self.game.teamKillValueDecreasePerSecond = self.getCvar('teamKillValueDecreasePerSecond').getBoolean()
except: pass
try: self.game.idleTimeout = self.getCvar('idleTimeout').getBoolean()
except: pass
def getTeam(self, team):
"""convert MOH team numbers to B3 team numbers"""
team = int(team)
if team == 1:
return b3.TEAM_RED
elif team == 2:
return b3.TEAM_BLUE
elif team == 3:
return b3.TEAM_SPEC
else:
return b3.TEAM_UNKNOWN
def OnPlayerSpawn(self, action, data):
"""
Request: player.onSpawn <soldier name: string> <kit: string> <weapon: string> <specializations: 3 x string>
"""
if len(data) < 2:
return None
spawner = self.getClient(data[0])
kit = data[1]
weapon = data[2]
spec1 = data[3]
spec2 = data[4]
spec3 = data[5]
event = b3.events.EVT_CLIENT_SPAWN
return b3.events.Event(event, (kit, weapon, spec1, spec2, spec3), spawner)
def OnPlayerTeamchange(self, action, data):
"""
player.onTeamChange <soldier name: player name> <team: Team ID>
Effect: Player might have changed team
"""
#['player.onTeamChange', 'Dalich', '2']
client = self.getClient(data[0])
if client:
client.team = self.getTeam(data[1]) # .team setter will send team change event
client.teamId = int(data[1])
def tempban(self, client, reason='', duration=2, admin=None, silent=False, *kwargs):
duration = b3.functions.time2minutes(duration)
if isinstance(client, str):
self.write(self.getCommand('kick', cid=client, reason=reason[:80]))
return
elif admin:
fullreason = self.getMessage('temp_banned_by', self.getMessageVariables(client=client, reason=reason, admin=admin, banduration=b3.functions.minutesStr(duration)))
else:
fullreason = self.getMessage('temp_banned', self.getMessageVariables(client=client, reason=reason, banduration=b3.functions.minutesStr(duration)))
fullreason = self.stripColors(fullreason)
reason = self.stripColors(reason)
if self.PunkBuster:
# punkbuster acts odd if you ban for more than a day
# tempban for a day here and let b3 re-ban if the player
# comes back
if duration > 1440:
duration = 1440
self.PunkBuster.kick(client, duration, reason)
self.write(('banList.list',))
self.write(self.getCommand('tempban', guid=client.guid, duration=duration*60, reason=reason[:80]))
self.write(('banList.list',))
## also kick as the MoH server seems not to enforce all bans correctly
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if not silent and fullreason != '':
self.say(fullreason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN_TEMP, {'reason': reason,
'duration': duration,
'admin': admin}
, client))
def ban(self, client, reason='', admin=None, silent=False, *kwargs):
"""Permanent ban"""
self.debug('BAN : client: %s, reason: %s', client, reason)
if isinstance(client, b3.clients.Client):
self.write(self.getCommand('ban', guid=client.guid, reason=reason[:80]))
try:
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
except:
pass
return
if admin:
reason = self.getMessage('banned_by', client.exactName, admin.exactName, reason)
else:
reason = self.getMessage('banned', client.exactName, reason)
reason = self.stripColors(reason)
if client.cid is None:
# ban by ip, this happens when we !permban @xx a player that is not connected
self.debug('EFFECTIVE BAN : %s',self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
self.write(self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s). His last ip (%s) has been added to banlist'%(client.exactName, client.id, client.ip))
else:
# ban by cid
self.debug('EFFECTIVE BAN : %s',self.getCommand('ban', guid=client.guid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('ban', cid=client.cid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s) has been added to banlist'%(client.exactName, client.id))
if self.PunkBuster:
self.PunkBuster.banGUID(client, reason)
if not silent:
self.say(reason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN, {'reason': reason, 'admin': admin}, client))
def rotateMap(self):
"""Load the next map (not level). If the current game mod plays each level twice
to get teams the chance to play both sides, then this rotate a second
time to really switch to the next map"""
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
# No map in map rotation list, just call admin.runNextLevel
self.write(('admin.runNextRound',))
else:
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound',))
def changeMap(self, map):
"""Change to the given map
1) determine the level name
If map is of the form 'mp_001' and 'Kaboul' is a supported
level for the current game mod, then this level is loaded.
In other cases, this method assumes it is given a 'easy map name' (like
'Port Valdez') and it will do its best to find the level name that seems
to be for 'Port Valdez' within the supported levels.
If no match is found, then instead of loading the map, this method
returns a list of candidate map names
2) if we got a level name
if the level is not in the current rotation list, then add it to
the map list and load it
"""
supportedMaps = self.getSupportedMaps()
if 'levels/%s'%map in supportedMaps:
map = 'levels/%s'%map
if map not in supportedMaps:
match = self.getMapsSoundingLike(map)
if len(match) == 1:
map = match[0]
else:
return match
if map in supportedMaps:
levelnames = self.write(('mapList.list',))
if map not in levelnames:
# add the map to the map list
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
self.write(('mapList.append', map))
nextIndex = 0
else:
if nextIndex == 0:
# case where the map list contains only 1 map
nextIndex = 1
self.write(('mapList.insert', nextIndex, map))
else:
nextIndex = 0
while nextIndex < len(levelnames) and levelnames[nextIndex] != map:
nextIndex += 1
self.say('Changing map to %s' % map)
time.sleep(1)
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound', ))
| self.info('connecting all players...')
plist = self.getPlayerList()
for cid, p in plist.iteritems():
client = self.clients.getByCID(cid)
if not client:
#self.clients.newClient(playerdata['cid'], guid=playerdata['guid'], name=playerdata['name'], team=playerdata['team'], squad=playerdata['squad'])
name = p['name']
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
self.debug('client %s found on the server' % cid)
client = self.clients.newClient(cid, guid=p['guid'], name=name, team=p['teamId'], data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client)) | identifier_body |
moh.py | #
# Medal of Honor Parser for BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2010 James 'Bakes' Baker ([email protected])
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# CHANGELOG
# 2010/11/07 - 0.10 - Courgette
# * add new maps info
# 2010/11/08 - 0.9.2 - GrosBedo
# * messages can now be empty (no message broadcasted on kick/tempban/ban/unban)
# 2010/10/27 - 0.9.1 - GrosBedo
# * messages now support named $variables instead of %s
# 2010/10/27 - 0.9 - Courgette
# * when banning, also kick to take over MoH engine failure to enforce bans. This
# will need more test to determine how to make the MoH engine enforce temp bans.
# 2010/10/24 - 0.8 - Courgette
# * fix OnServerRoundover and OnServerRoundoverplayers
# 2010/10/24 - 0.7 - Courgette
# * add missing getTeam() method
# 2010/10/24 - 0.6 - Courgette
# * minor fixes
# 2010/10/23 - 0.5 - Courgette
# * create specific events : EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES
# * now fires native B3 event EVT_GAME_ROUND_END
# * manage team changed event correctly
# 2010/10/23 - 0.4 - Courgette
# * refactor inheriting from frostbite AbstratParser
# * change available server var list
# 2010/10/10 - 0.3 - Bakes
# * getEasyName is now implemented and working, getHardName is implemented
# but not working.
# 2010/10/07 - 0.2 - Courgette
# * add gameName property. Fix SAY_LINE_MAX_LENGTH
# 2010/09/25 - 0.1 - Bakes
# * Initial version of MoH parser - hasn't been tested with OnKill events yet
# but basic commands seem to work.
# 2010-11-21 - 1.0 - Courgette
# * add rotateMap and changeMap to fix !maprotate and !map#
# 2011-02-01 - 1.1 - xlr8or
# * adapted to server R9 version 615937 - fixed onPlayerSpawn and vars.noCrosshairs errors
# 2011-03-05 - 1.2 - xlr8or
# * admin.kickPlayer after ban now in try/except to avoid error msg when player is already gone
# 2011-04-09 - 1.2.1 - Courgette
# * import missing time module
# 2011-05-22 - 1.2.2 - Courgette
# * move events EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES to abstract Frostbite parser
# as they also exist in BFBC2
# 2011-06-04 - 1.3.0 - Courgette
# makes use of the new pluginsStarted parser hook
# 2011-06-05 - 1.4.0 - Courgette
# * change data format for EVT_CLIENT_BAN_TEMP and EVT_CLIENT_BAN events
# 2012-10-60 - 1.5 - Courgette
# reflect changes in abstract parser 1.6
#
__author__ = 'Bakes, Courgette'
__version__ = '1.5'
import time
import b3.events
from b3.parsers.frostbite.abstractParser import AbstractParser
from b3.parsers.frostbite.util import PlayerInfoBlock
import b3.functions
SAY_LINE_MAX_LENGTH = 100
class MohParser(AbstractParser):
gameName = 'moh'
_gameServerVars = (
'serverName', # vars.serverName [name] Set the server name
'gamePassword', # vars.gamePassword [password] Set the game password for the server
'punkBuster', # vars.punkBuster [enabled] Set if the server will use PunkBuster or not
'hardCore', # vars.hardCore[enabled] Set hardcore mode
'ranked', # vars.ranked [enabled] Set ranked or not
'skillLimit', # vars.skillLimit [lower, upper] Set the skill limits allowed on to the server
'noUnlocks', # vars.noUnlocks [enabled] Set if unlocks should be disabled
'noAmmoPickups', # vars.noAmmoPickups [enabled] Set if pickups should be disabled
'realisticHealth', # vars.realisticHealth [enabled] Set if health should be realistic
'supportAction', # vars.supportAction [enabled] Set if support action should be enabled
'preRoundLimit', # vars.preRoundLimit [upper, lower] Set pre round limits. Setting both to zero means the game uses whatever settings are used on the specific levels. On ranked servers, the lowest values allowed are lower = 2 and upper = 4.
'roundStartTimerPlayersLimit', # vars.roundStartTimerPlayersLimit [limit] Get/Set the number of players that need to spawn on each team for the round start timer to start counting down.
'roundStartTimerDelay', # vars.roundStartTimerDelay [delay] If set to other than -1, this value overrides the round start delay set on the individual levels.
'tdmScoreCounterMaxScore', # vars.tdmScoreCounterMaxScore [score] If set to other than -1, this value overrides the score needed to win a round of Team Assault, Sector Control or Hot Zone.
'clanTeams', # vars.clanTeams [enabled] Set if clan teams should be used
'friendlyFire', # vars.friendlyFire [enabled] Set if the server should allow team damage
'currentPlayerLimit', # vars.currentPlayerLimit Retrieve the current maximum number of players
'maxPlayerLimit', # vars.maxPlayerLimit Retrieve the server-enforced maximum number of players
'playerLimit', # vars.playerLimit [nr of players] Set desired maximum number of players
'bannerUrl', # vars.bannerUrl [url] Set banner url
'serverDescription', # vars.serverDescription [description] Set server description
'noCrosshairs', # vars.noCrosshairs [enabled] Set if crosshairs for all weapons is hidden
'noSpotting', # vars.noSpotting [enabled] Set if spotted targets are disabled in the 3d-world
'teamKillCountForKick', # vars.teamKillCountForKick [count] Set number of teamkills allowed during a round
'teamKillValueForKick', # vars.teamKillValueForKick [count] Set max kill-value allowed for a player before he/she is kicked
'teamKillValueIncrease', # vars.teamKillValueIncrease [count] Set kill-value increase for a teamkill
'teamKillValueDecreasePerSecond', # vars.teamKillValueDecreasePerSecond [count] Set kill-value decrease per second
'idleTimeout', # vars.idleTimeout [time] Set idle timeout vars.profanityFilter [enabled] Set if profanity filter is enabled
)
def startup(self):
AbstractParser.startup(self)
# create the 'Server' client
self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
if self.config.has_option('moh', 'max_say_line_length'):
try:
maxlength = self.config.getint('moh', 'max_say_line_length')
if maxlength > SAY_LINE_MAX_LENGTH:
self.warning('max_say_line_length cannot be greater than %s' % SAY_LINE_MAX_LENGTH)
maxlength = SAY_LINE_MAX_LENGTH
if maxlength < 20:
self.warning('max_say_line_length is way too short. using default')
maxlength = self._settings['line_length']
self._settings['line_length'] = maxlength
self._settings['min_wrap_length'] = maxlength
except Exception, err:
self.error('failed to read max_say_line_length setting "%s" : %s' % (self.config.get('moh', 'max_say_line_length'), err))
self.debug('line_length: %s' % self._settings['line_length'])
self.verbose('GameType: %s, Map: %s' %(self.game.gameType, self.game.mapName))
def pluginsStarted(self):
self.info('connecting all players...')
plist = self.getPlayerList()
for cid, p in plist.iteritems():
client = self.clients.getByCID(cid)
if not client:
#self.clients.newClient(playerdata['cid'], guid=playerdata['guid'], name=playerdata['name'], team=playerdata['team'], squad=playerdata['squad'])
name = p['name']
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
self.debug('client %s found on the server' % cid)
client = self.clients.newClient(cid, guid=p['guid'], name=name, team=p['teamId'], data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
def checkVersion(self):
version = self.output.write('version')
self.info('server version : %s' % version)
if version[0] != 'MOH':
raise Exception("the moh parser can only work with Medal of Honor")
def getClient(self, cid, _guid=None):
"""Get a connected client from storage or create it
B3 CID <--> MoH character name
B3 GUID <--> MoH EA_guid
"""
# try to get the client from the storage of already authed clients
client = self.clients.getByCID(cid)
if not client:
if cid == 'Server':
return self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
# must be the first time we see this client
words = self.write(('admin.listPlayers', 'player', cid))
pib = PlayerInfoBlock(words)
if len(pib) == 0:
self.debug('no such client found')
return None
p = pib[0]
cid = p['name']
name = p['name']
# Let's see if we have a guid, either from the PlayerInfoBlock, or passed to us by OnPlayerAuthenticated()
if p['guid']:
guid = p['guid']
elif _guid:
guid = _guid
else:
# If we still don't have a guid, we cannot create a newclient without the guid!
self.debug('No guid for %s, waiting for next event.' %name)
return None
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
client = self.clients.newClient(cid, guid=guid, name=name, team=self.getTeam(p['teamId']), teamId=int(p['teamId']), data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
return client
def getHardName(self, mapname):
""" Change real name to level name """
mapname = mapname.lower()
if mapname.startswith('mazar-i-sharif airfield'):
return 'levels/mp_01'
elif mapname.startswith('bagram hanger'):
return 'levels/mp_01_elimination'
elif mapname.startswith('shah-i-knot mountains'):
return 'levels/mp_02'
elif mapname.startswith('hindu kush pass'):
return 'levels/mp_02_koth'
elif mapname.startswith('khyber caves'):
return 'levels/mp_03'
#return 'levels/mp_03_elimination'
elif mapname.startswith('helmand valley'):
return 'levels/mp_04'
elif mapname.startswith('helmand river hill'):
return 'levels/mp_04_koth'
elif mapname.startswith('kandahar marketplace'):
return 'levels/mp_05'
elif mapname.startswith('diwagal camp'):
return 'levels/mp_06'
#return 'mp_06_elimination'
elif mapname.startswith('korengal outpost'):
return 'levels/mp_07_koth'
elif mapname.startswith('kunar base'):
return 'levels/mp_08'
elif mapname.startswith('kabul city ruins'):
return 'levels/mp_09'
#return 'levels/mp_09_elimination'
elif mapname.startswith('garmzir town'):
return 'levels/mp_10'
else:
self.warning('unknown level name \'%s\'. Please make sure you have entered a valid mapname' % mapname)
return mapname
def getEasyName(self, mapname):
""" Change levelname to real name """
if mapname.startswith('levels/mp_01_elimination'):
return 'Bagram Hanger'
elif mapname.startswith('levels/mp_01'):
return 'Mazar-i-Sharif Airfield'
elif mapname.startswith('levels/mp_02_koth'):
return 'Hindu Kush Pass'
elif mapname.startswith('levels/mp_02'):
return 'Shah-i-Knot Mountains'
elif mapname.startswith('levels/mp_03'):
return 'Khyber Caves'
elif mapname.startswith('levels/mp_04_koth'):
return 'Helmand River Hill'
elif mapname.startswith('levels/mp_04'):
return 'Helmand Valley'
elif mapname.startswith('levels/mp_05'):
return 'Kandahar Marketplace'
elif mapname.startswith('levels/mp_06'):
return 'Diwagal Camp'
elif mapname.startswith('levels/mp_07_koth'):
return 'Korengal Outpost'
elif mapname.startswith('levels/mp_08'):
return 'Kunar Base'
elif mapname.startswith('levels/mp_09'):
return 'Kabul City Ruins'
elif mapname.startswith('levels/mp_10'):
return 'Garmzir Town'
else:
self.warning('unknown level name \'%s\'. Please report this on B3 forums' % mapname)
return mapname
def getServerVars(self):
"""Update the game property from server fresh data"""
try: self.game.serverName = self.getCvar('serverName').getBoolean()
except: pass
try: self.game.gamePassword = self.getCvar('gamePassword').getBoolean()
except: pass
try: self.game.punkBuster = self.getCvar('punkBuster').getBoolean()
except: pass
try: self.game.hardCore = self.getCvar('hardCore').getBoolean()
except: pass
try: self.game.ranked = self.getCvar('ranked').getBoolean()
except: pass
try: self.game.skillLimit = self.getCvar('skillLimit').getBoolean()
except: pass
try: self.game.noUnlocks = self.getCvar('noUnlocks').getBoolean()
except: pass
try: self.game.noAmmoPickups = self.getCvar('noAmmoPickups').getBoolean()
except: pass
try: self.game.realisticHealth = self.getCvar('realisticHealth').getBoolean()
except: pass
try: self.game.supportAction = self.getCvar('supportAction').getBoolean()
except: pass
try: self.game.preRoundLimit = self.getCvar('preRoundLimit').getBoolean()
except: pass
try: self.game.roundStartTimerPlayersLimit = self.getCvar('roundStartTimerPlayersLimit').getBoolean()
except: pass
try: self.game.roundStartTimerDelay = self.getCvar('roundStartTimerDelay').getBoolean()
except: pass
try: self.game.tdmScoreCounterMaxScore = self.getCvar('tdmScoreCounterMaxScore').getBoolean()
except: pass
try: self.game.clanTeams = self.getCvar('clanTeams').getBoolean()
except: pass
try: self.game.friendlyFire = self.getCvar('friendlyFire').getBoolean()
except: pass
try: self.game.currentPlayerLimit = self.getCvar('currentPlayerLimit').getBoolean()
except: pass
try: self.game.maxPlayerLimit = self.getCvar('maxPlayerLimit').getBoolean()
except: pass
try: self.game.playerLimit = self.getCvar('playerLimit').getBoolean()
except: pass
try: self.game.bannerUrl = self.getCvar('bannerUrl').getBoolean()
except: pass
try: self.game.serverDescription = self.getCvar('serverDescription').getBoolean()
except: pass
try: self.game.noCrosshair = self.getCvar('noCrosshair').getBoolean()
except: pass
try: self.game.noSpotting = self.getCvar('noSpotting').getBoolean()
except: pass
try: self.game.teamKillCountForKick = self.getCvar('teamKillCountForKick').getBoolean()
except: pass
try: self.game.teamKillValueForKick = self.getCvar('teamKillValueForKick').getBoolean()
except: pass
try: self.game.teamKillValueIncrease = self.getCvar('teamKillValueIncrease').getBoolean()
except: pass
try: self.game.teamKillValueDecreasePerSecond = self.getCvar('teamKillValueDecreasePerSecond').getBoolean()
except: pass
try: self.game.idleTimeout = self.getCvar('idleTimeout').getBoolean()
except: pass
def getTeam(self, team):
"""convert MOH team numbers to B3 team numbers"""
team = int(team)
if team == 1:
return b3.TEAM_RED
elif team == 2:
return b3.TEAM_BLUE
elif team == 3:
return b3.TEAM_SPEC
else:
return b3.TEAM_UNKNOWN
def OnPlayerSpawn(self, action, data):
"""
Request: player.onSpawn <soldier name: string> <kit: string> <weapon: string> <specializations: 3 x string>
"""
if len(data) < 2:
return None
spawner = self.getClient(data[0])
kit = data[1]
weapon = data[2]
spec1 = data[3]
spec2 = data[4]
spec3 = data[5]
event = b3.events.EVT_CLIENT_SPAWN
return b3.events.Event(event, (kit, weapon, spec1, spec2, spec3), spawner)
def OnPlayerTeamchange(self, action, data):
"""
player.onTeamChange <soldier name: player name> <team: Team ID>
Effect: Player might have changed team
"""
#['player.onTeamChange', 'Dalich', '2']
client = self.getClient(data[0])
if client:
client.team = self.getTeam(data[1]) # .team setter will send team change event
client.teamId = int(data[1])
def tempban(self, client, reason='', duration=2, admin=None, silent=False, *kwargs):
duration = b3.functions.time2minutes(duration)
if isinstance(client, str):
self.write(self.getCommand('kick', cid=client, reason=reason[:80]))
return
elif admin:
fullreason = self.getMessage('temp_banned_by', self.getMessageVariables(client=client, reason=reason, admin=admin, banduration=b3.functions.minutesStr(duration)))
else:
fullreason = self.getMessage('temp_banned', self.getMessageVariables(client=client, reason=reason, banduration=b3.functions.minutesStr(duration)))
fullreason = self.stripColors(fullreason)
reason = self.stripColors(reason)
if self.PunkBuster:
# punkbuster acts odd if you ban for more than a day
# tempban for a day here and let b3 re-ban if the player
# comes back
if duration > 1440:
duration = 1440
self.PunkBuster.kick(client, duration, reason)
self.write(('banList.list',))
self.write(self.getCommand('tempban', guid=client.guid, duration=duration*60, reason=reason[:80]))
self.write(('banList.list',))
## also kick as the MoH server seems not to enforce all bans correctly
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if not silent and fullreason != '':
self.say(fullreason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN_TEMP, {'reason': reason,
'duration': duration,
'admin': admin}
, client))
def ban(self, client, reason='', admin=None, silent=False, *kwargs):
"""Permanent ban"""
self.debug('BAN : client: %s, reason: %s', client, reason)
if isinstance(client, b3.clients.Client):
self.write(self.getCommand('ban', guid=client.guid, reason=reason[:80]))
try:
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
except:
pass
return
if admin:
reason = self.getMessage('banned_by', client.exactName, admin.exactName, reason)
else:
reason = self.getMessage('banned', client.exactName, reason)
reason = self.stripColors(reason)
if client.cid is None:
# ban by ip, this happens when we !permban @xx a player that is not connected
self.debug('EFFECTIVE BAN : %s',self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
self.write(self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s). His last ip (%s) has been added to banlist'%(client.exactName, client.id, client.ip))
else:
# ban by cid
self.debug('EFFECTIVE BAN : %s',self.getCommand('ban', guid=client.guid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('ban', cid=client.cid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s) has been added to banlist'%(client.exactName, client.id))
if self.PunkBuster:
self.PunkBuster.banGUID(client, reason)
if not silent:
self.say(reason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN, {'reason': reason, 'admin': admin}, client))
def rotateMap(self):
"""Load the next map (not level). If the current game mod plays each level twice
to get teams the chance to play both sides, then this rotate a second
time to really switch to the next map"""
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
# No map in map rotation list, just call admin.runNextLevel
self.write(('admin.runNextRound',))
else:
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound',))
def changeMap(self, map):
"""Change to the given map
1) determine the level name
If map is of the form 'mp_001' and 'Kaboul' is a supported
level for the current game mod, then this level is loaded.
In other cases, this method assumes it is given a 'easy map name' (like
'Port Valdez') and it will do its best to find the level name that seems
to be for 'Port Valdez' within the supported levels.
If no match is found, then instead of loading the map, this method
returns a list of candidate map names
2) if we got a level name
if the level is not in the current rotation list, then add it to
the map list and load it
"""
supportedMaps = self.getSupportedMaps()
if 'levels/%s'%map in supportedMaps:
map = 'levels/%s'%map
if map not in supportedMaps:
match = self.getMapsSoundingLike(map)
if len(match) == 1:
map = match[0]
else:
return match
if map in supportedMaps:
levelnames = self.write(('mapList.list',))
if map not in levelnames:
# add the map to the map list
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
self.write(('mapList.append', map))
nextIndex = 0
else:
if nextIndex == 0:
# case where the map list contains only 1 map
nextIndex = 1
self.write(('mapList.insert', nextIndex, map))
else:
nextIndex = 0
while nextIndex < len(levelnames) and levelnames[nextIndex] != map:
|
self.say('Changing map to %s' % map)
time.sleep(1)
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound', ))
| nextIndex += 1 | conditional_block |
moh.py | #
# Medal of Honor Parser for BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2010 James 'Bakes' Baker ([email protected])
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# CHANGELOG
# 2010/11/07 - 0.10 - Courgette
# * add new maps info
# 2010/11/08 - 0.9.2 - GrosBedo
# * messages can now be empty (no message broadcasted on kick/tempban/ban/unban)
# 2010/10/27 - 0.9.1 - GrosBedo
# * messages now support named $variables instead of %s
# 2010/10/27 - 0.9 - Courgette
# * when banning, also kick to take over MoH engine failure to enforce bans. This
# will need more test to determine how to make the MoH engine enforce temp bans.
# 2010/10/24 - 0.8 - Courgette
# * fix OnServerRoundover and OnServerRoundoverplayers
# 2010/10/24 - 0.7 - Courgette
# * add missing getTeam() method
# 2010/10/24 - 0.6 - Courgette
# * minor fixes
# 2010/10/23 - 0.5 - Courgette
# * create specific events : EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES
# * now fires native B3 event EVT_GAME_ROUND_END
# * manage team changed event correctly
# 2010/10/23 - 0.4 - Courgette
# * refactor inheriting from frostbite AbstratParser
# * change available server var list
# 2010/10/10 - 0.3 - Bakes
# * getEasyName is now implemented and working, getHardName is implemented
# but not working.
# 2010/10/07 - 0.2 - Courgette
# * add gameName property. Fix SAY_LINE_MAX_LENGTH
# 2010/09/25 - 0.1 - Bakes
# * Initial version of MoH parser - hasn't been tested with OnKill events yet
# but basic commands seem to work.
# 2010-11-21 - 1.0 - Courgette
# * add rotateMap and changeMap to fix !maprotate and !map#
# 2011-02-01 - 1.1 - xlr8or
# * adapted to server R9 version 615937 - fixed onPlayerSpawn and vars.noCrosshairs errors
# 2011-03-05 - 1.2 - xlr8or
# * admin.kickPlayer after ban now in try/except to avoid error msg when player is already gone
# 2011-04-09 - 1.2.1 - Courgette
# * import missing time module
# 2011-05-22 - 1.2.2 - Courgette
# * move events EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES to abstract Frostbite parser
# as they also exist in BFBC2
# 2011-06-04 - 1.3.0 - Courgette
# makes use of the new pluginsStarted parser hook
# 2011-06-05 - 1.4.0 - Courgette
# * change data format for EVT_CLIENT_BAN_TEMP and EVT_CLIENT_BAN events
# 2012-10-60 - 1.5 - Courgette
# reflect changes in abstract parser 1.6
#
__author__ = 'Bakes, Courgette'
__version__ = '1.5'
import time
import b3.events
from b3.parsers.frostbite.abstractParser import AbstractParser
from b3.parsers.frostbite.util import PlayerInfoBlock
import b3.functions
SAY_LINE_MAX_LENGTH = 100
class MohParser(AbstractParser):
gameName = 'moh'
_gameServerVars = (
'serverName', # vars.serverName [name] Set the server name
'gamePassword', # vars.gamePassword [password] Set the game password for the server
'punkBuster', # vars.punkBuster [enabled] Set if the server will use PunkBuster or not
'hardCore', # vars.hardCore[enabled] Set hardcore mode
'ranked', # vars.ranked [enabled] Set ranked or not
'skillLimit', # vars.skillLimit [lower, upper] Set the skill limits allowed on to the server
'noUnlocks', # vars.noUnlocks [enabled] Set if unlocks should be disabled
'noAmmoPickups', # vars.noAmmoPickups [enabled] Set if pickups should be disabled
'realisticHealth', # vars.realisticHealth [enabled] Set if health should be realistic
'supportAction', # vars.supportAction [enabled] Set if support action should be enabled
'preRoundLimit', # vars.preRoundLimit [upper, lower] Set pre round limits. Setting both to zero means the game uses whatever settings are used on the specific levels. On ranked servers, the lowest values allowed are lower = 2 and upper = 4.
'roundStartTimerPlayersLimit', # vars.roundStartTimerPlayersLimit [limit] Get/Set the number of players that need to spawn on each team for the round start timer to start counting down.
'roundStartTimerDelay', # vars.roundStartTimerDelay [delay] If set to other than -1, this value overrides the round start delay set on the individual levels.
'tdmScoreCounterMaxScore', # vars.tdmScoreCounterMaxScore [score] If set to other than -1, this value overrides the score needed to win a round of Team Assault, Sector Control or Hot Zone.
'clanTeams', # vars.clanTeams [enabled] Set if clan teams should be used
'friendlyFire', # vars.friendlyFire [enabled] Set if the server should allow team damage
'currentPlayerLimit', # vars.currentPlayerLimit Retrieve the current maximum number of players
'maxPlayerLimit', # vars.maxPlayerLimit Retrieve the server-enforced maximum number of players
'playerLimit', # vars.playerLimit [nr of players] Set desired maximum number of players
'bannerUrl', # vars.bannerUrl [url] Set banner url
'serverDescription', # vars.serverDescription [description] Set server description
'noCrosshairs', # vars.noCrosshairs [enabled] Set if crosshairs for all weapons is hidden
'noSpotting', # vars.noSpotting [enabled] Set if spotted targets are disabled in the 3d-world
'teamKillCountForKick', # vars.teamKillCountForKick [count] Set number of teamkills allowed during a round
'teamKillValueForKick', # vars.teamKillValueForKick [count] Set max kill-value allowed for a player before he/she is kicked
'teamKillValueIncrease', # vars.teamKillValueIncrease [count] Set kill-value increase for a teamkill
'teamKillValueDecreasePerSecond', # vars.teamKillValueDecreasePerSecond [count] Set kill-value decrease per second
'idleTimeout', # vars.idleTimeout [time] Set idle timeout vars.profanityFilter [enabled] Set if profanity filter is enabled
)
def startup(self):
AbstractParser.startup(self)
# create the 'Server' client
self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
if self.config.has_option('moh', 'max_say_line_length'):
try:
maxlength = self.config.getint('moh', 'max_say_line_length')
if maxlength > SAY_LINE_MAX_LENGTH:
self.warning('max_say_line_length cannot be greater than %s' % SAY_LINE_MAX_LENGTH)
maxlength = SAY_LINE_MAX_LENGTH
if maxlength < 20:
self.warning('max_say_line_length is way too short. using default')
maxlength = self._settings['line_length']
self._settings['line_length'] = maxlength
self._settings['min_wrap_length'] = maxlength
except Exception, err:
self.error('failed to read max_say_line_length setting "%s" : %s' % (self.config.get('moh', 'max_say_line_length'), err))
self.debug('line_length: %s' % self._settings['line_length'])
self.verbose('GameType: %s, Map: %s' %(self.game.gameType, self.game.mapName))
def pluginsStarted(self):
self.info('connecting all players...')
plist = self.getPlayerList()
for cid, p in plist.iteritems():
client = self.clients.getByCID(cid)
if not client:
#self.clients.newClient(playerdata['cid'], guid=playerdata['guid'], name=playerdata['name'], team=playerdata['team'], squad=playerdata['squad'])
name = p['name']
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
self.debug('client %s found on the server' % cid)
client = self.clients.newClient(cid, guid=p['guid'], name=name, team=p['teamId'], data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
def checkVersion(self):
version = self.output.write('version')
self.info('server version : %s' % version)
if version[0] != 'MOH':
raise Exception("the moh parser can only work with Medal of Honor")
def getClient(self, cid, _guid=None):
"""Get a connected client from storage or create it
B3 CID <--> MoH character name
B3 GUID <--> MoH EA_guid
"""
# try to get the client from the storage of already authed clients
client = self.clients.getByCID(cid)
if not client:
if cid == 'Server':
return self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
# must be the first time we see this client
words = self.write(('admin.listPlayers', 'player', cid))
pib = PlayerInfoBlock(words)
if len(pib) == 0:
self.debug('no such client found')
return None
p = pib[0]
cid = p['name']
name = p['name']
# Let's see if we have a guid, either from the PlayerInfoBlock, or passed to us by OnPlayerAuthenticated()
if p['guid']:
guid = p['guid']
elif _guid:
guid = _guid
else:
# If we still don't have a guid, we cannot create a newclient without the guid!
self.debug('No guid for %s, waiting for next event.' %name)
return None
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
client = self.clients.newClient(cid, guid=guid, name=name, team=self.getTeam(p['teamId']), teamId=int(p['teamId']), data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
return client
def getHardName(self, mapname):
""" Change real name to level name """
mapname = mapname.lower()
if mapname.startswith('mazar-i-sharif airfield'):
return 'levels/mp_01'
elif mapname.startswith('bagram hanger'):
return 'levels/mp_01_elimination'
elif mapname.startswith('shah-i-knot mountains'):
return 'levels/mp_02'
elif mapname.startswith('hindu kush pass'):
return 'levels/mp_02_koth'
elif mapname.startswith('khyber caves'):
return 'levels/mp_03'
#return 'levels/mp_03_elimination'
elif mapname.startswith('helmand valley'):
return 'levels/mp_04'
elif mapname.startswith('helmand river hill'):
return 'levels/mp_04_koth'
elif mapname.startswith('kandahar marketplace'):
return 'levels/mp_05'
elif mapname.startswith('diwagal camp'):
return 'levels/mp_06'
#return 'mp_06_elimination'
elif mapname.startswith('korengal outpost'):
return 'levels/mp_07_koth'
elif mapname.startswith('kunar base'):
return 'levels/mp_08'
elif mapname.startswith('kabul city ruins'):
return 'levels/mp_09'
#return 'levels/mp_09_elimination'
elif mapname.startswith('garmzir town'):
return 'levels/mp_10'
else:
self.warning('unknown level name \'%s\'. Please make sure you have entered a valid mapname' % mapname)
return mapname
def getEasyName(self, mapname):
""" Change levelname to real name """
if mapname.startswith('levels/mp_01_elimination'):
return 'Bagram Hanger'
elif mapname.startswith('levels/mp_01'):
return 'Mazar-i-Sharif Airfield'
elif mapname.startswith('levels/mp_02_koth'):
return 'Hindu Kush Pass'
elif mapname.startswith('levels/mp_02'):
return 'Shah-i-Knot Mountains'
elif mapname.startswith('levels/mp_03'):
return 'Khyber Caves'
elif mapname.startswith('levels/mp_04_koth'):
return 'Helmand River Hill'
elif mapname.startswith('levels/mp_04'):
return 'Helmand Valley'
elif mapname.startswith('levels/mp_05'):
return 'Kandahar Marketplace'
elif mapname.startswith('levels/mp_06'):
return 'Diwagal Camp'
elif mapname.startswith('levels/mp_07_koth'):
return 'Korengal Outpost'
elif mapname.startswith('levels/mp_08'):
return 'Kunar Base'
elif mapname.startswith('levels/mp_09'):
return 'Kabul City Ruins'
elif mapname.startswith('levels/mp_10'):
return 'Garmzir Town'
else:
self.warning('unknown level name \'%s\'. Please report this on B3 forums' % mapname)
return mapname
def getServerVars(self):
"""Update the game property from server fresh data"""
try: self.game.serverName = self.getCvar('serverName').getBoolean()
except: pass
try: self.game.gamePassword = self.getCvar('gamePassword').getBoolean()
except: pass
try: self.game.punkBuster = self.getCvar('punkBuster').getBoolean()
except: pass
try: self.game.hardCore = self.getCvar('hardCore').getBoolean()
except: pass
try: self.game.ranked = self.getCvar('ranked').getBoolean()
except: pass
try: self.game.skillLimit = self.getCvar('skillLimit').getBoolean()
except: pass
try: self.game.noUnlocks = self.getCvar('noUnlocks').getBoolean()
except: pass
try: self.game.noAmmoPickups = self.getCvar('noAmmoPickups').getBoolean()
except: pass
try: self.game.realisticHealth = self.getCvar('realisticHealth').getBoolean()
except: pass
try: self.game.supportAction = self.getCvar('supportAction').getBoolean()
except: pass
try: self.game.preRoundLimit = self.getCvar('preRoundLimit').getBoolean()
except: pass
try: self.game.roundStartTimerPlayersLimit = self.getCvar('roundStartTimerPlayersLimit').getBoolean()
except: pass
try: self.game.roundStartTimerDelay = self.getCvar('roundStartTimerDelay').getBoolean()
except: pass
try: self.game.tdmScoreCounterMaxScore = self.getCvar('tdmScoreCounterMaxScore').getBoolean()
except: pass
try: self.game.clanTeams = self.getCvar('clanTeams').getBoolean()
except: pass
try: self.game.friendlyFire = self.getCvar('friendlyFire').getBoolean()
except: pass
try: self.game.currentPlayerLimit = self.getCvar('currentPlayerLimit').getBoolean()
except: pass
try: self.game.maxPlayerLimit = self.getCvar('maxPlayerLimit').getBoolean()
except: pass
try: self.game.playerLimit = self.getCvar('playerLimit').getBoolean()
except: pass
try: self.game.bannerUrl = self.getCvar('bannerUrl').getBoolean()
except: pass
try: self.game.serverDescription = self.getCvar('serverDescription').getBoolean()
except: pass
try: self.game.noCrosshair = self.getCvar('noCrosshair').getBoolean()
except: pass
try: self.game.noSpotting = self.getCvar('noSpotting').getBoolean()
except: pass
try: self.game.teamKillCountForKick = self.getCvar('teamKillCountForKick').getBoolean()
except: pass
try: self.game.teamKillValueForKick = self.getCvar('teamKillValueForKick').getBoolean()
except: pass
try: self.game.teamKillValueIncrease = self.getCvar('teamKillValueIncrease').getBoolean()
except: pass
try: self.game.teamKillValueDecreasePerSecond = self.getCvar('teamKillValueDecreasePerSecond').getBoolean()
except: pass
try: self.game.idleTimeout = self.getCvar('idleTimeout').getBoolean()
except: pass
def | (self, team):
"""convert MOH team numbers to B3 team numbers"""
team = int(team)
if team == 1:
return b3.TEAM_RED
elif team == 2:
return b3.TEAM_BLUE
elif team == 3:
return b3.TEAM_SPEC
else:
return b3.TEAM_UNKNOWN
def OnPlayerSpawn(self, action, data):
"""
Request: player.onSpawn <soldier name: string> <kit: string> <weapon: string> <specializations: 3 x string>
"""
if len(data) < 2:
return None
spawner = self.getClient(data[0])
kit = data[1]
weapon = data[2]
spec1 = data[3]
spec2 = data[4]
spec3 = data[5]
event = b3.events.EVT_CLIENT_SPAWN
return b3.events.Event(event, (kit, weapon, spec1, spec2, spec3), spawner)
def OnPlayerTeamchange(self, action, data):
"""
player.onTeamChange <soldier name: player name> <team: Team ID>
Effect: Player might have changed team
"""
#['player.onTeamChange', 'Dalich', '2']
client = self.getClient(data[0])
if client:
client.team = self.getTeam(data[1]) # .team setter will send team change event
client.teamId = int(data[1])
def tempban(self, client, reason='', duration=2, admin=None, silent=False, *kwargs):
duration = b3.functions.time2minutes(duration)
if isinstance(client, str):
self.write(self.getCommand('kick', cid=client, reason=reason[:80]))
return
elif admin:
fullreason = self.getMessage('temp_banned_by', self.getMessageVariables(client=client, reason=reason, admin=admin, banduration=b3.functions.minutesStr(duration)))
else:
fullreason = self.getMessage('temp_banned', self.getMessageVariables(client=client, reason=reason, banduration=b3.functions.minutesStr(duration)))
fullreason = self.stripColors(fullreason)
reason = self.stripColors(reason)
if self.PunkBuster:
# punkbuster acts odd if you ban for more than a day
# tempban for a day here and let b3 re-ban if the player
# comes back
if duration > 1440:
duration = 1440
self.PunkBuster.kick(client, duration, reason)
self.write(('banList.list',))
self.write(self.getCommand('tempban', guid=client.guid, duration=duration*60, reason=reason[:80]))
self.write(('banList.list',))
## also kick as the MoH server seems not to enforce all bans correctly
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if not silent and fullreason != '':
self.say(fullreason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN_TEMP, {'reason': reason,
'duration': duration,
'admin': admin}
, client))
def ban(self, client, reason='', admin=None, silent=False, *kwargs):
"""Permanent ban"""
self.debug('BAN : client: %s, reason: %s', client, reason)
if isinstance(client, b3.clients.Client):
self.write(self.getCommand('ban', guid=client.guid, reason=reason[:80]))
try:
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
except:
pass
return
if admin:
reason = self.getMessage('banned_by', client.exactName, admin.exactName, reason)
else:
reason = self.getMessage('banned', client.exactName, reason)
reason = self.stripColors(reason)
if client.cid is None:
# ban by ip, this happens when we !permban @xx a player that is not connected
self.debug('EFFECTIVE BAN : %s',self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
self.write(self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s). His last ip (%s) has been added to banlist'%(client.exactName, client.id, client.ip))
else:
# ban by cid
self.debug('EFFECTIVE BAN : %s',self.getCommand('ban', guid=client.guid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('ban', cid=client.cid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s) has been added to banlist'%(client.exactName, client.id))
if self.PunkBuster:
self.PunkBuster.banGUID(client, reason)
if not silent:
self.say(reason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN, {'reason': reason, 'admin': admin}, client))
def rotateMap(self):
"""Load the next map (not level). If the current game mod plays each level twice
to get teams the chance to play both sides, then this rotate a second
time to really switch to the next map"""
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
# No map in map rotation list, just call admin.runNextLevel
self.write(('admin.runNextRound',))
else:
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound',))
def changeMap(self, map):
"""Change to the given map
1) determine the level name
If map is of the form 'mp_001' and 'Kaboul' is a supported
level for the current game mod, then this level is loaded.
In other cases, this method assumes it is given a 'easy map name' (like
'Port Valdez') and it will do its best to find the level name that seems
to be for 'Port Valdez' within the supported levels.
If no match is found, then instead of loading the map, this method
returns a list of candidate map names
2) if we got a level name
if the level is not in the current rotation list, then add it to
the map list and load it
"""
supportedMaps = self.getSupportedMaps()
if 'levels/%s'%map in supportedMaps:
map = 'levels/%s'%map
if map not in supportedMaps:
match = self.getMapsSoundingLike(map)
if len(match) == 1:
map = match[0]
else:
return match
if map in supportedMaps:
levelnames = self.write(('mapList.list',))
if map not in levelnames:
# add the map to the map list
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
self.write(('mapList.append', map))
nextIndex = 0
else:
if nextIndex == 0:
# case where the map list contains only 1 map
nextIndex = 1
self.write(('mapList.insert', nextIndex, map))
else:
nextIndex = 0
while nextIndex < len(levelnames) and levelnames[nextIndex] != map:
nextIndex += 1
self.say('Changing map to %s' % map)
time.sleep(1)
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound', ))
| getTeam | identifier_name |
moh.py | #
# Medal of Honor Parser for BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2010 James 'Bakes' Baker ([email protected])
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# CHANGELOG
# 2010/11/07 - 0.10 - Courgette
# * add new maps info
# 2010/11/08 - 0.9.2 - GrosBedo
# * messages can now be empty (no message broadcasted on kick/tempban/ban/unban)
# 2010/10/27 - 0.9.1 - GrosBedo
# * messages now support named $variables instead of %s
# 2010/10/27 - 0.9 - Courgette
# * when banning, also kick to take over MoH engine failure to enforce bans. This
# will need more test to determine how to make the MoH engine enforce temp bans.
# 2010/10/24 - 0.8 - Courgette
# * fix OnServerRoundover and OnServerRoundoverplayers
# 2010/10/24 - 0.7 - Courgette
# * add missing getTeam() method
# 2010/10/24 - 0.6 - Courgette
# * minor fixes
# 2010/10/23 - 0.5 - Courgette
# * create specific events : EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES
# * now fires native B3 event EVT_GAME_ROUND_END
# * manage team changed event correctly
# 2010/10/23 - 0.4 - Courgette
# * refactor inheriting from frostbite AbstratParser
# * change available server var list
# 2010/10/10 - 0.3 - Bakes
# * getEasyName is now implemented and working, getHardName is implemented
# but not working.
# 2010/10/07 - 0.2 - Courgette
# * add gameName property. Fix SAY_LINE_MAX_LENGTH
# 2010/09/25 - 0.1 - Bakes
# * Initial version of MoH parser - hasn't been tested with OnKill events yet
# but basic commands seem to work.
# 2010-11-21 - 1.0 - Courgette
# * add rotateMap and changeMap to fix !maprotate and !map#
# 2011-02-01 - 1.1 - xlr8or
# * adapted to server R9 version 615937 - fixed onPlayerSpawn and vars.noCrosshairs errors
# 2011-03-05 - 1.2 - xlr8or
# * admin.kickPlayer after ban now in try/except to avoid error msg when player is already gone
# 2011-04-09 - 1.2.1 - Courgette
# * import missing time module
# 2011-05-22 - 1.2.2 - Courgette
# * move events EVT_GAME_ROUND_PLAYER_SCORES and EVT_GAME_ROUND_TEAM_SCORES to abstract Frostbite parser
# as they also exist in BFBC2
# 2011-06-04 - 1.3.0 - Courgette
# makes use of the new pluginsStarted parser hook
# 2011-06-05 - 1.4.0 - Courgette
# * change data format for EVT_CLIENT_BAN_TEMP and EVT_CLIENT_BAN events
# 2012-10-60 - 1.5 - Courgette
# reflect changes in abstract parser 1.6
#
__author__ = 'Bakes, Courgette'
__version__ = '1.5'
import time
import b3.events
from b3.parsers.frostbite.abstractParser import AbstractParser
from b3.parsers.frostbite.util import PlayerInfoBlock
import b3.functions
SAY_LINE_MAX_LENGTH = 100
class MohParser(AbstractParser):
gameName = 'moh'
_gameServerVars = (
'serverName', # vars.serverName [name] Set the server name
'gamePassword', # vars.gamePassword [password] Set the game password for the server
'punkBuster', # vars.punkBuster [enabled] Set if the server will use PunkBuster or not
'hardCore', # vars.hardCore[enabled] Set hardcore mode
'ranked', # vars.ranked [enabled] Set ranked or not
'skillLimit', # vars.skillLimit [lower, upper] Set the skill limits allowed on to the server
'noUnlocks', # vars.noUnlocks [enabled] Set if unlocks should be disabled
'noAmmoPickups', # vars.noAmmoPickups [enabled] Set if pickups should be disabled
'realisticHealth', # vars.realisticHealth [enabled] Set if health should be realistic
'supportAction', # vars.supportAction [enabled] Set if support action should be enabled
'preRoundLimit', # vars.preRoundLimit [upper, lower] Set pre round limits. Setting both to zero means the game uses whatever settings are used on the specific levels. On ranked servers, the lowest values allowed are lower = 2 and upper = 4.
'roundStartTimerPlayersLimit', # vars.roundStartTimerPlayersLimit [limit] Get/Set the number of players that need to spawn on each team for the round start timer to start counting down.
'roundStartTimerDelay', # vars.roundStartTimerDelay [delay] If set to other than -1, this value overrides the round start delay set on the individual levels.
'tdmScoreCounterMaxScore', # vars.tdmScoreCounterMaxScore [score] If set to other than -1, this value overrides the score needed to win a round of Team Assault, Sector Control or Hot Zone.
'clanTeams', # vars.clanTeams [enabled] Set if clan teams should be used
'friendlyFire', # vars.friendlyFire [enabled] Set if the server should allow team damage
'currentPlayerLimit', # vars.currentPlayerLimit Retrieve the current maximum number of players
'maxPlayerLimit', # vars.maxPlayerLimit Retrieve the server-enforced maximum number of players
'playerLimit', # vars.playerLimit [nr of players] Set desired maximum number of players
'bannerUrl', # vars.bannerUrl [url] Set banner url
'serverDescription', # vars.serverDescription [description] Set server description
'noCrosshairs', # vars.noCrosshairs [enabled] Set if crosshairs for all weapons is hidden
'noSpotting', # vars.noSpotting [enabled] Set if spotted targets are disabled in the 3d-world
'teamKillCountForKick', # vars.teamKillCountForKick [count] Set number of teamkills allowed during a round
'teamKillValueForKick', # vars.teamKillValueForKick [count] Set max kill-value allowed for a player before he/she is kicked
'teamKillValueIncrease', # vars.teamKillValueIncrease [count] Set kill-value increase for a teamkill
'teamKillValueDecreasePerSecond', # vars.teamKillValueDecreasePerSecond [count] Set kill-value decrease per second
'idleTimeout', # vars.idleTimeout [time] Set idle timeout vars.profanityFilter [enabled] Set if profanity filter is enabled
)
def startup(self):
AbstractParser.startup(self)
# create the 'Server' client
self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
if self.config.has_option('moh', 'max_say_line_length'):
try:
maxlength = self.config.getint('moh', 'max_say_line_length')
if maxlength > SAY_LINE_MAX_LENGTH:
self.warning('max_say_line_length cannot be greater than %s' % SAY_LINE_MAX_LENGTH)
maxlength = SAY_LINE_MAX_LENGTH
if maxlength < 20:
self.warning('max_say_line_length is way too short. using default')
maxlength = self._settings['line_length']
self._settings['line_length'] = maxlength
self._settings['min_wrap_length'] = maxlength
except Exception, err:
self.error('failed to read max_say_line_length setting "%s" : %s' % (self.config.get('moh', 'max_say_line_length'), err))
self.debug('line_length: %s' % self._settings['line_length'])
self.verbose('GameType: %s, Map: %s' %(self.game.gameType, self.game.mapName))
def pluginsStarted(self):
self.info('connecting all players...')
plist = self.getPlayerList()
for cid, p in plist.iteritems():
client = self.clients.getByCID(cid)
if not client:
#self.clients.newClient(playerdata['cid'], guid=playerdata['guid'], name=playerdata['name'], team=playerdata['team'], squad=playerdata['squad'])
name = p['name']
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
self.debug('client %s found on the server' % cid)
client = self.clients.newClient(cid, guid=p['guid'], name=name, team=p['teamId'], data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
def checkVersion(self):
version = self.output.write('version')
self.info('server version : %s' % version)
if version[0] != 'MOH':
raise Exception("the moh parser can only work with Medal of Honor")
def getClient(self, cid, _guid=None):
"""Get a connected client from storage or create it
B3 CID <--> MoH character name
B3 GUID <--> MoH EA_guid
"""
# try to get the client from the storage of already authed clients
client = self.clients.getByCID(cid)
if not client:
if cid == 'Server':
return self.clients.newClient('Server', guid='Server', name='Server', hide=True, pbid='Server', team=b3.TEAM_UNKNOWN)
# must be the first time we see this client
words = self.write(('admin.listPlayers', 'player', cid))
pib = PlayerInfoBlock(words)
if len(pib) == 0:
self.debug('no such client found')
return None
p = pib[0]
cid = p['name']
name = p['name']
# Let's see if we have a guid, either from the PlayerInfoBlock, or passed to us by OnPlayerAuthenticated()
if p['guid']:
guid = p['guid']
elif _guid:
guid = _guid
else:
# If we still don't have a guid, we cannot create a newclient without the guid!
self.debug('No guid for %s, waiting for next event.' %name)
return None
if 'clanTag' in p and len(p['clanTag']) > 0:
name = "[" + p['clanTag'] + "] " + p['name']
client = self.clients.newClient(cid, guid=guid, name=name, team=self.getTeam(p['teamId']), teamId=int(p['teamId']), data=p)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_JOIN, p, client))
return client
def getHardName(self, mapname):
""" Change real name to level name """
mapname = mapname.lower()
if mapname.startswith('mazar-i-sharif airfield'):
return 'levels/mp_01'
elif mapname.startswith('bagram hanger'):
return 'levels/mp_01_elimination'
elif mapname.startswith('shah-i-knot mountains'):
return 'levels/mp_02'
elif mapname.startswith('hindu kush pass'):
return 'levels/mp_02_koth'
elif mapname.startswith('khyber caves'):
return 'levels/mp_03'
#return 'levels/mp_03_elimination'
elif mapname.startswith('helmand valley'):
return 'levels/mp_04'
elif mapname.startswith('helmand river hill'):
return 'levels/mp_04_koth'
elif mapname.startswith('kandahar marketplace'):
return 'levels/mp_05'
elif mapname.startswith('diwagal camp'):
return 'levels/mp_06'
#return 'mp_06_elimination'
elif mapname.startswith('korengal outpost'):
return 'levels/mp_07_koth'
elif mapname.startswith('kunar base'):
return 'levels/mp_08'
elif mapname.startswith('kabul city ruins'):
return 'levels/mp_09'
#return 'levels/mp_09_elimination'
elif mapname.startswith('garmzir town'):
return 'levels/mp_10'
else:
self.warning('unknown level name \'%s\'. Please make sure you have entered a valid mapname' % mapname)
return mapname
def getEasyName(self, mapname):
""" Change levelname to real name """
if mapname.startswith('levels/mp_01_elimination'):
return 'Bagram Hanger'
elif mapname.startswith('levels/mp_01'):
return 'Mazar-i-Sharif Airfield'
elif mapname.startswith('levels/mp_02_koth'):
return 'Hindu Kush Pass'
elif mapname.startswith('levels/mp_02'):
return 'Shah-i-Knot Mountains'
elif mapname.startswith('levels/mp_03'):
return 'Khyber Caves'
elif mapname.startswith('levels/mp_04_koth'):
return 'Helmand River Hill'
elif mapname.startswith('levels/mp_04'):
return 'Helmand Valley'
elif mapname.startswith('levels/mp_05'):
return 'Kandahar Marketplace'
elif mapname.startswith('levels/mp_06'):
return 'Diwagal Camp'
elif mapname.startswith('levels/mp_07_koth'):
return 'Korengal Outpost'
elif mapname.startswith('levels/mp_08'):
return 'Kunar Base'
elif mapname.startswith('levels/mp_09'):
return 'Kabul City Ruins'
elif mapname.startswith('levels/mp_10'):
return 'Garmzir Town'
else:
self.warning('unknown level name \'%s\'. Please report this on B3 forums' % mapname)
return mapname
def getServerVars(self):
"""Update the game property from server fresh data"""
try: self.game.serverName = self.getCvar('serverName').getBoolean()
except: pass
try: self.game.gamePassword = self.getCvar('gamePassword').getBoolean()
except: pass
try: self.game.punkBuster = self.getCvar('punkBuster').getBoolean()
except: pass
try: self.game.hardCore = self.getCvar('hardCore').getBoolean()
except: pass
try: self.game.ranked = self.getCvar('ranked').getBoolean()
except: pass
try: self.game.skillLimit = self.getCvar('skillLimit').getBoolean()
except: pass
try: self.game.noUnlocks = self.getCvar('noUnlocks').getBoolean()
except: pass
try: self.game.noAmmoPickups = self.getCvar('noAmmoPickups').getBoolean()
except: pass
try: self.game.realisticHealth = self.getCvar('realisticHealth').getBoolean()
except: pass
try: self.game.supportAction = self.getCvar('supportAction').getBoolean()
except: pass
try: self.game.preRoundLimit = self.getCvar('preRoundLimit').getBoolean()
except: pass
try: self.game.roundStartTimerPlayersLimit = self.getCvar('roundStartTimerPlayersLimit').getBoolean()
except: pass
try: self.game.roundStartTimerDelay = self.getCvar('roundStartTimerDelay').getBoolean()
except: pass
try: self.game.tdmScoreCounterMaxScore = self.getCvar('tdmScoreCounterMaxScore').getBoolean()
except: pass
try: self.game.clanTeams = self.getCvar('clanTeams').getBoolean()
except: pass
try: self.game.friendlyFire = self.getCvar('friendlyFire').getBoolean()
except: pass
try: self.game.currentPlayerLimit = self.getCvar('currentPlayerLimit').getBoolean()
except: pass
try: self.game.maxPlayerLimit = self.getCvar('maxPlayerLimit').getBoolean()
except: pass
try: self.game.playerLimit = self.getCvar('playerLimit').getBoolean()
except: pass
try: self.game.bannerUrl = self.getCvar('bannerUrl').getBoolean()
except: pass
try: self.game.serverDescription = self.getCvar('serverDescription').getBoolean()
except: pass
try: self.game.noCrosshair = self.getCvar('noCrosshair').getBoolean()
except: pass
try: self.game.noSpotting = self.getCvar('noSpotting').getBoolean()
except: pass
try: self.game.teamKillCountForKick = self.getCvar('teamKillCountForKick').getBoolean()
except: pass
try: self.game.teamKillValueForKick = self.getCvar('teamKillValueForKick').getBoolean()
except: pass
try: self.game.teamKillValueIncrease = self.getCvar('teamKillValueIncrease').getBoolean()
except: pass
try: self.game.teamKillValueDecreasePerSecond = self.getCvar('teamKillValueDecreasePerSecond').getBoolean()
except: pass
try: self.game.idleTimeout = self.getCvar('idleTimeout').getBoolean()
except: pass
def getTeam(self, team):
"""convert MOH team numbers to B3 team numbers"""
team = int(team)
if team == 1:
return b3.TEAM_RED
elif team == 2:
return b3.TEAM_BLUE
elif team == 3:
return b3.TEAM_SPEC
else:
return b3.TEAM_UNKNOWN
def OnPlayerSpawn(self, action, data):
"""
Request: player.onSpawn <soldier name: string> <kit: string> <weapon: string> <specializations: 3 x string>
"""
if len(data) < 2:
return None
spawner = self.getClient(data[0])
kit = data[1]
weapon = data[2]
spec1 = data[3]
spec2 = data[4]
spec3 = data[5]
event = b3.events.EVT_CLIENT_SPAWN
return b3.events.Event(event, (kit, weapon, spec1, spec2, spec3), spawner)
def OnPlayerTeamchange(self, action, data):
"""
player.onTeamChange <soldier name: player name> <team: Team ID>
Effect: Player might have changed team
"""
#['player.onTeamChange', 'Dalich', '2']
client = self.getClient(data[0])
if client:
client.team = self.getTeam(data[1]) # .team setter will send team change event
client.teamId = int(data[1])
def tempban(self, client, reason='', duration=2, admin=None, silent=False, *kwargs):
duration = b3.functions.time2minutes(duration)
if isinstance(client, str):
self.write(self.getCommand('kick', cid=client, reason=reason[:80]))
return
elif admin:
fullreason = self.getMessage('temp_banned_by', self.getMessageVariables(client=client, reason=reason, admin=admin, banduration=b3.functions.minutesStr(duration)))
else:
fullreason = self.getMessage('temp_banned', self.getMessageVariables(client=client, reason=reason, banduration=b3.functions.minutesStr(duration)))
fullreason = self.stripColors(fullreason)
reason = self.stripColors(reason)
if self.PunkBuster:
# punkbuster acts odd if you ban for more than a day
# tempban for a day here and let b3 re-ban if the player
# comes back
if duration > 1440:
duration = 1440
self.PunkBuster.kick(client, duration, reason)
self.write(('banList.list',))
self.write(self.getCommand('tempban', guid=client.guid, duration=duration*60, reason=reason[:80]))
self.write(('banList.list',))
## also kick as the MoH server seems not to enforce all bans correctly
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if not silent and fullreason != '':
self.say(fullreason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN_TEMP, {'reason': reason,
'duration': duration,
'admin': admin}
, client))
def ban(self, client, reason='', admin=None, silent=False, *kwargs):
"""Permanent ban"""
self.debug('BAN : client: %s, reason: %s', client, reason)
if isinstance(client, b3.clients.Client):
self.write(self.getCommand('ban', guid=client.guid, reason=reason[:80]))
try:
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
except: | pass
return
if admin:
reason = self.getMessage('banned_by', client.exactName, admin.exactName, reason)
else:
reason = self.getMessage('banned', client.exactName, reason)
reason = self.stripColors(reason)
if client.cid is None:
# ban by ip, this happens when we !permban @xx a player that is not connected
self.debug('EFFECTIVE BAN : %s',self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
self.write(self.getCommand('banByIp', ip=client.ip, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s). His last ip (%s) has been added to banlist'%(client.exactName, client.id, client.ip))
else:
# ban by cid
self.debug('EFFECTIVE BAN : %s',self.getCommand('ban', guid=client.guid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('ban', cid=client.cid, reason=reason[:80]))
self.write(('banList.list',))
self.write(self.getCommand('kick', cid=client.cid, reason=reason[:80]))
if admin:
admin.message('banned: %s (@%s) has been added to banlist'%(client.exactName, client.id))
if self.PunkBuster:
self.PunkBuster.banGUID(client, reason)
if not silent:
self.say(reason)
self.queueEvent(b3.events.Event(b3.events.EVT_CLIENT_BAN, {'reason': reason, 'admin': admin}, client))
def rotateMap(self):
"""Load the next map (not level). If the current game mod plays each level twice
to get teams the chance to play both sides, then this rotate a second
time to really switch to the next map"""
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
# No map in map rotation list, just call admin.runNextLevel
self.write(('admin.runNextRound',))
else:
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound',))
def changeMap(self, map):
"""Change to the given map
1) determine the level name
If map is of the form 'mp_001' and 'Kaboul' is a supported
level for the current game mod, then this level is loaded.
In other cases, this method assumes it is given a 'easy map name' (like
'Port Valdez') and it will do its best to find the level name that seems
to be for 'Port Valdez' within the supported levels.
If no match is found, then instead of loading the map, this method
returns a list of candidate map names
2) if we got a level name
if the level is not in the current rotation list, then add it to
the map list and load it
"""
supportedMaps = self.getSupportedMaps()
if 'levels/%s'%map in supportedMaps:
map = 'levels/%s'%map
if map not in supportedMaps:
match = self.getMapsSoundingLike(map)
if len(match) == 1:
map = match[0]
else:
return match
if map in supportedMaps:
levelnames = self.write(('mapList.list',))
if map not in levelnames:
# add the map to the map list
nextIndex = self.getNextMapIndex()
if nextIndex == -1:
self.write(('mapList.append', map))
nextIndex = 0
else:
if nextIndex == 0:
# case where the map list contains only 1 map
nextIndex = 1
self.write(('mapList.insert', nextIndex, map))
else:
nextIndex = 0
while nextIndex < len(levelnames) and levelnames[nextIndex] != map:
nextIndex += 1
self.say('Changing map to %s' % map)
time.sleep(1)
self.write(('mapList.nextLevelIndex', nextIndex))
self.write(('admin.runNextRound', )) | random_line_split |
|
asm.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
* Inline assembly support.
*/
use self::State::*;
use ast;
use codemap;
use codemap::Span;
use ext::base;
use ext::base::*;
use parse::token::InternedString;
use parse::token;
use ptr::P;
enum State {
Asm,
Outputs,
Inputs,
Clobbers,
Options,
StateNone
}
impl State {
fn | (&self) -> State {
match *self {
Asm => Outputs,
Outputs => Inputs,
Inputs => Clobbers,
Clobbers => Options,
Options => StateNone,
StateNone => StateNone
}
}
}
static OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"];
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let mut p = cx.new_parser_from_tts(tts);
let mut asm = InternedString::new("");
let mut asm_str_style = None;
let mut outputs = Vec::new();
let mut inputs = Vec::new();
let mut clobs = Vec::new();
let mut volatile = false;
let mut alignstack = false;
let mut dialect = ast::AsmAtt;
let mut state = Asm;
'statement: loop {
match state {
Asm => {
let (s, style) = match expr_to_string(cx, p.parse_expr(),
"inline assembly must be a string literal") {
Some((s, st)) => (s, st),
// let compilation continue
None => return DummyResult::expr(sp),
};
asm = s;
asm_str_style = Some(style);
}
Outputs => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if outputs.len() != 0 {
p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
let span = p.last_span;
p.expect(&token::OpenDelim(token::Paren));
let out = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
// Expands a read+write operand into two operands.
//
// Use '+' modifier when you want the same expression
// to be both an input and an output at the same time.
// It's the opposite of '=&' which means that the memory
// cannot be shared with any other operand (usually when
// a register is clobbered early.)
let output = match constraint.get().slice_shift_char() {
Some(('=', _)) => None,
Some(('+', operand)) => {
Some(token::intern_and_get_ident(format!(
"={}",
operand).as_slice()))
}
_ => {
cx.span_err(span, "output operand constraint lacks '=' or '+'");
None
}
};
let is_rw = output.is_some();
outputs.push((output.unwrap_or(constraint), out, is_rw));
}
}
Inputs => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if inputs.len() != 0 {
p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
if constraint.get().starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.get().starts_with("+") {
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
p.expect(&token::OpenDelim(token::Paren));
let input = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
inputs.push((constraint, input));
}
}
Clobbers => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if clobs.len() != 0 {
p.eat(&token::Comma);
}
let (s, _str_style) = p.parse_str();
if OPTIONS.iter().any(|opt| s.equiv(opt)) {
cx.span_warn(p.last_span, "expected a clobber, found an option");
}
clobs.push(s);
}
}
Options => {
let (option, _str_style) = p.parse_str();
if option.equiv(&("volatile")) {
// Indicates that the inline assembly has side effects
// and must not be optimized out along with its outputs.
volatile = true;
} else if option.equiv(&("alignstack")) {
alignstack = true;
} else if option.equiv(&("intel")) {
dialect = ast::AsmIntel;
} else {
cx.span_warn(p.last_span, "unrecognized option");
}
if p.token == token::Comma {
p.eat(&token::Comma);
}
}
StateNone => ()
}
loop {
// MOD_SEP is a double colon '::' without space in between.
// When encountered, the state must be advanced twice.
match (&p.token, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) |
(&token::ModSep, _, StateNone) => {
p.bump();
break 'statement;
}
(&token::Colon, st, _) |
(&token::ModSep, _, st) => {
p.bump();
state = st;
}
(&token::Eof, _, _) => break 'statement,
_ => break
}
}
}
let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: sp,
callee: codemap::NameAndSpan {
name: "asm".to_string(),
format: codemap::MacroBang,
span: None,
},
});
MacExpr::new(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm {
asm: token::intern_and_get_ident(asm.get()),
asm_str_style: asm_str_style.unwrap(),
outputs: outputs,
inputs: inputs,
clobbers: clobs,
volatile: volatile,
alignstack: alignstack,
dialect: dialect,
expn_id: expn_id,
}),
span: sp
}))
}
| next | identifier_name |
asm.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
* Inline assembly support.
*/
use self::State::*;
use ast;
use codemap;
use codemap::Span;
use ext::base;
use ext::base::*;
use parse::token::InternedString;
use parse::token;
use ptr::P;
enum State {
Asm,
Outputs,
Inputs,
Clobbers,
Options,
StateNone
}
impl State {
fn next(&self) -> State {
match *self {
Asm => Outputs,
Outputs => Inputs,
Inputs => Clobbers,
Clobbers => Options,
Options => StateNone,
StateNone => StateNone
}
}
}
static OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"];
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let mut p = cx.new_parser_from_tts(tts);
let mut asm = InternedString::new("");
let mut asm_str_style = None;
let mut outputs = Vec::new();
let mut inputs = Vec::new();
let mut clobs = Vec::new();
let mut volatile = false;
let mut alignstack = false;
let mut dialect = ast::AsmAtt;
let mut state = Asm;
'statement: loop {
match state {
Asm => {
let (s, style) = match expr_to_string(cx, p.parse_expr(),
"inline assembly must be a string literal") {
Some((s, st)) => (s, st),
// let compilation continue
None => return DummyResult::expr(sp),
};
asm = s;
asm_str_style = Some(style);
}
Outputs => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if outputs.len() != 0 {
p.eat(&token::Comma);
}
|
let span = p.last_span;
p.expect(&token::OpenDelim(token::Paren));
let out = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
// Expands a read+write operand into two operands.
//
// Use '+' modifier when you want the same expression
// to be both an input and an output at the same time.
// It's the opposite of '=&' which means that the memory
// cannot be shared with any other operand (usually when
// a register is clobbered early.)
let output = match constraint.get().slice_shift_char() {
Some(('=', _)) => None,
Some(('+', operand)) => {
Some(token::intern_and_get_ident(format!(
"={}",
operand).as_slice()))
}
_ => {
cx.span_err(span, "output operand constraint lacks '=' or '+'");
None
}
};
let is_rw = output.is_some();
outputs.push((output.unwrap_or(constraint), out, is_rw));
}
}
Inputs => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if inputs.len() != 0 {
p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
if constraint.get().starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.get().starts_with("+") {
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
p.expect(&token::OpenDelim(token::Paren));
let input = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
inputs.push((constraint, input));
}
}
Clobbers => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if clobs.len() != 0 {
p.eat(&token::Comma);
}
let (s, _str_style) = p.parse_str();
if OPTIONS.iter().any(|opt| s.equiv(opt)) {
cx.span_warn(p.last_span, "expected a clobber, found an option");
}
clobs.push(s);
}
}
Options => {
let (option, _str_style) = p.parse_str();
if option.equiv(&("volatile")) {
// Indicates that the inline assembly has side effects
// and must not be optimized out along with its outputs.
volatile = true;
} else if option.equiv(&("alignstack")) {
alignstack = true;
} else if option.equiv(&("intel")) {
dialect = ast::AsmIntel;
} else {
cx.span_warn(p.last_span, "unrecognized option");
}
if p.token == token::Comma {
p.eat(&token::Comma);
}
}
StateNone => ()
}
loop {
// MOD_SEP is a double colon '::' without space in between.
// When encountered, the state must be advanced twice.
match (&p.token, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) |
(&token::ModSep, _, StateNone) => {
p.bump();
break 'statement;
}
(&token::Colon, st, _) |
(&token::ModSep, _, st) => {
p.bump();
state = st;
}
(&token::Eof, _, _) => break 'statement,
_ => break
}
}
}
let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: sp,
callee: codemap::NameAndSpan {
name: "asm".to_string(),
format: codemap::MacroBang,
span: None,
},
});
MacExpr::new(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm {
asm: token::intern_and_get_ident(asm.get()),
asm_str_style: asm_str_style.unwrap(),
outputs: outputs,
inputs: inputs,
clobbers: clobs,
volatile: volatile,
alignstack: alignstack,
dialect: dialect,
expn_id: expn_id,
}),
span: sp
}))
} | let (constraint, _str_style) = p.parse_str(); | random_line_split |
asm.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
* Inline assembly support.
*/
use self::State::*;
use ast;
use codemap;
use codemap::Span;
use ext::base;
use ext::base::*;
use parse::token::InternedString;
use parse::token;
use ptr::P;
enum State {
Asm,
Outputs,
Inputs,
Clobbers,
Options,
StateNone
}
impl State {
fn next(&self) -> State |
}
static OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"];
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let mut p = cx.new_parser_from_tts(tts);
let mut asm = InternedString::new("");
let mut asm_str_style = None;
let mut outputs = Vec::new();
let mut inputs = Vec::new();
let mut clobs = Vec::new();
let mut volatile = false;
let mut alignstack = false;
let mut dialect = ast::AsmAtt;
let mut state = Asm;
'statement: loop {
match state {
Asm => {
let (s, style) = match expr_to_string(cx, p.parse_expr(),
"inline assembly must be a string literal") {
Some((s, st)) => (s, st),
// let compilation continue
None => return DummyResult::expr(sp),
};
asm = s;
asm_str_style = Some(style);
}
Outputs => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if outputs.len() != 0 {
p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
let span = p.last_span;
p.expect(&token::OpenDelim(token::Paren));
let out = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
// Expands a read+write operand into two operands.
//
// Use '+' modifier when you want the same expression
// to be both an input and an output at the same time.
// It's the opposite of '=&' which means that the memory
// cannot be shared with any other operand (usually when
// a register is clobbered early.)
let output = match constraint.get().slice_shift_char() {
Some(('=', _)) => None,
Some(('+', operand)) => {
Some(token::intern_and_get_ident(format!(
"={}",
operand).as_slice()))
}
_ => {
cx.span_err(span, "output operand constraint lacks '=' or '+'");
None
}
};
let is_rw = output.is_some();
outputs.push((output.unwrap_or(constraint), out, is_rw));
}
}
Inputs => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if inputs.len() != 0 {
p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
if constraint.get().starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.get().starts_with("+") {
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
p.expect(&token::OpenDelim(token::Paren));
let input = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
inputs.push((constraint, input));
}
}
Clobbers => {
while p.token != token::Eof &&
p.token != token::Colon &&
p.token != token::ModSep {
if clobs.len() != 0 {
p.eat(&token::Comma);
}
let (s, _str_style) = p.parse_str();
if OPTIONS.iter().any(|opt| s.equiv(opt)) {
cx.span_warn(p.last_span, "expected a clobber, found an option");
}
clobs.push(s);
}
}
Options => {
let (option, _str_style) = p.parse_str();
if option.equiv(&("volatile")) {
// Indicates that the inline assembly has side effects
// and must not be optimized out along with its outputs.
volatile = true;
} else if option.equiv(&("alignstack")) {
alignstack = true;
} else if option.equiv(&("intel")) {
dialect = ast::AsmIntel;
} else {
cx.span_warn(p.last_span, "unrecognized option");
}
if p.token == token::Comma {
p.eat(&token::Comma);
}
}
StateNone => ()
}
loop {
// MOD_SEP is a double colon '::' without space in between.
// When encountered, the state must be advanced twice.
match (&p.token, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) |
(&token::ModSep, _, StateNone) => {
p.bump();
break 'statement;
}
(&token::Colon, st, _) |
(&token::ModSep, _, st) => {
p.bump();
state = st;
}
(&token::Eof, _, _) => break 'statement,
_ => break
}
}
}
let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: sp,
callee: codemap::NameAndSpan {
name: "asm".to_string(),
format: codemap::MacroBang,
span: None,
},
});
MacExpr::new(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm {
asm: token::intern_and_get_ident(asm.get()),
asm_str_style: asm_str_style.unwrap(),
outputs: outputs,
inputs: inputs,
clobbers: clobs,
volatile: volatile,
alignstack: alignstack,
dialect: dialect,
expn_id: expn_id,
}),
span: sp
}))
}
| {
match *self {
Asm => Outputs,
Outputs => Inputs,
Inputs => Clobbers,
Clobbers => Options,
Options => StateNone,
StateNone => StateNone
}
} | identifier_body |
porn91.py | # encoding: utf-8
from __future__ import unicode_literals
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_urlencode,
)
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
ExtractorError,
)
class Porn91IE(InfoExtractor):
IE_NAME = '91porn'
_VALID_URL = r'(?:https?://)(?:www\.|)91porn\.com/.+?\?viewkey=(?P<id>[\w\d]+)'
_TEST = {
'url': 'http://91porn.com/view_video.php?viewkey=7e42283b4f5ab36da134',
'md5': '6df8f6d028bc8b14f5dbd73af742fb20',
'info_dict': {
'id': '7e42283b4f5ab36da134',
'title': '18岁大一漂亮学妹,水嫩性感,再爽一次!',
'ext': 'mp4',
'duration': 431,
'age_limit': 18,
}
}
def _real_extract(self, url):
vi | ._match_id(url)
self._set_cookie('91porn.com', 'language', 'cn_CN')
webpage = self._download_webpage(
'http://91porn.com/view_video.php?viewkey=%s' % video_id, video_id)
if '作为游客,你每天只可观看10个视频' in webpage:
raise ExtractorError('91 Porn says: Daily limit 10 videos exceeded', expected=True)
title = self._search_regex(
r'<div id="viewvideo-title">([^<]+)</div>', webpage, 'title')
title = title.replace('\n', '')
# get real url
file_id = self._search_regex(
r'so.addVariable\(\'file\',\'(\d+)\'', webpage, 'file id')
sec_code = self._search_regex(
r'so.addVariable\(\'seccode\',\'([^\']+)\'', webpage, 'sec code')
max_vid = self._search_regex(
r'so.addVariable\(\'max_vid\',\'(\d+)\'', webpage, 'max vid')
url_params = compat_urllib_parse_urlencode({
'VID': file_id,
'mp4': '1',
'seccode': sec_code,
'max_vid': max_vid,
})
info_cn = self._download_webpage(
'http://91porn.com/getfile.php?' + url_params, video_id,
'Downloading real video url')
video_url = compat_urllib_parse_unquote(self._search_regex(
r'file=([^&]+)&', info_cn, 'url'))
duration = parse_duration(self._search_regex(
r'时长:\s*</span>\s*(\d+:\d+)', webpage, 'duration', fatal=False))
comment_count = int_or_none(self._search_regex(
r'留言:\s*</span>\s*(\d+)', webpage, 'comment count', fatal=False))
return {
'id': video_id,
'title': title,
'url': video_url,
'duration': duration,
'comment_count': comment_count,
'age_limit': self._rta_search(webpage),
}
| deo_id = self | identifier_name |
porn91.py | # encoding: utf-8
from __future__ import unicode_literals
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_urlencode,
)
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
ExtractorError,
)
class Porn91IE(InfoExtractor):
| IE_NAME = '91porn'
_VALID_URL = r'(?:https?://)(?:www\.|)91porn\.com/.+?\?viewkey=(?P<id>[\w\d]+)'
_TEST = {
'url': 'http://91porn.com/view_video.php?viewkey=7e42283b4f5ab36da134',
'md5': '6df8f6d028bc8b14f5dbd73af742fb20',
'info_dict': {
'id': '7e42283b4f5ab36da134',
'title': '18岁大一漂亮学妹,水嫩性感,再爽一次!',
'ext': 'mp4',
'duration': 431,
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
self._set_cookie('91porn.com', 'language', 'cn_CN')
webpage = self._download_webpage(
'http://91porn.com/view_video.php?viewkey=%s' % video_id, video_id)
if '作为游客,你每天只可观看10个视频' in webpage:
raise ExtractorError('91 Porn says: Daily limit 10 videos exceeded', expected=True)
title = self._search_regex(
r'<div id="viewvideo-title">([^<]+)</div>', webpage, 'title')
title = title.replace('\n', '')
# get real url
file_id = self._search_regex(
r'so.addVariable\(\'file\',\'(\d+)\'', webpage, 'file id')
sec_code = self._search_regex(
r'so.addVariable\(\'seccode\',\'([^\']+)\'', webpage, 'sec code')
max_vid = self._search_regex(
r'so.addVariable\(\'max_vid\',\'(\d+)\'', webpage, 'max vid')
url_params = compat_urllib_parse_urlencode({
'VID': file_id,
'mp4': '1',
'seccode': sec_code,
'max_vid': max_vid,
})
info_cn = self._download_webpage(
'http://91porn.com/getfile.php?' + url_params, video_id,
'Downloading real video url')
video_url = compat_urllib_parse_unquote(self._search_regex(
r'file=([^&]+)&', info_cn, 'url'))
duration = parse_duration(self._search_regex(
r'时长:\s*</span>\s*(\d+:\d+)', webpage, 'duration', fatal=False))
comment_count = int_or_none(self._search_regex(
r'留言:\s*</span>\s*(\d+)', webpage, 'comment count', fatal=False))
return {
'id': video_id,
'title': title,
'url': video_url,
'duration': duration,
'comment_count': comment_count,
'age_limit': self._rta_search(webpage),
}
| identifier_body |
|
porn91.py | # encoding: utf-8
from __future__ import unicode_literals
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_urlencode,
)
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
ExtractorError,
)
class Porn91IE(InfoExtractor):
IE_NAME = '91porn'
_VALID_URL = r'(?:https?://)(?:www\.|)91porn\.com/.+?\?viewkey=(?P<id>[\w\d]+)'
_TEST = {
'url': 'http://91porn.com/view_video.php?viewkey=7e42283b4f5ab36da134',
'md5': '6df8f6d028bc8b14f5dbd73af742fb20',
'info_dict': {
'id': '7e42283b4f5ab36da134',
'title': '18岁大一漂亮学妹,水嫩性感,再爽一次!',
'ext': 'mp4',
'duration': 431,
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
self._set_cookie('91porn.com', 'language', 'cn_CN')
webpage = self._download_webpage(
'http://91porn.com/view_video.php?viewkey=%s' % video_id, video_id)
if '作为游客,你每天只可观看10个视频' in webpage:
raise ExtractorError('91 Porn says: Daily limit 10 videos exceeded | ideo-title">([^<]+)</div>', webpage, 'title')
title = title.replace('\n', '')
# get real url
file_id = self._search_regex(
r'so.addVariable\(\'file\',\'(\d+)\'', webpage, 'file id')
sec_code = self._search_regex(
r'so.addVariable\(\'seccode\',\'([^\']+)\'', webpage, 'sec code')
max_vid = self._search_regex(
r'so.addVariable\(\'max_vid\',\'(\d+)\'', webpage, 'max vid')
url_params = compat_urllib_parse_urlencode({
'VID': file_id,
'mp4': '1',
'seccode': sec_code,
'max_vid': max_vid,
})
info_cn = self._download_webpage(
'http://91porn.com/getfile.php?' + url_params, video_id,
'Downloading real video url')
video_url = compat_urllib_parse_unquote(self._search_regex(
r'file=([^&]+)&', info_cn, 'url'))
duration = parse_duration(self._search_regex(
r'时长:\s*</span>\s*(\d+:\d+)', webpage, 'duration', fatal=False))
comment_count = int_or_none(self._search_regex(
r'留言:\s*</span>\s*(\d+)', webpage, 'comment count', fatal=False))
return {
'id': video_id,
'title': title,
'url': video_url,
'duration': duration,
'comment_count': comment_count,
'age_limit': self._rta_search(webpage),
}
| ', expected=True)
title = self._search_regex(
r'<div id="viewv | conditional_block |
porn91.py | # encoding: utf-8
from __future__ import unicode_literals
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_urlencode,
)
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
ExtractorError,
)
class Porn91IE(InfoExtractor):
IE_NAME = '91porn'
_VALID_URL = r'(?:https?://)(?:www\.|)91porn\.com/.+?\?viewkey=(?P<id>[\w\d]+)'
_TEST = {
'url': 'http://91porn.com/view_video.php?viewkey=7e42283b4f5ab36da134',
'md5': '6df8f6d028bc8b14f5dbd73af742fb20',
'info_dict': {
'id': '7e42283b4f5ab36da134',
'title': '18岁大一漂亮学妹,水嫩性感,再爽一次!',
'ext': 'mp4', | 'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
self._set_cookie('91porn.com', 'language', 'cn_CN')
webpage = self._download_webpage(
'http://91porn.com/view_video.php?viewkey=%s' % video_id, video_id)
if '作为游客,你每天只可观看10个视频' in webpage:
raise ExtractorError('91 Porn says: Daily limit 10 videos exceeded', expected=True)
title = self._search_regex(
r'<div id="viewvideo-title">([^<]+)</div>', webpage, 'title')
title = title.replace('\n', '')
# get real url
file_id = self._search_regex(
r'so.addVariable\(\'file\',\'(\d+)\'', webpage, 'file id')
sec_code = self._search_regex(
r'so.addVariable\(\'seccode\',\'([^\']+)\'', webpage, 'sec code')
max_vid = self._search_regex(
r'so.addVariable\(\'max_vid\',\'(\d+)\'', webpage, 'max vid')
url_params = compat_urllib_parse_urlencode({
'VID': file_id,
'mp4': '1',
'seccode': sec_code,
'max_vid': max_vid,
})
info_cn = self._download_webpage(
'http://91porn.com/getfile.php?' + url_params, video_id,
'Downloading real video url')
video_url = compat_urllib_parse_unquote(self._search_regex(
r'file=([^&]+)&', info_cn, 'url'))
duration = parse_duration(self._search_regex(
r'时长:\s*</span>\s*(\d+:\d+)', webpage, 'duration', fatal=False))
comment_count = int_or_none(self._search_regex(
r'留言:\s*</span>\s*(\d+)', webpage, 'comment count', fatal=False))
return {
'id': video_id,
'title': title,
'url': video_url,
'duration': duration,
'comment_count': comment_count,
'age_limit': self._rta_search(webpage),
} | 'duration': 431, | random_line_split |
index.d.ts | // Type definitions for ember-test-helpers 0.7
// Project: https://github.com/emberjs/ember-test-helpers#readme
// Definitions by: Derek Wickern <https://github.com/dwickern>
// Mike North <https://github.com/mike-north>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.7
/// <reference types="jquery" />
declare module 'ember-test-helpers' {
import Ember from 'ember';
import { TemplateFactory } from 'htmlbars-inline-precompile';
import RSVP from 'rsvp';
interface ModuleCallbacks {
integration?: boolean | undefined;
unit?: boolean | undefined;
needs?: string[] | undefined;
beforeSetup?(assert?: any): void;
setup?(assert?: any): void;
teardown?(assert?: any): void;
afterTeardown?(assert?: any): void;
[key: string]: any;
}
interface TestContext {
get(key: string): any;
getProperties<K extends string>(...keys: K[]): Pick<any, K>;
set<V>(key: string, value: V): V;
setProperties<P extends { [key: string]: any }>(hash: P): P;
on(actionName: string, handler: (this: TestContext, ...args: any[]) => any): void;
send(actionName: string): void;
$: JQueryStatic;
subject(options?: {}): any;
render(template?: string | string[] | TemplateFactory): Promise<void>;
clearRender(): void;
registry: Ember.Registry;
container: Ember.Container;
dispatcher: Ember.EventDispatcher;
application: Ember.Application;
register(fullName: string, factory: any): void;
factory(fullName: string): any;
inject: {
controller(name: string, options?: { as: string }): any;
service(name: string, options?: { as: string }): any;
}; | owner: Ember.ApplicationInstance & {
factoryFor(fullName: string, options?: {}): any;
};
pauseTest(): Promise<void>;
resumeTest(): void;
element: Element;
}
class TestModule {
constructor(name: string, callbacks?: ModuleCallbacks);
constructor(name: string, description?: string, callbacks?: ModuleCallbacks);
name: string;
subjectName: string;
description: string;
isIntegration: boolean;
callbacks: ModuleCallbacks;
context: TestContext;
resolver: Ember.Resolver;
setup(assert?: any): RSVP.Promise<void>;
teardown(assert?: any): RSVP.Promise<void>;
getContext(): TestContext;
setContext(context: TestContext): void;
}
class TestModuleForAcceptance extends TestModule {}
class TestModuleForIntegration extends TestModule {}
class TestModuleForComponent extends TestModule {}
class TestModuleForModel extends TestModule {}
function getContext(): TestContext | undefined;
function setContext(context: TestContext): void;
function unsetContext(): void;
function setResolver(resolver: Ember.Resolver): void;
}
declare module 'ember-test-helpers/wait' {
import RSVP from 'rsvp';
interface WaitOptions {
waitForTimers?: boolean | undefined;
waitForAJAX?: boolean | undefined;
waitForWaiters?: boolean | undefined;
}
export default function wait(options?: WaitOptions): RSVP.Promise<void>;
}
declare module 'ember-test-helpers/has-ember-version' {
export default function hasEmberVersion(major: number, minor: number): boolean;
} | random_line_split |
|
index.d.ts | // Type definitions for ember-test-helpers 0.7
// Project: https://github.com/emberjs/ember-test-helpers#readme
// Definitions by: Derek Wickern <https://github.com/dwickern>
// Mike North <https://github.com/mike-north>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.7
/// <reference types="jquery" />
declare module 'ember-test-helpers' {
import Ember from 'ember';
import { TemplateFactory } from 'htmlbars-inline-precompile';
import RSVP from 'rsvp';
interface ModuleCallbacks {
integration?: boolean | undefined;
unit?: boolean | undefined;
needs?: string[] | undefined;
beforeSetup?(assert?: any): void;
setup?(assert?: any): void;
teardown?(assert?: any): void;
afterTeardown?(assert?: any): void;
[key: string]: any;
}
interface TestContext {
get(key: string): any;
getProperties<K extends string>(...keys: K[]): Pick<any, K>;
set<V>(key: string, value: V): V;
setProperties<P extends { [key: string]: any }>(hash: P): P;
on(actionName: string, handler: (this: TestContext, ...args: any[]) => any): void;
send(actionName: string): void;
$: JQueryStatic;
subject(options?: {}): any;
render(template?: string | string[] | TemplateFactory): Promise<void>;
clearRender(): void;
registry: Ember.Registry;
container: Ember.Container;
dispatcher: Ember.EventDispatcher;
application: Ember.Application;
register(fullName: string, factory: any): void;
factory(fullName: string): any;
inject: {
controller(name: string, options?: { as: string }): any;
service(name: string, options?: { as: string }): any;
};
owner: Ember.ApplicationInstance & {
factoryFor(fullName: string, options?: {}): any;
};
pauseTest(): Promise<void>;
resumeTest(): void;
element: Element;
}
class TestModule {
constructor(name: string, callbacks?: ModuleCallbacks);
constructor(name: string, description?: string, callbacks?: ModuleCallbacks);
name: string;
subjectName: string;
description: string;
isIntegration: boolean;
callbacks: ModuleCallbacks;
context: TestContext;
resolver: Ember.Resolver;
setup(assert?: any): RSVP.Promise<void>;
teardown(assert?: any): RSVP.Promise<void>;
getContext(): TestContext;
setContext(context: TestContext): void;
}
class TestModuleForAcceptance extends TestModule {}
class TestModuleForIntegration extends TestModule {}
class | extends TestModule {}
class TestModuleForModel extends TestModule {}
function getContext(): TestContext | undefined;
function setContext(context: TestContext): void;
function unsetContext(): void;
function setResolver(resolver: Ember.Resolver): void;
}
declare module 'ember-test-helpers/wait' {
import RSVP from 'rsvp';
interface WaitOptions {
waitForTimers?: boolean | undefined;
waitForAJAX?: boolean | undefined;
waitForWaiters?: boolean | undefined;
}
export default function wait(options?: WaitOptions): RSVP.Promise<void>;
}
declare module 'ember-test-helpers/has-ember-version' {
export default function hasEmberVersion(major: number, minor: number): boolean;
}
| TestModuleForComponent | identifier_name |
__init__.py | import os
import glob
import importlib
import logging
logger = logging.getLogger(__name__)
class UnknownFormatError(RuntimeError):
pass
item_classes = []
supported_protocols = []
def constructor_exists(path):
for klass in item_classes:
if klass.applies(path):
return True
return False
def find_constructors(path, **kwargs):
"""Available constructors for a path.
"""
applies = ((float(klass.applies(path, **kwargs)), klass) for klass in item_classes)
contructors = list(sorted((c for c in applies if c[0] > 0.0), key=lambda c: (c[0], c[1].__name__)))
logging.debug("For {0}, constructors found: {1}".format(path, contructors))
return contructors
def find_best_constructor(path, **kwargs):
constructors = find_constructors(path, **kwargs)
if constructors:
return find_constructors(path)[-1][1]
else:
return None
def read_item(name, storage, path):
|
def register(klass):
"""Make this class available to storage for creating items.
:type klass: type
Works as decorator for both classes and functions.
"""
item_classes.append(klass)
logger.info("Registered item class %s." % klass.__name__)
supported_protocols.extend(getattr(klass, "protocols", []))
return klass
def _auto_import():
"""Automatically imports all modules from this directory."""
for subdir in ["formats", "pseudoitems"]:
py_files = glob.glob(os.path.join(os.path.dirname(__file__), subdir) +"/*.py")
py_files = filter(lambda f: os.path.isfile(f) and not os.path.basename(f).startswith('_'),
py_files)
modules = [os.path.basename(py_file[:-3]) for py_file in py_files]
subpackage = importlib.import_module("." + subdir, __package__)
for module_name in modules:
try:
importlib.import_module("." + module_name, subpackage.__name__)
except Exception as i:
logger.warning("Module %s could not be imported, reason: %s" % (module_name, i))
from . import directory
_auto_import()
| """Try to create item using one of the registered constructors."""
klass = find_best_constructor(path)
if klass:
return klass(storage, name, path)
else:
raise UnknownFormatError("Cannot interpret file %s" % path) | identifier_body |
__init__.py | import os
import glob
import importlib
import logging
logger = logging.getLogger(__name__)
class UnknownFormatError(RuntimeError):
pass
item_classes = []
supported_protocols = []
def constructor_exists(path):
for klass in item_classes:
if klass.applies(path):
return True
return False
def find_constructors(path, **kwargs):
"""Available constructors for a path.
"""
applies = ((float(klass.applies(path, **kwargs)), klass) for klass in item_classes)
contructors = list(sorted((c for c in applies if c[0] > 0.0), key=lambda c: (c[0], c[1].__name__)))
logging.debug("For {0}, constructors found: {1}".format(path, contructors))
return contructors
def find_best_constructor(path, **kwargs):
constructors = find_constructors(path, **kwargs)
if constructors:
return find_constructors(path)[-1][1]
else:
return None
def read_item(name, storage, path):
"""Try to create item using one of the registered constructors."""
klass = find_best_constructor(path)
if klass:
|
else:
raise UnknownFormatError("Cannot interpret file %s" % path)
def register(klass):
"""Make this class available to storage for creating items.
:type klass: type
Works as decorator for both classes and functions.
"""
item_classes.append(klass)
logger.info("Registered item class %s." % klass.__name__)
supported_protocols.extend(getattr(klass, "protocols", []))
return klass
def _auto_import():
"""Automatically imports all modules from this directory."""
for subdir in ["formats", "pseudoitems"]:
py_files = glob.glob(os.path.join(os.path.dirname(__file__), subdir) +"/*.py")
py_files = filter(lambda f: os.path.isfile(f) and not os.path.basename(f).startswith('_'),
py_files)
modules = [os.path.basename(py_file[:-3]) for py_file in py_files]
subpackage = importlib.import_module("." + subdir, __package__)
for module_name in modules:
try:
importlib.import_module("." + module_name, subpackage.__name__)
except Exception as i:
logger.warning("Module %s could not be imported, reason: %s" % (module_name, i))
from . import directory
_auto_import()
| return klass(storage, name, path) | conditional_block |
__init__.py | import os
import glob
import importlib
import logging
logger = logging.getLogger(__name__)
class UnknownFormatError(RuntimeError):
pass
item_classes = []
supported_protocols = []
def constructor_exists(path):
for klass in item_classes:
if klass.applies(path):
return True
return False
| """Available constructors for a path.
"""
applies = ((float(klass.applies(path, **kwargs)), klass) for klass in item_classes)
contructors = list(sorted((c for c in applies if c[0] > 0.0), key=lambda c: (c[0], c[1].__name__)))
logging.debug("For {0}, constructors found: {1}".format(path, contructors))
return contructors
def find_best_constructor(path, **kwargs):
constructors = find_constructors(path, **kwargs)
if constructors:
return find_constructors(path)[-1][1]
else:
return None
def read_item(name, storage, path):
"""Try to create item using one of the registered constructors."""
klass = find_best_constructor(path)
if klass:
return klass(storage, name, path)
else:
raise UnknownFormatError("Cannot interpret file %s" % path)
def register(klass):
"""Make this class available to storage for creating items.
:type klass: type
Works as decorator for both classes and functions.
"""
item_classes.append(klass)
logger.info("Registered item class %s." % klass.__name__)
supported_protocols.extend(getattr(klass, "protocols", []))
return klass
def _auto_import():
"""Automatically imports all modules from this directory."""
for subdir in ["formats", "pseudoitems"]:
py_files = glob.glob(os.path.join(os.path.dirname(__file__), subdir) +"/*.py")
py_files = filter(lambda f: os.path.isfile(f) and not os.path.basename(f).startswith('_'),
py_files)
modules = [os.path.basename(py_file[:-3]) for py_file in py_files]
subpackage = importlib.import_module("." + subdir, __package__)
for module_name in modules:
try:
importlib.import_module("." + module_name, subpackage.__name__)
except Exception as i:
logger.warning("Module %s could not be imported, reason: %s" % (module_name, i))
from . import directory
_auto_import() | def find_constructors(path, **kwargs): | random_line_split |
__init__.py | import os
import glob
import importlib
import logging
logger = logging.getLogger(__name__)
class UnknownFormatError(RuntimeError):
pass
item_classes = []
supported_protocols = []
def constructor_exists(path):
for klass in item_classes:
if klass.applies(path):
return True
return False
def find_constructors(path, **kwargs):
"""Available constructors for a path.
"""
applies = ((float(klass.applies(path, **kwargs)), klass) for klass in item_classes)
contructors = list(sorted((c for c in applies if c[0] > 0.0), key=lambda c: (c[0], c[1].__name__)))
logging.debug("For {0}, constructors found: {1}".format(path, contructors))
return contructors
def find_best_constructor(path, **kwargs):
constructors = find_constructors(path, **kwargs)
if constructors:
return find_constructors(path)[-1][1]
else:
return None
def | (name, storage, path):
"""Try to create item using one of the registered constructors."""
klass = find_best_constructor(path)
if klass:
return klass(storage, name, path)
else:
raise UnknownFormatError("Cannot interpret file %s" % path)
def register(klass):
"""Make this class available to storage for creating items.
:type klass: type
Works as decorator for both classes and functions.
"""
item_classes.append(klass)
logger.info("Registered item class %s." % klass.__name__)
supported_protocols.extend(getattr(klass, "protocols", []))
return klass
def _auto_import():
"""Automatically imports all modules from this directory."""
for subdir in ["formats", "pseudoitems"]:
py_files = glob.glob(os.path.join(os.path.dirname(__file__), subdir) +"/*.py")
py_files = filter(lambda f: os.path.isfile(f) and not os.path.basename(f).startswith('_'),
py_files)
modules = [os.path.basename(py_file[:-3]) for py_file in py_files]
subpackage = importlib.import_module("." + subdir, __package__)
for module_name in modules:
try:
importlib.import_module("." + module_name, subpackage.__name__)
except Exception as i:
logger.warning("Module %s could not be imported, reason: %s" % (module_name, i))
from . import directory
_auto_import()
| read_item | identifier_name |
upload.py | #!/usr/bin/env python
import argparse
import errno
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# Do not upload other files when passed "-p".
return
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg))
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
if PLATFORM == 'win32' and not tag_exists:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
# Upload node headers.
run_python_script('create-node-headers.py', '-v', args.version)
run_python_script('upload-node-headers.py', '-v', args.version)
def | ():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def run_python_script(script, *args):
script_path = os.path.join(SOURCE_ROOT, 'script', script)
return execute([sys.executable, script_path] + list(args))
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
if PLATFORM == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
elif PLATFORM == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(SOURCE_ROOT, 'out', 'R', PROJECT_NAME)
return subprocess.check_output([electron, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft']:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1}'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']:
if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception:
pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
if 'armv7l' in filename:
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_sha256_checksum(version, file_path):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
'atom-shell/tmp/{0}'.format(version), [checksum_path])
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ELECTRON_REPO).releases(release_id).patch(data=data)
def auth_token():
token = get_env_var('GITHUB_TOKEN')
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
if __name__ == '__main__':
import sys
sys.exit(main())
| parse_args | identifier_name |
upload.py | #!/usr/bin/env python
import argparse
import errno
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# Do not upload other files when passed "-p".
return
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg))
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
if PLATFORM == 'win32' and not tag_exists:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
# Upload node headers.
run_python_script('create-node-headers.py', '-v', args.version)
run_python_script('upload-node-headers.py', '-v', args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def run_python_script(script, *args):
|
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
if PLATFORM == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
elif PLATFORM == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(SOURCE_ROOT, 'out', 'R', PROJECT_NAME)
return subprocess.check_output([electron, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft']:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1}'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']:
if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception:
pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
if 'armv7l' in filename:
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_sha256_checksum(version, file_path):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
'atom-shell/tmp/{0}'.format(version), [checksum_path])
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ELECTRON_REPO).releases(release_id).patch(data=data)
def auth_token():
token = get_env_var('GITHUB_TOKEN')
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
if __name__ == '__main__':
import sys
sys.exit(main())
| script_path = os.path.join(SOURCE_ROOT, 'script', script)
return execute([sys.executable, script_path] + list(args)) | identifier_body |
upload.py | #!/usr/bin/env python
import argparse
import errno
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# Do not upload other files when passed "-p".
return
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg))
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
if PLATFORM == 'win32' and not tag_exists:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
# Upload node headers.
run_python_script('create-node-headers.py', '-v', args.version)
run_python_script('upload-node-headers.py', '-v', args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def run_python_script(script, *args):
script_path = os.path.join(SOURCE_ROOT, 'script', script)
return execute([sys.executable, script_path] + list(args))
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
if PLATFORM == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
elif PLATFORM == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(SOURCE_ROOT, 'out', 'R', PROJECT_NAME)
return subprocess.check_output([electron, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft']:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1}'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']: | pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
if 'armv7l' in filename:
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_sha256_checksum(version, file_path):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
'atom-shell/tmp/{0}'.format(version), [checksum_path])
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ELECTRON_REPO).releases(release_id).patch(data=data)
def auth_token():
token = get_env_var('GITHUB_TOKEN')
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
if __name__ == '__main__':
import sys
sys.exit(main()) | if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception: | random_line_split |
upload.py | #!/usr/bin/env python
import argparse
import errno
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# Do not upload other files when passed "-p".
return
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg))
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
if PLATFORM == 'win32' and not tag_exists:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
# Upload node headers.
run_python_script('create-node-headers.py', '-v', args.version)
run_python_script('upload-node-headers.py', '-v', args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def run_python_script(script, *args):
script_path = os.path.join(SOURCE_ROOT, 'script', script)
return execute([sys.executable, script_path] + list(args))
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
# In CI we just build as told.
|
if PLATFORM == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
elif PLATFORM == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(SOURCE_ROOT, 'out', 'R', PROJECT_NAME)
return subprocess.check_output([electron, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft']:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1}'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']:
if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception:
pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
if 'armv7l' in filename:
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_sha256_checksum(version, file_path):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
'atom-shell/tmp/{0}'.format(version), [checksum_path])
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ELECTRON_REPO).releases(release_id).patch(data=data)
def auth_token():
token = get_env_var('GITHUB_TOKEN')
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
if __name__ == '__main__':
import sys
sys.exit(main())
| return ELECTRON_VERSION | conditional_block |
error.rs | use std::{env,
error,
fmt,
io,
net,
path::PathBuf,
result,
str,
string};
use crate::{api_client,
hcore::{self,
package::PackageIdent}};
pub type Result<T> = result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
APIClient(api_client::Error),
ArtifactIdentMismatch((String, String, String)),
/// Occurs when there is no valid toml of json in the environment variable
BadEnvConfig(String),
BadGlyphStyle(String),
CantUploadGossipToml,
ChannelNotFound,
CryptoKeyError(String),
DownloadFailed(String),
EditorEnv(env::VarError),
EditStatus,
FileNameError,
/// Occurs when a file that should exist does not or could not be read.
FileNotFound(String),
GossipFileRelativePath(String),
HabitatCore(hcore::Error),
InstallHookFailed(PackageIdent),
InterpreterNotFound(PackageIdent, Box<Self>),
InvalidEventStreamToken(String),
InvalidInstallHookMode(String),
/// Occurs when making lower level IO calls.
IO(io::Error),
/// Errors when joining paths :)
JoinPathsError(env::JoinPathsError),
MissingCLIInputError(String),
NamedPipeTimeoutOnStart(String, String, io::Error),
NativeTls(native_tls::Error),
NetParseError(net::AddrParseError),
OfflineArtifactNotFound(PackageIdent),
OfflineOriginKeyNotFound(String),
OfflinePackageNotFound(PackageIdent),
PackageNotFound(String),
/// Occurs upon errors related to file or directory permissions.
PermissionFailed(String),
/// When an error occurs serializing rendering context
RenderContextSerialization(serde_json::Error),
RootRequired,
StatusFileCorrupt(PathBuf),
StrFromUtf8Error(str::Utf8Error),
StringFromUtf8Error(string::FromUtf8Error),
/// When an error occurs registering template file
// Boxed due to clippy::large_enum_variant
TemplateFileError(Box<handlebars::TemplateFileError>),
/// When an error occurs rendering template
/// The error is constructed with a handlebars::RenderError's format string instead
/// of the handlebars::RenderError itself because the cause field of the
/// handlebars::RenderError in the handlebars crate version we use implements send
/// and not sync which can lead to upstream compile errors when dealing with the
/// failure crate. We should change this to a RenderError after we update the
/// handlebars crate. See https://github.com/sunng87/handlebars-rust/issues/194
TemplateRenderError(String),
/// When an error occurs merging toml
TomlMergeError(String),
/// When an error occurs parsing toml
TomlParser(toml::de::Error),
TomlSerializeError(toml::ser::Error),
WireDecode(String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let msg = match *self {
Error::APIClient(ref err) => format!("{}", err),
Error::ArtifactIdentMismatch((ref a, ref ai, ref i)) => {
format!("Artifact ident {} for `{}' does not match expected ident {}",
ai, a, i)
}
Error::BadEnvConfig(ref varname) => {
format!("Unable to find valid TOML or JSON in {} ENVVAR", varname)
}
Error::BadGlyphStyle(ref style) => format!("Unknown symbol style '{}'", style),
Error::CantUploadGossipToml => {
"Can't upload gossip.toml, it's a reserved file name".to_string()
}
Error::ChannelNotFound => "Channel not found".to_string(),
Error::CryptoKeyError(ref s) => format!("Missing or invalid key: {}", s),
Error::DownloadFailed(ref msg) => msg.to_string(),
Error::EditorEnv(ref e) => format!("Missing EDITOR environment variable: {}", e),
Error::EditStatus => "Failed edit text command".to_string(),
Error::FileNameError => "Failed to extract a filename".to_string(),
Error::FileNotFound(ref e) => format!("File not found at: {}", e),
Error::GossipFileRelativePath(ref s) => {
format!("Path for gossip file cannot have relative components (eg: ..): {}",
s)
}
Error::HabitatCore(ref e) => format!("{}", e),
Error::MissingCLIInputError(ref arg) => {
format!("Missing required CLI argument!: {}", arg)
}
Error::InstallHookFailed(ref ident) => {
format!("Install hook exited unsuccessfully: {}", ident)
}
Error::InterpreterNotFound(ref ident, ref e) => {
format!("Unable to install interpreter ident: {} - {}", ident, e)
}
Error::InvalidEventStreamToken(ref s) => {
format!("Invalid event stream token provided: '{}'", s)
}
Error::InvalidInstallHookMode(ref e) => {
format!("Invalid InstallHookMode conversion from {}", e)
}
Error::IO(ref err) => format!("{}", err),
Error::JoinPathsError(ref err) => format!("{}", err),
Error::NamedPipeTimeoutOnStart(ref group, ref hook, ref err) => {
format!("Unable to start powershell named pipe for {} hook of {}: {}",
hook, group, err)
}
Error::NativeTls(ref err) => format!("TLS error '{}'", err),
Error::NetParseError(ref err) => format!("{}", err),
Error::OfflineArtifactNotFound(ref ident) => {
format!("Cached artifact not found in offline mode: {}", ident)
}
Error::OfflineOriginKeyNotFound(ref name_with_rev) => {
format!("Cached origin key not found in offline mode: {}",
name_with_rev)
}
Error::OfflinePackageNotFound(ref ident) => {
format!("No installed package or cached artifact could be found locally in \
offline mode: {}",
ident)
}
Error::PackageNotFound(ref e) => format!("Package not found. {}", e),
Error::PermissionFailed(ref e) => e.to_string(),
Error::RenderContextSerialization(ref e) => {
format!("Unable to serialize rendering context, {}", e)
}
Error::RootRequired => {
"Root or administrator permissions required to complete operation".to_string()
}
Error::StatusFileCorrupt(ref path) => {
format!("Unable to decode contents of INSTALL_STATUS file, {}",
path.display())
}
Error::StrFromUtf8Error(ref e) => format!("{}", e),
Error::StringFromUtf8Error(ref e) => format!("{}", e),
Error::TemplateFileError(ref err) => format!("{:?}", err),
Error::TemplateRenderError(ref err) => err.to_string(),
Error::TomlMergeError(ref e) => format!("Failed to merge TOML: {}", e),
Error::TomlParser(ref err) => format!("Failed to parse TOML: {}", err),
Error::TomlSerializeError(ref e) => format!("Can't serialize TOML: {}", e),
Error::WireDecode(ref m) => format!("Failed to decode wire message: {}", m),
};
write!(f, "{}", msg)
}
}
impl error::Error for Error {}
impl From<api_client::Error> for Error {
fn from(err: api_client::Error) -> Self { Error::APIClient(err) }
}
impl From<handlebars::TemplateFileError> for Error {
fn from(err: handlebars::TemplateFileError) -> Self { Error::TemplateFileError(Box::new(err)) }
}
impl From<hcore::Error> for Error {
fn from(err: hcore::Error) -> Self { Error::HabitatCore(err) }
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self { Error::IO(err) }
}
impl From<env::JoinPathsError> for Error {
fn from(err: env::JoinPathsError) -> Self { Error::JoinPathsError(err) }
}
impl From<str::Utf8Error> for Error {
fn from(err: str::Utf8Error) -> Self { Error::StrFromUtf8Error(err) }
}
impl From<string::FromUtf8Error> for Error {
fn from(err: string::FromUtf8Error) -> Self { Error::StringFromUtf8Error(err) }
}
impl From<toml::ser::Error> for Error {
fn from(err: toml::ser::Error) -> Self { Error::TomlSerializeError(err) }
}
impl From<net::AddrParseError> for Error {
fn | (err: net::AddrParseError) -> Self { Error::NetParseError(err) }
}
impl From<native_tls::Error> for Error {
fn from(error: native_tls::Error) -> Self { Error::NativeTls(error) }
}
| from | identifier_name |
error.rs | use std::{env,
error,
fmt,
io,
net,
path::PathBuf,
result,
str,
string};
use crate::{api_client,
hcore::{self,
package::PackageIdent}};
pub type Result<T> = result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
APIClient(api_client::Error),
ArtifactIdentMismatch((String, String, String)),
/// Occurs when there is no valid toml of json in the environment variable
BadEnvConfig(String),
BadGlyphStyle(String),
CantUploadGossipToml,
ChannelNotFound,
CryptoKeyError(String),
DownloadFailed(String),
EditorEnv(env::VarError),
EditStatus,
FileNameError,
/// Occurs when a file that should exist does not or could not be read.
FileNotFound(String),
GossipFileRelativePath(String),
HabitatCore(hcore::Error),
InstallHookFailed(PackageIdent),
InterpreterNotFound(PackageIdent, Box<Self>),
InvalidEventStreamToken(String),
InvalidInstallHookMode(String),
/// Occurs when making lower level IO calls.
IO(io::Error),
/// Errors when joining paths :)
JoinPathsError(env::JoinPathsError),
MissingCLIInputError(String),
NamedPipeTimeoutOnStart(String, String, io::Error),
NativeTls(native_tls::Error),
NetParseError(net::AddrParseError),
OfflineArtifactNotFound(PackageIdent),
OfflineOriginKeyNotFound(String),
OfflinePackageNotFound(PackageIdent),
PackageNotFound(String),
/// Occurs upon errors related to file or directory permissions.
PermissionFailed(String),
/// When an error occurs serializing rendering context
RenderContextSerialization(serde_json::Error),
RootRequired,
StatusFileCorrupt(PathBuf),
StrFromUtf8Error(str::Utf8Error),
StringFromUtf8Error(string::FromUtf8Error),
/// When an error occurs registering template file
// Boxed due to clippy::large_enum_variant
TemplateFileError(Box<handlebars::TemplateFileError>),
/// When an error occurs rendering template
/// The error is constructed with a handlebars::RenderError's format string instead
/// of the handlebars::RenderError itself because the cause field of the
/// handlebars::RenderError in the handlebars crate version we use implements send
/// and not sync which can lead to upstream compile errors when dealing with the
/// failure crate. We should change this to a RenderError after we update the
/// handlebars crate. See https://github.com/sunng87/handlebars-rust/issues/194
TemplateRenderError(String),
/// When an error occurs merging toml
TomlMergeError(String),
/// When an error occurs parsing toml
TomlParser(toml::de::Error),
TomlSerializeError(toml::ser::Error),
WireDecode(String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let msg = match *self {
Error::APIClient(ref err) => format!("{}", err),
Error::ArtifactIdentMismatch((ref a, ref ai, ref i)) => {
format!("Artifact ident {} for `{}' does not match expected ident {}",
ai, a, i)
}
Error::BadEnvConfig(ref varname) => {
format!("Unable to find valid TOML or JSON in {} ENVVAR", varname)
}
Error::BadGlyphStyle(ref style) => format!("Unknown symbol style '{}'", style),
Error::CantUploadGossipToml => {
"Can't upload gossip.toml, it's a reserved file name".to_string()
}
Error::ChannelNotFound => "Channel not found".to_string(),
Error::CryptoKeyError(ref s) => format!("Missing or invalid key: {}", s),
Error::DownloadFailed(ref msg) => msg.to_string(),
Error::EditorEnv(ref e) => format!("Missing EDITOR environment variable: {}", e),
Error::EditStatus => "Failed edit text command".to_string(),
Error::FileNameError => "Failed to extract a filename".to_string(),
Error::FileNotFound(ref e) => format!("File not found at: {}", e),
Error::GossipFileRelativePath(ref s) => {
format!("Path for gossip file cannot have relative components (eg: ..): {}",
s)
}
Error::HabitatCore(ref e) => format!("{}", e),
Error::MissingCLIInputError(ref arg) => {
format!("Missing required CLI argument!: {}", arg)
}
Error::InstallHookFailed(ref ident) => {
format!("Install hook exited unsuccessfully: {}", ident)
}
Error::InterpreterNotFound(ref ident, ref e) => {
format!("Unable to install interpreter ident: {} - {}", ident, e)
}
Error::InvalidEventStreamToken(ref s) => {
format!("Invalid event stream token provided: '{}'", s)
}
Error::InvalidInstallHookMode(ref e) => {
format!("Invalid InstallHookMode conversion from {}", e)
}
Error::IO(ref err) => format!("{}", err),
Error::JoinPathsError(ref err) => format!("{}", err),
Error::NamedPipeTimeoutOnStart(ref group, ref hook, ref err) => {
format!("Unable to start powershell named pipe for {} hook of {}: {}",
hook, group, err)
}
Error::NativeTls(ref err) => format!("TLS error '{}'", err),
Error::NetParseError(ref err) => format!("{}", err),
Error::OfflineArtifactNotFound(ref ident) => {
format!("Cached artifact not found in offline mode: {}", ident)
}
Error::OfflineOriginKeyNotFound(ref name_with_rev) => {
format!("Cached origin key not found in offline mode: {}",
name_with_rev)
}
Error::OfflinePackageNotFound(ref ident) => {
format!("No installed package or cached artifact could be found locally in \
offline mode: {}",
ident)
}
Error::PackageNotFound(ref e) => format!("Package not found. {}", e),
Error::PermissionFailed(ref e) => e.to_string(),
Error::RenderContextSerialization(ref e) => {
format!("Unable to serialize rendering context, {}", e)
}
Error::RootRequired => {
"Root or administrator permissions required to complete operation".to_string()
}
Error::StatusFileCorrupt(ref path) => {
format!("Unable to decode contents of INSTALL_STATUS file, {}",
path.display())
}
Error::StrFromUtf8Error(ref e) => format!("{}", e),
Error::StringFromUtf8Error(ref e) => format!("{}", e),
Error::TemplateFileError(ref err) => format!("{:?}", err),
Error::TemplateRenderError(ref err) => err.to_string(),
Error::TomlMergeError(ref e) => format!("Failed to merge TOML: {}", e),
Error::TomlParser(ref err) => format!("Failed to parse TOML: {}", err),
Error::TomlSerializeError(ref e) => format!("Can't serialize TOML: {}", e),
Error::WireDecode(ref m) => format!("Failed to decode wire message: {}", m),
};
write!(f, "{}", msg)
}
}
impl error::Error for Error {}
impl From<api_client::Error> for Error {
fn from(err: api_client::Error) -> Self { Error::APIClient(err) }
}
impl From<handlebars::TemplateFileError> for Error {
fn from(err: handlebars::TemplateFileError) -> Self { Error::TemplateFileError(Box::new(err)) }
}
impl From<hcore::Error> for Error {
fn from(err: hcore::Error) -> Self { Error::HabitatCore(err) }
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self { Error::IO(err) }
}
impl From<env::JoinPathsError> for Error {
fn from(err: env::JoinPathsError) -> Self { Error::JoinPathsError(err) }
}
impl From<str::Utf8Error> for Error {
fn from(err: str::Utf8Error) -> Self { Error::StrFromUtf8Error(err) }
}
impl From<string::FromUtf8Error> for Error {
fn from(err: string::FromUtf8Error) -> Self { Error::StringFromUtf8Error(err) }
}
impl From<toml::ser::Error> for Error { | }
impl From<net::AddrParseError> for Error {
fn from(err: net::AddrParseError) -> Self { Error::NetParseError(err) }
}
impl From<native_tls::Error> for Error {
fn from(error: native_tls::Error) -> Self { Error::NativeTls(error) }
} | fn from(err: toml::ser::Error) -> Self { Error::TomlSerializeError(err) } | random_line_split |
error.rs | use std::{env,
error,
fmt,
io,
net,
path::PathBuf,
result,
str,
string};
use crate::{api_client,
hcore::{self,
package::PackageIdent}};
pub type Result<T> = result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
APIClient(api_client::Error),
ArtifactIdentMismatch((String, String, String)),
/// Occurs when there is no valid toml of json in the environment variable
BadEnvConfig(String),
BadGlyphStyle(String),
CantUploadGossipToml,
ChannelNotFound,
CryptoKeyError(String),
DownloadFailed(String),
EditorEnv(env::VarError),
EditStatus,
FileNameError,
/// Occurs when a file that should exist does not or could not be read.
FileNotFound(String),
GossipFileRelativePath(String),
HabitatCore(hcore::Error),
InstallHookFailed(PackageIdent),
InterpreterNotFound(PackageIdent, Box<Self>),
InvalidEventStreamToken(String),
InvalidInstallHookMode(String),
/// Occurs when making lower level IO calls.
IO(io::Error),
/// Errors when joining paths :)
JoinPathsError(env::JoinPathsError),
MissingCLIInputError(String),
NamedPipeTimeoutOnStart(String, String, io::Error),
NativeTls(native_tls::Error),
NetParseError(net::AddrParseError),
OfflineArtifactNotFound(PackageIdent),
OfflineOriginKeyNotFound(String),
OfflinePackageNotFound(PackageIdent),
PackageNotFound(String),
/// Occurs upon errors related to file or directory permissions.
PermissionFailed(String),
/// When an error occurs serializing rendering context
RenderContextSerialization(serde_json::Error),
RootRequired,
StatusFileCorrupt(PathBuf),
StrFromUtf8Error(str::Utf8Error),
StringFromUtf8Error(string::FromUtf8Error),
/// When an error occurs registering template file
// Boxed due to clippy::large_enum_variant
TemplateFileError(Box<handlebars::TemplateFileError>),
/// When an error occurs rendering template
/// The error is constructed with a handlebars::RenderError's format string instead
/// of the handlebars::RenderError itself because the cause field of the
/// handlebars::RenderError in the handlebars crate version we use implements send
/// and not sync which can lead to upstream compile errors when dealing with the
/// failure crate. We should change this to a RenderError after we update the
/// handlebars crate. See https://github.com/sunng87/handlebars-rust/issues/194
TemplateRenderError(String),
/// When an error occurs merging toml
TomlMergeError(String),
/// When an error occurs parsing toml
TomlParser(toml::de::Error),
TomlSerializeError(toml::ser::Error),
WireDecode(String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let msg = match *self {
Error::APIClient(ref err) => format!("{}", err),
Error::ArtifactIdentMismatch((ref a, ref ai, ref i)) => {
format!("Artifact ident {} for `{}' does not match expected ident {}",
ai, a, i)
}
Error::BadEnvConfig(ref varname) => {
format!("Unable to find valid TOML or JSON in {} ENVVAR", varname)
}
Error::BadGlyphStyle(ref style) => format!("Unknown symbol style '{}'", style),
Error::CantUploadGossipToml => {
"Can't upload gossip.toml, it's a reserved file name".to_string()
}
Error::ChannelNotFound => "Channel not found".to_string(),
Error::CryptoKeyError(ref s) => format!("Missing or invalid key: {}", s),
Error::DownloadFailed(ref msg) => msg.to_string(),
Error::EditorEnv(ref e) => format!("Missing EDITOR environment variable: {}", e),
Error::EditStatus => "Failed edit text command".to_string(),
Error::FileNameError => "Failed to extract a filename".to_string(),
Error::FileNotFound(ref e) => format!("File not found at: {}", e),
Error::GossipFileRelativePath(ref s) => {
format!("Path for gossip file cannot have relative components (eg: ..): {}",
s)
}
Error::HabitatCore(ref e) => format!("{}", e),
Error::MissingCLIInputError(ref arg) => {
format!("Missing required CLI argument!: {}", arg)
}
Error::InstallHookFailed(ref ident) => {
format!("Install hook exited unsuccessfully: {}", ident)
}
Error::InterpreterNotFound(ref ident, ref e) => {
format!("Unable to install interpreter ident: {} - {}", ident, e)
}
Error::InvalidEventStreamToken(ref s) => {
format!("Invalid event stream token provided: '{}'", s)
}
Error::InvalidInstallHookMode(ref e) => {
format!("Invalid InstallHookMode conversion from {}", e)
}
Error::IO(ref err) => format!("{}", err),
Error::JoinPathsError(ref err) => format!("{}", err),
Error::NamedPipeTimeoutOnStart(ref group, ref hook, ref err) => {
format!("Unable to start powershell named pipe for {} hook of {}: {}",
hook, group, err)
}
Error::NativeTls(ref err) => format!("TLS error '{}'", err),
Error::NetParseError(ref err) => format!("{}", err),
Error::OfflineArtifactNotFound(ref ident) => {
format!("Cached artifact not found in offline mode: {}", ident)
}
Error::OfflineOriginKeyNotFound(ref name_with_rev) => |
Error::OfflinePackageNotFound(ref ident) => {
format!("No installed package or cached artifact could be found locally in \
offline mode: {}",
ident)
}
Error::PackageNotFound(ref e) => format!("Package not found. {}", e),
Error::PermissionFailed(ref e) => e.to_string(),
Error::RenderContextSerialization(ref e) => {
format!("Unable to serialize rendering context, {}", e)
}
Error::RootRequired => {
"Root or administrator permissions required to complete operation".to_string()
}
Error::StatusFileCorrupt(ref path) => {
format!("Unable to decode contents of INSTALL_STATUS file, {}",
path.display())
}
Error::StrFromUtf8Error(ref e) => format!("{}", e),
Error::StringFromUtf8Error(ref e) => format!("{}", e),
Error::TemplateFileError(ref err) => format!("{:?}", err),
Error::TemplateRenderError(ref err) => err.to_string(),
Error::TomlMergeError(ref e) => format!("Failed to merge TOML: {}", e),
Error::TomlParser(ref err) => format!("Failed to parse TOML: {}", err),
Error::TomlSerializeError(ref e) => format!("Can't serialize TOML: {}", e),
Error::WireDecode(ref m) => format!("Failed to decode wire message: {}", m),
};
write!(f, "{}", msg)
}
}
impl error::Error for Error {}
impl From<api_client::Error> for Error {
fn from(err: api_client::Error) -> Self { Error::APIClient(err) }
}
impl From<handlebars::TemplateFileError> for Error {
fn from(err: handlebars::TemplateFileError) -> Self { Error::TemplateFileError(Box::new(err)) }
}
impl From<hcore::Error> for Error {
fn from(err: hcore::Error) -> Self { Error::HabitatCore(err) }
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self { Error::IO(err) }
}
impl From<env::JoinPathsError> for Error {
fn from(err: env::JoinPathsError) -> Self { Error::JoinPathsError(err) }
}
impl From<str::Utf8Error> for Error {
fn from(err: str::Utf8Error) -> Self { Error::StrFromUtf8Error(err) }
}
impl From<string::FromUtf8Error> for Error {
fn from(err: string::FromUtf8Error) -> Self { Error::StringFromUtf8Error(err) }
}
impl From<toml::ser::Error> for Error {
fn from(err: toml::ser::Error) -> Self { Error::TomlSerializeError(err) }
}
impl From<net::AddrParseError> for Error {
fn from(err: net::AddrParseError) -> Self { Error::NetParseError(err) }
}
impl From<native_tls::Error> for Error {
fn from(error: native_tls::Error) -> Self { Error::NativeTls(error) }
}
| {
format!("Cached origin key not found in offline mode: {}",
name_with_rev)
} | conditional_block |
error.rs | use std::{env,
error,
fmt,
io,
net,
path::PathBuf,
result,
str,
string};
use crate::{api_client,
hcore::{self,
package::PackageIdent}};
pub type Result<T> = result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
APIClient(api_client::Error),
ArtifactIdentMismatch((String, String, String)),
/// Occurs when there is no valid toml of json in the environment variable
BadEnvConfig(String),
BadGlyphStyle(String),
CantUploadGossipToml,
ChannelNotFound,
CryptoKeyError(String),
DownloadFailed(String),
EditorEnv(env::VarError),
EditStatus,
FileNameError,
/// Occurs when a file that should exist does not or could not be read.
FileNotFound(String),
GossipFileRelativePath(String),
HabitatCore(hcore::Error),
InstallHookFailed(PackageIdent),
InterpreterNotFound(PackageIdent, Box<Self>),
InvalidEventStreamToken(String),
InvalidInstallHookMode(String),
/// Occurs when making lower level IO calls.
IO(io::Error),
/// Errors when joining paths :)
JoinPathsError(env::JoinPathsError),
MissingCLIInputError(String),
NamedPipeTimeoutOnStart(String, String, io::Error),
NativeTls(native_tls::Error),
NetParseError(net::AddrParseError),
OfflineArtifactNotFound(PackageIdent),
OfflineOriginKeyNotFound(String),
OfflinePackageNotFound(PackageIdent),
PackageNotFound(String),
/// Occurs upon errors related to file or directory permissions.
PermissionFailed(String),
/// When an error occurs serializing rendering context
RenderContextSerialization(serde_json::Error),
RootRequired,
StatusFileCorrupt(PathBuf),
StrFromUtf8Error(str::Utf8Error),
StringFromUtf8Error(string::FromUtf8Error),
/// When an error occurs registering template file
// Boxed due to clippy::large_enum_variant
TemplateFileError(Box<handlebars::TemplateFileError>),
/// When an error occurs rendering template
/// The error is constructed with a handlebars::RenderError's format string instead
/// of the handlebars::RenderError itself because the cause field of the
/// handlebars::RenderError in the handlebars crate version we use implements send
/// and not sync which can lead to upstream compile errors when dealing with the
/// failure crate. We should change this to a RenderError after we update the
/// handlebars crate. See https://github.com/sunng87/handlebars-rust/issues/194
TemplateRenderError(String),
/// When an error occurs merging toml
TomlMergeError(String),
/// When an error occurs parsing toml
TomlParser(toml::de::Error),
TomlSerializeError(toml::ser::Error),
WireDecode(String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let msg = match *self {
Error::APIClient(ref err) => format!("{}", err),
Error::ArtifactIdentMismatch((ref a, ref ai, ref i)) => {
format!("Artifact ident {} for `{}' does not match expected ident {}",
ai, a, i)
}
Error::BadEnvConfig(ref varname) => {
format!("Unable to find valid TOML or JSON in {} ENVVAR", varname)
}
Error::BadGlyphStyle(ref style) => format!("Unknown symbol style '{}'", style),
Error::CantUploadGossipToml => {
"Can't upload gossip.toml, it's a reserved file name".to_string()
}
Error::ChannelNotFound => "Channel not found".to_string(),
Error::CryptoKeyError(ref s) => format!("Missing or invalid key: {}", s),
Error::DownloadFailed(ref msg) => msg.to_string(),
Error::EditorEnv(ref e) => format!("Missing EDITOR environment variable: {}", e),
Error::EditStatus => "Failed edit text command".to_string(),
Error::FileNameError => "Failed to extract a filename".to_string(),
Error::FileNotFound(ref e) => format!("File not found at: {}", e),
Error::GossipFileRelativePath(ref s) => {
format!("Path for gossip file cannot have relative components (eg: ..): {}",
s)
}
Error::HabitatCore(ref e) => format!("{}", e),
Error::MissingCLIInputError(ref arg) => {
format!("Missing required CLI argument!: {}", arg)
}
Error::InstallHookFailed(ref ident) => {
format!("Install hook exited unsuccessfully: {}", ident)
}
Error::InterpreterNotFound(ref ident, ref e) => {
format!("Unable to install interpreter ident: {} - {}", ident, e)
}
Error::InvalidEventStreamToken(ref s) => {
format!("Invalid event stream token provided: '{}'", s)
}
Error::InvalidInstallHookMode(ref e) => {
format!("Invalid InstallHookMode conversion from {}", e)
}
Error::IO(ref err) => format!("{}", err),
Error::JoinPathsError(ref err) => format!("{}", err),
Error::NamedPipeTimeoutOnStart(ref group, ref hook, ref err) => {
format!("Unable to start powershell named pipe for {} hook of {}: {}",
hook, group, err)
}
Error::NativeTls(ref err) => format!("TLS error '{}'", err),
Error::NetParseError(ref err) => format!("{}", err),
Error::OfflineArtifactNotFound(ref ident) => {
format!("Cached artifact not found in offline mode: {}", ident)
}
Error::OfflineOriginKeyNotFound(ref name_with_rev) => {
format!("Cached origin key not found in offline mode: {}",
name_with_rev)
}
Error::OfflinePackageNotFound(ref ident) => {
format!("No installed package or cached artifact could be found locally in \
offline mode: {}",
ident)
}
Error::PackageNotFound(ref e) => format!("Package not found. {}", e),
Error::PermissionFailed(ref e) => e.to_string(),
Error::RenderContextSerialization(ref e) => {
format!("Unable to serialize rendering context, {}", e)
}
Error::RootRequired => {
"Root or administrator permissions required to complete operation".to_string()
}
Error::StatusFileCorrupt(ref path) => {
format!("Unable to decode contents of INSTALL_STATUS file, {}",
path.display())
}
Error::StrFromUtf8Error(ref e) => format!("{}", e),
Error::StringFromUtf8Error(ref e) => format!("{}", e),
Error::TemplateFileError(ref err) => format!("{:?}", err),
Error::TemplateRenderError(ref err) => err.to_string(),
Error::TomlMergeError(ref e) => format!("Failed to merge TOML: {}", e),
Error::TomlParser(ref err) => format!("Failed to parse TOML: {}", err),
Error::TomlSerializeError(ref e) => format!("Can't serialize TOML: {}", e),
Error::WireDecode(ref m) => format!("Failed to decode wire message: {}", m),
};
write!(f, "{}", msg)
}
}
impl error::Error for Error {}
impl From<api_client::Error> for Error {
fn from(err: api_client::Error) -> Self { Error::APIClient(err) }
}
impl From<handlebars::TemplateFileError> for Error {
fn from(err: handlebars::TemplateFileError) -> Self { Error::TemplateFileError(Box::new(err)) }
}
impl From<hcore::Error> for Error {
fn from(err: hcore::Error) -> Self { Error::HabitatCore(err) }
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self |
}
impl From<env::JoinPathsError> for Error {
fn from(err: env::JoinPathsError) -> Self { Error::JoinPathsError(err) }
}
impl From<str::Utf8Error> for Error {
fn from(err: str::Utf8Error) -> Self { Error::StrFromUtf8Error(err) }
}
impl From<string::FromUtf8Error> for Error {
fn from(err: string::FromUtf8Error) -> Self { Error::StringFromUtf8Error(err) }
}
impl From<toml::ser::Error> for Error {
fn from(err: toml::ser::Error) -> Self { Error::TomlSerializeError(err) }
}
impl From<net::AddrParseError> for Error {
fn from(err: net::AddrParseError) -> Self { Error::NetParseError(err) }
}
impl From<native_tls::Error> for Error {
fn from(error: native_tls::Error) -> Self { Error::NativeTls(error) }
}
| { Error::IO(err) } | identifier_body |
Decompose.js | /**
* @author Richard Davey <[email protected]>
* @copyright 2019 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
/**
* Create an array of points for each corner of a Rectangle
* If an array is specified, each point object will be added to the end of the array, otherwise a new array will be created.
*
* @function Phaser.Geom.Rectangle.Decompose
* @since 3.0.0
*
* @param {Phaser.Geom.Rectangle} rect - The Rectangle object to be decomposed.
* @param {array} [out] - If provided, each point will be added to this array.
*
* @return {array} Will return the array you specified or a new array containing the points of the Rectangle.
*/
var Decompose = function (rect, out)
{
if (out === undefined) |
out.push({ x: rect.x, y: rect.y });
out.push({ x: rect.right, y: rect.y });
out.push({ x: rect.right, y: rect.bottom });
out.push({ x: rect.x, y: rect.bottom });
return out;
};
module.exports = Decompose;
| { out = []; } | conditional_block |
Decompose.js | /**
* @author Richard Davey <[email protected]>
* @copyright 2019 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
/**
* Create an array of points for each corner of a Rectangle
* If an array is specified, each point object will be added to the end of the array, otherwise a new array will be created.
*
* @function Phaser.Geom.Rectangle.Decompose
* @since 3.0.0
*
* @param {Phaser.Geom.Rectangle} rect - The Rectangle object to be decomposed.
* @param {array} [out] - If provided, each point will be added to this array.
*
* @return {array} Will return the array you specified or a new array containing the points of the Rectangle.
*/
var Decompose = function (rect, out)
{
if (out === undefined) { out = []; }
out.push({ x: rect.x, y: rect.y });
out.push({ x: rect.right, y: rect.y }); | return out;
};
module.exports = Decompose; | out.push({ x: rect.right, y: rect.bottom });
out.push({ x: rect.x, y: rect.bottom });
| random_line_split |
dweet.py | """
A component which allows you to send data to Dweet.io.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/dweet/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import EVENT_STATE_CHANGED, STATE_UNKNOWN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import state as state_helper
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DOMAIN = "dweet"
DEPENDENCIES = []
REQUIREMENTS = ['dweepy==0.2.0']
CONF_NAME = 'name'
CONF_WHITELIST = 'whitelist'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_WHITELIST): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
# pylint: disable=too-many-locals
def setup(hass, config):
"""Setup the Dweet.io component."""
conf = config[DOMAIN]
name = conf[CONF_NAME]
whitelist = conf.get(CONF_WHITELIST, [])
json_body = {}
def dweet_event_listener(event):
"""Listen for new messages on the bus and sends them to Dweet.io."""
state = event.data.get('new_state')
if state is None or state.state in (STATE_UNKNOWN, '') \
or state.entity_id not in whitelist:
|
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body[state.attributes.get('friendly_name')] = _state
send_data(name, json_body)
hass.bus.listen(EVENT_STATE_CHANGED, dweet_event_listener)
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def send_data(name, msg):
"""Send the collected data to Dweet.io."""
import dweepy
try:
dweepy.dweet_for(name, msg)
except dweepy.DweepyError:
_LOGGER.error("Error saving data '%s' to Dweet.io", msg)
| return | conditional_block |
dweet.py | """
A component which allows you to send data to Dweet.io.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/dweet/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import EVENT_STATE_CHANGED, STATE_UNKNOWN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import state as state_helper
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DOMAIN = "dweet"
DEPENDENCIES = []
REQUIREMENTS = ['dweepy==0.2.0']
CONF_NAME = 'name'
CONF_WHITELIST = 'whitelist'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_WHITELIST): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
# pylint: disable=too-many-locals
def setup(hass, config):
"""Setup the Dweet.io component."""
conf = config[DOMAIN]
name = conf[CONF_NAME]
whitelist = conf.get(CONF_WHITELIST, [])
json_body = {}
def | (event):
"""Listen for new messages on the bus and sends them to Dweet.io."""
state = event.data.get('new_state')
if state is None or state.state in (STATE_UNKNOWN, '') \
or state.entity_id not in whitelist:
return
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body[state.attributes.get('friendly_name')] = _state
send_data(name, json_body)
hass.bus.listen(EVENT_STATE_CHANGED, dweet_event_listener)
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def send_data(name, msg):
"""Send the collected data to Dweet.io."""
import dweepy
try:
dweepy.dweet_for(name, msg)
except dweepy.DweepyError:
_LOGGER.error("Error saving data '%s' to Dweet.io", msg)
| dweet_event_listener | identifier_name |
dweet.py | """
A component which allows you to send data to Dweet.io.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/dweet/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import EVENT_STATE_CHANGED, STATE_UNKNOWN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import state as state_helper
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DOMAIN = "dweet"
DEPENDENCIES = []
REQUIREMENTS = ['dweepy==0.2.0']
CONF_NAME = 'name'
CONF_WHITELIST = 'whitelist'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
| DOMAIN: vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_WHITELIST): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
# pylint: disable=too-many-locals
def setup(hass, config):
"""Setup the Dweet.io component."""
conf = config[DOMAIN]
name = conf[CONF_NAME]
whitelist = conf.get(CONF_WHITELIST, [])
json_body = {}
def dweet_event_listener(event):
"""Listen for new messages on the bus and sends them to Dweet.io."""
state = event.data.get('new_state')
if state is None or state.state in (STATE_UNKNOWN, '') \
or state.entity_id not in whitelist:
return
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body[state.attributes.get('friendly_name')] = _state
send_data(name, json_body)
hass.bus.listen(EVENT_STATE_CHANGED, dweet_event_listener)
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def send_data(name, msg):
"""Send the collected data to Dweet.io."""
import dweepy
try:
dweepy.dweet_for(name, msg)
except dweepy.DweepyError:
_LOGGER.error("Error saving data '%s' to Dweet.io", msg) | CONFIG_SCHEMA = vol.Schema({ | random_line_split |
dweet.py | """
A component which allows you to send data to Dweet.io.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/dweet/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import EVENT_STATE_CHANGED, STATE_UNKNOWN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import state as state_helper
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DOMAIN = "dweet"
DEPENDENCIES = []
REQUIREMENTS = ['dweepy==0.2.0']
CONF_NAME = 'name'
CONF_WHITELIST = 'whitelist'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_WHITELIST): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
# pylint: disable=too-many-locals
def setup(hass, config):
"""Setup the Dweet.io component."""
conf = config[DOMAIN]
name = conf[CONF_NAME]
whitelist = conf.get(CONF_WHITELIST, [])
json_body = {}
def dweet_event_listener(event):
|
hass.bus.listen(EVENT_STATE_CHANGED, dweet_event_listener)
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def send_data(name, msg):
"""Send the collected data to Dweet.io."""
import dweepy
try:
dweepy.dweet_for(name, msg)
except dweepy.DweepyError:
_LOGGER.error("Error saving data '%s' to Dweet.io", msg)
| """Listen for new messages on the bus and sends them to Dweet.io."""
state = event.data.get('new_state')
if state is None or state.state in (STATE_UNKNOWN, '') \
or state.entity_id not in whitelist:
return
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body[state.attributes.get('friendly_name')] = _state
send_data(name, json_body) | identifier_body |
read_spec.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use lig::read::{read, read_attribute, read_entity, read_value};
use lig::*;
use ligature::{Attribute, Entity, Statement, Value};
#[test]
fn read_entities() {
let e = "<test>";
assert_eq!(
read_entity(e),
Entity::new("test").map_err(|_| LigError("Could not create entity.".into())) | fn read_attributes() -> Result<(), LigError> {
let a = "@<test>";
assert_eq!(read_attribute(a)?, Attribute::new("test")?);
Ok(())
}
#[test]
fn read_string_literals() -> Result<(), LigError> {
let s = "\"test\"";
assert_eq!(read_value(s)?, Value::StringLiteral("test".to_string()));
Ok(())
}
#[test]
fn read_integer_literals() -> Result<(), LigError> {
let i = "243";
assert_eq!(read_value(i)?, Value::IntegerLiteral(243));
Ok(())
}
#[test]
fn read_float_literals() -> Result<(), LigError> {
let f = "1.2";
assert_eq!(read_value(f)?, Value::FloatLiteral(1.2));
Ok(())
}
#[test]
fn read_byte_arrays_literals() -> Result<(), LigError> {
let b = "0x00ff";
assert_eq!(read_value(b)?, Value::BytesLiteral(vec![0, 255]));
Ok(())
}
#[test]
fn read_entity_as_value() -> Result<(), LigError> {
let e = "<test>";
assert_eq!(read_value(e)?, Value::Entity(Entity::new("test")?));
Ok(())
}
#[test]
fn read_empty_set_of_statements() -> Result<(), LigError> {
let s = "";
let expected: Vec<Statement> = vec![];
assert_eq!(read(s)?, expected);
Ok(())
}
#[test]
fn read_set_of_statements() -> Result<(), LigError> {
let s = "<e> @<a> 123 <c>\n<e2> @<a> <e> <c2>\n";
let expected = vec![
Statement {
entity: Entity::new("e")?,
attribute: Attribute::new("a")?,
value: Value::IntegerLiteral(123),
context: Entity::new("c")?,
},
Statement {
entity: Entity::new("e2")?,
attribute: Attribute::new("a")?,
value: Value::Entity(Entity::new("e")?),
context: Entity::new("c2")?,
},
];
assert_eq!(read(s)?, expected);
Ok(())
} | );
}
#[test] | random_line_split |
read_spec.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use lig::read::{read, read_attribute, read_entity, read_value};
use lig::*;
use ligature::{Attribute, Entity, Statement, Value};
#[test]
fn read_entities() |
#[test]
fn read_attributes() -> Result<(), LigError> {
let a = "@<test>";
assert_eq!(read_attribute(a)?, Attribute::new("test")?);
Ok(())
}
#[test]
fn read_string_literals() -> Result<(), LigError> {
let s = "\"test\"";
assert_eq!(read_value(s)?, Value::StringLiteral("test".to_string()));
Ok(())
}
#[test]
fn read_integer_literals() -> Result<(), LigError> {
let i = "243";
assert_eq!(read_value(i)?, Value::IntegerLiteral(243));
Ok(())
}
#[test]
fn read_float_literals() -> Result<(), LigError> {
let f = "1.2";
assert_eq!(read_value(f)?, Value::FloatLiteral(1.2));
Ok(())
}
#[test]
fn read_byte_arrays_literals() -> Result<(), LigError> {
let b = "0x00ff";
assert_eq!(read_value(b)?, Value::BytesLiteral(vec![0, 255]));
Ok(())
}
#[test]
fn read_entity_as_value() -> Result<(), LigError> {
let e = "<test>";
assert_eq!(read_value(e)?, Value::Entity(Entity::new("test")?));
Ok(())
}
#[test]
fn read_empty_set_of_statements() -> Result<(), LigError> {
let s = "";
let expected: Vec<Statement> = vec![];
assert_eq!(read(s)?, expected);
Ok(())
}
#[test]
fn read_set_of_statements() -> Result<(), LigError> {
let s = "<e> @<a> 123 <c>\n<e2> @<a> <e> <c2>\n";
let expected = vec![
Statement {
entity: Entity::new("e")?,
attribute: Attribute::new("a")?,
value: Value::IntegerLiteral(123),
context: Entity::new("c")?,
},
Statement {
entity: Entity::new("e2")?,
attribute: Attribute::new("a")?,
value: Value::Entity(Entity::new("e")?),
context: Entity::new("c2")?,
},
];
assert_eq!(read(s)?, expected);
Ok(())
}
| {
let e = "<test>";
assert_eq!(
read_entity(e),
Entity::new("test").map_err(|_| LigError("Could not create entity.".into()))
);
} | identifier_body |
read_spec.rs | // This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use lig::read::{read, read_attribute, read_entity, read_value};
use lig::*;
use ligature::{Attribute, Entity, Statement, Value};
#[test]
fn read_entities() {
let e = "<test>";
assert_eq!(
read_entity(e),
Entity::new("test").map_err(|_| LigError("Could not create entity.".into()))
);
}
#[test]
fn read_attributes() -> Result<(), LigError> {
let a = "@<test>";
assert_eq!(read_attribute(a)?, Attribute::new("test")?);
Ok(())
}
#[test]
fn read_string_literals() -> Result<(), LigError> {
let s = "\"test\"";
assert_eq!(read_value(s)?, Value::StringLiteral("test".to_string()));
Ok(())
}
#[test]
fn read_integer_literals() -> Result<(), LigError> {
let i = "243";
assert_eq!(read_value(i)?, Value::IntegerLiteral(243));
Ok(())
}
#[test]
fn | () -> Result<(), LigError> {
let f = "1.2";
assert_eq!(read_value(f)?, Value::FloatLiteral(1.2));
Ok(())
}
#[test]
fn read_byte_arrays_literals() -> Result<(), LigError> {
let b = "0x00ff";
assert_eq!(read_value(b)?, Value::BytesLiteral(vec![0, 255]));
Ok(())
}
#[test]
fn read_entity_as_value() -> Result<(), LigError> {
let e = "<test>";
assert_eq!(read_value(e)?, Value::Entity(Entity::new("test")?));
Ok(())
}
#[test]
fn read_empty_set_of_statements() -> Result<(), LigError> {
let s = "";
let expected: Vec<Statement> = vec![];
assert_eq!(read(s)?, expected);
Ok(())
}
#[test]
fn read_set_of_statements() -> Result<(), LigError> {
let s = "<e> @<a> 123 <c>\n<e2> @<a> <e> <c2>\n";
let expected = vec![
Statement {
entity: Entity::new("e")?,
attribute: Attribute::new("a")?,
value: Value::IntegerLiteral(123),
context: Entity::new("c")?,
},
Statement {
entity: Entity::new("e2")?,
attribute: Attribute::new("a")?,
value: Value::Entity(Entity::new("e")?),
context: Entity::new("c2")?,
},
];
assert_eq!(read(s)?, expected);
Ok(())
}
| read_float_literals | identifier_name |
response.py | __author__ = 'Joe Linn'
#import pylastica
import pylastica.response
class Response(pylastica.response.Response): | @param response_data:
@type response_data: dict or str
@param action:
@type action: pylastica.bulk.action.Action
@param op_type: bulk operation type
@type op_type: str
"""
assert isinstance(action, pylastica.bulk.action.Action), "action must be an instance of Action: %r" % action
super(Response, self).__init__(response_data)
self._action = action
self._op_type = op_type
@property
def action(self):
"""
@return:
@rtype: pylastica.bulk.action.Action
"""
return self._action
@property
def op_type(self):
"""
@return:
@rtype: str
"""
return self._op_type | def __init__(self, response_data, action, op_type):
"""
| random_line_split |
response.py | __author__ = 'Joe Linn'
#import pylastica
import pylastica.response
class Response(pylastica.response.Response):
def __init__(self, response_data, action, op_type):
"""
@param response_data:
@type response_data: dict or str
@param action:
@type action: pylastica.bulk.action.Action
@param op_type: bulk operation type
@type op_type: str
"""
assert isinstance(action, pylastica.bulk.action.Action), "action must be an instance of Action: %r" % action
super(Response, self).__init__(response_data)
self._action = action
self._op_type = op_type
@property
def action(self):
|
@property
def op_type(self):
"""
@return:
@rtype: str
"""
return self._op_type
| """
@return:
@rtype: pylastica.bulk.action.Action
"""
return self._action | identifier_body |
response.py | __author__ = 'Joe Linn'
#import pylastica
import pylastica.response
class Response(pylastica.response.Response):
def | (self, response_data, action, op_type):
"""
@param response_data:
@type response_data: dict or str
@param action:
@type action: pylastica.bulk.action.Action
@param op_type: bulk operation type
@type op_type: str
"""
assert isinstance(action, pylastica.bulk.action.Action), "action must be an instance of Action: %r" % action
super(Response, self).__init__(response_data)
self._action = action
self._op_type = op_type
@property
def action(self):
"""
@return:
@rtype: pylastica.bulk.action.Action
"""
return self._action
@property
def op_type(self):
"""
@return:
@rtype: str
"""
return self._op_type
| __init__ | identifier_name |
task.py | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
__all__ = ['Task']
display = Display()
class Task(Base, Conditional, Taggable, CollectionSearch):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
# NOTE: ONLY set defaults on task attributes that are not inheritable,
# inheritance is only triggered if the 'current value' is None,
# default can be set at play/top level object and inheritance will take it's course.
_args = FieldAttribute(isa='dict', default=dict)
_action = FieldAttribute(isa='string')
_async_val = FieldAttribute(isa='int', default=0, alias='async')
_changed_when = FieldAttribute(isa='list', default=list)
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
_failed_when = FieldAttribute(isa='list', default=list)
_loop = FieldAttribute()
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL)
_register = FieldAttribute(isa='string', static=True)
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list', default=list)
# deprecated, used to be loop and loop_args but loop has been repurposed
_loop_with = FieldAttribute(isa='string', private=True, inherit=False)
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
self._role = role
self._parent = None
if task_include:
self._parent = task_include
else:
self._parent = block
super(Task, self).__init__()
def get_path(self):
''' return the absolute path of the task with its line number '''
path = ""
if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
elif hasattr(self._parent._play, '_ds') and hasattr(self._parent._play._ds, '_data_source') and hasattr(self._parent._play._ds, '_line_number'):
path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number)
return path
def get_name(self, include_role_fqcn=True):
''' return the name of the task '''
if self._role:
role_name = self._role.get_name(include_role_fqcn=include_role_fqcn)
if self._role and self.name and role_name not in self.name:
return "%s : %s" % (role_name, self.name)
elif self.name:
return self.name
else:
if self._role:
return "%s : %s" % (role_name, self.action)
else:
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def __repr__(self):
''' returns a human readable representation of the task '''
if self.get_name() == 'meta':
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_with_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop_with'] = loop_name
new_ds['loop'] = v
# display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead", version="2.10")
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
if not isinstance(ds, dict):
raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds)))
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
default_collection = AnsibleCollectionLoader().default_collection
# use the parent value if our ds doesn't define it
collections_list = ds.get('collections', self.collections)
if collections_list is None:
collections_list = []
if isinstance(collections_list, string_types):
collections_list = [collections_list]
if default_collection and not self._role: # FIXME: and not a collections role
if collections_list:
if default_collection not in collections_list:
collections_list.insert(0, default_collection)
else:
collections_list = [default_collection]
if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
collections_list.append('ansible.legacy')
if collections_list:
ds['collections'] = collections_list
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
# if the raises exception was created with obj=ds args, then it includes the detail
# so we dont need to add it so we can just re raise.
if e._obj:
raise
# But if it wasn't, we can add the yaml object now to get more detail
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
# value over to _raw_params (assuming it is empty)
if action in ('command', 'shell', 'script'):
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
" Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
new_ds['action'] = action
new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.get('vars'))
else:
new_ds['vars'] = dict()
for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
continue
elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
# transform into loop property
self._preprocess_with_loop(ds, new_ds, k, v)
else:
# pre-2.0 syntax allowed variables for include statements at the top level of the task,
# so we move those into the 'vars' dictionary here, and show a deprecation message
# as we will remove this at some point in the future.
if action in ('include',) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
display.deprecated("Specifying include variables at the top-level of the task is deprecated."
" Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
" for currently supported syntax regarding included files and variables", version="2.12")
new_ds['vars'][k] = v
elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
new_ds[k] = v
else:
display.warning("Ignoring invalid attribute: %s" % k)
return super(Task, self).preprocess_data(new_ds)
def _load_loop_control(self, attr, ds):
if not isinstance(ds, dict):
raise AnsibleParserError(
"the `loop_control` value must be specified as a dictionary and cannot "
"be a variable itself (though it can contain variables)",
obj=ds,
)
return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)
def _validate_attributes(self, ds):
try:
super(Task, self)._validate_attributes(ds)
except AnsibleParserError as e:
e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
raise e
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._parent:
self._parent.post_validate(templar)
if AnsibleCollectionLoader().default_collection:
pass
super(Task, self).post_validate(templar)
def _post_validate_loop(self, attr, value, templar):
'''
Override post validation for the loop field, which is templated
specially in the TaskExecutor class when evaluating loops.
'''
return value
def _post_validate_environment(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
env = {}
if value is not None:
def _parse_env_kv(k, v):
try:
env[k] = templar.template(v, convert_bare=False)
except AnsibleUndefinedVariable as e:
error = to_native(e)
if self.action in ('setup', 'gather_facts') and 'ansible_facts.env' in error or 'ansible_env' in error:
# ignore as fact gathering is required for 'env' facts
return
raise
if isinstance(value, list):
for env_item in value:
if isinstance(env_item, dict):
for k in env_item:
_parse_env_kv(k, env_item[k])
else:
isdict = templar.template(env_item, convert_bare=False)
if isinstance(isdict, dict):
env.update(isdict)
else:
display.warning("could not parse environment value, skipping: %s" % value)
elif isinstance(value, dict):
# should not really happen
env = dict()
for env_item in value:
_parse_env_kv(env_item, value[env_item])
else:
# at this point it should be a simple string, also should not happen
env = templar.template(value, convert_bare=False)
return env
def _post_validate_changed_when(self, attr, value, templar):
'''
changed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_failed_when(self, attr, value, templar):
'''
failed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_until(self, attr, value, templar):
'''
until is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def get_vars(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_vars())
all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
def get_include_params(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_include_params())
if self.action in ('include', 'include_tasks', 'include_role'):
all_vars.update(self.vars)
return all_vars
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(Task, self).copy()
new_me._parent = None
if self._parent and not exclude_parent:
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
new_me._role = None
if self._role:
new_me._role = self._role
return new_me
def serialize(self):
data = super(Task, self).serialize()
if not self._squashed and not self._finalized:
if self._parent:
data['parent'] = self._parent.serialize()
data['parent_type'] = self._parent.__class__.__name__
if self._role:
data['role'] = self._role.serialize()
return data
def deserialize(self, data):
# import is here to avoid import loops
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
parent_data = data.get('parent', None)
if parent_data:
parent_type = data.get('parent_type')
if parent_type == 'Block':
p = Block()
elif parent_type == 'TaskInclude':
p = TaskInclude()
elif parent_type == 'HandlerTaskInclude':
p = HandlerTaskInclude()
p.deserialize(parent_data)
self._parent = p
del data['parent']
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
del data['role']
super(Task, self).deserialize(data)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
This is used primarily after the Task has been serialized/deserialized, which
does not preserve the loader.
'''
self._loader = loader
if self._parent:
self._parent.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False, prepend=False):
|
def get_dep_chain(self):
if self._parent:
return self._parent.get_dep_chain()
else:
return None
def get_search_path(self):
'''
Return the list of paths you should search for files, in order.
This follows role/playbook dependency chain.
'''
path_stack = []
dep_chain = self.get_dep_chain()
# inside role: add the dependency chain from current to dependent
if dep_chain:
path_stack.extend(reversed([x._role_path for x in dep_chain]))
# add path of task itself, unless it is already in the list
task_dir = os.path.dirname(self.get_path())
if task_dir not in path_stack:
path_stack.append(task_dir)
return path_stack
def all_parents_static(self):
if self._parent:
return self._parent.all_parents_static()
return True
def get_first_parent_include(self):
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude):
return self._parent
return self._parent.get_first_parent_include()
return None
| '''
Generic logic to get the attribute or parent attribute for a task value.
'''
extend = self._valid_attrs[attr].extend
prepend = self._valid_attrs[attr].prepend
try:
value = self._attributes[attr]
# If parent is static, we can grab attrs from the parent
# otherwise, defer to the grandparent
if getattr(self._parent, 'statically_loaded', True):
_parent = self._parent
else:
_parent = self._parent._parent
if _parent and (value is Sentinel or extend):
if getattr(_parent, 'statically_loaded', True):
# vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'):
parent_value = _parent._get_parent_attribute(attr)
else:
parent_value = _parent._attributes.get(attr, Sentinel)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except KeyError:
pass
return value | identifier_body |
task.py | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
__all__ = ['Task']
display = Display()
class Task(Base, Conditional, Taggable, CollectionSearch):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
# NOTE: ONLY set defaults on task attributes that are not inheritable,
# inheritance is only triggered if the 'current value' is None,
# default can be set at play/top level object and inheritance will take it's course.
_args = FieldAttribute(isa='dict', default=dict)
_action = FieldAttribute(isa='string')
_async_val = FieldAttribute(isa='int', default=0, alias='async')
_changed_when = FieldAttribute(isa='list', default=list)
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
_failed_when = FieldAttribute(isa='list', default=list)
_loop = FieldAttribute()
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL)
_register = FieldAttribute(isa='string', static=True)
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list', default=list)
# deprecated, used to be loop and loop_args but loop has been repurposed
_loop_with = FieldAttribute(isa='string', private=True, inherit=False)
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
self._role = role
self._parent = None
if task_include:
self._parent = task_include
else:
self._parent = block
super(Task, self).__init__()
def get_path(self):
''' return the absolute path of the task with its line number '''
path = ""
if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
elif hasattr(self._parent._play, '_ds') and hasattr(self._parent._play._ds, '_data_source') and hasattr(self._parent._play._ds, '_line_number'):
path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number)
return path
def get_name(self, include_role_fqcn=True):
''' return the name of the task '''
if self._role:
role_name = self._role.get_name(include_role_fqcn=include_role_fqcn)
if self._role and self.name and role_name not in self.name:
return "%s : %s" % (role_name, self.name)
elif self.name:
return self.name
else:
if self._role:
return "%s : %s" % (role_name, self.action)
else:
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def __repr__(self):
''' returns a human readable representation of the task '''
if self.get_name() == 'meta':
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_with_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop_with'] = loop_name
new_ds['loop'] = v
# display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead", version="2.10")
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
if not isinstance(ds, dict):
raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds)))
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
default_collection = AnsibleCollectionLoader().default_collection
# use the parent value if our ds doesn't define it
collections_list = ds.get('collections', self.collections)
if collections_list is None:
collections_list = []
if isinstance(collections_list, string_types):
collections_list = [collections_list]
if default_collection and not self._role: # FIXME: and not a collections role
if collections_list:
if default_collection not in collections_list:
collections_list.insert(0, default_collection)
else:
collections_list = [default_collection]
if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
collections_list.append('ansible.legacy')
if collections_list:
ds['collections'] = collections_list
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
# if the raises exception was created with obj=ds args, then it includes the detail
# so we dont need to add it so we can just re raise.
if e._obj:
raise
# But if it wasn't, we can add the yaml object now to get more detail
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
# value over to _raw_params (assuming it is empty)
if action in ('command', 'shell', 'script'):
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
" Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
new_ds['action'] = action
new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.get('vars'))
else:
new_ds['vars'] = dict()
for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
continue
elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
# transform into loop property
self._preprocess_with_loop(ds, new_ds, k, v)
else:
# pre-2.0 syntax allowed variables for include statements at the top level of the task,
# so we move those into the 'vars' dictionary here, and show a deprecation message
# as we will remove this at some point in the future.
if action in ('include',) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
display.deprecated("Specifying include variables at the top-level of the task is deprecated."
" Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
" for currently supported syntax regarding included files and variables", version="2.12")
new_ds['vars'][k] = v
elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
new_ds[k] = v
else:
display.warning("Ignoring invalid attribute: %s" % k)
return super(Task, self).preprocess_data(new_ds)
def _load_loop_control(self, attr, ds):
if not isinstance(ds, dict):
raise AnsibleParserError( | "be a variable itself (though it can contain variables)",
obj=ds,
)
return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)
def _validate_attributes(self, ds):
try:
super(Task, self)._validate_attributes(ds)
except AnsibleParserError as e:
e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
raise e
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._parent:
self._parent.post_validate(templar)
if AnsibleCollectionLoader().default_collection:
pass
super(Task, self).post_validate(templar)
def _post_validate_loop(self, attr, value, templar):
'''
Override post validation for the loop field, which is templated
specially in the TaskExecutor class when evaluating loops.
'''
return value
def _post_validate_environment(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
env = {}
if value is not None:
def _parse_env_kv(k, v):
try:
env[k] = templar.template(v, convert_bare=False)
except AnsibleUndefinedVariable as e:
error = to_native(e)
if self.action in ('setup', 'gather_facts') and 'ansible_facts.env' in error or 'ansible_env' in error:
# ignore as fact gathering is required for 'env' facts
return
raise
if isinstance(value, list):
for env_item in value:
if isinstance(env_item, dict):
for k in env_item:
_parse_env_kv(k, env_item[k])
else:
isdict = templar.template(env_item, convert_bare=False)
if isinstance(isdict, dict):
env.update(isdict)
else:
display.warning("could not parse environment value, skipping: %s" % value)
elif isinstance(value, dict):
# should not really happen
env = dict()
for env_item in value:
_parse_env_kv(env_item, value[env_item])
else:
# at this point it should be a simple string, also should not happen
env = templar.template(value, convert_bare=False)
return env
def _post_validate_changed_when(self, attr, value, templar):
'''
changed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_failed_when(self, attr, value, templar):
'''
failed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_until(self, attr, value, templar):
'''
until is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def get_vars(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_vars())
all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
def get_include_params(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_include_params())
if self.action in ('include', 'include_tasks', 'include_role'):
all_vars.update(self.vars)
return all_vars
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(Task, self).copy()
new_me._parent = None
if self._parent and not exclude_parent:
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
new_me._role = None
if self._role:
new_me._role = self._role
return new_me
def serialize(self):
data = super(Task, self).serialize()
if not self._squashed and not self._finalized:
if self._parent:
data['parent'] = self._parent.serialize()
data['parent_type'] = self._parent.__class__.__name__
if self._role:
data['role'] = self._role.serialize()
return data
def deserialize(self, data):
# import is here to avoid import loops
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
parent_data = data.get('parent', None)
if parent_data:
parent_type = data.get('parent_type')
if parent_type == 'Block':
p = Block()
elif parent_type == 'TaskInclude':
p = TaskInclude()
elif parent_type == 'HandlerTaskInclude':
p = HandlerTaskInclude()
p.deserialize(parent_data)
self._parent = p
del data['parent']
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
del data['role']
super(Task, self).deserialize(data)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
This is used primarily after the Task has been serialized/deserialized, which
does not preserve the loader.
'''
self._loader = loader
if self._parent:
self._parent.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a task value.
'''
extend = self._valid_attrs[attr].extend
prepend = self._valid_attrs[attr].prepend
try:
value = self._attributes[attr]
# If parent is static, we can grab attrs from the parent
# otherwise, defer to the grandparent
if getattr(self._parent, 'statically_loaded', True):
_parent = self._parent
else:
_parent = self._parent._parent
if _parent and (value is Sentinel or extend):
if getattr(_parent, 'statically_loaded', True):
# vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'):
parent_value = _parent._get_parent_attribute(attr)
else:
parent_value = _parent._attributes.get(attr, Sentinel)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except KeyError:
pass
return value
def get_dep_chain(self):
if self._parent:
return self._parent.get_dep_chain()
else:
return None
def get_search_path(self):
'''
Return the list of paths you should search for files, in order.
This follows role/playbook dependency chain.
'''
path_stack = []
dep_chain = self.get_dep_chain()
# inside role: add the dependency chain from current to dependent
if dep_chain:
path_stack.extend(reversed([x._role_path for x in dep_chain]))
# add path of task itself, unless it is already in the list
task_dir = os.path.dirname(self.get_path())
if task_dir not in path_stack:
path_stack.append(task_dir)
return path_stack
def all_parents_static(self):
if self._parent:
return self._parent.all_parents_static()
return True
def get_first_parent_include(self):
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude):
return self._parent
return self._parent.get_first_parent_include()
return None | "the `loop_control` value must be specified as a dictionary and cannot " | random_line_split |
task.py | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
__all__ = ['Task']
display = Display()
class Task(Base, Conditional, Taggable, CollectionSearch):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
# NOTE: ONLY set defaults on task attributes that are not inheritable,
# inheritance is only triggered if the 'current value' is None,
# default can be set at play/top level object and inheritance will take it's course.
_args = FieldAttribute(isa='dict', default=dict)
_action = FieldAttribute(isa='string')
_async_val = FieldAttribute(isa='int', default=0, alias='async')
_changed_when = FieldAttribute(isa='list', default=list)
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
_failed_when = FieldAttribute(isa='list', default=list)
_loop = FieldAttribute()
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL)
_register = FieldAttribute(isa='string', static=True)
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list', default=list)
# deprecated, used to be loop and loop_args but loop has been repurposed
_loop_with = FieldAttribute(isa='string', private=True, inherit=False)
def | (self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
self._role = role
self._parent = None
if task_include:
self._parent = task_include
else:
self._parent = block
super(Task, self).__init__()
def get_path(self):
''' return the absolute path of the task with its line number '''
path = ""
if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
elif hasattr(self._parent._play, '_ds') and hasattr(self._parent._play._ds, '_data_source') and hasattr(self._parent._play._ds, '_line_number'):
path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number)
return path
def get_name(self, include_role_fqcn=True):
''' return the name of the task '''
if self._role:
role_name = self._role.get_name(include_role_fqcn=include_role_fqcn)
if self._role and self.name and role_name not in self.name:
return "%s : %s" % (role_name, self.name)
elif self.name:
return self.name
else:
if self._role:
return "%s : %s" % (role_name, self.action)
else:
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def __repr__(self):
''' returns a human readable representation of the task '''
if self.get_name() == 'meta':
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_with_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop_with'] = loop_name
new_ds['loop'] = v
# display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead", version="2.10")
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
if not isinstance(ds, dict):
raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds)))
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
default_collection = AnsibleCollectionLoader().default_collection
# use the parent value if our ds doesn't define it
collections_list = ds.get('collections', self.collections)
if collections_list is None:
collections_list = []
if isinstance(collections_list, string_types):
collections_list = [collections_list]
if default_collection and not self._role: # FIXME: and not a collections role
if collections_list:
if default_collection not in collections_list:
collections_list.insert(0, default_collection)
else:
collections_list = [default_collection]
if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
collections_list.append('ansible.legacy')
if collections_list:
ds['collections'] = collections_list
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
# if the raises exception was created with obj=ds args, then it includes the detail
# so we dont need to add it so we can just re raise.
if e._obj:
raise
# But if it wasn't, we can add the yaml object now to get more detail
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
# value over to _raw_params (assuming it is empty)
if action in ('command', 'shell', 'script'):
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
" Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
new_ds['action'] = action
new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.get('vars'))
else:
new_ds['vars'] = dict()
for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
continue
elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
# transform into loop property
self._preprocess_with_loop(ds, new_ds, k, v)
else:
# pre-2.0 syntax allowed variables for include statements at the top level of the task,
# so we move those into the 'vars' dictionary here, and show a deprecation message
# as we will remove this at some point in the future.
if action in ('include',) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
display.deprecated("Specifying include variables at the top-level of the task is deprecated."
" Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
" for currently supported syntax regarding included files and variables", version="2.12")
new_ds['vars'][k] = v
elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
new_ds[k] = v
else:
display.warning("Ignoring invalid attribute: %s" % k)
return super(Task, self).preprocess_data(new_ds)
def _load_loop_control(self, attr, ds):
if not isinstance(ds, dict):
raise AnsibleParserError(
"the `loop_control` value must be specified as a dictionary and cannot "
"be a variable itself (though it can contain variables)",
obj=ds,
)
return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)
def _validate_attributes(self, ds):
try:
super(Task, self)._validate_attributes(ds)
except AnsibleParserError as e:
e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
raise e
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._parent:
self._parent.post_validate(templar)
if AnsibleCollectionLoader().default_collection:
pass
super(Task, self).post_validate(templar)
def _post_validate_loop(self, attr, value, templar):
'''
Override post validation for the loop field, which is templated
specially in the TaskExecutor class when evaluating loops.
'''
return value
def _post_validate_environment(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
env = {}
if value is not None:
def _parse_env_kv(k, v):
try:
env[k] = templar.template(v, convert_bare=False)
except AnsibleUndefinedVariable as e:
error = to_native(e)
if self.action in ('setup', 'gather_facts') and 'ansible_facts.env' in error or 'ansible_env' in error:
# ignore as fact gathering is required for 'env' facts
return
raise
if isinstance(value, list):
for env_item in value:
if isinstance(env_item, dict):
for k in env_item:
_parse_env_kv(k, env_item[k])
else:
isdict = templar.template(env_item, convert_bare=False)
if isinstance(isdict, dict):
env.update(isdict)
else:
display.warning("could not parse environment value, skipping: %s" % value)
elif isinstance(value, dict):
# should not really happen
env = dict()
for env_item in value:
_parse_env_kv(env_item, value[env_item])
else:
# at this point it should be a simple string, also should not happen
env = templar.template(value, convert_bare=False)
return env
def _post_validate_changed_when(self, attr, value, templar):
'''
changed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_failed_when(self, attr, value, templar):
'''
failed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_until(self, attr, value, templar):
'''
until is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def get_vars(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_vars())
all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
def get_include_params(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_include_params())
if self.action in ('include', 'include_tasks', 'include_role'):
all_vars.update(self.vars)
return all_vars
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(Task, self).copy()
new_me._parent = None
if self._parent and not exclude_parent:
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
new_me._role = None
if self._role:
new_me._role = self._role
return new_me
def serialize(self):
data = super(Task, self).serialize()
if not self._squashed and not self._finalized:
if self._parent:
data['parent'] = self._parent.serialize()
data['parent_type'] = self._parent.__class__.__name__
if self._role:
data['role'] = self._role.serialize()
return data
def deserialize(self, data):
# import is here to avoid import loops
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
parent_data = data.get('parent', None)
if parent_data:
parent_type = data.get('parent_type')
if parent_type == 'Block':
p = Block()
elif parent_type == 'TaskInclude':
p = TaskInclude()
elif parent_type == 'HandlerTaskInclude':
p = HandlerTaskInclude()
p.deserialize(parent_data)
self._parent = p
del data['parent']
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
del data['role']
super(Task, self).deserialize(data)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
This is used primarily after the Task has been serialized/deserialized, which
does not preserve the loader.
'''
self._loader = loader
if self._parent:
self._parent.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a task value.
'''
extend = self._valid_attrs[attr].extend
prepend = self._valid_attrs[attr].prepend
try:
value = self._attributes[attr]
# If parent is static, we can grab attrs from the parent
# otherwise, defer to the grandparent
if getattr(self._parent, 'statically_loaded', True):
_parent = self._parent
else:
_parent = self._parent._parent
if _parent and (value is Sentinel or extend):
if getattr(_parent, 'statically_loaded', True):
# vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'):
parent_value = _parent._get_parent_attribute(attr)
else:
parent_value = _parent._attributes.get(attr, Sentinel)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except KeyError:
pass
return value
def get_dep_chain(self):
if self._parent:
return self._parent.get_dep_chain()
else:
return None
def get_search_path(self):
'''
Return the list of paths you should search for files, in order.
This follows role/playbook dependency chain.
'''
path_stack = []
dep_chain = self.get_dep_chain()
# inside role: add the dependency chain from current to dependent
if dep_chain:
path_stack.extend(reversed([x._role_path for x in dep_chain]))
# add path of task itself, unless it is already in the list
task_dir = os.path.dirname(self.get_path())
if task_dir not in path_stack:
path_stack.append(task_dir)
return path_stack
def all_parents_static(self):
if self._parent:
return self._parent.all_parents_static()
return True
def get_first_parent_include(self):
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude):
return self._parent
return self._parent.get_first_parent_include()
return None
| __init__ | identifier_name |
task.py | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
__all__ = ['Task']
display = Display()
class Task(Base, Conditional, Taggable, CollectionSearch):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
# NOTE: ONLY set defaults on task attributes that are not inheritable,
# inheritance is only triggered if the 'current value' is None,
# default can be set at play/top level object and inheritance will take it's course.
_args = FieldAttribute(isa='dict', default=dict)
_action = FieldAttribute(isa='string')
_async_val = FieldAttribute(isa='int', default=0, alias='async')
_changed_when = FieldAttribute(isa='list', default=list)
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
_failed_when = FieldAttribute(isa='list', default=list)
_loop = FieldAttribute()
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL)
_register = FieldAttribute(isa='string', static=True)
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list', default=list)
# deprecated, used to be loop and loop_args but loop has been repurposed
_loop_with = FieldAttribute(isa='string', private=True, inherit=False)
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
self._role = role
self._parent = None
if task_include:
self._parent = task_include
else:
self._parent = block
super(Task, self).__init__()
def get_path(self):
''' return the absolute path of the task with its line number '''
path = ""
if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
elif hasattr(self._parent._play, '_ds') and hasattr(self._parent._play._ds, '_data_source') and hasattr(self._parent._play._ds, '_line_number'):
path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number)
return path
def get_name(self, include_role_fqcn=True):
''' return the name of the task '''
if self._role:
role_name = self._role.get_name(include_role_fqcn=include_role_fqcn)
if self._role and self.name and role_name not in self.name:
return "%s : %s" % (role_name, self.name)
elif self.name:
return self.name
else:
if self._role:
return "%s : %s" % (role_name, self.action)
else:
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def __repr__(self):
''' returns a human readable representation of the task '''
if self.get_name() == 'meta':
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_with_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop_with'] = loop_name
new_ds['loop'] = v
# display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead", version="2.10")
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
if not isinstance(ds, dict):
raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds)))
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
default_collection = AnsibleCollectionLoader().default_collection
# use the parent value if our ds doesn't define it
collections_list = ds.get('collections', self.collections)
if collections_list is None:
collections_list = []
if isinstance(collections_list, string_types):
collections_list = [collections_list]
if default_collection and not self._role: # FIXME: and not a collections role
if collections_list:
if default_collection not in collections_list:
collections_list.insert(0, default_collection)
else:
collections_list = [default_collection]
if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
collections_list.append('ansible.legacy')
if collections_list:
ds['collections'] = collections_list
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
# if the raises exception was created with obj=ds args, then it includes the detail
# so we dont need to add it so we can just re raise.
if e._obj:
raise
# But if it wasn't, we can add the yaml object now to get more detail
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
# value over to _raw_params (assuming it is empty)
if action in ('command', 'shell', 'script'):
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
" Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
new_ds['action'] = action
new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.get('vars'))
else:
new_ds['vars'] = dict()
for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
continue
elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
# transform into loop property
self._preprocess_with_loop(ds, new_ds, k, v)
else:
# pre-2.0 syntax allowed variables for include statements at the top level of the task,
# so we move those into the 'vars' dictionary here, and show a deprecation message
# as we will remove this at some point in the future.
if action in ('include',) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
display.deprecated("Specifying include variables at the top-level of the task is deprecated."
" Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
" for currently supported syntax regarding included files and variables", version="2.12")
new_ds['vars'][k] = v
elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
new_ds[k] = v
else:
display.warning("Ignoring invalid attribute: %s" % k)
return super(Task, self).preprocess_data(new_ds)
def _load_loop_control(self, attr, ds):
if not isinstance(ds, dict):
raise AnsibleParserError(
"the `loop_control` value must be specified as a dictionary and cannot "
"be a variable itself (though it can contain variables)",
obj=ds,
)
return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)
def _validate_attributes(self, ds):
try:
super(Task, self)._validate_attributes(ds)
except AnsibleParserError as e:
e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
raise e
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._parent:
self._parent.post_validate(templar)
if AnsibleCollectionLoader().default_collection:
pass
super(Task, self).post_validate(templar)
def _post_validate_loop(self, attr, value, templar):
'''
Override post validation for the loop field, which is templated
specially in the TaskExecutor class when evaluating loops.
'''
return value
def _post_validate_environment(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
env = {}
if value is not None:
def _parse_env_kv(k, v):
try:
env[k] = templar.template(v, convert_bare=False)
except AnsibleUndefinedVariable as e:
error = to_native(e)
if self.action in ('setup', 'gather_facts') and 'ansible_facts.env' in error or 'ansible_env' in error:
# ignore as fact gathering is required for 'env' facts
return
raise
if isinstance(value, list):
for env_item in value:
if isinstance(env_item, dict):
for k in env_item:
_parse_env_kv(k, env_item[k])
else:
isdict = templar.template(env_item, convert_bare=False)
if isinstance(isdict, dict):
|
else:
display.warning("could not parse environment value, skipping: %s" % value)
elif isinstance(value, dict):
# should not really happen
env = dict()
for env_item in value:
_parse_env_kv(env_item, value[env_item])
else:
# at this point it should be a simple string, also should not happen
env = templar.template(value, convert_bare=False)
return env
def _post_validate_changed_when(self, attr, value, templar):
'''
changed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_failed_when(self, attr, value, templar):
'''
failed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_until(self, attr, value, templar):
'''
until is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def get_vars(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_vars())
all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
def get_include_params(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_include_params())
if self.action in ('include', 'include_tasks', 'include_role'):
all_vars.update(self.vars)
return all_vars
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(Task, self).copy()
new_me._parent = None
if self._parent and not exclude_parent:
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
new_me._role = None
if self._role:
new_me._role = self._role
return new_me
def serialize(self):
data = super(Task, self).serialize()
if not self._squashed and not self._finalized:
if self._parent:
data['parent'] = self._parent.serialize()
data['parent_type'] = self._parent.__class__.__name__
if self._role:
data['role'] = self._role.serialize()
return data
def deserialize(self, data):
# import is here to avoid import loops
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
parent_data = data.get('parent', None)
if parent_data:
parent_type = data.get('parent_type')
if parent_type == 'Block':
p = Block()
elif parent_type == 'TaskInclude':
p = TaskInclude()
elif parent_type == 'HandlerTaskInclude':
p = HandlerTaskInclude()
p.deserialize(parent_data)
self._parent = p
del data['parent']
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
del data['role']
super(Task, self).deserialize(data)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
This is used primarily after the Task has been serialized/deserialized, which
does not preserve the loader.
'''
self._loader = loader
if self._parent:
self._parent.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a task value.
'''
extend = self._valid_attrs[attr].extend
prepend = self._valid_attrs[attr].prepend
try:
value = self._attributes[attr]
# If parent is static, we can grab attrs from the parent
# otherwise, defer to the grandparent
if getattr(self._parent, 'statically_loaded', True):
_parent = self._parent
else:
_parent = self._parent._parent
if _parent and (value is Sentinel or extend):
if getattr(_parent, 'statically_loaded', True):
# vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'):
parent_value = _parent._get_parent_attribute(attr)
else:
parent_value = _parent._attributes.get(attr, Sentinel)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except KeyError:
pass
return value
def get_dep_chain(self):
if self._parent:
return self._parent.get_dep_chain()
else:
return None
def get_search_path(self):
'''
Return the list of paths you should search for files, in order.
This follows role/playbook dependency chain.
'''
path_stack = []
dep_chain = self.get_dep_chain()
# inside role: add the dependency chain from current to dependent
if dep_chain:
path_stack.extend(reversed([x._role_path for x in dep_chain]))
# add path of task itself, unless it is already in the list
task_dir = os.path.dirname(self.get_path())
if task_dir not in path_stack:
path_stack.append(task_dir)
return path_stack
def all_parents_static(self):
if self._parent:
return self._parent.all_parents_static()
return True
def get_first_parent_include(self):
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude):
return self._parent
return self._parent.get_first_parent_include()
return None
| env.update(isdict) | conditional_block |
axis.js | // "horizontalaxis" : {
// "id" : STRING, "type" : DATATYPE(number), "length" : RELLEN(1.0), "base" : POINT(-1,1), "anchor" : DOUBLE(-1), "position" : POINT(0,0),
// "min" : DATAVALUEORAUTO(auto), "max" : DATAVALUEORAUTO(auto), "minposition" : RELPOS(-1.0), "maxposition" : RELPOS(1.0), "color" : COLOR(black), "linewidth" : INTEGER(1),
// "tickmin" : INTEGER(-3), "tickmax" : INTEGER(3), "tickcolor" : COLOR(black),
// "labels" : {
// "format" : STRING, "start" : DATAVALUE(0), "angle" : DOUBLE(0), "position" : POINT,
// "anchor" : POINT, "color" : COLOR(black), "spacing" : STRING, "densityfactor" : DOUBLE(1.0),
// "label" : [
// { "format" : STRING, "start" : STRING, "angle" : DOUBLE, "position" : POINT, "anchor" : POINT, "spacing" : STRING, "densityfactor" : DOUBLE },
// { "format" : STRING, "start" : STRING, "angle" : DOUBLE, "position" : POINT, "anchor" : POINT, "spacing" : STRING, "densityfactor" : DOUBLE },
// ...
// ]
// }
// "title" : { "base" : DOUBLE(0), "anchor" : POINT, "position" : POINT, "angle" : DOUBLE(0), "text" : "TITLETEXT", "font": STRING },
// "grid" : { "color" : COLOR(0xeeeeee), "visible" : BOOLEAN(false) },
// "pan" : { "allowed" : BOOLEAN(yes), "min" : DATAVALUE, "max" : DATAVALUE },
// "zoom" : { "allowed" : BOOLEAN(yes), "min" : DATAMEASURE, "max" : DATAMEASURE, "anchor" : DATAVALUE },
// "binding" : { "id" : STRING!, "min" : DATAVALUE!, "max" : DATAVALUE! }
// "visible" : BOOLEAN(true)
// }
// these are needed so that their .parseJSON methods will be defined when called below:
require('./labeler.js');
require('./axis_title.js');
require('./grid.js');
require('./pan.js');
require('./zoom.js');
var Axis = require('../../core/axis.js'),
pF = require('../../util/parsingFunctions.js'),
vF = require('../../util/validationFunctions.js'),
uF = require('../../util/utilityFunctions.js');
var parseLabels = function (json, axis) {
var spacings,
labelers = axis.labelers(),
Labeler = require('../../core/labeler.js'),
DataValue = require('../../core/data_value.js'),
i;
spacings = [];
if (json !== undefined) {
if (json.spacing !== undefined) { | // If there was a spacing attr on the <labels> tag, create a new labeler for
// each spacing present in it, using the other values from the <labels> tag
for (i = 0; i < spacings.length; ++i) {
labelers.add(Labeler.parseJSON(json, axis, undefined, spacings[i]));
}
} else if (json !== undefined && json.label !== undefined && json.label.length > 0) {
// If there are <label> tags, parse the <labels> tag to get default values
var defaults = Labeler.parseJSON(json, axis, undefined, null);
// And loop over each <label> tag, creating labelers for each, splitting multiple
// spacings on the same <label> tag into multiple labelers:
json.label.forEach(function(e) {
var spacing = [];
if (e.spacing !== undefined) {
spacing = vF.typeOf(e.spacing) === 'array' ? e.spacing : [ e.spacing ];
}
spacing.forEach(function(s) {
labelers.add( Labeler.parseJSON(e, axis, defaults, s) );
});
});
} else {
// Otherwise create labelers using the default spacing, with the other values
// from the <labels> tag
var defaultValues = (uF.getDefaultValuesFromXSD()).horizontalaxis.labels;
var defaultSpacings = axis.type() === DataValue.NUMBER ?
defaultValues.defaultNumberSpacing :
defaultValues.defaultDatetimeSpacing;
for (i = 0; i < defaultSpacings.length; ++i) {
labelers.add(Labeler.parseJSON(json, axis, undefined, defaultSpacings[i]));
}
}
};
Axis.parseJSON = function (json, orientation, messageHandler, multigraph) {
var DataValue = require('../../core/data_value.js'),
Point = require('../../math/point.js'),
RGBColor = require('../../math/rgb_color.js'),
Displacement = require('../../math/displacement.js'),
AxisTitle = require('../../core/axis_title.js'),
Grid = require('../../core/grid.js'),
Pan = require('../../core/pan.js'),
Zoom = require('../../core/zoom.js'),
AxisBinding = require('../../core/axis_binding.js'),
axis = new Axis(orientation),
parseAttribute = pF.parseAttribute,
parseDisplacement = Displacement.parse,
parseJSONPoint = function(p) { return new Point(p[0], p[1]); },
parseRGBColor = RGBColor.parse,
attr, child,
value;
if (json) {
parseAttribute(json.id, axis.id);
parseAttribute(json.type, axis.type, DataValue.parseType);
parseAttribute(json.length, axis.length, parseDisplacement);
//
// The following provides support for the deprecated "positionbase" axis attribute;
// MUGL files should use the "base" attribute instead. When we're ready to remove
// support for the deprecated attribute, delete this block of code:
//
(function () {
var positionbase = json.positionbase;
if (positionbase) {
messageHandler.warning('Use of deprecated axis attribute "positionbase"; use "base" attribute instead');
if ((positionbase === "left") || (positionbase === "bottom")) {
axis.base(new Point(-1, -1));
} else if (positionbase === "right") {
axis.base(new Point(1, -1));
} else if (positionbase === "top") {
axis.base(new Point(-1, 1));
}
}
}());
//
// End of code to delete when removing support for deprecated "positionbase"
// attribute.
//
attr = json.position;
if (attr !== undefined) {
if (vF.typeOf(attr) === 'array') {
axis.position(parseJSONPoint(attr));
} else {
// If position is not an array, and if it can be interpreted
// as a number, construct the position point by interpreting that
// number as an offset from the 0 location along the perpendicular
// direction.
if (vF.isNumberNotNaN(attr)) {
if (orientation === Axis.HORIZONTAL) {
axis.position(new Point(0, attr));
} else {
axis.position(new Point(attr, 0));
}
} else {
throw new Error("axis position '"+attr+"' is of the wrong type; it should be a number or a point");
}
}
}
// Note: we coerce the min and max values to strings here, because the "min" and "max" attrs
// of the Axis object require strings. See the comments about these properties in src/core/axis.js
// for a discussion of why this is the case.
if ("min" in json) {
axis.min(uF.coerceToString(json.min));
}
if (axis.min() !== "auto") {
axis.dataMin(DataValue.parse(axis.type(), axis.min()));
}
if ("max" in json) {
axis.max(uF.coerceToString(json.max));
}
if (axis.max() !== "auto") {
axis.dataMax(DataValue.parse(axis.type(), axis.max()));
}
parseAttribute(json.pregap, axis.pregap);
parseAttribute(json.postgap, axis.postgap);
parseAttribute(json.anchor, axis.anchor);
parseAttribute(json.base, axis.base, parseJSONPoint);
parseAttribute(json.minposition, axis.minposition, parseDisplacement);
parseAttribute(json.maxposition, axis.maxposition, parseDisplacement);
parseAttribute(json.minoffset, axis.minoffset);
parseAttribute(json.maxoffset, axis.maxoffset);
parseAttribute(json.color, axis.color, parseRGBColor);
parseAttribute(json.tickcolor, axis.tickcolor, parseRGBColor);
parseAttribute(json.tickwidth, axis.tickwidth);
parseAttribute(json.tickmin, axis.tickmin);
parseAttribute(json.tickmax, axis.tickmax);
parseAttribute(json.highlightstyle, axis.highlightstyle);
parseAttribute(json.linewidth, axis.linewidth);
if ("title" in json) {
if (typeof(json.title) === 'boolean') {
if (json.title) {
axis.title(new AxisTitle(axis));
} else {
axis.title(AxisTitle.parseJSON({}, axis));
}
} else {
axis.title(AxisTitle.parseJSON(json.title, axis));
}
} else {
axis.title(new AxisTitle(axis));
}
if (json.grid) {
axis.grid(Grid.parseJSON(json.grid));
}
if (json.visible !== undefined) {
axis.visible(json.visible);
}
if ("pan" in json) {
axis.pan(Pan.parseJSON(json.pan, axis.type()));
}
if ("zoom" in json) {
axis.zoom(Zoom.parseJSON(json.zoom, axis.type()));
}
if (json.labels) {
parseLabels(json.labels, axis);
}
if (json.binding) {
var bindingMinDataValue = DataValue.parse(axis.type(), json.binding.min),
bindingMaxDataValue = DataValue.parse(axis.type(), json.binding.max);
if (typeof(json.binding.id) !== "string") {
throw new Error("invalid axis binding id: '" + json.binding.id + "'");
}
if (! DataValue.isInstance(bindingMinDataValue)) {
throw new Error("invalid axis binding min: '" + json.binding.min + "'");
}
if (! DataValue.isInstance(bindingMaxDataValue)) {
throw new Error("invalid axis binding max: '" + json.binding.max + "'");
}
AxisBinding.findByIdOrCreateNew(json.binding.id).addAxis(axis, bindingMinDataValue, bindingMaxDataValue, multigraph);
}
}
return axis;
};
module.exports = Axis; | spacings = vF.typeOf(json.spacing) === 'array' ? json.spacing : [ json.spacing ];
}
}
if (spacings.length > 0) { | random_line_split |
axis.js |
// "horizontalaxis" : {
// "id" : STRING, "type" : DATATYPE(number), "length" : RELLEN(1.0), "base" : POINT(-1,1), "anchor" : DOUBLE(-1), "position" : POINT(0,0),
// "min" : DATAVALUEORAUTO(auto), "max" : DATAVALUEORAUTO(auto), "minposition" : RELPOS(-1.0), "maxposition" : RELPOS(1.0), "color" : COLOR(black), "linewidth" : INTEGER(1),
// "tickmin" : INTEGER(-3), "tickmax" : INTEGER(3), "tickcolor" : COLOR(black),
// "labels" : {
// "format" : STRING, "start" : DATAVALUE(0), "angle" : DOUBLE(0), "position" : POINT,
// "anchor" : POINT, "color" : COLOR(black), "spacing" : STRING, "densityfactor" : DOUBLE(1.0),
// "label" : [
// { "format" : STRING, "start" : STRING, "angle" : DOUBLE, "position" : POINT, "anchor" : POINT, "spacing" : STRING, "densityfactor" : DOUBLE },
// { "format" : STRING, "start" : STRING, "angle" : DOUBLE, "position" : POINT, "anchor" : POINT, "spacing" : STRING, "densityfactor" : DOUBLE },
// ...
// ]
// }
// "title" : { "base" : DOUBLE(0), "anchor" : POINT, "position" : POINT, "angle" : DOUBLE(0), "text" : "TITLETEXT", "font": STRING },
// "grid" : { "color" : COLOR(0xeeeeee), "visible" : BOOLEAN(false) },
// "pan" : { "allowed" : BOOLEAN(yes), "min" : DATAVALUE, "max" : DATAVALUE },
// "zoom" : { "allowed" : BOOLEAN(yes), "min" : DATAMEASURE, "max" : DATAMEASURE, "anchor" : DATAVALUE },
// "binding" : { "id" : STRING!, "min" : DATAVALUE!, "max" : DATAVALUE! }
// "visible" : BOOLEAN(true)
// }
// these are needed so that their .parseJSON methods will be defined when called below:
require('./labeler.js');
require('./axis_title.js');
require('./grid.js');
require('./pan.js');
require('./zoom.js');
var Axis = require('../../core/axis.js'),
pF = require('../../util/parsingFunctions.js'),
vF = require('../../util/validationFunctions.js'),
uF = require('../../util/utilityFunctions.js');
var parseLabels = function (json, axis) {
var spacings,
labelers = axis.labelers(),
Labeler = require('../../core/labeler.js'),
DataValue = require('../../core/data_value.js'),
i;
spacings = [];
if (json !== undefined) {
if (json.spacing !== undefined) {
spacings = vF.typeOf(json.spacing) === 'array' ? json.spacing : [ json.spacing ];
}
}
if (spacings.length > 0) {
// If there was a spacing attr on the <labels> tag, create a new labeler for
// each spacing present in it, using the other values from the <labels> tag
for (i = 0; i < spacings.length; ++i) {
labelers.add(Labeler.parseJSON(json, axis, undefined, spacings[i]));
}
} else if (json !== undefined && json.label !== undefined && json.label.length > 0) {
// If there are <label> tags, parse the <labels> tag to get default values
var defaults = Labeler.parseJSON(json, axis, undefined, null);
// And loop over each <label> tag, creating labelers for each, splitting multiple
// spacings on the same <label> tag into multiple labelers:
json.label.forEach(function(e) {
var spacing = [];
if (e.spacing !== undefined) {
spacing = vF.typeOf(e.spacing) === 'array' ? e.spacing : [ e.spacing ];
}
spacing.forEach(function(s) {
labelers.add( Labeler.parseJSON(e, axis, defaults, s) );
});
});
} else {
// Otherwise create labelers using the default spacing, with the other values
// from the <labels> tag
var defaultValues = (uF.getDefaultValuesFromXSD()).horizontalaxis.labels;
var defaultSpacings = axis.type() === DataValue.NUMBER ?
defaultValues.defaultNumberSpacing :
defaultValues.defaultDatetimeSpacing;
for (i = 0; i < defaultSpacings.length; ++i) {
labelers.add(Labeler.parseJSON(json, axis, undefined, defaultSpacings[i]));
}
}
};
Axis.parseJSON = function (json, orientation, messageHandler, multigraph) {
var DataValue = require('../../core/data_value.js'),
Point = require('../../math/point.js'),
RGBColor = require('../../math/rgb_color.js'),
Displacement = require('../../math/displacement.js'),
AxisTitle = require('../../core/axis_title.js'),
Grid = require('../../core/grid.js'),
Pan = require('../../core/pan.js'),
Zoom = require('../../core/zoom.js'),
AxisBinding = require('../../core/axis_binding.js'),
axis = new Axis(orientation),
parseAttribute = pF.parseAttribute,
parseDisplacement = Displacement.parse,
parseJSONPoint = function(p) { return new Point(p[0], p[1]); },
parseRGBColor = RGBColor.parse,
attr, child,
value;
if (json) {
parseAttribute(json.id, axis.id);
parseAttribute(json.type, axis.type, DataValue.parseType);
parseAttribute(json.length, axis.length, parseDisplacement);
//
// The following provides support for the deprecated "positionbase" axis attribute;
// MUGL files should use the "base" attribute instead. When we're ready to remove
// support for the deprecated attribute, delete this block of code:
//
(function () {
var positionbase = json.positionbase;
if (positionbase) {
messageHandler.warning('Use of deprecated axis attribute "positionbase"; use "base" attribute instead');
if ((positionbase === "left") || (positionbase === "bottom")) {
axis.base(new Point(-1, -1));
} else if (positionbase === "right") {
axis.base(new Point(1, -1));
} else if (positionbase === "top") {
axis.base(new Point(-1, 1));
}
}
}());
//
// End of code to delete when removing support for deprecated "positionbase"
// attribute.
//
attr = json.position;
if (attr !== undefined) {
if (vF.typeOf(attr) === 'array') {
axis.position(parseJSONPoint(attr));
} else {
// If position is not an array, and if it can be interpreted
// as a number, construct the position point by interpreting that
// number as an offset from the 0 location along the perpendicular
// direction.
if (vF.isNumberNotNaN(attr)) {
if (orientation === Axis.HORIZONTAL) {
axis.position(new Point(0, attr));
} else {
axis.position(new Point(attr, 0));
}
} else {
throw new Error("axis position '"+attr+"' is of the wrong type; it should be a number or a point");
}
}
}
// Note: we coerce the min and max values to strings here, because the "min" and "max" attrs
// of the Axis object require strings. See the comments about these properties in src/core/axis.js
// for a discussion of why this is the case.
if ("min" in json) {
axis.min(uF.coerceToString(json.min));
}
if (axis.min() !== "auto") {
axis.dataMin(DataValue.parse(axis.type(), axis.min()));
}
if ("max" in json) {
axis.max(uF.coerceToString(json.max));
}
if (axis.max() !== "auto") {
axis.dataMax(DataValue.parse(axis.type(), axis.max()));
}
parseAttribute(json.pregap, axis.pregap);
parseAttribute(json.postgap, axis.postgap);
parseAttribute(json.anchor, axis.anchor);
parseAttribute(json.base, axis.base, parseJSONPoint);
parseAttribute(json.minposition, axis.minposition, parseDisplacement);
parseAttribute(json.maxposition, axis.maxposition, parseDisplacement);
parseAttribute(json.minoffset, axis.minoffset);
parseAttribute(json.maxoffset, axis.maxoffset);
parseAttribute(json.color, axis.color, parseRGBColor);
parseAttribute(json.tickcolor, axis.tickcolor, parseRGBColor);
parseAttribute(json.tickwidth, axis.tickwidth);
parseAttribute(json.tickmin, axis.tickmin);
parseAttribute(json.tickmax, axis.tickmax);
parseAttribute(json.highlightstyle, axis.highlightstyle);
parseAttribute(json.linewidth, axis.linewidth);
if ("title" in json) {
if (typeof(json.title) === 'boolean') | else {
axis.title(AxisTitle.parseJSON(json.title, axis));
}
} else {
axis.title(new AxisTitle(axis));
}
if (json.grid) {
axis.grid(Grid.parseJSON(json.grid));
}
if (json.visible !== undefined) {
axis.visible(json.visible);
}
if ("pan" in json) {
axis.pan(Pan.parseJSON(json.pan, axis.type()));
}
if ("zoom" in json) {
axis.zoom(Zoom.parseJSON(json.zoom, axis.type()));
}
if (json.labels) {
parseLabels(json.labels, axis);
}
if (json.binding) {
var bindingMinDataValue = DataValue.parse(axis.type(), json.binding.min),
bindingMaxDataValue = DataValue.parse(axis.type(), json.binding.max);
if (typeof(json.binding.id) !== "string") {
throw new Error("invalid axis binding id: '" + json.binding.id + "'");
}
if (! DataValue.isInstance(bindingMinDataValue)) {
throw new Error("invalid axis binding min: '" + json.binding.min + "'");
}
if (! DataValue.isInstance(bindingMaxDataValue)) {
throw new Error("invalid axis binding max: '" + json.binding.max + "'");
}
AxisBinding.findByIdOrCreateNew(json.binding.id).addAxis(axis, bindingMinDataValue, bindingMaxDataValue, multigraph);
}
}
return axis;
};
module.exports = Axis;
| {
if (json.title) {
axis.title(new AxisTitle(axis));
} else {
axis.title(AxisTitle.parseJSON({}, axis));
}
} | conditional_block |
input.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use script_traits::MouseButton;
use std::path::Path;
use std::mem::size_of;
use std::mem::transmute;
use std::mem::zeroed;
use std::os::errno;
use std::os::unix::AsRawFd;
use std::num::Float;
use std::fs::File;
use std::thread;
use std::sync::mpsc::Sender;
use std::io::Read;
use geom::point::TypedPoint2D;
use libc::c_int;
use libc::c_long;
use libc::time_t;
use compositing::windowing::WindowEvent;
use compositing::windowing::MouseWindowEvent;
extern {
// XXX: no variadic form in std libs?
fn ioctl(fd: c_int, req: c_int, ...) -> c_int;
}
#[repr(C)]
struct linux_input_event {
sec: time_t,
msec: c_long,
evt_type: u16,
code: u16,
value: i32,
}
#[repr(C)]
struct linux_input_absinfo {
value: i32,
minimum: i32,
maximum: i32,
fuzz: i32,
flat: i32,
resolution: i32,
}
const IOC_NONE: c_int = 0;
const IOC_WRITE: c_int = 1;
const IOC_READ: c_int = 2;
fn ioc(dir: c_int, ioctype: c_int, nr: c_int, size: c_int) -> c_int {
dir << 30 | size << 16 | ioctype << 8 | nr
}
fn ev_ioc_g_abs(abs: u16) -> c_int {
ioc(IOC_READ, 'E' as c_int, (0x40 + abs) as i32, size_of::<linux_input_absinfo>() as i32)
}
const EV_SYN: u16 = 0;
const EV_ABS: u16 = 3;
const EV_REPORT: u16 = 0;
const ABS_MT_SLOT: u16 = 0x2F;
const ABS_MT_TOUCH_MAJOR: u16 = 0x30;
const ABS_MT_TOUCH_MINOR: u16 = 0x31;
const ABS_MT_WIDTH_MAJOR: u16 = 0x32;
const ABS_MT_WIDTH_MINOR: u16 = 0x33;
const ABS_MT_ORIENTATION: u16 = 0x34;
const ABS_MT_POSITION_X: u16 = 0x35;
const ABS_MT_POSITION_Y: u16 = 0x36;
const ABS_MT_TRACKING_ID: u16 = 0x39;
struct InputSlot {
tracking_id: i32,
x: i32,
y: i32,
}
fn dist(x1: i32, x2: i32, y1: i32, y2: i32) -> f32 {
let deltaX = (x2 - x1) as f32;
let deltaY = (y2 - y1) as f32;
(deltaX * deltaX + deltaY * deltaY).sqrt()
}
fn read_input_device(device_path: &Path,
sender: &Sender<WindowEvent>) {
let mut device = match File::open(device_path) {
Ok(dev) => dev,
Err(e) => {
println!("Couldn't open device! {}", e);
return;
},
};
let fd = device.as_raw_fd();
let mut x_info: linux_input_absinfo = unsafe { zeroed() };
let mut y_info: linux_input_absinfo = unsafe { zeroed() };
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_X), &mut x_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_X info {} {}", ret, errno());
}
}
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_Y), &mut y_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_Y info {} {}", ret, errno());
}
}
let touchWidth = x_info.maximum - x_info.minimum;
let touchHeight = y_info.maximum - y_info.minimum;
println!("xMin: {}, yMin: {}, touchWidth: {}, touchHeight: {}", x_info.minimum, y_info.minimum, touchWidth, touchHeight);
// XXX: Why isn't size_of treated as constant?
// let buf: [u8; (16 * size_of::<linux_input_event>())];
let mut buf: [u8; (16 * 16)] = unsafe { zeroed() };
let mut slots: [InputSlot; 10] = unsafe { zeroed() };
for slot in slots.iter_mut() {
slot.tracking_id = -1;
}
let mut last_x = 0;
let mut last_y = 0;
let mut first_x = 0;
let mut first_y = 0;
let mut last_dist: f32 = 0f32;
let mut touch_count: i32 = 0;
let mut current_slot: uint = 0;
// XXX: Need to use the real dimensions of the screen
let screen_dist = dist(0, 480, 854, 0);
loop {
let read = match device.read(buf.as_mut_slice()) {
Ok(count) => {
assert!(count % size_of::<linux_input_event>() == 0,
"Unexpected input device read length!");
count
},
Err(e) => {
println!("Couldn't read device! {}", e);
return;
}
};
let count = read / size_of::<linux_input_event>();
let events: *mut linux_input_event = unsafe { transmute(buf.as_mut_ptr()) };
let mut tracking_updated = false;
for idx in range(0, count as int) {
let event: &linux_input_event = unsafe { transmute(events.offset(idx)) };
match (event.evt_type, event.code) {
(EV_SYN, EV_REPORT) => {
let slotA = &slots[0];
if tracking_updated {
tracking_updated = false;
if slotA.tracking_id == -1 {
println!("Touch up");
let delta_x = slotA.x - first_x;
let delta_y = slotA.y - first_y;
let dist = delta_x * delta_x + delta_y * delta_y;
if dist < 16 {
let click_pt = TypedPoint2D(slotA.x as f32, slotA.y as f32);
println!("Dispatching click!");
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::MouseDown(MouseButton::Left, click_pt)))
.ok().unwrap();
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::MouseUp(MouseButton::Left, click_pt)))
.ok().unwrap();
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::Click(MouseButton::Left, click_pt)))
.ok().unwrap();
}
} else {
println!("Touch down");
last_x = slotA.x;
last_y = slotA.y;
first_x = slotA.x;
first_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
last_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
}
}
} else {
println!("Touch move x: {}, y: {}", slotA.x, slotA.y);
sender.send(WindowEvent::Scroll(TypedPoint2D((slotA.x - last_x) as f32, (slotA.y - last_y) as f32),
TypedPoint2D(slotA.x, slotA.y))).ok().unwrap();
last_x = slotA.x;
last_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
let cur_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
println!("Zooming {} {} {} {}", cur_dist, last_dist, screen_dist, ((screen_dist + (cur_dist - last_dist))/screen_dist));
sender.send(WindowEvent::Zoom((screen_dist + (cur_dist - last_dist))/screen_dist)).ok().unwrap();
last_dist = cur_dist;
}
}
},
(EV_SYN, _) => println!("Unknown SYN code {}", event.code),
(EV_ABS, ABS_MT_SLOT) => {
if (event.value as uint) < slots.len() {
current_slot = event.value as uint;
} else {
println!("Invalid slot! {}", event.value);
}
},
(EV_ABS, ABS_MT_TOUCH_MAJOR) => (),
(EV_ABS, ABS_MT_TOUCH_MINOR) => (),
(EV_ABS, ABS_MT_WIDTH_MAJOR) => (),
(EV_ABS, ABS_MT_WIDTH_MINOR) => (),
(EV_ABS, ABS_MT_ORIENTATION) => (),
(EV_ABS, ABS_MT_POSITION_X) => {
slots[current_slot].x = event.value - x_info.minimum;
},
(EV_ABS, ABS_MT_POSITION_Y) => {
slots[current_slot].y = event.value - y_info.minimum;
},
(EV_ABS, ABS_MT_TRACKING_ID) => {
let current_id = slots[current_slot].tracking_id;
if current_id != event.value &&
(current_id == -1 || event.value == -1) {
tracking_updated = true;
if event.value == -1 | else {
touch_count += 1;
}
}
slots[current_slot].tracking_id = event.value;
},
(EV_ABS, _) => println!("Unknown ABS code {}", event.code),
(_, _) => println!("Unknown event type {}", event.evt_type),
}
}
}
}
pub fn run_input_loop(event_sender: &Sender<WindowEvent>) {
let sender = event_sender.clone();
thread::spawn(move || {
// XXX need to scan all devices and read every one.
let touchinputdev = Path::new("/dev/input/event0");
read_input_device(&touchinputdev, &sender);
});
}
| {
touch_count -= 1;
} | conditional_block |
input.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use script_traits::MouseButton;
use std::path::Path;
use std::mem::size_of;
use std::mem::transmute;
use std::mem::zeroed;
use std::os::errno;
use std::os::unix::AsRawFd;
use std::num::Float;
use std::fs::File;
use std::thread;
use std::sync::mpsc::Sender;
use std::io::Read;
use geom::point::TypedPoint2D;
use libc::c_int;
use libc::c_long;
use libc::time_t;
use compositing::windowing::WindowEvent;
use compositing::windowing::MouseWindowEvent;
extern {
// XXX: no variadic form in std libs?
fn ioctl(fd: c_int, req: c_int, ...) -> c_int;
}
#[repr(C)]
struct linux_input_event {
sec: time_t,
msec: c_long,
evt_type: u16,
code: u16,
value: i32,
}
#[repr(C)]
struct linux_input_absinfo {
value: i32,
minimum: i32,
maximum: i32,
fuzz: i32,
flat: i32,
resolution: i32,
}
const IOC_NONE: c_int = 0;
const IOC_WRITE: c_int = 1;
const IOC_READ: c_int = 2;
fn ioc(dir: c_int, ioctype: c_int, nr: c_int, size: c_int) -> c_int {
dir << 30 | size << 16 | ioctype << 8 | nr
}
fn ev_ioc_g_abs(abs: u16) -> c_int {
ioc(IOC_READ, 'E' as c_int, (0x40 + abs) as i32, size_of::<linux_input_absinfo>() as i32)
}
const EV_SYN: u16 = 0;
const EV_ABS: u16 = 3;
const EV_REPORT: u16 = 0;
const ABS_MT_SLOT: u16 = 0x2F;
const ABS_MT_TOUCH_MAJOR: u16 = 0x30;
const ABS_MT_TOUCH_MINOR: u16 = 0x31;
const ABS_MT_WIDTH_MAJOR: u16 = 0x32;
const ABS_MT_WIDTH_MINOR: u16 = 0x33;
const ABS_MT_ORIENTATION: u16 = 0x34;
const ABS_MT_POSITION_X: u16 = 0x35;
const ABS_MT_POSITION_Y: u16 = 0x36;
const ABS_MT_TRACKING_ID: u16 = 0x39;
struct InputSlot {
tracking_id: i32,
x: i32,
y: i32,
}
fn dist(x1: i32, x2: i32, y1: i32, y2: i32) -> f32 {
let deltaX = (x2 - x1) as f32;
let deltaY = (y2 - y1) as f32;
(deltaX * deltaX + deltaY * deltaY).sqrt()
}
fn | (device_path: &Path,
sender: &Sender<WindowEvent>) {
let mut device = match File::open(device_path) {
Ok(dev) => dev,
Err(e) => {
println!("Couldn't open device! {}", e);
return;
},
};
let fd = device.as_raw_fd();
let mut x_info: linux_input_absinfo = unsafe { zeroed() };
let mut y_info: linux_input_absinfo = unsafe { zeroed() };
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_X), &mut x_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_X info {} {}", ret, errno());
}
}
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_Y), &mut y_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_Y info {} {}", ret, errno());
}
}
let touchWidth = x_info.maximum - x_info.minimum;
let touchHeight = y_info.maximum - y_info.minimum;
println!("xMin: {}, yMin: {}, touchWidth: {}, touchHeight: {}", x_info.minimum, y_info.minimum, touchWidth, touchHeight);
// XXX: Why isn't size_of treated as constant?
// let buf: [u8; (16 * size_of::<linux_input_event>())];
let mut buf: [u8; (16 * 16)] = unsafe { zeroed() };
let mut slots: [InputSlot; 10] = unsafe { zeroed() };
for slot in slots.iter_mut() {
slot.tracking_id = -1;
}
let mut last_x = 0;
let mut last_y = 0;
let mut first_x = 0;
let mut first_y = 0;
let mut last_dist: f32 = 0f32;
let mut touch_count: i32 = 0;
let mut current_slot: uint = 0;
// XXX: Need to use the real dimensions of the screen
let screen_dist = dist(0, 480, 854, 0);
loop {
let read = match device.read(buf.as_mut_slice()) {
Ok(count) => {
assert!(count % size_of::<linux_input_event>() == 0,
"Unexpected input device read length!");
count
},
Err(e) => {
println!("Couldn't read device! {}", e);
return;
}
};
let count = read / size_of::<linux_input_event>();
let events: *mut linux_input_event = unsafe { transmute(buf.as_mut_ptr()) };
let mut tracking_updated = false;
for idx in range(0, count as int) {
let event: &linux_input_event = unsafe { transmute(events.offset(idx)) };
match (event.evt_type, event.code) {
(EV_SYN, EV_REPORT) => {
let slotA = &slots[0];
if tracking_updated {
tracking_updated = false;
if slotA.tracking_id == -1 {
println!("Touch up");
let delta_x = slotA.x - first_x;
let delta_y = slotA.y - first_y;
let dist = delta_x * delta_x + delta_y * delta_y;
if dist < 16 {
let click_pt = TypedPoint2D(slotA.x as f32, slotA.y as f32);
println!("Dispatching click!");
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::MouseDown(MouseButton::Left, click_pt)))
.ok().unwrap();
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::MouseUp(MouseButton::Left, click_pt)))
.ok().unwrap();
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::Click(MouseButton::Left, click_pt)))
.ok().unwrap();
}
} else {
println!("Touch down");
last_x = slotA.x;
last_y = slotA.y;
first_x = slotA.x;
first_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
last_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
}
}
} else {
println!("Touch move x: {}, y: {}", slotA.x, slotA.y);
sender.send(WindowEvent::Scroll(TypedPoint2D((slotA.x - last_x) as f32, (slotA.y - last_y) as f32),
TypedPoint2D(slotA.x, slotA.y))).ok().unwrap();
last_x = slotA.x;
last_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
let cur_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
println!("Zooming {} {} {} {}", cur_dist, last_dist, screen_dist, ((screen_dist + (cur_dist - last_dist))/screen_dist));
sender.send(WindowEvent::Zoom((screen_dist + (cur_dist - last_dist))/screen_dist)).ok().unwrap();
last_dist = cur_dist;
}
}
},
(EV_SYN, _) => println!("Unknown SYN code {}", event.code),
(EV_ABS, ABS_MT_SLOT) => {
if (event.value as uint) < slots.len() {
current_slot = event.value as uint;
} else {
println!("Invalid slot! {}", event.value);
}
},
(EV_ABS, ABS_MT_TOUCH_MAJOR) => (),
(EV_ABS, ABS_MT_TOUCH_MINOR) => (),
(EV_ABS, ABS_MT_WIDTH_MAJOR) => (),
(EV_ABS, ABS_MT_WIDTH_MINOR) => (),
(EV_ABS, ABS_MT_ORIENTATION) => (),
(EV_ABS, ABS_MT_POSITION_X) => {
slots[current_slot].x = event.value - x_info.minimum;
},
(EV_ABS, ABS_MT_POSITION_Y) => {
slots[current_slot].y = event.value - y_info.minimum;
},
(EV_ABS, ABS_MT_TRACKING_ID) => {
let current_id = slots[current_slot].tracking_id;
if current_id != event.value &&
(current_id == -1 || event.value == -1) {
tracking_updated = true;
if event.value == -1 {
touch_count -= 1;
} else {
touch_count += 1;
}
}
slots[current_slot].tracking_id = event.value;
},
(EV_ABS, _) => println!("Unknown ABS code {}", event.code),
(_, _) => println!("Unknown event type {}", event.evt_type),
}
}
}
}
pub fn run_input_loop(event_sender: &Sender<WindowEvent>) {
let sender = event_sender.clone();
thread::spawn(move || {
// XXX need to scan all devices and read every one.
let touchinputdev = Path::new("/dev/input/event0");
read_input_device(&touchinputdev, &sender);
});
}
| read_input_device | identifier_name |
input.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use script_traits::MouseButton;
use std::path::Path;
use std::mem::size_of;
use std::mem::transmute;
use std::mem::zeroed;
use std::os::errno;
use std::os::unix::AsRawFd;
use std::num::Float;
use std::fs::File;
use std::thread;
use std::sync::mpsc::Sender;
use std::io::Read;
use geom::point::TypedPoint2D;
use libc::c_int;
use libc::c_long;
use libc::time_t;
use compositing::windowing::WindowEvent;
use compositing::windowing::MouseWindowEvent;
extern {
// XXX: no variadic form in std libs?
fn ioctl(fd: c_int, req: c_int, ...) -> c_int;
}
#[repr(C)]
struct linux_input_event {
sec: time_t,
msec: c_long,
evt_type: u16,
code: u16,
value: i32,
}
#[repr(C)]
struct linux_input_absinfo {
value: i32,
minimum: i32,
maximum: i32,
fuzz: i32,
flat: i32,
resolution: i32,
}
const IOC_NONE: c_int = 0;
const IOC_WRITE: c_int = 1;
const IOC_READ: c_int = 2;
fn ioc(dir: c_int, ioctype: c_int, nr: c_int, size: c_int) -> c_int {
dir << 30 | size << 16 | ioctype << 8 | nr
}
fn ev_ioc_g_abs(abs: u16) -> c_int {
ioc(IOC_READ, 'E' as c_int, (0x40 + abs) as i32, size_of::<linux_input_absinfo>() as i32)
}
const EV_SYN: u16 = 0;
const EV_ABS: u16 = 3;
const EV_REPORT: u16 = 0;
const ABS_MT_SLOT: u16 = 0x2F;
const ABS_MT_TOUCH_MAJOR: u16 = 0x30;
const ABS_MT_TOUCH_MINOR: u16 = 0x31;
const ABS_MT_WIDTH_MAJOR: u16 = 0x32;
const ABS_MT_WIDTH_MINOR: u16 = 0x33;
const ABS_MT_ORIENTATION: u16 = 0x34;
const ABS_MT_POSITION_X: u16 = 0x35;
const ABS_MT_POSITION_Y: u16 = 0x36;
const ABS_MT_TRACKING_ID: u16 = 0x39;
struct InputSlot {
tracking_id: i32,
x: i32,
y: i32,
}
fn dist(x1: i32, x2: i32, y1: i32, y2: i32) -> f32 {
let deltaX = (x2 - x1) as f32;
let deltaY = (y2 - y1) as f32;
(deltaX * deltaX + deltaY * deltaY).sqrt()
}
fn read_input_device(device_path: &Path,
sender: &Sender<WindowEvent>) {
let mut device = match File::open(device_path) {
Ok(dev) => dev,
Err(e) => {
println!("Couldn't open device! {}", e);
return;
},
};
let fd = device.as_raw_fd();
let mut x_info: linux_input_absinfo = unsafe { zeroed() };
let mut y_info: linux_input_absinfo = unsafe { zeroed() }; | println!("Couldn't get ABS_MT_POSITION_X info {} {}", ret, errno());
}
}
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_Y), &mut y_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_Y info {} {}", ret, errno());
}
}
let touchWidth = x_info.maximum - x_info.minimum;
let touchHeight = y_info.maximum - y_info.minimum;
println!("xMin: {}, yMin: {}, touchWidth: {}, touchHeight: {}", x_info.minimum, y_info.minimum, touchWidth, touchHeight);
// XXX: Why isn't size_of treated as constant?
// let buf: [u8; (16 * size_of::<linux_input_event>())];
let mut buf: [u8; (16 * 16)] = unsafe { zeroed() };
let mut slots: [InputSlot; 10] = unsafe { zeroed() };
for slot in slots.iter_mut() {
slot.tracking_id = -1;
}
let mut last_x = 0;
let mut last_y = 0;
let mut first_x = 0;
let mut first_y = 0;
let mut last_dist: f32 = 0f32;
let mut touch_count: i32 = 0;
let mut current_slot: uint = 0;
// XXX: Need to use the real dimensions of the screen
let screen_dist = dist(0, 480, 854, 0);
loop {
let read = match device.read(buf.as_mut_slice()) {
Ok(count) => {
assert!(count % size_of::<linux_input_event>() == 0,
"Unexpected input device read length!");
count
},
Err(e) => {
println!("Couldn't read device! {}", e);
return;
}
};
let count = read / size_of::<linux_input_event>();
let events: *mut linux_input_event = unsafe { transmute(buf.as_mut_ptr()) };
let mut tracking_updated = false;
for idx in range(0, count as int) {
let event: &linux_input_event = unsafe { transmute(events.offset(idx)) };
match (event.evt_type, event.code) {
(EV_SYN, EV_REPORT) => {
let slotA = &slots[0];
if tracking_updated {
tracking_updated = false;
if slotA.tracking_id == -1 {
println!("Touch up");
let delta_x = slotA.x - first_x;
let delta_y = slotA.y - first_y;
let dist = delta_x * delta_x + delta_y * delta_y;
if dist < 16 {
let click_pt = TypedPoint2D(slotA.x as f32, slotA.y as f32);
println!("Dispatching click!");
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::MouseDown(MouseButton::Left, click_pt)))
.ok().unwrap();
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::MouseUp(MouseButton::Left, click_pt)))
.ok().unwrap();
sender.send(WindowEvent::MouseWindowEventClass(MouseWindowEvent::Click(MouseButton::Left, click_pt)))
.ok().unwrap();
}
} else {
println!("Touch down");
last_x = slotA.x;
last_y = slotA.y;
first_x = slotA.x;
first_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
last_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
}
}
} else {
println!("Touch move x: {}, y: {}", slotA.x, slotA.y);
sender.send(WindowEvent::Scroll(TypedPoint2D((slotA.x - last_x) as f32, (slotA.y - last_y) as f32),
TypedPoint2D(slotA.x, slotA.y))).ok().unwrap();
last_x = slotA.x;
last_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
let cur_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
println!("Zooming {} {} {} {}", cur_dist, last_dist, screen_dist, ((screen_dist + (cur_dist - last_dist))/screen_dist));
sender.send(WindowEvent::Zoom((screen_dist + (cur_dist - last_dist))/screen_dist)).ok().unwrap();
last_dist = cur_dist;
}
}
},
(EV_SYN, _) => println!("Unknown SYN code {}", event.code),
(EV_ABS, ABS_MT_SLOT) => {
if (event.value as uint) < slots.len() {
current_slot = event.value as uint;
} else {
println!("Invalid slot! {}", event.value);
}
},
(EV_ABS, ABS_MT_TOUCH_MAJOR) => (),
(EV_ABS, ABS_MT_TOUCH_MINOR) => (),
(EV_ABS, ABS_MT_WIDTH_MAJOR) => (),
(EV_ABS, ABS_MT_WIDTH_MINOR) => (),
(EV_ABS, ABS_MT_ORIENTATION) => (),
(EV_ABS, ABS_MT_POSITION_X) => {
slots[current_slot].x = event.value - x_info.minimum;
},
(EV_ABS, ABS_MT_POSITION_Y) => {
slots[current_slot].y = event.value - y_info.minimum;
},
(EV_ABS, ABS_MT_TRACKING_ID) => {
let current_id = slots[current_slot].tracking_id;
if current_id != event.value &&
(current_id == -1 || event.value == -1) {
tracking_updated = true;
if event.value == -1 {
touch_count -= 1;
} else {
touch_count += 1;
}
}
slots[current_slot].tracking_id = event.value;
},
(EV_ABS, _) => println!("Unknown ABS code {}", event.code),
(_, _) => println!("Unknown event type {}", event.evt_type),
}
}
}
}
pub fn run_input_loop(event_sender: &Sender<WindowEvent>) {
let sender = event_sender.clone();
thread::spawn(move || {
// XXX need to scan all devices and read every one.
let touchinputdev = Path::new("/dev/input/event0");
read_input_device(&touchinputdev, &sender);
});
} | unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_X), &mut x_info);
if ret < 0 { | random_line_split |
htmlframeelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLFrameElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLFrameElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector}; | use servo_util::str::DOMString;
#[deriving(Encodable)]
#[must_root]
pub struct HTMLFrameElement {
pub htmlelement: HTMLElement
}
impl HTMLFrameElementDerived for EventTarget {
fn is_htmlframeelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLFrameElementTypeId))
}
}
impl HTMLFrameElement {
pub fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLFrameElement {
HTMLFrameElement {
htmlelement: HTMLElement::new_inherited(HTMLFrameElementTypeId, localName, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLFrameElement> {
let element = HTMLFrameElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLFrameElementBinding::Wrap)
}
}
impl Reflectable for HTMLFrameElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
} | use dom::document::Document;
use dom::element::HTMLFrameElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId}; | random_line_split |
htmlframeelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLFrameElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLFrameElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLFrameElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
#[must_root]
pub struct HTMLFrameElement {
pub htmlelement: HTMLElement
}
impl HTMLFrameElementDerived for EventTarget {
fn is_htmlframeelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLFrameElementTypeId))
}
}
impl HTMLFrameElement {
pub fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLFrameElement {
HTMLFrameElement {
htmlelement: HTMLElement::new_inherited(HTMLFrameElementTypeId, localName, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLFrameElement> {
let element = HTMLFrameElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLFrameElementBinding::Wrap)
}
}
impl Reflectable for HTMLFrameElement {
fn reflector<'a>(&'a self) -> &'a Reflector |
}
| {
self.htmlelement.reflector()
} | identifier_body |
htmlframeelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLFrameElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLFrameElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::HTMLFrameElementTypeId;
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, ElementNodeTypeId};
use servo_util::str::DOMString;
#[deriving(Encodable)]
#[must_root]
pub struct HTMLFrameElement {
pub htmlelement: HTMLElement
}
impl HTMLFrameElementDerived for EventTarget {
fn is_htmlframeelement(&self) -> bool {
self.type_id == NodeTargetTypeId(ElementNodeTypeId(HTMLFrameElementTypeId))
}
}
impl HTMLFrameElement {
pub fn new_inherited(localName: DOMString, document: JSRef<Document>) -> HTMLFrameElement {
HTMLFrameElement {
htmlelement: HTMLElement::new_inherited(HTMLFrameElementTypeId, localName, document)
}
}
#[allow(unrooted_must_root)]
pub fn | (localName: DOMString, document: JSRef<Document>) -> Temporary<HTMLFrameElement> {
let element = HTMLFrameElement::new_inherited(localName, document);
Node::reflect_node(box element, document, HTMLFrameElementBinding::Wrap)
}
}
impl Reflectable for HTMLFrameElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}
| new | identifier_name |
scrollable.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directionality} from '@angular/cdk/bidi';
import {
getRtlScrollAxisType,
RtlScrollAxisType,
supportsScrollBehavior
} from '@angular/cdk/platform';
import {Directive, ElementRef, NgZone, OnDestroy, OnInit, Optional} from '@angular/core';
import {fromEvent, Observable, Subject, Observer} from 'rxjs';
import {takeUntil} from 'rxjs/operators';
import {ScrollDispatcher} from './scroll-dispatcher';
export type _Without<T> = {[P in keyof T]?: never};
export type _XOR<T, U> = (_Without<T> & U) | (_Without<U> & T);
export type _Top = {top?: number};
export type _Bottom = {bottom?: number};
export type _Left = {left?: number};
export type _Right = {right?: number};
export type _Start = {start?: number};
export type _End = {end?: number};
export type _XAxis = _XOR<_XOR<_Left, _Right>, _XOR<_Start, _End>>;
export type _YAxis = _XOR<_Top, _Bottom>;
/**
* An extended version of ScrollToOptions that allows expressing scroll offsets relative to the
* top, bottom, left, right, start, or end of the viewport rather than just the top and left.
* Please note: the top and bottom properties are mutually exclusive, as are the left, right,
* start, and end properties.
*/
export type ExtendedScrollToOptions = _XAxis & _YAxis & ScrollOptions;
/**
* Sends an event when the directive's element is scrolled. Registers itself with the
* ScrollDispatcher service to include itself as part of its collection of scrolling events that it
* can be listened to through the service.
*/
@Directive({
selector: '[cdk-scrollable], [cdkScrollable]'
})
export class CdkScrollable implements OnInit, OnDestroy {
private _destroyed = new Subject();
private _elementScrolled: Observable<Event> = new Observable((observer: Observer<Event>) =>
this.ngZone.runOutsideAngular(() =>
fromEvent(this.elementRef.nativeElement, 'scroll').pipe(takeUntil(this._destroyed))
.subscribe(observer)));
constructor(protected elementRef: ElementRef<HTMLElement>,
protected scrollDispatcher: ScrollDispatcher,
protected ngZone: NgZone,
@Optional() protected dir?: Directionality) {}
ngOnInit() {
this.scrollDispatcher.register(this);
}
ngOnDestroy() { | /** Returns observable that emits when a scroll event is fired on the host element. */
elementScrolled(): Observable<Event> {
return this._elementScrolled;
}
/** Gets the ElementRef for the viewport. */
getElementRef(): ElementRef<HTMLElement> {
return this.elementRef;
}
/**
* Scrolls to the specified offsets. This is a normalized version of the browser's native scrollTo
* method, since browsers are not consistent about what scrollLeft means in RTL. For this method
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param options specified the offsets to scroll to.
*/
scrollTo(options: ExtendedScrollToOptions): void {
const el = this.elementRef.nativeElement;
const isRtl = this.dir && this.dir.value == 'rtl';
// Rewrite start & end offsets as right or left offsets.
options.left = options.left == null ? (isRtl ? options.end : options.start) : options.left;
options.right = options.right == null ? (isRtl ? options.start : options.end) : options.right;
// Rewrite the bottom offset as a top offset.
if (options.bottom != null) {
(options as _Without<_Bottom> & _Top).top =
el.scrollHeight - el.clientHeight - options.bottom;
}
// Rewrite the right offset as a left offset.
if (isRtl && getRtlScrollAxisType() != RtlScrollAxisType.NORMAL) {
if (options.left != null) {
(options as _Without<_Left> & _Right).right =
el.scrollWidth - el.clientWidth - options.left;
}
if (getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
options.left = options.right;
} else if (getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
options.left = options.right ? -options.right : options.right;
}
} else {
if (options.right != null) {
(options as _Without<_Right> & _Left).left =
el.scrollWidth - el.clientWidth - options.right;
}
}
this._applyScrollToOptions(options);
}
private _applyScrollToOptions(options: ScrollToOptions): void {
const el = this.elementRef.nativeElement;
if (supportsScrollBehavior()) {
el.scrollTo(options);
} else {
if (options.top != null) {
el.scrollTop = options.top;
}
if (options.left != null) {
el.scrollLeft = options.left;
}
}
}
/**
* Measures the scroll offset relative to the specified edge of the viewport. This method can be
* used instead of directly checking scrollLeft or scrollTop, since browsers are not consistent
* about what scrollLeft means in RTL. The values returned by this method are normalized such that
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param from The edge to measure from.
*/
measureScrollOffset(from: 'top' | 'left' | 'right' | 'bottom' | 'start' | 'end'): number {
const LEFT = 'left';
const RIGHT = 'right';
const el = this.elementRef.nativeElement;
if (from == 'top') {
return el.scrollTop;
}
if (from == 'bottom') {
return el.scrollHeight - el.clientHeight - el.scrollTop;
}
// Rewrite start & end as left or right offsets.
const isRtl = this.dir && this.dir.value == 'rtl';
if (from == 'start') {
from = isRtl ? RIGHT : LEFT;
} else if (from == 'end') {
from = isRtl ? LEFT : RIGHT;
}
if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
// For INVERTED, scrollLeft is (scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
} else {
return el.scrollLeft;
}
} else if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
// For NEGATED, scrollLeft is -(scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft + el.scrollWidth - el.clientWidth;
} else {
return -el.scrollLeft;
}
} else {
// For NORMAL, as well as non-RTL contexts, scrollLeft is 0 when scrolled all the way left and
// (scrollWidth - clientWidth) when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft;
} else {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
}
}
}
} | this.scrollDispatcher.deregister(this);
this._destroyed.next();
this._destroyed.complete();
}
| random_line_split |
scrollable.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directionality} from '@angular/cdk/bidi';
import {
getRtlScrollAxisType,
RtlScrollAxisType,
supportsScrollBehavior
} from '@angular/cdk/platform';
import {Directive, ElementRef, NgZone, OnDestroy, OnInit, Optional} from '@angular/core';
import {fromEvent, Observable, Subject, Observer} from 'rxjs';
import {takeUntil} from 'rxjs/operators';
import {ScrollDispatcher} from './scroll-dispatcher';
export type _Without<T> = {[P in keyof T]?: never};
export type _XOR<T, U> = (_Without<T> & U) | (_Without<U> & T);
export type _Top = {top?: number};
export type _Bottom = {bottom?: number};
export type _Left = {left?: number};
export type _Right = {right?: number};
export type _Start = {start?: number};
export type _End = {end?: number};
export type _XAxis = _XOR<_XOR<_Left, _Right>, _XOR<_Start, _End>>;
export type _YAxis = _XOR<_Top, _Bottom>;
/**
* An extended version of ScrollToOptions that allows expressing scroll offsets relative to the
* top, bottom, left, right, start, or end of the viewport rather than just the top and left.
* Please note: the top and bottom properties are mutually exclusive, as are the left, right,
* start, and end properties.
*/
export type ExtendedScrollToOptions = _XAxis & _YAxis & ScrollOptions;
/**
* Sends an event when the directive's element is scrolled. Registers itself with the
* ScrollDispatcher service to include itself as part of its collection of scrolling events that it
* can be listened to through the service.
*/
@Directive({
selector: '[cdk-scrollable], [cdkScrollable]'
})
export class CdkScrollable implements OnInit, OnDestroy {
private _destroyed = new Subject();
private _elementScrolled: Observable<Event> = new Observable((observer: Observer<Event>) =>
this.ngZone.runOutsideAngular(() =>
fromEvent(this.elementRef.nativeElement, 'scroll').pipe(takeUntil(this._destroyed))
.subscribe(observer)));
constructor(protected elementRef: ElementRef<HTMLElement>,
protected scrollDispatcher: ScrollDispatcher,
protected ngZone: NgZone,
@Optional() protected dir?: Directionality) {}
ngOnInit() {
this.scrollDispatcher.register(this);
}
ngOnDestroy() {
this.scrollDispatcher.deregister(this);
this._destroyed.next();
this._destroyed.complete();
}
/** Returns observable that emits when a scroll event is fired on the host element. */
elementScrolled(): Observable<Event> {
return this._elementScrolled;
}
/** Gets the ElementRef for the viewport. */
getElementRef(): ElementRef<HTMLElement> {
return this.elementRef;
}
/**
* Scrolls to the specified offsets. This is a normalized version of the browser's native scrollTo
* method, since browsers are not consistent about what scrollLeft means in RTL. For this method
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param options specified the offsets to scroll to.
*/
scrollTo(options: ExtendedScrollToOptions): void {
const el = this.elementRef.nativeElement;
const isRtl = this.dir && this.dir.value == 'rtl';
// Rewrite start & end offsets as right or left offsets.
options.left = options.left == null ? (isRtl ? options.end : options.start) : options.left;
options.right = options.right == null ? (isRtl ? options.start : options.end) : options.right;
// Rewrite the bottom offset as a top offset.
if (options.bottom != null) {
(options as _Without<_Bottom> & _Top).top =
el.scrollHeight - el.clientHeight - options.bottom;
}
// Rewrite the right offset as a left offset.
if (isRtl && getRtlScrollAxisType() != RtlScrollAxisType.NORMAL) {
if (options.left != null) {
(options as _Without<_Left> & _Right).right =
el.scrollWidth - el.clientWidth - options.left;
}
if (getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
options.left = options.right;
} else if (getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
options.left = options.right ? -options.right : options.right;
}
} else {
if (options.right != null) {
(options as _Without<_Right> & _Left).left =
el.scrollWidth - el.clientWidth - options.right;
}
}
this._applyScrollToOptions(options);
}
private _applyScrollToOptions(options: ScrollToOptions): void {
const el = this.elementRef.nativeElement;
if (supportsScrollBehavior()) {
el.scrollTo(options);
} else {
if (options.top != null) {
el.scrollTop = options.top;
}
if (options.left != null) {
el.scrollLeft = options.left;
}
}
}
/**
* Measures the scroll offset relative to the specified edge of the viewport. This method can be
* used instead of directly checking scrollLeft or scrollTop, since browsers are not consistent
* about what scrollLeft means in RTL. The values returned by this method are normalized such that
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param from The edge to measure from.
*/
measureScrollOffset(from: 'top' | 'left' | 'right' | 'bottom' | 'start' | 'end'): number {
const LEFT = 'left';
const RIGHT = 'right';
const el = this.elementRef.nativeElement;
if (from == 'top') {
return el.scrollTop;
}
if (from == 'bottom') {
return el.scrollHeight - el.clientHeight - el.scrollTop;
}
// Rewrite start & end as left or right offsets.
const isRtl = this.dir && this.dir.value == 'rtl';
if (from == 'start') | else if (from == 'end') {
from = isRtl ? LEFT : RIGHT;
}
if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
// For INVERTED, scrollLeft is (scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
} else {
return el.scrollLeft;
}
} else if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
// For NEGATED, scrollLeft is -(scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft + el.scrollWidth - el.clientWidth;
} else {
return -el.scrollLeft;
}
} else {
// For NORMAL, as well as non-RTL contexts, scrollLeft is 0 when scrolled all the way left and
// (scrollWidth - clientWidth) when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft;
} else {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
}
}
}
}
| {
from = isRtl ? RIGHT : LEFT;
} | conditional_block |
scrollable.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directionality} from '@angular/cdk/bidi';
import {
getRtlScrollAxisType,
RtlScrollAxisType,
supportsScrollBehavior
} from '@angular/cdk/platform';
import {Directive, ElementRef, NgZone, OnDestroy, OnInit, Optional} from '@angular/core';
import {fromEvent, Observable, Subject, Observer} from 'rxjs';
import {takeUntil} from 'rxjs/operators';
import {ScrollDispatcher} from './scroll-dispatcher';
export type _Without<T> = {[P in keyof T]?: never};
export type _XOR<T, U> = (_Without<T> & U) | (_Without<U> & T);
export type _Top = {top?: number};
export type _Bottom = {bottom?: number};
export type _Left = {left?: number};
export type _Right = {right?: number};
export type _Start = {start?: number};
export type _End = {end?: number};
export type _XAxis = _XOR<_XOR<_Left, _Right>, _XOR<_Start, _End>>;
export type _YAxis = _XOR<_Top, _Bottom>;
/**
* An extended version of ScrollToOptions that allows expressing scroll offsets relative to the
* top, bottom, left, right, start, or end of the viewport rather than just the top and left.
* Please note: the top and bottom properties are mutually exclusive, as are the left, right,
* start, and end properties.
*/
export type ExtendedScrollToOptions = _XAxis & _YAxis & ScrollOptions;
/**
* Sends an event when the directive's element is scrolled. Registers itself with the
* ScrollDispatcher service to include itself as part of its collection of scrolling events that it
* can be listened to through the service.
*/
@Directive({
selector: '[cdk-scrollable], [cdkScrollable]'
})
export class CdkScrollable implements OnInit, OnDestroy {
private _destroyed = new Subject();
private _elementScrolled: Observable<Event> = new Observable((observer: Observer<Event>) =>
this.ngZone.runOutsideAngular(() =>
fromEvent(this.elementRef.nativeElement, 'scroll').pipe(takeUntil(this._destroyed))
.subscribe(observer)));
constructor(protected elementRef: ElementRef<HTMLElement>,
protected scrollDispatcher: ScrollDispatcher,
protected ngZone: NgZone,
@Optional() protected dir?: Directionality) {}
ngOnInit() {
this.scrollDispatcher.register(this);
}
ngOnDestroy() {
this.scrollDispatcher.deregister(this);
this._destroyed.next();
this._destroyed.complete();
}
/** Returns observable that emits when a scroll event is fired on the host element. */
elementScrolled(): Observable<Event> |
/** Gets the ElementRef for the viewport. */
getElementRef(): ElementRef<HTMLElement> {
return this.elementRef;
}
/**
* Scrolls to the specified offsets. This is a normalized version of the browser's native scrollTo
* method, since browsers are not consistent about what scrollLeft means in RTL. For this method
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param options specified the offsets to scroll to.
*/
scrollTo(options: ExtendedScrollToOptions): void {
const el = this.elementRef.nativeElement;
const isRtl = this.dir && this.dir.value == 'rtl';
// Rewrite start & end offsets as right or left offsets.
options.left = options.left == null ? (isRtl ? options.end : options.start) : options.left;
options.right = options.right == null ? (isRtl ? options.start : options.end) : options.right;
// Rewrite the bottom offset as a top offset.
if (options.bottom != null) {
(options as _Without<_Bottom> & _Top).top =
el.scrollHeight - el.clientHeight - options.bottom;
}
// Rewrite the right offset as a left offset.
if (isRtl && getRtlScrollAxisType() != RtlScrollAxisType.NORMAL) {
if (options.left != null) {
(options as _Without<_Left> & _Right).right =
el.scrollWidth - el.clientWidth - options.left;
}
if (getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
options.left = options.right;
} else if (getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
options.left = options.right ? -options.right : options.right;
}
} else {
if (options.right != null) {
(options as _Without<_Right> & _Left).left =
el.scrollWidth - el.clientWidth - options.right;
}
}
this._applyScrollToOptions(options);
}
private _applyScrollToOptions(options: ScrollToOptions): void {
const el = this.elementRef.nativeElement;
if (supportsScrollBehavior()) {
el.scrollTo(options);
} else {
if (options.top != null) {
el.scrollTop = options.top;
}
if (options.left != null) {
el.scrollLeft = options.left;
}
}
}
/**
* Measures the scroll offset relative to the specified edge of the viewport. This method can be
* used instead of directly checking scrollLeft or scrollTop, since browsers are not consistent
* about what scrollLeft means in RTL. The values returned by this method are normalized such that
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param from The edge to measure from.
*/
measureScrollOffset(from: 'top' | 'left' | 'right' | 'bottom' | 'start' | 'end'): number {
const LEFT = 'left';
const RIGHT = 'right';
const el = this.elementRef.nativeElement;
if (from == 'top') {
return el.scrollTop;
}
if (from == 'bottom') {
return el.scrollHeight - el.clientHeight - el.scrollTop;
}
// Rewrite start & end as left or right offsets.
const isRtl = this.dir && this.dir.value == 'rtl';
if (from == 'start') {
from = isRtl ? RIGHT : LEFT;
} else if (from == 'end') {
from = isRtl ? LEFT : RIGHT;
}
if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
// For INVERTED, scrollLeft is (scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
} else {
return el.scrollLeft;
}
} else if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
// For NEGATED, scrollLeft is -(scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft + el.scrollWidth - el.clientWidth;
} else {
return -el.scrollLeft;
}
} else {
// For NORMAL, as well as non-RTL contexts, scrollLeft is 0 when scrolled all the way left and
// (scrollWidth - clientWidth) when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft;
} else {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
}
}
}
}
| {
return this._elementScrolled;
} | identifier_body |
scrollable.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directionality} from '@angular/cdk/bidi';
import {
getRtlScrollAxisType,
RtlScrollAxisType,
supportsScrollBehavior
} from '@angular/cdk/platform';
import {Directive, ElementRef, NgZone, OnDestroy, OnInit, Optional} from '@angular/core';
import {fromEvent, Observable, Subject, Observer} from 'rxjs';
import {takeUntil} from 'rxjs/operators';
import {ScrollDispatcher} from './scroll-dispatcher';
export type _Without<T> = {[P in keyof T]?: never};
export type _XOR<T, U> = (_Without<T> & U) | (_Without<U> & T);
export type _Top = {top?: number};
export type _Bottom = {bottom?: number};
export type _Left = {left?: number};
export type _Right = {right?: number};
export type _Start = {start?: number};
export type _End = {end?: number};
export type _XAxis = _XOR<_XOR<_Left, _Right>, _XOR<_Start, _End>>;
export type _YAxis = _XOR<_Top, _Bottom>;
/**
* An extended version of ScrollToOptions that allows expressing scroll offsets relative to the
* top, bottom, left, right, start, or end of the viewport rather than just the top and left.
* Please note: the top and bottom properties are mutually exclusive, as are the left, right,
* start, and end properties.
*/
export type ExtendedScrollToOptions = _XAxis & _YAxis & ScrollOptions;
/**
* Sends an event when the directive's element is scrolled. Registers itself with the
* ScrollDispatcher service to include itself as part of its collection of scrolling events that it
* can be listened to through the service.
*/
@Directive({
selector: '[cdk-scrollable], [cdkScrollable]'
})
export class CdkScrollable implements OnInit, OnDestroy {
private _destroyed = new Subject();
private _elementScrolled: Observable<Event> = new Observable((observer: Observer<Event>) =>
this.ngZone.runOutsideAngular(() =>
fromEvent(this.elementRef.nativeElement, 'scroll').pipe(takeUntil(this._destroyed))
.subscribe(observer)));
constructor(protected elementRef: ElementRef<HTMLElement>,
protected scrollDispatcher: ScrollDispatcher,
protected ngZone: NgZone,
@Optional() protected dir?: Directionality) {}
| () {
this.scrollDispatcher.register(this);
}
ngOnDestroy() {
this.scrollDispatcher.deregister(this);
this._destroyed.next();
this._destroyed.complete();
}
/** Returns observable that emits when a scroll event is fired on the host element. */
elementScrolled(): Observable<Event> {
return this._elementScrolled;
}
/** Gets the ElementRef for the viewport. */
getElementRef(): ElementRef<HTMLElement> {
return this.elementRef;
}
/**
* Scrolls to the specified offsets. This is a normalized version of the browser's native scrollTo
* method, since browsers are not consistent about what scrollLeft means in RTL. For this method
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param options specified the offsets to scroll to.
*/
scrollTo(options: ExtendedScrollToOptions): void {
const el = this.elementRef.nativeElement;
const isRtl = this.dir && this.dir.value == 'rtl';
// Rewrite start & end offsets as right or left offsets.
options.left = options.left == null ? (isRtl ? options.end : options.start) : options.left;
options.right = options.right == null ? (isRtl ? options.start : options.end) : options.right;
// Rewrite the bottom offset as a top offset.
if (options.bottom != null) {
(options as _Without<_Bottom> & _Top).top =
el.scrollHeight - el.clientHeight - options.bottom;
}
// Rewrite the right offset as a left offset.
if (isRtl && getRtlScrollAxisType() != RtlScrollAxisType.NORMAL) {
if (options.left != null) {
(options as _Without<_Left> & _Right).right =
el.scrollWidth - el.clientWidth - options.left;
}
if (getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
options.left = options.right;
} else if (getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
options.left = options.right ? -options.right : options.right;
}
} else {
if (options.right != null) {
(options as _Without<_Right> & _Left).left =
el.scrollWidth - el.clientWidth - options.right;
}
}
this._applyScrollToOptions(options);
}
private _applyScrollToOptions(options: ScrollToOptions): void {
const el = this.elementRef.nativeElement;
if (supportsScrollBehavior()) {
el.scrollTo(options);
} else {
if (options.top != null) {
el.scrollTop = options.top;
}
if (options.left != null) {
el.scrollLeft = options.left;
}
}
}
/**
* Measures the scroll offset relative to the specified edge of the viewport. This method can be
* used instead of directly checking scrollLeft or scrollTop, since browsers are not consistent
* about what scrollLeft means in RTL. The values returned by this method are normalized such that
* left and right always refer to the left and right side of the scrolling container irrespective
* of the layout direction. start and end refer to left and right in an LTR context and vice-versa
* in an RTL context.
* @param from The edge to measure from.
*/
measureScrollOffset(from: 'top' | 'left' | 'right' | 'bottom' | 'start' | 'end'): number {
const LEFT = 'left';
const RIGHT = 'right';
const el = this.elementRef.nativeElement;
if (from == 'top') {
return el.scrollTop;
}
if (from == 'bottom') {
return el.scrollHeight - el.clientHeight - el.scrollTop;
}
// Rewrite start & end as left or right offsets.
const isRtl = this.dir && this.dir.value == 'rtl';
if (from == 'start') {
from = isRtl ? RIGHT : LEFT;
} else if (from == 'end') {
from = isRtl ? LEFT : RIGHT;
}
if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.INVERTED) {
// For INVERTED, scrollLeft is (scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
} else {
return el.scrollLeft;
}
} else if (isRtl && getRtlScrollAxisType() == RtlScrollAxisType.NEGATED) {
// For NEGATED, scrollLeft is -(scrollWidth - clientWidth) when scrolled all the way left and
// 0 when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft + el.scrollWidth - el.clientWidth;
} else {
return -el.scrollLeft;
}
} else {
// For NORMAL, as well as non-RTL contexts, scrollLeft is 0 when scrolled all the way left and
// (scrollWidth - clientWidth) when scrolled all the way right.
if (from == LEFT) {
return el.scrollLeft;
} else {
return el.scrollWidth - el.clientWidth - el.scrollLeft;
}
}
}
}
| ngOnInit | identifier_name |
mod.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Native thread-blocking I/O implementation
//!
//! This module contains the implementation of native thread-blocking
//! implementations of I/O on all platforms. This module is not intended to be
//! used directly, but rather the rust runtime will fall back to using it if
//! necessary.
//!
//! Rust code normally runs inside of green tasks with a local scheduler using
//! asynchronous I/O to cooperate among tasks. This model is not always
//! available, however, and that's where these native implementations come into
//! play. The only dependencies of these modules are the normal system libraries
//! that you would find on the respective platform.
#![allow(non_snake_case_functions)]
use libc::c_int;
use libc;
use std::c_str::CString;
use std::os;
use std::rt::rtio;
use std::rt::rtio::{IoResult, IoError};
// Local re-exports
pub use self::file::FileDesc;
pub use self::process::Process;
mod helper_thread;
// Native I/O implementations
pub mod addrinfo;
pub mod net;
pub mod process;
mod util;
#[cfg(unix)]
#[path = "file_unix.rs"]
pub mod file;
#[cfg(windows)]
#[path = "file_windows.rs"]
pub mod file;
#[cfg(target_os = "macos")]
#[cfg(target_os = "ios")]
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "dragonfly")]
#[cfg(target_os = "android")]
#[cfg(target_os = "linux")]
#[path = "timer_unix.rs"]
pub mod timer;
#[cfg(target_os = "windows")]
#[path = "timer_windows.rs"]
pub mod timer;
#[cfg(unix)]
#[path = "pipe_unix.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "pipe_windows.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "tty_windows.rs"]
mod tty;
#[cfg(unix)] #[path = "c_unix.rs"] mod c;
#[cfg(windows)] #[path = "c_windows.rs"] mod c;
fn unimpl() -> IoError {
#[cfg(unix)] use libc::ENOSYS as ERROR;
#[cfg(windows)] use libc::ERROR_CALL_NOT_IMPLEMENTED as ERROR;
IoError {
code: ERROR as uint,
extra: 0,
detail: Some("not yet supported by the `native` runtime, maybe try `green`.".to_string()),
}
}
fn last_error() -> IoError {
let errno = os::errno() as uint;
IoError {
code: os::errno() as uint,
extra: 0,
detail: Some(os::error_string(errno)),
}
}
// unix has nonzero values as errors
fn mkerr_libc(ret: libc::c_int) -> IoResult<()> {
if ret != 0 {
Err(last_error())
} else {
Ok(())
}
}
// windows has zero values as errors
#[cfg(windows)]
fn mkerr_winbool(ret: libc::c_int) -> IoResult<()> {
if ret == 0 {
Err(last_error())
} else {
Ok(())
}
}
#[cfg(windows)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::WSAEINTR as int => {}
n => return n,
}
}
}
#[cfg(unix)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::EINTR as int => {}
n => return n,
}
}
}
fn keep_going(data: &[u8], f: |*const u8, uint| -> i64) -> i64 {
let origamt = data.len();
let mut data = data.as_ptr();
let mut amt = origamt;
while amt > 0 {
let ret = retry(|| f(data, amt) as libc::c_int);
if ret == 0 {
break
} else if ret != -1 {
amt -= ret as uint;
data = unsafe { data.offset(ret as int) };
} else {
return ret as i64;
}
}
return (origamt - amt) as i64;
}
/// Implementation of rt::rtio's IoFactory trait to generate handles to the
/// native I/O functionality.
pub struct IoFactory {
_cannot_construct_outside_of_this_module: ()
}
impl IoFactory {
pub fn new() -> IoFactory {
net::init();
IoFactory { _cannot_construct_outside_of_this_module: () }
}
}
impl rtio::IoFactory for IoFactory {
// networking
fn tcp_connect(&mut self, addr: rtio::SocketAddr,
timeout: Option<u64>)
-> IoResult<Box<rtio::RtioTcpStream + Send>>
{
net::TcpStream::connect(addr, timeout).map(|s| {
box s as Box<rtio::RtioTcpStream + Send>
})
}
fn tcp_bind(&mut self, addr: rtio::SocketAddr)
-> IoResult<Box<rtio::RtioTcpListener + Send>> {
net::TcpListener::bind(addr).map(|s| {
box s as Box<rtio::RtioTcpListener + Send>
})
}
fn | (&mut self, addr: rtio::SocketAddr)
-> IoResult<Box<rtio::RtioUdpSocket + Send>> {
net::UdpSocket::bind(addr).map(|u| {
box u as Box<rtio::RtioUdpSocket + Send>
})
}
fn unix_bind(&mut self, path: &CString)
-> IoResult<Box<rtio::RtioUnixListener + Send>> {
pipe::UnixListener::bind(path).map(|s| {
box s as Box<rtio::RtioUnixListener + Send>
})
}
fn unix_connect(&mut self, path: &CString,
timeout: Option<u64>) -> IoResult<Box<rtio::RtioPipe + Send>> {
pipe::UnixStream::connect(path, timeout).map(|s| {
box s as Box<rtio::RtioPipe + Send>
})
}
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
hint: Option<rtio::AddrinfoHint>)
-> IoResult<Vec<rtio::AddrinfoInfo>>
{
addrinfo::GetAddrInfoRequest::run(host, servname, hint)
}
// filesystem operations
fn fs_from_raw_fd(&mut self, fd: c_int, close: rtio::CloseBehavior)
-> Box<rtio::RtioFileStream + Send> {
let close = match close {
rtio::CloseSynchronously | rtio::CloseAsynchronously => true,
rtio::DontClose => false
};
box file::FileDesc::new(fd, close) as Box<rtio::RtioFileStream + Send>
}
fn fs_open(&mut self, path: &CString, fm: rtio::FileMode,
fa: rtio::FileAccess)
-> IoResult<Box<rtio::RtioFileStream + Send>>
{
file::open(path, fm, fa).map(|fd| box fd as Box<rtio::RtioFileStream + Send>)
}
fn fs_unlink(&mut self, path: &CString) -> IoResult<()> {
file::unlink(path)
}
fn fs_stat(&mut self, path: &CString) -> IoResult<rtio::FileStat> {
file::stat(path)
}
fn fs_mkdir(&mut self, path: &CString, mode: uint) -> IoResult<()> {
file::mkdir(path, mode)
}
fn fs_chmod(&mut self, path: &CString, mode: uint) -> IoResult<()> {
file::chmod(path, mode)
}
fn fs_rmdir(&mut self, path: &CString) -> IoResult<()> {
file::rmdir(path)
}
fn fs_rename(&mut self, path: &CString, to: &CString) -> IoResult<()> {
file::rename(path, to)
}
fn fs_readdir(&mut self, path: &CString, _flags: c_int) -> IoResult<Vec<CString>> {
file::readdir(path)
}
fn fs_lstat(&mut self, path: &CString) -> IoResult<rtio::FileStat> {
file::lstat(path)
}
fn fs_chown(&mut self, path: &CString, uid: int, gid: int) -> IoResult<()> {
file::chown(path, uid, gid)
}
fn fs_readlink(&mut self, path: &CString) -> IoResult<CString> {
file::readlink(path)
}
fn fs_symlink(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::symlink(src, dst)
}
fn fs_link(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::link(src, dst)
}
fn fs_utime(&mut self, src: &CString, atime: u64,
mtime: u64) -> IoResult<()> {
file::utime(src, atime, mtime)
}
// misc
fn timer_init(&mut self) -> IoResult<Box<rtio::RtioTimer + Send>> {
timer::Timer::new().map(|t| box t as Box<rtio::RtioTimer + Send>)
}
fn spawn(&mut self, cfg: rtio::ProcessConfig)
-> IoResult<(Box<rtio::RtioProcess + Send>,
Vec<Option<Box<rtio::RtioPipe + Send>>>)> {
process::Process::spawn(cfg).map(|(p, io)| {
(box p as Box<rtio::RtioProcess + Send>,
io.move_iter().map(|p| p.map(|p| {
box p as Box<rtio::RtioPipe + Send>
})).collect())
})
}
fn kill(&mut self, pid: libc::pid_t, signum: int) -> IoResult<()> {
process::Process::kill(pid, signum)
}
fn pipe_open(&mut self, fd: c_int) -> IoResult<Box<rtio::RtioPipe + Send>> {
Ok(box file::FileDesc::new(fd, true) as Box<rtio::RtioPipe + Send>)
}
#[cfg(unix)]
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<Box<rtio::RtioTTY + Send>> {
if unsafe { libc::isatty(fd) } != 0 {
Ok(box file::FileDesc::new(fd, true) as Box<rtio::RtioTTY + Send>)
} else {
Err(IoError {
code: libc::ENOTTY as uint,
extra: 0,
detail: None,
})
}
}
#[cfg(windows)]
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<Box<rtio::RtioTTY + Send>> {
if tty::is_tty(fd) {
Ok(box tty::WindowsTTY::new(fd) as Box<rtio::RtioTTY + Send>)
} else {
Err(IoError {
code: libc::ERROR_INVALID_HANDLE as uint,
extra: 0,
detail: None,
})
}
}
fn signal(&mut self, _signal: int, _cb: Box<rtio::Callback>)
-> IoResult<Box<rtio::RtioSignal + Send>> {
Err(unimpl())
}
}
| udp_bind | identifier_name |
mod.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Native thread-blocking I/O implementation
//!
//! This module contains the implementation of native thread-blocking
//! implementations of I/O on all platforms. This module is not intended to be
//! used directly, but rather the rust runtime will fall back to using it if
//! necessary.
//!
//! Rust code normally runs inside of green tasks with a local scheduler using
//! asynchronous I/O to cooperate among tasks. This model is not always
//! available, however, and that's where these native implementations come into
//! play. The only dependencies of these modules are the normal system libraries
//! that you would find on the respective platform.
#![allow(non_snake_case_functions)]
use libc::c_int;
use libc;
use std::c_str::CString;
use std::os;
use std::rt::rtio;
use std::rt::rtio::{IoResult, IoError};
// Local re-exports
pub use self::file::FileDesc;
pub use self::process::Process;
mod helper_thread;
// Native I/O implementations
pub mod addrinfo;
pub mod net;
pub mod process;
mod util;
#[cfg(unix)]
#[path = "file_unix.rs"]
pub mod file;
#[cfg(windows)]
#[path = "file_windows.rs"]
pub mod file;
#[cfg(target_os = "macos")]
#[cfg(target_os = "ios")]
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "dragonfly")]
#[cfg(target_os = "android")]
#[cfg(target_os = "linux")]
#[path = "timer_unix.rs"]
pub mod timer;
#[cfg(target_os = "windows")]
#[path = "timer_windows.rs"]
pub mod timer;
#[cfg(unix)]
#[path = "pipe_unix.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "pipe_windows.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "tty_windows.rs"]
mod tty;
#[cfg(unix)] #[path = "c_unix.rs"] mod c;
#[cfg(windows)] #[path = "c_windows.rs"] mod c;
fn unimpl() -> IoError {
#[cfg(unix)] use libc::ENOSYS as ERROR;
#[cfg(windows)] use libc::ERROR_CALL_NOT_IMPLEMENTED as ERROR;
IoError {
code: ERROR as uint,
extra: 0,
detail: Some("not yet supported by the `native` runtime, maybe try `green`.".to_string()),
}
}
fn last_error() -> IoError {
let errno = os::errno() as uint;
IoError {
code: os::errno() as uint,
extra: 0,
detail: Some(os::error_string(errno)),
}
}
// unix has nonzero values as errors
fn mkerr_libc(ret: libc::c_int) -> IoResult<()> {
if ret != 0 {
Err(last_error())
} else {
Ok(())
}
}
// windows has zero values as errors
#[cfg(windows)]
fn mkerr_winbool(ret: libc::c_int) -> IoResult<()> {
if ret == 0 {
Err(last_error())
} else {
Ok(())
}
}
#[cfg(windows)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::WSAEINTR as int => {}
n => return n,
}
}
}
#[cfg(unix)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::EINTR as int => |
n => return n,
}
}
}
fn keep_going(data: &[u8], f: |*const u8, uint| -> i64) -> i64 {
let origamt = data.len();
let mut data = data.as_ptr();
let mut amt = origamt;
while amt > 0 {
let ret = retry(|| f(data, amt) as libc::c_int);
if ret == 0 {
break
} else if ret != -1 {
amt -= ret as uint;
data = unsafe { data.offset(ret as int) };
} else {
return ret as i64;
}
}
return (origamt - amt) as i64;
}
/// Implementation of rt::rtio's IoFactory trait to generate handles to the
/// native I/O functionality.
pub struct IoFactory {
_cannot_construct_outside_of_this_module: ()
}
impl IoFactory {
pub fn new() -> IoFactory {
net::init();
IoFactory { _cannot_construct_outside_of_this_module: () }
}
}
impl rtio::IoFactory for IoFactory {
// networking
fn tcp_connect(&mut self, addr: rtio::SocketAddr,
timeout: Option<u64>)
-> IoResult<Box<rtio::RtioTcpStream + Send>>
{
net::TcpStream::connect(addr, timeout).map(|s| {
box s as Box<rtio::RtioTcpStream + Send>
})
}
fn tcp_bind(&mut self, addr: rtio::SocketAddr)
-> IoResult<Box<rtio::RtioTcpListener + Send>> {
net::TcpListener::bind(addr).map(|s| {
box s as Box<rtio::RtioTcpListener + Send>
})
}
fn udp_bind(&mut self, addr: rtio::SocketAddr)
-> IoResult<Box<rtio::RtioUdpSocket + Send>> {
net::UdpSocket::bind(addr).map(|u| {
box u as Box<rtio::RtioUdpSocket + Send>
})
}
fn unix_bind(&mut self, path: &CString)
-> IoResult<Box<rtio::RtioUnixListener + Send>> {
pipe::UnixListener::bind(path).map(|s| {
box s as Box<rtio::RtioUnixListener + Send>
})
}
fn unix_connect(&mut self, path: &CString,
timeout: Option<u64>) -> IoResult<Box<rtio::RtioPipe + Send>> {
pipe::UnixStream::connect(path, timeout).map(|s| {
box s as Box<rtio::RtioPipe + Send>
})
}
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
hint: Option<rtio::AddrinfoHint>)
-> IoResult<Vec<rtio::AddrinfoInfo>>
{
addrinfo::GetAddrInfoRequest::run(host, servname, hint)
}
// filesystem operations
fn fs_from_raw_fd(&mut self, fd: c_int, close: rtio::CloseBehavior)
-> Box<rtio::RtioFileStream + Send> {
let close = match close {
rtio::CloseSynchronously | rtio::CloseAsynchronously => true,
rtio::DontClose => false
};
box file::FileDesc::new(fd, close) as Box<rtio::RtioFileStream + Send>
}
fn fs_open(&mut self, path: &CString, fm: rtio::FileMode,
fa: rtio::FileAccess)
-> IoResult<Box<rtio::RtioFileStream + Send>>
{
file::open(path, fm, fa).map(|fd| box fd as Box<rtio::RtioFileStream + Send>)
}
fn fs_unlink(&mut self, path: &CString) -> IoResult<()> {
file::unlink(path)
}
fn fs_stat(&mut self, path: &CString) -> IoResult<rtio::FileStat> {
file::stat(path)
}
fn fs_mkdir(&mut self, path: &CString, mode: uint) -> IoResult<()> {
file::mkdir(path, mode)
}
fn fs_chmod(&mut self, path: &CString, mode: uint) -> IoResult<()> {
file::chmod(path, mode)
}
fn fs_rmdir(&mut self, path: &CString) -> IoResult<()> {
file::rmdir(path)
}
fn fs_rename(&mut self, path: &CString, to: &CString) -> IoResult<()> {
file::rename(path, to)
}
fn fs_readdir(&mut self, path: &CString, _flags: c_int) -> IoResult<Vec<CString>> {
file::readdir(path)
}
fn fs_lstat(&mut self, path: &CString) -> IoResult<rtio::FileStat> {
file::lstat(path)
}
fn fs_chown(&mut self, path: &CString, uid: int, gid: int) -> IoResult<()> {
file::chown(path, uid, gid)
}
fn fs_readlink(&mut self, path: &CString) -> IoResult<CString> {
file::readlink(path)
}
fn fs_symlink(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::symlink(src, dst)
}
fn fs_link(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::link(src, dst)
}
fn fs_utime(&mut self, src: &CString, atime: u64,
mtime: u64) -> IoResult<()> {
file::utime(src, atime, mtime)
}
// misc
fn timer_init(&mut self) -> IoResult<Box<rtio::RtioTimer + Send>> {
timer::Timer::new().map(|t| box t as Box<rtio::RtioTimer + Send>)
}
fn spawn(&mut self, cfg: rtio::ProcessConfig)
-> IoResult<(Box<rtio::RtioProcess + Send>,
Vec<Option<Box<rtio::RtioPipe + Send>>>)> {
process::Process::spawn(cfg).map(|(p, io)| {
(box p as Box<rtio::RtioProcess + Send>,
io.move_iter().map(|p| p.map(|p| {
box p as Box<rtio::RtioPipe + Send>
})).collect())
})
}
fn kill(&mut self, pid: libc::pid_t, signum: int) -> IoResult<()> {
process::Process::kill(pid, signum)
}
fn pipe_open(&mut self, fd: c_int) -> IoResult<Box<rtio::RtioPipe + Send>> {
Ok(box file::FileDesc::new(fd, true) as Box<rtio::RtioPipe + Send>)
}
#[cfg(unix)]
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<Box<rtio::RtioTTY + Send>> {
if unsafe { libc::isatty(fd) } != 0 {
Ok(box file::FileDesc::new(fd, true) as Box<rtio::RtioTTY + Send>)
} else {
Err(IoError {
code: libc::ENOTTY as uint,
extra: 0,
detail: None,
})
}
}
#[cfg(windows)]
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<Box<rtio::RtioTTY + Send>> {
if tty::is_tty(fd) {
Ok(box tty::WindowsTTY::new(fd) as Box<rtio::RtioTTY + Send>)
} else {
Err(IoError {
code: libc::ERROR_INVALID_HANDLE as uint,
extra: 0,
detail: None,
})
}
}
fn signal(&mut self, _signal: int, _cb: Box<rtio::Callback>)
-> IoResult<Box<rtio::RtioSignal + Send>> {
Err(unimpl())
}
}
| {} | conditional_block |
mod.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Native thread-blocking I/O implementation
//!
//! This module contains the implementation of native thread-blocking
//! implementations of I/O on all platforms. This module is not intended to be
//! used directly, but rather the rust runtime will fall back to using it if
//! necessary.
//!
//! Rust code normally runs inside of green tasks with a local scheduler using
//! asynchronous I/O to cooperate among tasks. This model is not always
//! available, however, and that's where these native implementations come into
//! play. The only dependencies of these modules are the normal system libraries
//! that you would find on the respective platform.
#![allow(non_snake_case_functions)]
use libc::c_int;
use libc;
use std::c_str::CString;
use std::os;
use std::rt::rtio;
use std::rt::rtio::{IoResult, IoError};
// Local re-exports
pub use self::file::FileDesc;
pub use self::process::Process;
mod helper_thread;
// Native I/O implementations
pub mod addrinfo;
pub mod net; |
#[cfg(unix)]
#[path = "file_unix.rs"]
pub mod file;
#[cfg(windows)]
#[path = "file_windows.rs"]
pub mod file;
#[cfg(target_os = "macos")]
#[cfg(target_os = "ios")]
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "dragonfly")]
#[cfg(target_os = "android")]
#[cfg(target_os = "linux")]
#[path = "timer_unix.rs"]
pub mod timer;
#[cfg(target_os = "windows")]
#[path = "timer_windows.rs"]
pub mod timer;
#[cfg(unix)]
#[path = "pipe_unix.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "pipe_windows.rs"]
pub mod pipe;
#[cfg(windows)]
#[path = "tty_windows.rs"]
mod tty;
#[cfg(unix)] #[path = "c_unix.rs"] mod c;
#[cfg(windows)] #[path = "c_windows.rs"] mod c;
fn unimpl() -> IoError {
#[cfg(unix)] use libc::ENOSYS as ERROR;
#[cfg(windows)] use libc::ERROR_CALL_NOT_IMPLEMENTED as ERROR;
IoError {
code: ERROR as uint,
extra: 0,
detail: Some("not yet supported by the `native` runtime, maybe try `green`.".to_string()),
}
}
fn last_error() -> IoError {
let errno = os::errno() as uint;
IoError {
code: os::errno() as uint,
extra: 0,
detail: Some(os::error_string(errno)),
}
}
// unix has nonzero values as errors
fn mkerr_libc(ret: libc::c_int) -> IoResult<()> {
if ret != 0 {
Err(last_error())
} else {
Ok(())
}
}
// windows has zero values as errors
#[cfg(windows)]
fn mkerr_winbool(ret: libc::c_int) -> IoResult<()> {
if ret == 0 {
Err(last_error())
} else {
Ok(())
}
}
#[cfg(windows)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::WSAEINTR as int => {}
n => return n,
}
}
}
#[cfg(unix)]
#[inline]
fn retry(f: || -> libc::c_int) -> libc::c_int {
loop {
match f() {
-1 if os::errno() as int == libc::EINTR as int => {}
n => return n,
}
}
}
fn keep_going(data: &[u8], f: |*const u8, uint| -> i64) -> i64 {
let origamt = data.len();
let mut data = data.as_ptr();
let mut amt = origamt;
while amt > 0 {
let ret = retry(|| f(data, amt) as libc::c_int);
if ret == 0 {
break
} else if ret != -1 {
amt -= ret as uint;
data = unsafe { data.offset(ret as int) };
} else {
return ret as i64;
}
}
return (origamt - amt) as i64;
}
/// Implementation of rt::rtio's IoFactory trait to generate handles to the
/// native I/O functionality.
pub struct IoFactory {
_cannot_construct_outside_of_this_module: ()
}
impl IoFactory {
pub fn new() -> IoFactory {
net::init();
IoFactory { _cannot_construct_outside_of_this_module: () }
}
}
impl rtio::IoFactory for IoFactory {
// networking
fn tcp_connect(&mut self, addr: rtio::SocketAddr,
timeout: Option<u64>)
-> IoResult<Box<rtio::RtioTcpStream + Send>>
{
net::TcpStream::connect(addr, timeout).map(|s| {
box s as Box<rtio::RtioTcpStream + Send>
})
}
fn tcp_bind(&mut self, addr: rtio::SocketAddr)
-> IoResult<Box<rtio::RtioTcpListener + Send>> {
net::TcpListener::bind(addr).map(|s| {
box s as Box<rtio::RtioTcpListener + Send>
})
}
fn udp_bind(&mut self, addr: rtio::SocketAddr)
-> IoResult<Box<rtio::RtioUdpSocket + Send>> {
net::UdpSocket::bind(addr).map(|u| {
box u as Box<rtio::RtioUdpSocket + Send>
})
}
fn unix_bind(&mut self, path: &CString)
-> IoResult<Box<rtio::RtioUnixListener + Send>> {
pipe::UnixListener::bind(path).map(|s| {
box s as Box<rtio::RtioUnixListener + Send>
})
}
fn unix_connect(&mut self, path: &CString,
timeout: Option<u64>) -> IoResult<Box<rtio::RtioPipe + Send>> {
pipe::UnixStream::connect(path, timeout).map(|s| {
box s as Box<rtio::RtioPipe + Send>
})
}
fn get_host_addresses(&mut self, host: Option<&str>, servname: Option<&str>,
hint: Option<rtio::AddrinfoHint>)
-> IoResult<Vec<rtio::AddrinfoInfo>>
{
addrinfo::GetAddrInfoRequest::run(host, servname, hint)
}
// filesystem operations
fn fs_from_raw_fd(&mut self, fd: c_int, close: rtio::CloseBehavior)
-> Box<rtio::RtioFileStream + Send> {
let close = match close {
rtio::CloseSynchronously | rtio::CloseAsynchronously => true,
rtio::DontClose => false
};
box file::FileDesc::new(fd, close) as Box<rtio::RtioFileStream + Send>
}
fn fs_open(&mut self, path: &CString, fm: rtio::FileMode,
fa: rtio::FileAccess)
-> IoResult<Box<rtio::RtioFileStream + Send>>
{
file::open(path, fm, fa).map(|fd| box fd as Box<rtio::RtioFileStream + Send>)
}
fn fs_unlink(&mut self, path: &CString) -> IoResult<()> {
file::unlink(path)
}
fn fs_stat(&mut self, path: &CString) -> IoResult<rtio::FileStat> {
file::stat(path)
}
fn fs_mkdir(&mut self, path: &CString, mode: uint) -> IoResult<()> {
file::mkdir(path, mode)
}
fn fs_chmod(&mut self, path: &CString, mode: uint) -> IoResult<()> {
file::chmod(path, mode)
}
fn fs_rmdir(&mut self, path: &CString) -> IoResult<()> {
file::rmdir(path)
}
fn fs_rename(&mut self, path: &CString, to: &CString) -> IoResult<()> {
file::rename(path, to)
}
fn fs_readdir(&mut self, path: &CString, _flags: c_int) -> IoResult<Vec<CString>> {
file::readdir(path)
}
fn fs_lstat(&mut self, path: &CString) -> IoResult<rtio::FileStat> {
file::lstat(path)
}
fn fs_chown(&mut self, path: &CString, uid: int, gid: int) -> IoResult<()> {
file::chown(path, uid, gid)
}
fn fs_readlink(&mut self, path: &CString) -> IoResult<CString> {
file::readlink(path)
}
fn fs_symlink(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::symlink(src, dst)
}
fn fs_link(&mut self, src: &CString, dst: &CString) -> IoResult<()> {
file::link(src, dst)
}
fn fs_utime(&mut self, src: &CString, atime: u64,
mtime: u64) -> IoResult<()> {
file::utime(src, atime, mtime)
}
// misc
fn timer_init(&mut self) -> IoResult<Box<rtio::RtioTimer + Send>> {
timer::Timer::new().map(|t| box t as Box<rtio::RtioTimer + Send>)
}
fn spawn(&mut self, cfg: rtio::ProcessConfig)
-> IoResult<(Box<rtio::RtioProcess + Send>,
Vec<Option<Box<rtio::RtioPipe + Send>>>)> {
process::Process::spawn(cfg).map(|(p, io)| {
(box p as Box<rtio::RtioProcess + Send>,
io.move_iter().map(|p| p.map(|p| {
box p as Box<rtio::RtioPipe + Send>
})).collect())
})
}
fn kill(&mut self, pid: libc::pid_t, signum: int) -> IoResult<()> {
process::Process::kill(pid, signum)
}
fn pipe_open(&mut self, fd: c_int) -> IoResult<Box<rtio::RtioPipe + Send>> {
Ok(box file::FileDesc::new(fd, true) as Box<rtio::RtioPipe + Send>)
}
#[cfg(unix)]
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<Box<rtio::RtioTTY + Send>> {
if unsafe { libc::isatty(fd) } != 0 {
Ok(box file::FileDesc::new(fd, true) as Box<rtio::RtioTTY + Send>)
} else {
Err(IoError {
code: libc::ENOTTY as uint,
extra: 0,
detail: None,
})
}
}
#[cfg(windows)]
fn tty_open(&mut self, fd: c_int, _readable: bool)
-> IoResult<Box<rtio::RtioTTY + Send>> {
if tty::is_tty(fd) {
Ok(box tty::WindowsTTY::new(fd) as Box<rtio::RtioTTY + Send>)
} else {
Err(IoError {
code: libc::ERROR_INVALID_HANDLE as uint,
extra: 0,
detail: None,
})
}
}
fn signal(&mut self, _signal: int, _cb: Box<rtio::Callback>)
-> IoResult<Box<rtio::RtioSignal + Send>> {
Err(unimpl())
}
} | pub mod process;
mod util; | random_line_split |
testrpc.ts | // Deterministic accounts when called as 'ganache-cli -m soltsice' | /** The first five accounts generated with soltsice mnemonic on Ganache (TestRPC) */
export const testAddresses: string[] =
[
'0x11a9f77f4d2d9f298536f47ef14790b612e40a98',
'0x772167d4d36b92523e5284b6e39a20f7b7add641',
'0x64a91e5c14720ffda56a7a29fa8a891c4bf3ca9a',
'0x5c58888cd9f60ea6f1038dd91712dd3ec0c67e9e',
'0x5c8b0080304a9d6c3e86ca426f7923e1a29de7ab'
];
export const testPrivateKeys: string[] =
[
'3c66198bb60b1d2c95b4e37d4a9d414234c0d9a44d1f394617b48d25c6fceccd',
'b8fa0fb81a8dac0af622108370361ea7cba39c76d8d007dc9f4e7fbc54a24efa',
'dde5be99c44ba1e4aeba40e6690f7bfce1d0b0e0a44e9b5a191e42d1789d15bd',
'8502313126cea7c76ee0315fdf4b8d460a85c7cdc47f85e00249c4ac7dae36d7',
'e81574e55508dc185bf9b26b18920d73c441a012fd535eaf6fbcb21026d11ee3'
]; | random_line_split |
|
jquery.autocomplete.js | /*
* jQuery Autocomplete plugin 1.1
*
* Copyright (c) 2009 Jörn Zaefferer
*
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
*
* Revision: $Id: jquery.autocomplete.js 15 2009-08-22 10:30:27Z joern.zaefferer $
*/
;(function($) {
$.fn.extend({
autocomplete: function(urlOrData, options) {
var isUrl = typeof urlOrData == "string";
options = $.extend({}, $.Autocompleter.defaults, { | url: isUrl ? urlOrData : null,
data: isUrl ? null : urlOrData,
delay: isUrl ? $.Autocompleter.defaults.delay : 10,
max: options && !options.scroll ? 10 : 150
}, options);
// if highlight is set to false, replace it with a do-nothing function
options.highlight = options.highlight || function(value) { return value; };
// if the formatMatch option is not specified, then use formatItem for backwards compatibility
options.formatMatch = options.formatMatch || options.formatItem;
return this.each(function() {
new $.Autocompleter(this, options);
});
},
result: function(handler) {
return this.bind("result", handler);
},
search: function(handler) {
return this.trigger("search", [handler]);
},
flushCache: function() {
return this.trigger("flushCache");
},
setOptions: function(options){
return this.trigger("setOptions", [options]);
},
unautocomplete: function() {
return this.trigger("unautocomplete");
}
});
$.Autocompleter = function(input, options) {
var KEY = {
UP: 38,
DOWN: 40,
DEL: 46,
TAB: 9,
RETURN: 13,
ESC: 27,
COMMA: 188,
PAGEUP: 33,
PAGEDOWN: 34,
BACKSPACE: 8
};
// Create $ object for input element
var $input = $(input).attr("autocomplete", "off").addClass(options.inputClass);
var timeout;
var previousValue = "";
var cache = $.Autocompleter.Cache(options);
var hasFocus = 0;
var lastKeyPressCode;
var config = {
mouseDownOnSelect: false
};
var select = $.Autocompleter.Select(options, input, selectCurrent, config);
var blockSubmit;
// prevent form submit in opera when selecting with return key
$.browser.opera && $(input.form).bind("submit.autocomplete", function() {
if (blockSubmit) {
blockSubmit = false;
return false;
}
});
// only opera doesn't trigger keydown multiple times while pressed, others don't work with keypress at all
$input.bind(($.browser.opera ? "keypress" : "keydown") + ".autocomplete", function(event) {
// a keypress means the input has focus
// avoids issue where input had focus before the autocomplete was applied
hasFocus = 1;
// track last key pressed
lastKeyPressCode = event.keyCode;
switch(event.keyCode) {
case KEY.UP:
event.preventDefault();
if ( select.visible() ) {
select.prev();
} else {
onChange(0, true);
}
break;
case KEY.DOWN:
event.preventDefault();
if ( select.visible() ) {
select.next();
} else {
onChange(0, true);
}
break;
case KEY.PAGEUP:
event.preventDefault();
if ( select.visible() ) {
select.pageUp();
} else {
onChange(0, true);
}
break;
case KEY.PAGEDOWN:
event.preventDefault();
if ( select.visible() ) {
select.pageDown();
} else {
onChange(0, true);
}
break;
// matches also semicolon
case options.multiple && $.trim(options.multipleSeparator) == "," && KEY.COMMA:
case KEY.TAB:
case KEY.RETURN:
if( selectCurrent() ) {
// stop default to prevent a form submit, Opera needs special handling
event.preventDefault();
blockSubmit = true;
return false;
}
break;
case KEY.ESC:
select.hide();
break;
default:
clearTimeout(timeout);
timeout = setTimeout(onChange, options.delay);
break;
}
}).focus(function(){
// track whether the field has focus, we shouldn't process any
// results if the field no longer has focus
hasFocus++;
}).blur(function() {
hasFocus = 0;
if (!config.mouseDownOnSelect) {
hideResults();
}
}).click(function() {
// show select when clicking in a focused field
if ( hasFocus++ > 1 && !select.visible() ) {
onChange(0, true);
}
}).bind("search", function() {
// TODO why not just specifying both arguments?
var fn = (arguments.length > 1) ? arguments[1] : null;
function findValueCallback(q, data) {
var result;
if( data && data.length ) {
for (var i=0; i < data.length; i++) {
if( data[i].result.toLowerCase() == q.toLowerCase() ) {
result = data[i];
break;
}
}
}
if( typeof fn == "function" ) fn(result);
else $input.trigger("result", result && [result.data, result.value]);
}
$.each(trimWords($input.val()), function(i, value) {
request(value, findValueCallback, findValueCallback);
});
}).bind("flushCache", function() {
cache.flush();
}).bind("setOptions", function() {
$.extend(options, arguments[1]);
// if we've updated the data, repopulate
if ( "data" in arguments[1] )
cache.populate();
}).bind("unautocomplete", function() {
select.unbind();
$input.unbind();
$(input.form).unbind(".autocomplete");
});
function selectCurrent() {
var selected = select.selected();
if( !selected )
return false;
var v = selected.result;
previousValue = v;
if ( options.multiple ) {
var words = trimWords($input.val());
if ( words.length > 1 ) {
var seperator = options.multipleSeparator.length;
var cursorAt = $(input).selection().start;
var wordAt, progress = 0;
$.each(words, function(i, word) {
progress += word.length;
if (cursorAt <= progress) {
wordAt = i;
return false;
}
progress += seperator;
});
words[wordAt] = v;
// TODO this should set the cursor to the right position, but it gets overriden somewhere
//$.Autocompleter.Selection(input, progress + seperator, progress + seperator);
v = words.join( options.multipleSeparator );
}
v += options.multipleSeparator;
}
$input.val(v);
hideResultsNow();
$input.trigger("result", [selected.data, selected.value]);
return true;
}
function onChange(crap, skipPrevCheck) {
if( lastKeyPressCode == KEY.DEL ) {
select.hide();
return;
}
var currentValue = $input.val();
if ( !skipPrevCheck && currentValue == previousValue )
return;
previousValue = currentValue;
currentValue = lastWord(currentValue);
if ( currentValue.length >= options.minChars) {
$input.addClass(options.loadingClass);
if (!options.matchCase)
currentValue = currentValue.toLowerCase();
request(currentValue, receiveData, hideResultsNow);
} else {
stopLoading();
select.hide();
}
};
function trimWords(value) {
if (!value)
return [""];
if (!options.multiple)
return [$.trim(value)];
return $.map(value.split(options.multipleSeparator), function(word) {
return $.trim(value).length ? $.trim(word) : null;
});
}
function lastWord(value) {
if ( !options.multiple )
return value;
var words = trimWords(value);
if (words.length == 1)
return words[0];
var cursorAt = $(input).selection().start;
if (cursorAt == value.length) {
words = trimWords(value)
} else {
words = trimWords(value.replace(value.substring(cursorAt), ""));
}
return words[words.length - 1];
}
// fills in the input box w/the first match (assumed to be the best match)
// q: the term entered
// sValue: the first matching result
function autoFill(q, sValue){
// autofill in the complete box w/the first match as long as the user hasn't entered in more data
// if the last user key pressed was backspace, don't autofill
if( options.autoFill && (lastWord($input.val()).toLowerCase() == q.toLowerCase()) && lastKeyPressCode != KEY.BACKSPACE ) {
// fill in the value (keep the case the user has typed)
$input.val($input.val() + sValue.substring(lastWord(previousValue).length));
// select the portion of the value not typed by the user (so the next character will erase)
$(input).selection(previousValue.length, previousValue.length + sValue.length);
}
};
function hideResults() {
clearTimeout(timeout);
timeout = setTimeout(hideResultsNow, 200);
};
function hideResultsNow() {
var wasVisible = select.visible();
select.hide();
clearTimeout(timeout);
stopLoading();
if (options.mustMatch) {
// call search and run callback
$input.search(
function (result){
// if no value found, clear the input box
if( !result ) {
if (options.multiple) {
var words = trimWords($input.val()).slice(0, -1);
$input.val( words.join(options.multipleSeparator) + (words.length ? options.multipleSeparator : "") );
}
else {
$input.val( "" );
$input.trigger("result", null);
}
}
}
);
}
};
function receiveData(q, data) {
if ( data && data.length && hasFocus ) {
stopLoading();
select.display(data, q);
autoFill(q, data[0].value);
select.show();
} else {
hideResultsNow();
}
};
function request(term, success, failure) {
if (!options.matchCase)
term = term.toLowerCase();
var data = cache.load(term);
// recieve the cached data
if (data && data.length) {
success(term, data);
// if an AJAX url has been supplied, try loading the data now
} else if( (typeof options.url == "string") && (options.url.length > 0) ){
var extraParams = {
timestamp: +new Date()
};
$.each(options.extraParams, function(key, param) {
extraParams[key] = typeof param == "function" ? param() : param;
});
$.ajax({
// try to leverage ajaxQueue plugin to abort previous requests
mode: "abort",
// limit abortion to this input
port: "autocomplete" + input.name,
dataType: options.dataType,
url: options.url,
data: $.extend({
q: lastWord(term),
limit: options.max
}, extraParams),
success: function(data) {
var parsed = options.parse && options.parse(data) || parse(data);
cache.add(term, parsed);
success(term, parsed);
}
});
} else {
// if we have a failure, we need to empty the list -- this prevents the the [TAB] key from selecting the last successful match
select.emptyList();
failure(term);
}
};
function parse(data) {
var parsed = [];
var rows = data.split("\n");
for (var i=0; i < rows.length; i++) {
var row = $.trim(rows[i]);
if (row) {
row = row.split("|");
parsed[parsed.length] = {
data: row,
value: row[0],
result: options.formatResult && options.formatResult(row, row[0]) || row[0]
};
}
}
return parsed;
};
function stopLoading() {
$input.removeClass(options.loadingClass);
};
};
$.Autocompleter.defaults = {
inputClass: "ac_input",
resultsClass: "ac_results",
loadingClass: "ac_loading",
minChars: 1,
delay: 400,
matchCase: false,
matchSubset: true,
matchContains: false,
cacheLength: 10,
max: 100,
mustMatch: false,
extraParams: {},
selectFirst: true,
formatItem: function(row) { return row[0]; },
formatMatch: null,
autoFill: false,
width: 0,
multiple: false,
multipleSeparator: ", ",
highlight: function(value, term) {
return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)(" + term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi, "\\$1") + ")(?![^<>]*>)(?![^&;]+;)", "gi"), "<strong>$1</strong>");
},
scroll: true,
scrollHeight: 180
};
$.Autocompleter.Cache = function(options) {
var data = {};
var length = 0;
function matchSubset(s, sub) {
if (!options.matchCase)
s = s.toLowerCase();
var i = s.indexOf(sub);
if (options.matchContains == "word"){
i = s.toLowerCase().search("\\b" + sub.toLowerCase());
}
if (i == -1) return false;
return i == 0 || options.matchContains;
};
function add(q, value) {
if (length > options.cacheLength){
flush();
}
if (!data[q]){
length++;
}
data[q] = value;
}
function populate(){
if( !options.data ) return false;
// track the matches
var stMatchSets = {},
nullData = 0;
// no url was specified, we need to adjust the cache length to make sure it fits the local data store
if( !options.url ) options.cacheLength = 1;
// track all options for minChars = 0
stMatchSets[""] = [];
// loop through the array and create a lookup structure
for ( var i = 0, ol = options.data.length; i < ol; i++ ) {
var rawValue = options.data[i];
// if rawValue is a string, make an array otherwise just reference the array
rawValue = (typeof rawValue == "string") ? [rawValue] : rawValue;
var value = options.formatMatch(rawValue, i+1, options.data.length);
if ( value === false )
continue;
var firstChar = value.charAt(0).toLowerCase();
// if no lookup array for this character exists, look it up now
if( !stMatchSets[firstChar] )
stMatchSets[firstChar] = [];
// if the match is a string
var row = {
value: value,
data: rawValue,
result: options.formatResult && options.formatResult(rawValue) || value
};
// push the current match into the set list
stMatchSets[firstChar].push(row);
// keep track of minChars zero items
if ( nullData++ < options.max ) {
stMatchSets[""].push(row);
}
};
// add the data items to the cache
$.each(stMatchSets, function(i, value) {
// increase the cache size
options.cacheLength++;
// add to the cache
add(i, value);
});
}
// populate any existing data
setTimeout(populate, 25);
function flush(){
data = {};
length = 0;
}
return {
flush: flush,
add: add,
populate: populate,
load: function(q) {
if (!options.cacheLength || !length)
return null;
/*
* if dealing w/local data and matchContains than we must make sure
* to loop through all the data collections looking for matches
*/
if( !options.url && options.matchContains ){
// track all matches
var csub = [];
// loop through all the data grids for matches
for( var k in data ){
// don't search through the stMatchSets[""] (minChars: 0) cache
// this prevents duplicates
if( k.length > 0 ){
var c = data[k];
$.each(c, function(i, x) {
// if we've got a match, add it to the array
if (matchSubset(x.value, q)) {
csub.push(x);
}
});
}
}
return csub;
} else
// if the exact item exists, use it
if (data[q]){
return data[q];
} else
if (options.matchSubset) {
for (var i = q.length - 1; i >= options.minChars; i--) {
var c = data[q.substr(0, i)];
if (c) {
var csub = [];
$.each(c, function(i, x) {
if (matchSubset(x.value, q)) {
csub[csub.length] = x;
}
});
return csub;
}
}
}
return null;
}
};
};
$.Autocompleter.Select = function (options, input, select, config) {
var CLASSES = {
ACTIVE: "ac_over"
};
var listItems,
active = -1,
data,
term = "",
needsInit = true,
element,
list;
// Create results
function init() {
if (!needsInit)
return;
element = $("<div/>")
.hide()
.addClass(options.resultsClass)
.css("position", "absolute")
.appendTo(document.body);
list = $("<ul/>").appendTo(element).mouseover( function(event) {
if(target(event).nodeName && target(event).nodeName.toUpperCase() == 'LI') {
active = $("li", list).removeClass(CLASSES.ACTIVE).index(target(event));
$(target(event)).addClass(CLASSES.ACTIVE);
}
}).click(function(event) {
$(target(event)).addClass(CLASSES.ACTIVE);
select();
// TODO provide option to avoid setting focus again after selection? useful for cleanup-on-focus
input.focus();
return false;
}).mousedown(function() {
config.mouseDownOnSelect = true;
}).mouseup(function() {
config.mouseDownOnSelect = false;
});
if( options.width > 0 )
element.css("width", options.width);
needsInit = false;
}
function target(event) {
var element = event.target;
while(element && element.tagName != "LI")
element = element.parentNode;
// more fun with IE, sometimes event.target is empty, just ignore it then
if(!element)
return [];
return element;
}
function moveSelect(step) {
listItems.slice(active, active + 1).removeClass(CLASSES.ACTIVE);
movePosition(step);
var activeItem = listItems.slice(active, active + 1).addClass(CLASSES.ACTIVE);
if(options.scroll) {
var offset = 0;
listItems.slice(0, active).each(function() {
offset += this.offsetHeight;
});
if((offset + activeItem[0].offsetHeight - list.scrollTop()) > list[0].clientHeight) {
list.scrollTop(offset + activeItem[0].offsetHeight - list.innerHeight());
} else if(offset < list.scrollTop()) {
list.scrollTop(offset);
}
}
};
function movePosition(step) {
active += step;
if (active < 0) {
active = listItems.size() - 1;
} else if (active >= listItems.size()) {
active = 0;
}
}
function limitNumberOfItems(available) {
return options.max && options.max < available
? options.max
: available;
}
function fillList() {
list.empty();
var max = limitNumberOfItems(data.length);
for (var i=0; i < max; i++) {
if (!data[i])
continue;
var formatted = options.formatItem(data[i].data, i+1, max, data[i].value, term);
if ( formatted === false )
continue;
var li = $("<li/>").html( options.highlight(formatted, term) ).addClass(i%2 == 0 ? "ac_even" : "ac_odd").appendTo(list)[0];
$.data(li, "ac_data", data[i]);
}
listItems = list.find("li");
if ( options.selectFirst ) {
listItems.slice(0, 1).addClass(CLASSES.ACTIVE);
active = 0;
}
// apply bgiframe if available
if ( $.fn.bgiframe )
list.bgiframe();
}
return {
display: function(d, q) {
init();
data = d;
term = q;
fillList();
},
next: function() {
moveSelect(1);
},
prev: function() {
moveSelect(-1);
},
pageUp: function() {
if (active != 0 && active - 8 < 0) {
moveSelect( -active );
} else {
moveSelect(-8);
}
},
pageDown: function() {
if (active != listItems.size() - 1 && active + 8 > listItems.size()) {
moveSelect( listItems.size() - 1 - active );
} else {
moveSelect(8);
}
},
hide: function() {
element && element.hide();
listItems && listItems.removeClass(CLASSES.ACTIVE);
active = -1;
},
visible : function() {
return element && element.is(":visible");
},
current: function() {
return this.visible() && (listItems.filter("." + CLASSES.ACTIVE)[0] || options.selectFirst && listItems[0]);
},
show: function() {
var offset = $(input).offset();
element.css({
width: typeof options.width == "string" || options.width > 0 ? options.width : $(input).width(),
top: offset.top + input.offsetHeight,
left: offset.left
}).show();
if(options.scroll) {
list.scrollTop(0);
list.css({
maxHeight: options.scrollHeight,
overflow: 'auto'
});
if($.browser.msie && typeof document.body.style.maxHeight === "undefined") {
var listHeight = 0;
listItems.each(function() {
listHeight += this.offsetHeight;
});
var scrollbarsVisible = listHeight > options.scrollHeight;
list.css('height', scrollbarsVisible ? options.scrollHeight : listHeight );
if (!scrollbarsVisible) {
// IE doesn't recalculate width when scrollbar disappears
listItems.width( list.width() - parseInt(listItems.css("padding-left")) - parseInt(listItems.css("padding-right")) );
}
}
}
},
selected: function() {
var selected = listItems && listItems.filter("." + CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE);
return selected && selected.length && $.data(selected[0], "ac_data");
},
emptyList: function (){
list && list.empty();
},
unbind: function() {
element && element.remove();
}
};
};
$.fn.selection = function(start, end) {
if (start !== undefined) {
return this.each(function() {
if( this.createTextRange ){
var selRange = this.createTextRange();
if (end === undefined || start == end) {
selRange.move("character", start);
selRange.select();
} else {
selRange.collapse(true);
selRange.moveStart("character", start);
selRange.moveEnd("character", end);
selRange.select();
}
} else if( this.setSelectionRange ){
this.setSelectionRange(start, end);
} else if( this.selectionStart ){
this.selectionStart = start;
this.selectionEnd = end;
}
});
}
var field = this[0];
if ( field.createTextRange ) {
var range = document.selection.createRange(),
orig = field.value,
teststring = "<->",
textLength = range.text.length;
range.text = teststring;
var caretAt = field.value.indexOf(teststring);
field.value = orig;
this.selection(caretAt, caretAt + textLength);
return {
start: caretAt,
end: caretAt + textLength
}
} else if( field.selectionStart !== undefined ){
return {
start: field.selectionStart,
end: field.selectionEnd
}
}
};
})(jQuery); | random_line_split |
|
jquery.autocomplete.js | /*
* jQuery Autocomplete plugin 1.1
*
* Copyright (c) 2009 Jörn Zaefferer
*
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
*
* Revision: $Id: jquery.autocomplete.js 15 2009-08-22 10:30:27Z joern.zaefferer $
*/
;(function($) {
$.fn.extend({
autocomplete: function(urlOrData, options) {
var isUrl = typeof urlOrData == "string";
options = $.extend({}, $.Autocompleter.defaults, {
url: isUrl ? urlOrData : null,
data: isUrl ? null : urlOrData,
delay: isUrl ? $.Autocompleter.defaults.delay : 10,
max: options && !options.scroll ? 10 : 150
}, options);
// if highlight is set to false, replace it with a do-nothing function
options.highlight = options.highlight || function(value) { return value; };
// if the formatMatch option is not specified, then use formatItem for backwards compatibility
options.formatMatch = options.formatMatch || options.formatItem;
return this.each(function() {
new $.Autocompleter(this, options);
});
},
result: function(handler) {
return this.bind("result", handler);
},
search: function(handler) {
return this.trigger("search", [handler]);
},
flushCache: function() {
return this.trigger("flushCache");
},
setOptions: function(options){
return this.trigger("setOptions", [options]);
},
unautocomplete: function() {
return this.trigger("unautocomplete");
}
});
$.Autocompleter = function(input, options) {
var KEY = {
UP: 38,
DOWN: 40,
DEL: 46,
TAB: 9,
RETURN: 13,
ESC: 27,
COMMA: 188,
PAGEUP: 33,
PAGEDOWN: 34,
BACKSPACE: 8
};
// Create $ object for input element
var $input = $(input).attr("autocomplete", "off").addClass(options.inputClass);
var timeout;
var previousValue = "";
var cache = $.Autocompleter.Cache(options);
var hasFocus = 0;
var lastKeyPressCode;
var config = {
mouseDownOnSelect: false
};
var select = $.Autocompleter.Select(options, input, selectCurrent, config);
var blockSubmit;
// prevent form submit in opera when selecting with return key
$.browser.opera && $(input.form).bind("submit.autocomplete", function() {
if (blockSubmit) {
blockSubmit = false;
return false;
}
});
// only opera doesn't trigger keydown multiple times while pressed, others don't work with keypress at all
$input.bind(($.browser.opera ? "keypress" : "keydown") + ".autocomplete", function(event) {
// a keypress means the input has focus
// avoids issue where input had focus before the autocomplete was applied
hasFocus = 1;
// track last key pressed
lastKeyPressCode = event.keyCode;
switch(event.keyCode) {
case KEY.UP:
event.preventDefault();
if ( select.visible() ) {
select.prev();
} else {
onChange(0, true);
}
break;
case KEY.DOWN:
event.preventDefault();
if ( select.visible() ) {
select.next();
} else {
onChange(0, true);
}
break;
case KEY.PAGEUP:
event.preventDefault();
if ( select.visible() ) {
select.pageUp();
} else {
onChange(0, true);
}
break;
case KEY.PAGEDOWN:
event.preventDefault();
if ( select.visible() ) {
select.pageDown();
} else {
onChange(0, true);
}
break;
// matches also semicolon
case options.multiple && $.trim(options.multipleSeparator) == "," && KEY.COMMA:
case KEY.TAB:
case KEY.RETURN:
if( selectCurrent() ) {
// stop default to prevent a form submit, Opera needs special handling
event.preventDefault();
blockSubmit = true;
return false;
}
break;
case KEY.ESC:
select.hide();
break;
default:
clearTimeout(timeout);
timeout = setTimeout(onChange, options.delay);
break;
}
}).focus(function(){
// track whether the field has focus, we shouldn't process any
// results if the field no longer has focus
hasFocus++;
}).blur(function() {
hasFocus = 0;
if (!config.mouseDownOnSelect) {
hideResults();
}
}).click(function() {
// show select when clicking in a focused field
if ( hasFocus++ > 1 && !select.visible() ) {
onChange(0, true);
}
}).bind("search", function() {
// TODO why not just specifying both arguments?
var fn = (arguments.length > 1) ? arguments[1] : null;
function findValueCallback(q, data) {
var result;
if( data && data.length ) {
for (var i=0; i < data.length; i++) {
if( data[i].result.toLowerCase() == q.toLowerCase() ) {
result = data[i];
break;
}
}
}
if( typeof fn == "function" ) fn(result);
else $input.trigger("result", result && [result.data, result.value]);
}
$.each(trimWords($input.val()), function(i, value) {
request(value, findValueCallback, findValueCallback);
});
}).bind("flushCache", function() {
cache.flush();
}).bind("setOptions", function() {
$.extend(options, arguments[1]);
// if we've updated the data, repopulate
if ( "data" in arguments[1] )
cache.populate();
}).bind("unautocomplete", function() {
select.unbind();
$input.unbind();
$(input.form).unbind(".autocomplete");
});
function selectCurrent() {
var selected = select.selected();
if( !selected )
return false;
var v = selected.result;
previousValue = v;
if ( options.multiple ) {
var words = trimWords($input.val());
if ( words.length > 1 ) {
var seperator = options.multipleSeparator.length;
var cursorAt = $(input).selection().start;
var wordAt, progress = 0;
$.each(words, function(i, word) {
progress += word.length;
if (cursorAt <= progress) {
wordAt = i;
return false;
}
progress += seperator;
});
words[wordAt] = v;
// TODO this should set the cursor to the right position, but it gets overriden somewhere
//$.Autocompleter.Selection(input, progress + seperator, progress + seperator);
v = words.join( options.multipleSeparator );
}
v += options.multipleSeparator;
}
$input.val(v);
hideResultsNow();
$input.trigger("result", [selected.data, selected.value]);
return true;
}
function onChange(crap, skipPrevCheck) {
if( lastKeyPressCode == KEY.DEL ) {
select.hide();
return;
}
var currentValue = $input.val();
if ( !skipPrevCheck && currentValue == previousValue )
return;
previousValue = currentValue;
currentValue = lastWord(currentValue);
if ( currentValue.length >= options.minChars) {
$input.addClass(options.loadingClass);
if (!options.matchCase)
currentValue = currentValue.toLowerCase();
request(currentValue, receiveData, hideResultsNow);
} else {
stopLoading();
select.hide();
}
};
function trimWords(value) {
if (!value)
return [""];
if (!options.multiple)
return [$.trim(value)];
return $.map(value.split(options.multipleSeparator), function(word) {
return $.trim(value).length ? $.trim(word) : null;
});
}
function lastWord(value) {
if ( !options.multiple )
return value;
var words = trimWords(value);
if (words.length == 1)
return words[0];
var cursorAt = $(input).selection().start;
if (cursorAt == value.length) {
words = trimWords(value)
} else {
words = trimWords(value.replace(value.substring(cursorAt), ""));
}
return words[words.length - 1];
}
// fills in the input box w/the first match (assumed to be the best match)
// q: the term entered
// sValue: the first matching result
function autoFill(q, sValue){
// autofill in the complete box w/the first match as long as the user hasn't entered in more data
// if the last user key pressed was backspace, don't autofill
if( options.autoFill && (lastWord($input.val()).toLowerCase() == q.toLowerCase()) && lastKeyPressCode != KEY.BACKSPACE ) {
// fill in the value (keep the case the user has typed)
$input.val($input.val() + sValue.substring(lastWord(previousValue).length));
// select the portion of the value not typed by the user (so the next character will erase)
$(input).selection(previousValue.length, previousValue.length + sValue.length);
}
};
function hideResults() {
clearTimeout(timeout);
timeout = setTimeout(hideResultsNow, 200);
};
function hideResultsNow() {
var wasVisible = select.visible();
select.hide();
clearTimeout(timeout);
stopLoading();
if (options.mustMatch) {
// call search and run callback
$input.search(
function (result){
// if no value found, clear the input box
if( !result ) {
if (options.multiple) {
var words = trimWords($input.val()).slice(0, -1);
$input.val( words.join(options.multipleSeparator) + (words.length ? options.multipleSeparator : "") );
}
else {
$input.val( "" );
$input.trigger("result", null);
}
}
}
);
}
};
function receiveData(q, data) {
if ( data && data.length && hasFocus ) {
stopLoading();
select.display(data, q);
autoFill(q, data[0].value);
select.show();
} else {
hideResultsNow();
}
};
function request(term, success, failure) {
if (!options.matchCase)
term = term.toLowerCase();
var data = cache.load(term);
// recieve the cached data
if (data && data.length) {
success(term, data);
// if an AJAX url has been supplied, try loading the data now
} else if( (typeof options.url == "string") && (options.url.length > 0) ){
var extraParams = {
timestamp: +new Date()
};
$.each(options.extraParams, function(key, param) {
extraParams[key] = typeof param == "function" ? param() : param;
});
$.ajax({
// try to leverage ajaxQueue plugin to abort previous requests
mode: "abort",
// limit abortion to this input
port: "autocomplete" + input.name,
dataType: options.dataType,
url: options.url,
data: $.extend({
q: lastWord(term),
limit: options.max
}, extraParams),
success: function(data) {
var parsed = options.parse && options.parse(data) || parse(data);
cache.add(term, parsed);
success(term, parsed);
}
});
} else {
// if we have a failure, we need to empty the list -- this prevents the the [TAB] key from selecting the last successful match
select.emptyList();
failure(term);
}
};
function parse(data) {
var parsed = [];
var rows = data.split("\n");
for (var i=0; i < rows.length; i++) {
var row = $.trim(rows[i]);
if (row) {
row = row.split("|");
parsed[parsed.length] = {
data: row,
value: row[0],
result: options.formatResult && options.formatResult(row, row[0]) || row[0]
};
}
}
return parsed;
};
function stopLoading() {
$input.removeClass(options.loadingClass);
};
};
$.Autocompleter.defaults = {
inputClass: "ac_input",
resultsClass: "ac_results",
loadingClass: "ac_loading",
minChars: 1,
delay: 400,
matchCase: false,
matchSubset: true,
matchContains: false,
cacheLength: 10,
max: 100,
mustMatch: false,
extraParams: {},
selectFirst: true,
formatItem: function(row) { return row[0]; },
formatMatch: null,
autoFill: false,
width: 0,
multiple: false,
multipleSeparator: ", ",
highlight: function(value, term) {
return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)(" + term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi, "\\$1") + ")(?![^<>]*>)(?![^&;]+;)", "gi"), "<strong>$1</strong>");
},
scroll: true,
scrollHeight: 180
};
$.Autocompleter.Cache = function(options) {
var data = {};
var length = 0;
function matchSubset(s, sub) {
if (!options.matchCase)
s = s.toLowerCase();
var i = s.indexOf(sub);
if (options.matchContains == "word"){
i = s.toLowerCase().search("\\b" + sub.toLowerCase());
}
if (i == -1) return false;
return i == 0 || options.matchContains;
};
function add(q, value) {
if (length > options.cacheLength){
flush();
}
if (!data[q]){
length++;
}
data[q] = value;
}
function populate(){
if( !options.data ) return false;
// track the matches
var stMatchSets = {},
nullData = 0;
// no url was specified, we need to adjust the cache length to make sure it fits the local data store
if( !options.url ) options.cacheLength = 1;
// track all options for minChars = 0
stMatchSets[""] = [];
// loop through the array and create a lookup structure
for ( var i = 0, ol = options.data.length; i < ol; i++ ) {
var rawValue = options.data[i];
// if rawValue is a string, make an array otherwise just reference the array
rawValue = (typeof rawValue == "string") ? [rawValue] : rawValue;
var value = options.formatMatch(rawValue, i+1, options.data.length);
if ( value === false )
continue;
var firstChar = value.charAt(0).toLowerCase();
// if no lookup array for this character exists, look it up now
if( !stMatchSets[firstChar] )
stMatchSets[firstChar] = [];
// if the match is a string
var row = {
value: value,
data: rawValue,
result: options.formatResult && options.formatResult(rawValue) || value
};
// push the current match into the set list
stMatchSets[firstChar].push(row);
// keep track of minChars zero items
if ( nullData++ < options.max ) {
stMatchSets[""].push(row);
}
};
// add the data items to the cache
$.each(stMatchSets, function(i, value) {
// increase the cache size
options.cacheLength++;
// add to the cache
add(i, value);
});
}
// populate any existing data
setTimeout(populate, 25);
function flush(){
data = {};
length = 0;
}
return {
flush: flush,
add: add,
populate: populate,
load: function(q) {
if (!options.cacheLength || !length)
return null;
/*
* if dealing w/local data and matchContains than we must make sure
* to loop through all the data collections looking for matches
*/
if( !options.url && options.matchContains ){
// track all matches
var csub = [];
// loop through all the data grids for matches
for( var k in data ){
// don't search through the stMatchSets[""] (minChars: 0) cache
// this prevents duplicates
if( k.length > 0 ){
var c = data[k];
$.each(c, function(i, x) {
// if we've got a match, add it to the array
if (matchSubset(x.value, q)) {
csub.push(x);
}
});
}
}
return csub;
} else
// if the exact item exists, use it
if (data[q]){
return data[q];
} else
if (options.matchSubset) {
for (var i = q.length - 1; i >= options.minChars; i--) {
var c = data[q.substr(0, i)];
if (c) {
var csub = [];
$.each(c, function(i, x) {
if (matchSubset(x.value, q)) {
csub[csub.length] = x;
}
});
return csub;
}
}
}
return null;
}
};
};
$.Autocompleter.Select = function (options, input, select, config) {
var CLASSES = {
ACTIVE: "ac_over"
};
var listItems,
active = -1,
data,
term = "",
needsInit = true,
element,
list;
// Create results
function init() {
if (!needsInit)
return;
element = $("<div/>")
.hide()
.addClass(options.resultsClass)
.css("position", "absolute")
.appendTo(document.body);
list = $("<ul/>").appendTo(element).mouseover( function(event) {
if(target(event).nodeName && target(event).nodeName.toUpperCase() == 'LI') {
active = $("li", list).removeClass(CLASSES.ACTIVE).index(target(event));
$(target(event)).addClass(CLASSES.ACTIVE);
}
}).click(function(event) {
$(target(event)).addClass(CLASSES.ACTIVE);
select();
// TODO provide option to avoid setting focus again after selection? useful for cleanup-on-focus
input.focus();
return false;
}).mousedown(function() {
config.mouseDownOnSelect = true;
}).mouseup(function() {
config.mouseDownOnSelect = false;
});
if( options.width > 0 )
element.css("width", options.width);
needsInit = false;
}
function target(event) {
var element = event.target;
while(element && element.tagName != "LI")
element = element.parentNode;
// more fun with IE, sometimes event.target is empty, just ignore it then
if(!element)
return [];
return element;
}
function moveSelect(step) {
listItems.slice(active, active + 1).removeClass(CLASSES.ACTIVE);
movePosition(step);
var activeItem = listItems.slice(active, active + 1).addClass(CLASSES.ACTIVE);
if(options.scroll) { | };
function movePosition(step) {
active += step;
if (active < 0) {
active = listItems.size() - 1;
} else if (active >= listItems.size()) {
active = 0;
}
}
function limitNumberOfItems(available) {
return options.max && options.max < available
? options.max
: available;
}
function fillList() {
list.empty();
var max = limitNumberOfItems(data.length);
for (var i=0; i < max; i++) {
if (!data[i])
continue;
var formatted = options.formatItem(data[i].data, i+1, max, data[i].value, term);
if ( formatted === false )
continue;
var li = $("<li/>").html( options.highlight(formatted, term) ).addClass(i%2 == 0 ? "ac_even" : "ac_odd").appendTo(list)[0];
$.data(li, "ac_data", data[i]);
}
listItems = list.find("li");
if ( options.selectFirst ) {
listItems.slice(0, 1).addClass(CLASSES.ACTIVE);
active = 0;
}
// apply bgiframe if available
if ( $.fn.bgiframe )
list.bgiframe();
}
return {
display: function(d, q) {
init();
data = d;
term = q;
fillList();
},
next: function() {
moveSelect(1);
},
prev: function() {
moveSelect(-1);
},
pageUp: function() {
if (active != 0 && active - 8 < 0) {
moveSelect( -active );
} else {
moveSelect(-8);
}
},
pageDown: function() {
if (active != listItems.size() - 1 && active + 8 > listItems.size()) {
moveSelect( listItems.size() - 1 - active );
} else {
moveSelect(8);
}
},
hide: function() {
element && element.hide();
listItems && listItems.removeClass(CLASSES.ACTIVE);
active = -1;
},
visible : function() {
return element && element.is(":visible");
},
current: function() {
return this.visible() && (listItems.filter("." + CLASSES.ACTIVE)[0] || options.selectFirst && listItems[0]);
},
show: function() {
var offset = $(input).offset();
element.css({
width: typeof options.width == "string" || options.width > 0 ? options.width : $(input).width(),
top: offset.top + input.offsetHeight,
left: offset.left
}).show();
if(options.scroll) {
list.scrollTop(0);
list.css({
maxHeight: options.scrollHeight,
overflow: 'auto'
});
if($.browser.msie && typeof document.body.style.maxHeight === "undefined") {
var listHeight = 0;
listItems.each(function() {
listHeight += this.offsetHeight;
});
var scrollbarsVisible = listHeight > options.scrollHeight;
list.css('height', scrollbarsVisible ? options.scrollHeight : listHeight );
if (!scrollbarsVisible) {
// IE doesn't recalculate width when scrollbar disappears
listItems.width( list.width() - parseInt(listItems.css("padding-left")) - parseInt(listItems.css("padding-right")) );
}
}
}
},
selected: function() {
var selected = listItems && listItems.filter("." + CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE);
return selected && selected.length && $.data(selected[0], "ac_data");
},
emptyList: function (){
list && list.empty();
},
unbind: function() {
element && element.remove();
}
};
};
$.fn.selection = function(start, end) {
if (start !== undefined) {
return this.each(function() {
if( this.createTextRange ){
var selRange = this.createTextRange();
if (end === undefined || start == end) {
selRange.move("character", start);
selRange.select();
} else {
selRange.collapse(true);
selRange.moveStart("character", start);
selRange.moveEnd("character", end);
selRange.select();
}
} else if( this.setSelectionRange ){
this.setSelectionRange(start, end);
} else if( this.selectionStart ){
this.selectionStart = start;
this.selectionEnd = end;
}
});
}
var field = this[0];
if ( field.createTextRange ) {
var range = document.selection.createRange(),
orig = field.value,
teststring = "<->",
textLength = range.text.length;
range.text = teststring;
var caretAt = field.value.indexOf(teststring);
field.value = orig;
this.selection(caretAt, caretAt + textLength);
return {
start: caretAt,
end: caretAt + textLength
}
} else if( field.selectionStart !== undefined ){
return {
start: field.selectionStart,
end: field.selectionEnd
}
}
};
})(jQuery); |
var offset = 0;
listItems.slice(0, active).each(function() {
offset += this.offsetHeight;
});
if((offset + activeItem[0].offsetHeight - list.scrollTop()) > list[0].clientHeight) {
list.scrollTop(offset + activeItem[0].offsetHeight - list.innerHeight());
} else if(offset < list.scrollTop()) {
list.scrollTop(offset);
}
}
| conditional_block |
jquery.autocomplete.js | /*
* jQuery Autocomplete plugin 1.1
*
* Copyright (c) 2009 Jörn Zaefferer
*
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
*
* Revision: $Id: jquery.autocomplete.js 15 2009-08-22 10:30:27Z joern.zaefferer $
*/
;(function($) {
$.fn.extend({
autocomplete: function(urlOrData, options) {
var isUrl = typeof urlOrData == "string";
options = $.extend({}, $.Autocompleter.defaults, {
url: isUrl ? urlOrData : null,
data: isUrl ? null : urlOrData,
delay: isUrl ? $.Autocompleter.defaults.delay : 10,
max: options && !options.scroll ? 10 : 150
}, options);
// if highlight is set to false, replace it with a do-nothing function
options.highlight = options.highlight || function(value) { return value; };
// if the formatMatch option is not specified, then use formatItem for backwards compatibility
options.formatMatch = options.formatMatch || options.formatItem;
return this.each(function() {
new $.Autocompleter(this, options);
});
},
result: function(handler) {
return this.bind("result", handler);
},
search: function(handler) {
return this.trigger("search", [handler]);
},
flushCache: function() {
return this.trigger("flushCache");
},
setOptions: function(options){
return this.trigger("setOptions", [options]);
},
unautocomplete: function() {
return this.trigger("unautocomplete");
}
});
$.Autocompleter = function(input, options) {
var KEY = {
UP: 38,
DOWN: 40,
DEL: 46,
TAB: 9,
RETURN: 13,
ESC: 27,
COMMA: 188,
PAGEUP: 33,
PAGEDOWN: 34,
BACKSPACE: 8
};
// Create $ object for input element
var $input = $(input).attr("autocomplete", "off").addClass(options.inputClass);
var timeout;
var previousValue = "";
var cache = $.Autocompleter.Cache(options);
var hasFocus = 0;
var lastKeyPressCode;
var config = {
mouseDownOnSelect: false
};
var select = $.Autocompleter.Select(options, input, selectCurrent, config);
var blockSubmit;
// prevent form submit in opera when selecting with return key
$.browser.opera && $(input.form).bind("submit.autocomplete", function() {
if (blockSubmit) {
blockSubmit = false;
return false;
}
});
// only opera doesn't trigger keydown multiple times while pressed, others don't work with keypress at all
$input.bind(($.browser.opera ? "keypress" : "keydown") + ".autocomplete", function(event) {
// a keypress means the input has focus
// avoids issue where input had focus before the autocomplete was applied
hasFocus = 1;
// track last key pressed
lastKeyPressCode = event.keyCode;
switch(event.keyCode) {
case KEY.UP:
event.preventDefault();
if ( select.visible() ) {
select.prev();
} else {
onChange(0, true);
}
break;
case KEY.DOWN:
event.preventDefault();
if ( select.visible() ) {
select.next();
} else {
onChange(0, true);
}
break;
case KEY.PAGEUP:
event.preventDefault();
if ( select.visible() ) {
select.pageUp();
} else {
onChange(0, true);
}
break;
case KEY.PAGEDOWN:
event.preventDefault();
if ( select.visible() ) {
select.pageDown();
} else {
onChange(0, true);
}
break;
// matches also semicolon
case options.multiple && $.trim(options.multipleSeparator) == "," && KEY.COMMA:
case KEY.TAB:
case KEY.RETURN:
if( selectCurrent() ) {
// stop default to prevent a form submit, Opera needs special handling
event.preventDefault();
blockSubmit = true;
return false;
}
break;
case KEY.ESC:
select.hide();
break;
default:
clearTimeout(timeout);
timeout = setTimeout(onChange, options.delay);
break;
}
}).focus(function(){
// track whether the field has focus, we shouldn't process any
// results if the field no longer has focus
hasFocus++;
}).blur(function() {
hasFocus = 0;
if (!config.mouseDownOnSelect) {
hideResults();
}
}).click(function() {
// show select when clicking in a focused field
if ( hasFocus++ > 1 && !select.visible() ) {
onChange(0, true);
}
}).bind("search", function() {
// TODO why not just specifying both arguments?
var fn = (arguments.length > 1) ? arguments[1] : null;
function findValueCallback(q, data) {
var result;
if( data && data.length ) {
for (var i=0; i < data.length; i++) {
if( data[i].result.toLowerCase() == q.toLowerCase() ) {
result = data[i];
break;
}
}
}
if( typeof fn == "function" ) fn(result);
else $input.trigger("result", result && [result.data, result.value]);
}
$.each(trimWords($input.val()), function(i, value) {
request(value, findValueCallback, findValueCallback);
});
}).bind("flushCache", function() {
cache.flush();
}).bind("setOptions", function() {
$.extend(options, arguments[1]);
// if we've updated the data, repopulate
if ( "data" in arguments[1] )
cache.populate();
}).bind("unautocomplete", function() {
select.unbind();
$input.unbind();
$(input.form).unbind(".autocomplete");
});
function selectCurrent() {
var selected = select.selected();
if( !selected )
return false;
var v = selected.result;
previousValue = v;
if ( options.multiple ) {
var words = trimWords($input.val());
if ( words.length > 1 ) {
var seperator = options.multipleSeparator.length;
var cursorAt = $(input).selection().start;
var wordAt, progress = 0;
$.each(words, function(i, word) {
progress += word.length;
if (cursorAt <= progress) {
wordAt = i;
return false;
}
progress += seperator;
});
words[wordAt] = v;
// TODO this should set the cursor to the right position, but it gets overriden somewhere
//$.Autocompleter.Selection(input, progress + seperator, progress + seperator);
v = words.join( options.multipleSeparator );
}
v += options.multipleSeparator;
}
$input.val(v);
hideResultsNow();
$input.trigger("result", [selected.data, selected.value]);
return true;
}
function onChange(crap, skipPrevCheck) {
if( lastKeyPressCode == KEY.DEL ) {
select.hide();
return;
}
var currentValue = $input.val();
if ( !skipPrevCheck && currentValue == previousValue )
return;
previousValue = currentValue;
currentValue = lastWord(currentValue);
if ( currentValue.length >= options.minChars) {
$input.addClass(options.loadingClass);
if (!options.matchCase)
currentValue = currentValue.toLowerCase();
request(currentValue, receiveData, hideResultsNow);
} else {
stopLoading();
select.hide();
}
};
function trimWords(value) {
if (!value)
return [""];
if (!options.multiple)
return [$.trim(value)];
return $.map(value.split(options.multipleSeparator), function(word) {
return $.trim(value).length ? $.trim(word) : null;
});
}
function lastWord(value) {
if ( !options.multiple )
return value;
var words = trimWords(value);
if (words.length == 1)
return words[0];
var cursorAt = $(input).selection().start;
if (cursorAt == value.length) {
words = trimWords(value)
} else {
words = trimWords(value.replace(value.substring(cursorAt), ""));
}
return words[words.length - 1];
}
// fills in the input box w/the first match (assumed to be the best match)
// q: the term entered
// sValue: the first matching result
function autoFill(q, sValue){
// autofill in the complete box w/the first match as long as the user hasn't entered in more data
// if the last user key pressed was backspace, don't autofill
if( options.autoFill && (lastWord($input.val()).toLowerCase() == q.toLowerCase()) && lastKeyPressCode != KEY.BACKSPACE ) {
// fill in the value (keep the case the user has typed)
$input.val($input.val() + sValue.substring(lastWord(previousValue).length));
// select the portion of the value not typed by the user (so the next character will erase)
$(input).selection(previousValue.length, previousValue.length + sValue.length);
}
};
function hideResults() { |
function hideResultsNow() {
var wasVisible = select.visible();
select.hide();
clearTimeout(timeout);
stopLoading();
if (options.mustMatch) {
// call search and run callback
$input.search(
function (result){
// if no value found, clear the input box
if( !result ) {
if (options.multiple) {
var words = trimWords($input.val()).slice(0, -1);
$input.val( words.join(options.multipleSeparator) + (words.length ? options.multipleSeparator : "") );
}
else {
$input.val( "" );
$input.trigger("result", null);
}
}
}
);
}
};
function receiveData(q, data) {
if ( data && data.length && hasFocus ) {
stopLoading();
select.display(data, q);
autoFill(q, data[0].value);
select.show();
} else {
hideResultsNow();
}
};
function request(term, success, failure) {
if (!options.matchCase)
term = term.toLowerCase();
var data = cache.load(term);
// recieve the cached data
if (data && data.length) {
success(term, data);
// if an AJAX url has been supplied, try loading the data now
} else if( (typeof options.url == "string") && (options.url.length > 0) ){
var extraParams = {
timestamp: +new Date()
};
$.each(options.extraParams, function(key, param) {
extraParams[key] = typeof param == "function" ? param() : param;
});
$.ajax({
// try to leverage ajaxQueue plugin to abort previous requests
mode: "abort",
// limit abortion to this input
port: "autocomplete" + input.name,
dataType: options.dataType,
url: options.url,
data: $.extend({
q: lastWord(term),
limit: options.max
}, extraParams),
success: function(data) {
var parsed = options.parse && options.parse(data) || parse(data);
cache.add(term, parsed);
success(term, parsed);
}
});
} else {
// if we have a failure, we need to empty the list -- this prevents the the [TAB] key from selecting the last successful match
select.emptyList();
failure(term);
}
};
function parse(data) {
var parsed = [];
var rows = data.split("\n");
for (var i=0; i < rows.length; i++) {
var row = $.trim(rows[i]);
if (row) {
row = row.split("|");
parsed[parsed.length] = {
data: row,
value: row[0],
result: options.formatResult && options.formatResult(row, row[0]) || row[0]
};
}
}
return parsed;
};
function stopLoading() {
$input.removeClass(options.loadingClass);
};
};
$.Autocompleter.defaults = {
inputClass: "ac_input",
resultsClass: "ac_results",
loadingClass: "ac_loading",
minChars: 1,
delay: 400,
matchCase: false,
matchSubset: true,
matchContains: false,
cacheLength: 10,
max: 100,
mustMatch: false,
extraParams: {},
selectFirst: true,
formatItem: function(row) { return row[0]; },
formatMatch: null,
autoFill: false,
width: 0,
multiple: false,
multipleSeparator: ", ",
highlight: function(value, term) {
return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)(" + term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi, "\\$1") + ")(?![^<>]*>)(?![^&;]+;)", "gi"), "<strong>$1</strong>");
},
scroll: true,
scrollHeight: 180
};
$.Autocompleter.Cache = function(options) {
var data = {};
var length = 0;
function matchSubset(s, sub) {
if (!options.matchCase)
s = s.toLowerCase();
var i = s.indexOf(sub);
if (options.matchContains == "word"){
i = s.toLowerCase().search("\\b" + sub.toLowerCase());
}
if (i == -1) return false;
return i == 0 || options.matchContains;
};
function add(q, value) {
if (length > options.cacheLength){
flush();
}
if (!data[q]){
length++;
}
data[q] = value;
}
function populate(){
if( !options.data ) return false;
// track the matches
var stMatchSets = {},
nullData = 0;
// no url was specified, we need to adjust the cache length to make sure it fits the local data store
if( !options.url ) options.cacheLength = 1;
// track all options for minChars = 0
stMatchSets[""] = [];
// loop through the array and create a lookup structure
for ( var i = 0, ol = options.data.length; i < ol; i++ ) {
var rawValue = options.data[i];
// if rawValue is a string, make an array otherwise just reference the array
rawValue = (typeof rawValue == "string") ? [rawValue] : rawValue;
var value = options.formatMatch(rawValue, i+1, options.data.length);
if ( value === false )
continue;
var firstChar = value.charAt(0).toLowerCase();
// if no lookup array for this character exists, look it up now
if( !stMatchSets[firstChar] )
stMatchSets[firstChar] = [];
// if the match is a string
var row = {
value: value,
data: rawValue,
result: options.formatResult && options.formatResult(rawValue) || value
};
// push the current match into the set list
stMatchSets[firstChar].push(row);
// keep track of minChars zero items
if ( nullData++ < options.max ) {
stMatchSets[""].push(row);
}
};
// add the data items to the cache
$.each(stMatchSets, function(i, value) {
// increase the cache size
options.cacheLength++;
// add to the cache
add(i, value);
});
}
// populate any existing data
setTimeout(populate, 25);
function flush(){
data = {};
length = 0;
}
return {
flush: flush,
add: add,
populate: populate,
load: function(q) {
if (!options.cacheLength || !length)
return null;
/*
* if dealing w/local data and matchContains than we must make sure
* to loop through all the data collections looking for matches
*/
if( !options.url && options.matchContains ){
// track all matches
var csub = [];
// loop through all the data grids for matches
for( var k in data ){
// don't search through the stMatchSets[""] (minChars: 0) cache
// this prevents duplicates
if( k.length > 0 ){
var c = data[k];
$.each(c, function(i, x) {
// if we've got a match, add it to the array
if (matchSubset(x.value, q)) {
csub.push(x);
}
});
}
}
return csub;
} else
// if the exact item exists, use it
if (data[q]){
return data[q];
} else
if (options.matchSubset) {
for (var i = q.length - 1; i >= options.minChars; i--) {
var c = data[q.substr(0, i)];
if (c) {
var csub = [];
$.each(c, function(i, x) {
if (matchSubset(x.value, q)) {
csub[csub.length] = x;
}
});
return csub;
}
}
}
return null;
}
};
};
$.Autocompleter.Select = function (options, input, select, config) {
var CLASSES = {
ACTIVE: "ac_over"
};
var listItems,
active = -1,
data,
term = "",
needsInit = true,
element,
list;
// Create results
function init() {
if (!needsInit)
return;
element = $("<div/>")
.hide()
.addClass(options.resultsClass)
.css("position", "absolute")
.appendTo(document.body);
list = $("<ul/>").appendTo(element).mouseover( function(event) {
if(target(event).nodeName && target(event).nodeName.toUpperCase() == 'LI') {
active = $("li", list).removeClass(CLASSES.ACTIVE).index(target(event));
$(target(event)).addClass(CLASSES.ACTIVE);
}
}).click(function(event) {
$(target(event)).addClass(CLASSES.ACTIVE);
select();
// TODO provide option to avoid setting focus again after selection? useful for cleanup-on-focus
input.focus();
return false;
}).mousedown(function() {
config.mouseDownOnSelect = true;
}).mouseup(function() {
config.mouseDownOnSelect = false;
});
if( options.width > 0 )
element.css("width", options.width);
needsInit = false;
}
function target(event) {
var element = event.target;
while(element && element.tagName != "LI")
element = element.parentNode;
// more fun with IE, sometimes event.target is empty, just ignore it then
if(!element)
return [];
return element;
}
function moveSelect(step) {
listItems.slice(active, active + 1).removeClass(CLASSES.ACTIVE);
movePosition(step);
var activeItem = listItems.slice(active, active + 1).addClass(CLASSES.ACTIVE);
if(options.scroll) {
var offset = 0;
listItems.slice(0, active).each(function() {
offset += this.offsetHeight;
});
if((offset + activeItem[0].offsetHeight - list.scrollTop()) > list[0].clientHeight) {
list.scrollTop(offset + activeItem[0].offsetHeight - list.innerHeight());
} else if(offset < list.scrollTop()) {
list.scrollTop(offset);
}
}
};
function movePosition(step) {
active += step;
if (active < 0) {
active = listItems.size() - 1;
} else if (active >= listItems.size()) {
active = 0;
}
}
function limitNumberOfItems(available) {
return options.max && options.max < available
? options.max
: available;
}
function fillList() {
list.empty();
var max = limitNumberOfItems(data.length);
for (var i=0; i < max; i++) {
if (!data[i])
continue;
var formatted = options.formatItem(data[i].data, i+1, max, data[i].value, term);
if ( formatted === false )
continue;
var li = $("<li/>").html( options.highlight(formatted, term) ).addClass(i%2 == 0 ? "ac_even" : "ac_odd").appendTo(list)[0];
$.data(li, "ac_data", data[i]);
}
listItems = list.find("li");
if ( options.selectFirst ) {
listItems.slice(0, 1).addClass(CLASSES.ACTIVE);
active = 0;
}
// apply bgiframe if available
if ( $.fn.bgiframe )
list.bgiframe();
}
return {
display: function(d, q) {
init();
data = d;
term = q;
fillList();
},
next: function() {
moveSelect(1);
},
prev: function() {
moveSelect(-1);
},
pageUp: function() {
if (active != 0 && active - 8 < 0) {
moveSelect( -active );
} else {
moveSelect(-8);
}
},
pageDown: function() {
if (active != listItems.size() - 1 && active + 8 > listItems.size()) {
moveSelect( listItems.size() - 1 - active );
} else {
moveSelect(8);
}
},
hide: function() {
element && element.hide();
listItems && listItems.removeClass(CLASSES.ACTIVE);
active = -1;
},
visible : function() {
return element && element.is(":visible");
},
current: function() {
return this.visible() && (listItems.filter("." + CLASSES.ACTIVE)[0] || options.selectFirst && listItems[0]);
},
show: function() {
var offset = $(input).offset();
element.css({
width: typeof options.width == "string" || options.width > 0 ? options.width : $(input).width(),
top: offset.top + input.offsetHeight,
left: offset.left
}).show();
if(options.scroll) {
list.scrollTop(0);
list.css({
maxHeight: options.scrollHeight,
overflow: 'auto'
});
if($.browser.msie && typeof document.body.style.maxHeight === "undefined") {
var listHeight = 0;
listItems.each(function() {
listHeight += this.offsetHeight;
});
var scrollbarsVisible = listHeight > options.scrollHeight;
list.css('height', scrollbarsVisible ? options.scrollHeight : listHeight );
if (!scrollbarsVisible) {
// IE doesn't recalculate width when scrollbar disappears
listItems.width( list.width() - parseInt(listItems.css("padding-left")) - parseInt(listItems.css("padding-right")) );
}
}
}
},
selected: function() {
var selected = listItems && listItems.filter("." + CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE);
return selected && selected.length && $.data(selected[0], "ac_data");
},
emptyList: function (){
list && list.empty();
},
unbind: function() {
element && element.remove();
}
};
};
$.fn.selection = function(start, end) {
if (start !== undefined) {
return this.each(function() {
if( this.createTextRange ){
var selRange = this.createTextRange();
if (end === undefined || start == end) {
selRange.move("character", start);
selRange.select();
} else {
selRange.collapse(true);
selRange.moveStart("character", start);
selRange.moveEnd("character", end);
selRange.select();
}
} else if( this.setSelectionRange ){
this.setSelectionRange(start, end);
} else if( this.selectionStart ){
this.selectionStart = start;
this.selectionEnd = end;
}
});
}
var field = this[0];
if ( field.createTextRange ) {
var range = document.selection.createRange(),
orig = field.value,
teststring = "<->",
textLength = range.text.length;
range.text = teststring;
var caretAt = field.value.indexOf(teststring);
field.value = orig;
this.selection(caretAt, caretAt + textLength);
return {
start: caretAt,
end: caretAt + textLength
}
} else if( field.selectionStart !== undefined ){
return {
start: field.selectionStart,
end: field.selectionEnd
}
}
};
})(jQuery); |
clearTimeout(timeout);
timeout = setTimeout(hideResultsNow, 200);
}; | identifier_body |
jquery.autocomplete.js | /*
* jQuery Autocomplete plugin 1.1
*
* Copyright (c) 2009 Jörn Zaefferer
*
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
*
* Revision: $Id: jquery.autocomplete.js 15 2009-08-22 10:30:27Z joern.zaefferer $
*/
;(function($) {
$.fn.extend({
autocomplete: function(urlOrData, options) {
var isUrl = typeof urlOrData == "string";
options = $.extend({}, $.Autocompleter.defaults, {
url: isUrl ? urlOrData : null,
data: isUrl ? null : urlOrData,
delay: isUrl ? $.Autocompleter.defaults.delay : 10,
max: options && !options.scroll ? 10 : 150
}, options);
// if highlight is set to false, replace it with a do-nothing function
options.highlight = options.highlight || function(value) { return value; };
// if the formatMatch option is not specified, then use formatItem for backwards compatibility
options.formatMatch = options.formatMatch || options.formatItem;
return this.each(function() {
new $.Autocompleter(this, options);
});
},
result: function(handler) {
return this.bind("result", handler);
},
search: function(handler) {
return this.trigger("search", [handler]);
},
flushCache: function() {
return this.trigger("flushCache");
},
setOptions: function(options){
return this.trigger("setOptions", [options]);
},
unautocomplete: function() {
return this.trigger("unautocomplete");
}
});
$.Autocompleter = function(input, options) {
var KEY = {
UP: 38,
DOWN: 40,
DEL: 46,
TAB: 9,
RETURN: 13,
ESC: 27,
COMMA: 188,
PAGEUP: 33,
PAGEDOWN: 34,
BACKSPACE: 8
};
// Create $ object for input element
var $input = $(input).attr("autocomplete", "off").addClass(options.inputClass);
var timeout;
var previousValue = "";
var cache = $.Autocompleter.Cache(options);
var hasFocus = 0;
var lastKeyPressCode;
var config = {
mouseDownOnSelect: false
};
var select = $.Autocompleter.Select(options, input, selectCurrent, config);
var blockSubmit;
// prevent form submit in opera when selecting with return key
$.browser.opera && $(input.form).bind("submit.autocomplete", function() {
if (blockSubmit) {
blockSubmit = false;
return false;
}
});
// only opera doesn't trigger keydown multiple times while pressed, others don't work with keypress at all
$input.bind(($.browser.opera ? "keypress" : "keydown") + ".autocomplete", function(event) {
// a keypress means the input has focus
// avoids issue where input had focus before the autocomplete was applied
hasFocus = 1;
// track last key pressed
lastKeyPressCode = event.keyCode;
switch(event.keyCode) {
case KEY.UP:
event.preventDefault();
if ( select.visible() ) {
select.prev();
} else {
onChange(0, true);
}
break;
case KEY.DOWN:
event.preventDefault();
if ( select.visible() ) {
select.next();
} else {
onChange(0, true);
}
break;
case KEY.PAGEUP:
event.preventDefault();
if ( select.visible() ) {
select.pageUp();
} else {
onChange(0, true);
}
break;
case KEY.PAGEDOWN:
event.preventDefault();
if ( select.visible() ) {
select.pageDown();
} else {
onChange(0, true);
}
break;
// matches also semicolon
case options.multiple && $.trim(options.multipleSeparator) == "," && KEY.COMMA:
case KEY.TAB:
case KEY.RETURN:
if( selectCurrent() ) {
// stop default to prevent a form submit, Opera needs special handling
event.preventDefault();
blockSubmit = true;
return false;
}
break;
case KEY.ESC:
select.hide();
break;
default:
clearTimeout(timeout);
timeout = setTimeout(onChange, options.delay);
break;
}
}).focus(function(){
// track whether the field has focus, we shouldn't process any
// results if the field no longer has focus
hasFocus++;
}).blur(function() {
hasFocus = 0;
if (!config.mouseDownOnSelect) {
hideResults();
}
}).click(function() {
// show select when clicking in a focused field
if ( hasFocus++ > 1 && !select.visible() ) {
onChange(0, true);
}
}).bind("search", function() {
// TODO why not just specifying both arguments?
var fn = (arguments.length > 1) ? arguments[1] : null;
function findValueCallback(q, data) {
var result;
if( data && data.length ) {
for (var i=0; i < data.length; i++) {
if( data[i].result.toLowerCase() == q.toLowerCase() ) {
result = data[i];
break;
}
}
}
if( typeof fn == "function" ) fn(result);
else $input.trigger("result", result && [result.data, result.value]);
}
$.each(trimWords($input.val()), function(i, value) {
request(value, findValueCallback, findValueCallback);
});
}).bind("flushCache", function() {
cache.flush();
}).bind("setOptions", function() {
$.extend(options, arguments[1]);
// if we've updated the data, repopulate
if ( "data" in arguments[1] )
cache.populate();
}).bind("unautocomplete", function() {
select.unbind();
$input.unbind();
$(input.form).unbind(".autocomplete");
});
function selectCurrent() {
var selected = select.selected();
if( !selected )
return false;
var v = selected.result;
previousValue = v;
if ( options.multiple ) {
var words = trimWords($input.val());
if ( words.length > 1 ) {
var seperator = options.multipleSeparator.length;
var cursorAt = $(input).selection().start;
var wordAt, progress = 0;
$.each(words, function(i, word) {
progress += word.length;
if (cursorAt <= progress) {
wordAt = i;
return false;
}
progress += seperator;
});
words[wordAt] = v;
// TODO this should set the cursor to the right position, but it gets overriden somewhere
//$.Autocompleter.Selection(input, progress + seperator, progress + seperator);
v = words.join( options.multipleSeparator );
}
v += options.multipleSeparator;
}
$input.val(v);
hideResultsNow();
$input.trigger("result", [selected.data, selected.value]);
return true;
}
function onChange(crap, skipPrevCheck) {
if( lastKeyPressCode == KEY.DEL ) {
select.hide();
return;
}
var currentValue = $input.val();
if ( !skipPrevCheck && currentValue == previousValue )
return;
previousValue = currentValue;
currentValue = lastWord(currentValue);
if ( currentValue.length >= options.minChars) {
$input.addClass(options.loadingClass);
if (!options.matchCase)
currentValue = currentValue.toLowerCase();
request(currentValue, receiveData, hideResultsNow);
} else {
stopLoading();
select.hide();
}
};
function trimWords(value) {
if (!value)
return [""];
if (!options.multiple)
return [$.trim(value)];
return $.map(value.split(options.multipleSeparator), function(word) {
return $.trim(value).length ? $.trim(word) : null;
});
}
function lastWord(value) {
if ( !options.multiple )
return value;
var words = trimWords(value);
if (words.length == 1)
return words[0];
var cursorAt = $(input).selection().start;
if (cursorAt == value.length) {
words = trimWords(value)
} else {
words = trimWords(value.replace(value.substring(cursorAt), ""));
}
return words[words.length - 1];
}
// fills in the input box w/the first match (assumed to be the best match)
// q: the term entered
// sValue: the first matching result
function autoFill(q, sValue){
// autofill in the complete box w/the first match as long as the user hasn't entered in more data
// if the last user key pressed was backspace, don't autofill
if( options.autoFill && (lastWord($input.val()).toLowerCase() == q.toLowerCase()) && lastKeyPressCode != KEY.BACKSPACE ) {
// fill in the value (keep the case the user has typed)
$input.val($input.val() + sValue.substring(lastWord(previousValue).length));
// select the portion of the value not typed by the user (so the next character will erase)
$(input).selection(previousValue.length, previousValue.length + sValue.length);
}
};
function hideResults() {
clearTimeout(timeout);
timeout = setTimeout(hideResultsNow, 200);
};
function hideResultsNow() {
var wasVisible = select.visible();
select.hide();
clearTimeout(timeout);
stopLoading();
if (options.mustMatch) {
// call search and run callback
$input.search(
function (result){
// if no value found, clear the input box
if( !result ) {
if (options.multiple) {
var words = trimWords($input.val()).slice(0, -1);
$input.val( words.join(options.multipleSeparator) + (words.length ? options.multipleSeparator : "") );
}
else {
$input.val( "" );
$input.trigger("result", null);
}
}
}
);
}
};
function receiveData(q, data) {
if ( data && data.length && hasFocus ) {
stopLoading();
select.display(data, q);
autoFill(q, data[0].value);
select.show();
} else {
hideResultsNow();
}
};
function request(term, success, failure) {
if (!options.matchCase)
term = term.toLowerCase();
var data = cache.load(term);
// recieve the cached data
if (data && data.length) {
success(term, data);
// if an AJAX url has been supplied, try loading the data now
} else if( (typeof options.url == "string") && (options.url.length > 0) ){
var extraParams = {
timestamp: +new Date()
};
$.each(options.extraParams, function(key, param) {
extraParams[key] = typeof param == "function" ? param() : param;
});
$.ajax({
// try to leverage ajaxQueue plugin to abort previous requests
mode: "abort",
// limit abortion to this input
port: "autocomplete" + input.name,
dataType: options.dataType,
url: options.url,
data: $.extend({
q: lastWord(term),
limit: options.max
}, extraParams),
success: function(data) {
var parsed = options.parse && options.parse(data) || parse(data);
cache.add(term, parsed);
success(term, parsed);
}
});
} else {
// if we have a failure, we need to empty the list -- this prevents the the [TAB] key from selecting the last successful match
select.emptyList();
failure(term);
}
};
function parse(data) {
var parsed = [];
var rows = data.split("\n");
for (var i=0; i < rows.length; i++) {
var row = $.trim(rows[i]);
if (row) {
row = row.split("|");
parsed[parsed.length] = {
data: row,
value: row[0],
result: options.formatResult && options.formatResult(row, row[0]) || row[0]
};
}
}
return parsed;
};
function stopLoading() {
$input.removeClass(options.loadingClass);
};
};
$.Autocompleter.defaults = {
inputClass: "ac_input",
resultsClass: "ac_results",
loadingClass: "ac_loading",
minChars: 1,
delay: 400,
matchCase: false,
matchSubset: true,
matchContains: false,
cacheLength: 10,
max: 100,
mustMatch: false,
extraParams: {},
selectFirst: true,
formatItem: function(row) { return row[0]; },
formatMatch: null,
autoFill: false,
width: 0,
multiple: false,
multipleSeparator: ", ",
highlight: function(value, term) {
return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)(" + term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi, "\\$1") + ")(?![^<>]*>)(?![^&;]+;)", "gi"), "<strong>$1</strong>");
},
scroll: true,
scrollHeight: 180
};
$.Autocompleter.Cache = function(options) {
var data = {};
var length = 0;
function matchSubset(s, sub) {
if (!options.matchCase)
s = s.toLowerCase();
var i = s.indexOf(sub);
if (options.matchContains == "word"){
i = s.toLowerCase().search("\\b" + sub.toLowerCase());
}
if (i == -1) return false;
return i == 0 || options.matchContains;
};
function add(q, value) {
if (length > options.cacheLength){
flush();
}
if (!data[q]){
length++;
}
data[q] = value;
}
function populate(){
if( !options.data ) return false;
// track the matches
var stMatchSets = {},
nullData = 0;
// no url was specified, we need to adjust the cache length to make sure it fits the local data store
if( !options.url ) options.cacheLength = 1;
// track all options for minChars = 0
stMatchSets[""] = [];
// loop through the array and create a lookup structure
for ( var i = 0, ol = options.data.length; i < ol; i++ ) {
var rawValue = options.data[i];
// if rawValue is a string, make an array otherwise just reference the array
rawValue = (typeof rawValue == "string") ? [rawValue] : rawValue;
var value = options.formatMatch(rawValue, i+1, options.data.length);
if ( value === false )
continue;
var firstChar = value.charAt(0).toLowerCase();
// if no lookup array for this character exists, look it up now
if( !stMatchSets[firstChar] )
stMatchSets[firstChar] = [];
// if the match is a string
var row = {
value: value,
data: rawValue,
result: options.formatResult && options.formatResult(rawValue) || value
};
// push the current match into the set list
stMatchSets[firstChar].push(row);
// keep track of minChars zero items
if ( nullData++ < options.max ) {
stMatchSets[""].push(row);
}
};
// add the data items to the cache
$.each(stMatchSets, function(i, value) {
// increase the cache size
options.cacheLength++;
// add to the cache
add(i, value);
});
}
// populate any existing data
setTimeout(populate, 25);
function flush(){
data = {};
length = 0;
}
return {
flush: flush,
add: add,
populate: populate,
load: function(q) {
if (!options.cacheLength || !length)
return null;
/*
* if dealing w/local data and matchContains than we must make sure
* to loop through all the data collections looking for matches
*/
if( !options.url && options.matchContains ){
// track all matches
var csub = [];
// loop through all the data grids for matches
for( var k in data ){
// don't search through the stMatchSets[""] (minChars: 0) cache
// this prevents duplicates
if( k.length > 0 ){
var c = data[k];
$.each(c, function(i, x) {
// if we've got a match, add it to the array
if (matchSubset(x.value, q)) {
csub.push(x);
}
});
}
}
return csub;
} else
// if the exact item exists, use it
if (data[q]){
return data[q];
} else
if (options.matchSubset) {
for (var i = q.length - 1; i >= options.minChars; i--) {
var c = data[q.substr(0, i)];
if (c) {
var csub = [];
$.each(c, function(i, x) {
if (matchSubset(x.value, q)) {
csub[csub.length] = x;
}
});
return csub;
}
}
}
return null;
}
};
};
$.Autocompleter.Select = function (options, input, select, config) {
var CLASSES = {
ACTIVE: "ac_over"
};
var listItems,
active = -1,
data,
term = "",
needsInit = true,
element,
list;
// Create results
function init() {
if (!needsInit)
return;
element = $("<div/>")
.hide()
.addClass(options.resultsClass)
.css("position", "absolute")
.appendTo(document.body);
list = $("<ul/>").appendTo(element).mouseover( function(event) {
if(target(event).nodeName && target(event).nodeName.toUpperCase() == 'LI') {
active = $("li", list).removeClass(CLASSES.ACTIVE).index(target(event));
$(target(event)).addClass(CLASSES.ACTIVE);
}
}).click(function(event) {
$(target(event)).addClass(CLASSES.ACTIVE);
select();
// TODO provide option to avoid setting focus again after selection? useful for cleanup-on-focus
input.focus();
return false;
}).mousedown(function() {
config.mouseDownOnSelect = true;
}).mouseup(function() {
config.mouseDownOnSelect = false;
});
if( options.width > 0 )
element.css("width", options.width);
needsInit = false;
}
function target(event) {
var element = event.target;
while(element && element.tagName != "LI")
element = element.parentNode;
// more fun with IE, sometimes event.target is empty, just ignore it then
if(!element)
return [];
return element;
}
function moveSelect(step) {
listItems.slice(active, active + 1).removeClass(CLASSES.ACTIVE);
movePosition(step);
var activeItem = listItems.slice(active, active + 1).addClass(CLASSES.ACTIVE);
if(options.scroll) {
var offset = 0;
listItems.slice(0, active).each(function() {
offset += this.offsetHeight;
});
if((offset + activeItem[0].offsetHeight - list.scrollTop()) > list[0].clientHeight) {
list.scrollTop(offset + activeItem[0].offsetHeight - list.innerHeight());
} else if(offset < list.scrollTop()) {
list.scrollTop(offset);
}
}
};
function m | step) {
active += step;
if (active < 0) {
active = listItems.size() - 1;
} else if (active >= listItems.size()) {
active = 0;
}
}
function limitNumberOfItems(available) {
return options.max && options.max < available
? options.max
: available;
}
function fillList() {
list.empty();
var max = limitNumberOfItems(data.length);
for (var i=0; i < max; i++) {
if (!data[i])
continue;
var formatted = options.formatItem(data[i].data, i+1, max, data[i].value, term);
if ( formatted === false )
continue;
var li = $("<li/>").html( options.highlight(formatted, term) ).addClass(i%2 == 0 ? "ac_even" : "ac_odd").appendTo(list)[0];
$.data(li, "ac_data", data[i]);
}
listItems = list.find("li");
if ( options.selectFirst ) {
listItems.slice(0, 1).addClass(CLASSES.ACTIVE);
active = 0;
}
// apply bgiframe if available
if ( $.fn.bgiframe )
list.bgiframe();
}
return {
display: function(d, q) {
init();
data = d;
term = q;
fillList();
},
next: function() {
moveSelect(1);
},
prev: function() {
moveSelect(-1);
},
pageUp: function() {
if (active != 0 && active - 8 < 0) {
moveSelect( -active );
} else {
moveSelect(-8);
}
},
pageDown: function() {
if (active != listItems.size() - 1 && active + 8 > listItems.size()) {
moveSelect( listItems.size() - 1 - active );
} else {
moveSelect(8);
}
},
hide: function() {
element && element.hide();
listItems && listItems.removeClass(CLASSES.ACTIVE);
active = -1;
},
visible : function() {
return element && element.is(":visible");
},
current: function() {
return this.visible() && (listItems.filter("." + CLASSES.ACTIVE)[0] || options.selectFirst && listItems[0]);
},
show: function() {
var offset = $(input).offset();
element.css({
width: typeof options.width == "string" || options.width > 0 ? options.width : $(input).width(),
top: offset.top + input.offsetHeight,
left: offset.left
}).show();
if(options.scroll) {
list.scrollTop(0);
list.css({
maxHeight: options.scrollHeight,
overflow: 'auto'
});
if($.browser.msie && typeof document.body.style.maxHeight === "undefined") {
var listHeight = 0;
listItems.each(function() {
listHeight += this.offsetHeight;
});
var scrollbarsVisible = listHeight > options.scrollHeight;
list.css('height', scrollbarsVisible ? options.scrollHeight : listHeight );
if (!scrollbarsVisible) {
// IE doesn't recalculate width when scrollbar disappears
listItems.width( list.width() - parseInt(listItems.css("padding-left")) - parseInt(listItems.css("padding-right")) );
}
}
}
},
selected: function() {
var selected = listItems && listItems.filter("." + CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE);
return selected && selected.length && $.data(selected[0], "ac_data");
},
emptyList: function (){
list && list.empty();
},
unbind: function() {
element && element.remove();
}
};
};
$.fn.selection = function(start, end) {
if (start !== undefined) {
return this.each(function() {
if( this.createTextRange ){
var selRange = this.createTextRange();
if (end === undefined || start == end) {
selRange.move("character", start);
selRange.select();
} else {
selRange.collapse(true);
selRange.moveStart("character", start);
selRange.moveEnd("character", end);
selRange.select();
}
} else if( this.setSelectionRange ){
this.setSelectionRange(start, end);
} else if( this.selectionStart ){
this.selectionStart = start;
this.selectionEnd = end;
}
});
}
var field = this[0];
if ( field.createTextRange ) {
var range = document.selection.createRange(),
orig = field.value,
teststring = "<->",
textLength = range.text.length;
range.text = teststring;
var caretAt = field.value.indexOf(teststring);
field.value = orig;
this.selection(caretAt, caretAt + textLength);
return {
start: caretAt,
end: caretAt + textLength
}
} else if( field.selectionStart !== undefined ){
return {
start: field.selectionStart,
end: field.selectionEnd
}
}
};
})(jQuery); | ovePosition( | identifier_name |
test_matcher.py | # -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib', 'pyscraper'))
from matcher import Matcher
import util as util
import unittest
class TestMatcher(unittest.TestCase):
@classmethod
def | (cls):
# This is required so that readScraper() can parse the XML instruction files
util.RCBHOME = os.path.join(os.path.dirname(__file__), '..', '..')
# Test matching against a result set
def test_getBestResultsWithRomanNumerals(self):
results = [{'SearchKey': ['Tekken 2']}, {'SearchKey': ['Tekken 3']}, {'SearchKey': ['Tekken IV']}]
gamename = 'Tekken II'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'Tekken 2')
def test_getBestResultsWithApostropheAndYear(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == 'FIFA 98',
"Expected to match title (was {0})".format(x.get('SearchKey')[0]))
def test_getBestResultsMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = u'スーパー競輪'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == u'スーパー競輪', "Expected matching unicode strings to match")
def test_getBestResultsNonMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = 'Super Test Game'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertIsNone(x, "Expected non-matching strings to not match, including unicode")
def test_getBestResultsWithBrackets(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98 (1998) [Electronic Arts]'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'FIFA 98')
if __name__ == "__main__":
unittest.main() | setUpClass | identifier_name |
test_matcher.py | # -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib', 'pyscraper'))
from matcher import Matcher
import util as util
import unittest
class TestMatcher(unittest.TestCase):
@classmethod
def setUpClass(cls):
# This is required so that readScraper() can parse the XML instruction files
util.RCBHOME = os.path.join(os.path.dirname(__file__), '..', '..')
# Test matching against a result set
def test_getBestResultsWithRomanNumerals(self):
results = [{'SearchKey': ['Tekken 2']}, {'SearchKey': ['Tekken 3']}, {'SearchKey': ['Tekken IV']}]
gamename = 'Tekken II'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'Tekken 2')
def test_getBestResultsWithApostropheAndYear(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == 'FIFA 98',
"Expected to match title (was {0})".format(x.get('SearchKey')[0]))
def test_getBestResultsMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = u'スーパー競輪'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == u'スーパー競輪', "Expected matching unicode strings to match")
def test_getBestResultsNonMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = 'Super Test Game'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertIsNone(x, "Expected non-matching strings to not match, including unicode")
def test_getBestResultsWithBrackets(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98 (1998) [Electronic Arts]'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'FIFA 98')
if __name__ == "__main__":
unittest.main() | conditional_block |
||
test_matcher.py | # -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib', 'pyscraper'))
from matcher import Matcher
import util as util
import unittest
class TestMatcher(unittest.TestCase):
@classmethod
def setUpClass(cls):
# This is required so that readScraper() can parse the XML instruction files
|
# Test matching against a result set
def test_getBestResultsWithRomanNumerals(self):
results = [{'SearchKey': ['Tekken 2']}, {'SearchKey': ['Tekken 3']}, {'SearchKey': ['Tekken IV']}]
gamename = 'Tekken II'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'Tekken 2')
def test_getBestResultsWithApostropheAndYear(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == 'FIFA 98',
"Expected to match title (was {0})".format(x.get('SearchKey')[0]))
def test_getBestResultsMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = u'スーパー競輪'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == u'スーパー競輪', "Expected matching unicode strings to match")
def test_getBestResultsNonMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = 'Super Test Game'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertIsNone(x, "Expected non-matching strings to not match, including unicode")
def test_getBestResultsWithBrackets(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98 (1998) [Electronic Arts]'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'FIFA 98')
if __name__ == "__main__":
unittest.main() | util.RCBHOME = os.path.join(os.path.dirname(__file__), '..', '..') | identifier_body |
test_matcher.py | # -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib', 'pyscraper'))
from matcher import Matcher
import util as util
import unittest
class TestMatcher(unittest.TestCase):
@classmethod
def setUpClass(cls):
# This is required so that readScraper() can parse the XML instruction files
util.RCBHOME = os.path.join(os.path.dirname(__file__), '..', '..')
# Test matching against a result set
def test_getBestResultsWithRomanNumerals(self):
results = [{'SearchKey': ['Tekken 2']}, {'SearchKey': ['Tekken 3']}, {'SearchKey': ['Tekken IV']}]
gamename = 'Tekken II'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'Tekken 2')
def test_getBestResultsWithApostropheAndYear(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == 'FIFA 98',
"Expected to match title (was {0})".format(x.get('SearchKey')[0]))
def test_getBestResultsMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = u'スーパー競輪'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == u'スーパー競輪', "Expected matching unicode strings to match")
def test_getBestResultsNonMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = 'Super Test Game'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertIsNone(x, "Expected non-matching strings to not match, including unicode")
def test_getBestResultsWithBrackets(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98 (1998) [Electronic Arts]'
| if __name__ == "__main__":
unittest.main() | m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'FIFA 98')
| random_line_split |
test_sns_operations.py | #!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def | (self):
self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns')
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]
self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn')
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
unittest.main()
| setUp | identifier_name |
test_sns_operations.py | #!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns')
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]
self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn')
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
unittest.main() | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | random_line_split |
test_sns_operations.py | #!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns')
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]
self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn')
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
| unittest.main() | conditional_block |
|
test_sns_operations.py | #!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def setUp(self):
|
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]
self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn')
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
unittest.main()
| self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns') | identifier_body |
workspace.rs | use crate::core::{Target, Workspace};
use crate::ops::CompileOptions;
use crate::util::CargoResult;
use anyhow::bail;
use std::fmt::Write;
fn get_available_targets<'a>(
filter_fn: fn(&Target) -> bool,
ws: &'a Workspace<'_>,
options: &'a CompileOptions,
) -> CargoResult<Vec<&'a str>> {
let packages = options.spec.get_packages(ws)?;
let mut targets: Vec<_> = packages
.into_iter()
.flat_map(|pkg| {
pkg.manifest()
.targets()
.iter()
.filter(|target| filter_fn(target))
})
.map(Target::name)
.collect();
targets.sort();
Ok(targets)
}
fn print_available_targets(
filter_fn: fn(&Target) -> bool,
ws: &Workspace<'_>,
options: &CompileOptions,
option_name: &str,
plural_name: &str,
) -> CargoResult<()> {
let targets = get_available_targets(filter_fn, ws, options)?;
let mut output = String::new();
writeln!(output, "\"{}\" takes one argument.", option_name)?;
if targets.is_empty() {
writeln!(output, "No {} available.", plural_name)?;
} else {
writeln!(output, "Available {}:", plural_name)?;
for target in targets {
writeln!(output, " {}", target)?;
}
}
bail!("{}", output)
}
pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> {
let packages = ws
.members()
.map(|pkg| pkg.name().as_str())
.collect::<Vec<_>>();
let mut output = "\"--package <SPEC>\" requires a SPEC format value, \
which can be any package ID specifier in the dependency graph.\n\
Run `cargo help pkgid` for more information about SPEC format.\n\n"
.to_string();
if packages.is_empty() {
// This would never happen.
// Just in case something regresses we covers it here.
writeln!(output, "No packages available.")?;
} else {
writeln!(output, "Possible packages/workspace members:")?;
for package in packages {
writeln!(output, " {}", package)?;
}
}
bail!("{}", output)
}
pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> |
pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bin, ws, options, "--bin", "binaries")
}
pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bench, ws, options, "--bench", "benches")
}
pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_test, ws, options, "--test", "tests")
}
| {
print_available_targets(Target::is_example, ws, options, "--example", "examples")
} | identifier_body |
workspace.rs | use crate::core::{Target, Workspace};
use crate::ops::CompileOptions;
use crate::util::CargoResult;
use anyhow::bail;
use std::fmt::Write;
fn get_available_targets<'a>(
filter_fn: fn(&Target) -> bool,
ws: &'a Workspace<'_>,
options: &'a CompileOptions,
) -> CargoResult<Vec<&'a str>> {
let packages = options.spec.get_packages(ws)?;
let mut targets: Vec<_> = packages
.into_iter()
.flat_map(|pkg| {
pkg.manifest()
.targets()
.iter()
.filter(|target| filter_fn(target))
})
.map(Target::name)
.collect();
targets.sort();
Ok(targets)
}
fn | (
filter_fn: fn(&Target) -> bool,
ws: &Workspace<'_>,
options: &CompileOptions,
option_name: &str,
plural_name: &str,
) -> CargoResult<()> {
let targets = get_available_targets(filter_fn, ws, options)?;
let mut output = String::new();
writeln!(output, "\"{}\" takes one argument.", option_name)?;
if targets.is_empty() {
writeln!(output, "No {} available.", plural_name)?;
} else {
writeln!(output, "Available {}:", plural_name)?;
for target in targets {
writeln!(output, " {}", target)?;
}
}
bail!("{}", output)
}
pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> {
let packages = ws
.members()
.map(|pkg| pkg.name().as_str())
.collect::<Vec<_>>();
let mut output = "\"--package <SPEC>\" requires a SPEC format value, \
which can be any package ID specifier in the dependency graph.\n\
Run `cargo help pkgid` for more information about SPEC format.\n\n"
.to_string();
if packages.is_empty() {
// This would never happen.
// Just in case something regresses we covers it here.
writeln!(output, "No packages available.")?;
} else {
writeln!(output, "Possible packages/workspace members:")?;
for package in packages {
writeln!(output, " {}", package)?;
}
}
bail!("{}", output)
}
pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_example, ws, options, "--example", "examples")
}
pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bin, ws, options, "--bin", "binaries")
}
pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bench, ws, options, "--bench", "benches")
}
pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_test, ws, options, "--test", "tests")
}
| print_available_targets | identifier_name |
workspace.rs | use crate::core::{Target, Workspace};
use crate::ops::CompileOptions;
use crate::util::CargoResult;
use anyhow::bail;
use std::fmt::Write;
fn get_available_targets<'a>(
filter_fn: fn(&Target) -> bool,
ws: &'a Workspace<'_>,
options: &'a CompileOptions,
) -> CargoResult<Vec<&'a str>> {
let packages = options.spec.get_packages(ws)?;
let mut targets: Vec<_> = packages
.into_iter()
.flat_map(|pkg| {
pkg.manifest()
.targets()
.iter()
.filter(|target| filter_fn(target))
})
.map(Target::name)
.collect();
targets.sort();
Ok(targets)
}
fn print_available_targets(
filter_fn: fn(&Target) -> bool,
ws: &Workspace<'_>,
options: &CompileOptions,
option_name: &str,
plural_name: &str,
) -> CargoResult<()> {
let targets = get_available_targets(filter_fn, ws, options)?;
let mut output = String::new();
writeln!(output, "\"{}\" takes one argument.", option_name)?;
if targets.is_empty() {
writeln!(output, "No {} available.", plural_name)?;
} else {
writeln!(output, "Available {}:", plural_name)?;
for target in targets {
writeln!(output, " {}", target)?;
}
}
bail!("{}", output)
}
pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> {
let packages = ws
.members()
.map(|pkg| pkg.name().as_str())
.collect::<Vec<_>>();
let mut output = "\"--package <SPEC>\" requires a SPEC format value, \
which can be any package ID specifier in the dependency graph.\n\
Run `cargo help pkgid` for more information about SPEC format.\n\n"
.to_string();
if packages.is_empty() {
// This would never happen.
// Just in case something regresses we covers it here.
writeln!(output, "No packages available.")?;
} else |
bail!("{}", output)
}
pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_example, ws, options, "--example", "examples")
}
pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bin, ws, options, "--bin", "binaries")
}
pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bench, ws, options, "--bench", "benches")
}
pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_test, ws, options, "--test", "tests")
}
| {
writeln!(output, "Possible packages/workspace members:")?;
for package in packages {
writeln!(output, " {}", package)?;
}
} | conditional_block |
workspace.rs | use crate::core::{Target, Workspace};
use crate::ops::CompileOptions;
use crate::util::CargoResult;
use anyhow::bail;
use std::fmt::Write;
fn get_available_targets<'a>(
filter_fn: fn(&Target) -> bool,
ws: &'a Workspace<'_>,
options: &'a CompileOptions,
) -> CargoResult<Vec<&'a str>> {
let packages = options.spec.get_packages(ws)?;
let mut targets: Vec<_> = packages
.into_iter()
.flat_map(|pkg| {
pkg.manifest()
.targets()
.iter()
.filter(|target| filter_fn(target))
})
.map(Target::name)
.collect();
targets.sort();
Ok(targets)
}
fn print_available_targets(
filter_fn: fn(&Target) -> bool,
ws: &Workspace<'_>,
options: &CompileOptions,
option_name: &str,
plural_name: &str,
) -> CargoResult<()> {
let targets = get_available_targets(filter_fn, ws, options)?;
let mut output = String::new();
writeln!(output, "\"{}\" takes one argument.", option_name)?;
if targets.is_empty() {
writeln!(output, "No {} available.", plural_name)?;
} else {
writeln!(output, "Available {}:", plural_name)?;
for target in targets {
writeln!(output, " {}", target)?;
}
}
bail!("{}", output)
}
pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> {
let packages = ws | which can be any package ID specifier in the dependency graph.\n\
Run `cargo help pkgid` for more information about SPEC format.\n\n"
.to_string();
if packages.is_empty() {
// This would never happen.
// Just in case something regresses we covers it here.
writeln!(output, "No packages available.")?;
} else {
writeln!(output, "Possible packages/workspace members:")?;
for package in packages {
writeln!(output, " {}", package)?;
}
}
bail!("{}", output)
}
pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_example, ws, options, "--example", "examples")
}
pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bin, ws, options, "--bin", "binaries")
}
pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_bench, ws, options, "--bench", "benches")
}
pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> {
print_available_targets(Target::is_test, ws, options, "--test", "tests")
} | .members()
.map(|pkg| pkg.name().as_str())
.collect::<Vec<_>>();
let mut output = "\"--package <SPEC>\" requires a SPEC format value, \ | random_line_split |
index.js | export { default as mdlUpgrade } from './utils/mdlUpgrade';
export { default as MDLComponent } from './utils/MDLComponent';
// components | CardMedia, CardText, CardMenu
} from './Card';
export { default as Checkbox } from './Checkbox';
export { default as DataTable, Table, TableHeader } from './DataTable';
export {
Dialog, DialogTitle, DialogContent,
DialogActions
} from './Dialog';
export { default as FABButton } from './FABButton';
export {
Footer, FooterSection, FooterDropDownSection,
FooterLinkList
} from './Footer';
export { default as Grid, Cell } from './Grid';
export { default as Icon } from './Icon';
export { default as IconButton } from './IconButton';
export { default as IconToggle } from './IconToggle';
export {
Layout, Header, Drawer,
HeaderRow, HeaderTabs, Spacer,
Navigation, Content
} from './Layout';
export {
List, ListItem,
ListItemAction, ListItemContent
} from './List';
export { default as Menu, MenuItem } from './Menu';
export { default as ProgressBar } from './ProgressBar';
export { default as Radio } from './Radio';
export { default as RadioGroup } from './RadioGroup';
export { default as Slider } from './Slider';
export { default as Snackbar } from './Snackbar';
export { default as Spinner } from './Spinner';
export { default as Switch } from './Switch';
export { Tabs, Tab, TabBar } from './Tabs';
export { default as Textfield } from './Textfield';
export { default as Tooltip } from './Tooltip'; | export { default as Badge } from './Badge';
export { default as Button } from './Button';
export {
Card, CardTitle, CardActions, | random_line_split |
factory.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from suds.sudsobject import Object as SudsObject
class _FactoryKeywords(object):
def set_wsdl_object_attribute(self, object, name, value):
"""Sets the attribute of a WSDL object.
Example:
| ${order search request}= | Create Wsdl Object | OrderSearchRequest | |
| Set Wsdl Object Attribute | ${order search request} | id | 4065 |
"""
self._assert_is_suds_object(object)
getattr(object, name)
setattr(object, name, value)
def get_wsdl_object_attribute(self, object, name):
"""Gets the attribute of a WSDL object.
Extendend variable syntax may be used to access attributes; however,
some WSDL objects may have attribute names that are illegal in Python,
necessitating this keyword.
Example:
| ${sale record}= | Call Soap Method | getLastSale | |
| ${price}= | Get Wsdl Object Attribute | ${sale record} | Price |
"""
self._assert_is_suds_object(object)
return getattr(object, name)
def create_wsdl_object(self, type, *name_value_pairs):
"""Creates a WSDL object of the specified `type`.
Requested `type` must be defined in the WSDL, in an import specified
by the WSDL, or with `Add Doctor Import`. `type` is case sensitive.
Example:
| ${contact}= | Create Wsdl Object | Contact | |
| Set Wsdl Object Attribute | ${contact} | Name | Kelly Newman |
Attribute values can be set by passing the attribute name and value in
pairs. This is equivalent to the two lines above:
| ${contact}= | Create Wsdl Object | Contact | Name | Kelly Newman |
"""
if len(name_value_pairs) % 2 != 0:
raise ValueError("Creating a WSDL object failed. There should be "
"an even number of name-value pairs.")
obj = self._client().factory.create(type)
for i in range(0, len(name_value_pairs), 2):
self.set_wsdl_object_attribute(obj, name_value_pairs[i], name_value_pairs[i + 1])
return obj
# private
def _assert_is_suds_object(self, object):
if not isinstance(object, SudsObject):
raise ValueError("Object must be a WSDL object (suds.sudsobject.Object).") | # Copyright 2012 Kevin Ormbrek
# | random_line_split |
|
factory.py | # Copyright 2012 Kevin Ormbrek
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from suds.sudsobject import Object as SudsObject
class _FactoryKeywords(object):
def set_wsdl_object_attribute(self, object, name, value):
"""Sets the attribute of a WSDL object.
Example:
| ${order search request}= | Create Wsdl Object | OrderSearchRequest | |
| Set Wsdl Object Attribute | ${order search request} | id | 4065 |
"""
self._assert_is_suds_object(object)
getattr(object, name)
setattr(object, name, value)
def get_wsdl_object_attribute(self, object, name):
"""Gets the attribute of a WSDL object.
Extendend variable syntax may be used to access attributes; however,
some WSDL objects may have attribute names that are illegal in Python,
necessitating this keyword.
Example:
| ${sale record}= | Call Soap Method | getLastSale | |
| ${price}= | Get Wsdl Object Attribute | ${sale record} | Price |
"""
self._assert_is_suds_object(object)
return getattr(object, name)
def create_wsdl_object(self, type, *name_value_pairs):
"""Creates a WSDL object of the specified `type`.
Requested `type` must be defined in the WSDL, in an import specified
by the WSDL, or with `Add Doctor Import`. `type` is case sensitive.
Example:
| ${contact}= | Create Wsdl Object | Contact | |
| Set Wsdl Object Attribute | ${contact} | Name | Kelly Newman |
Attribute values can be set by passing the attribute name and value in
pairs. This is equivalent to the two lines above:
| ${contact}= | Create Wsdl Object | Contact | Name | Kelly Newman |
"""
if len(name_value_pairs) % 2 != 0:
raise ValueError("Creating a WSDL object failed. There should be "
"an even number of name-value pairs.")
obj = self._client().factory.create(type)
for i in range(0, len(name_value_pairs), 2):
|
return obj
# private
def _assert_is_suds_object(self, object):
if not isinstance(object, SudsObject):
raise ValueError("Object must be a WSDL object (suds.sudsobject.Object).")
| self.set_wsdl_object_attribute(obj, name_value_pairs[i], name_value_pairs[i + 1]) | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.