file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
Bootstrap.js | /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) |
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // identity function for calling harmony imports with the correct context
/******/ __webpack_require__.i = function(value) { return value; };
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ }
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 4);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */,
/* 1 */,
/* 2 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 3 */,
/* 4 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(2);
/***/ })
/******/ ]);
//# sourceMappingURL=Bootstrap.js.map | {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ } | identifier_body |
MailAddressToGroup.ts | import {create} from "../../common/utils/EntityUtils.js"
import {TypeRef, downcast} from "@tutao/tutanota-utils"
import type {TypeModel} from "../../common/EntityTypes.js"
export const MailAddressToGroupTypeRef: TypeRef<MailAddressToGroup> = new TypeRef("sys", "MailAddressToGroup")
export const _TypeModel: TypeModel = {
"name": "MailAddressToGroup",
"since": 1,
"type": "ELEMENT_TYPE",
"id": 204,
"rootId": "A3N5cwAAzA",
"versioned": false,
"encrypted": false,
"values": {
"_format": {
"id": 208,
"type": "Number",
"cardinality": "One",
"final": false,
"encrypted": false
},
"_id": {
"id": 206,
"type": "CustomId",
"cardinality": "One",
"final": true,
"encrypted": false
},
"_ownerGroup": {
"id": 1019,
"type": "GeneratedId",
"cardinality": "ZeroOrOne",
"final": true, | "encrypted": false
},
"_permissions": {
"id": 207,
"type": "GeneratedId",
"cardinality": "One",
"final": true,
"encrypted": false
}
},
"associations": {
"internalGroup": {
"id": 209,
"type": "ELEMENT_ASSOCIATION",
"cardinality": "ZeroOrOne",
"final": false,
"refType": "Group"
}
},
"app": "sys",
"version": "72"
}
export function createMailAddressToGroup(values?: Partial<MailAddressToGroup>): MailAddressToGroup {
return Object.assign(create(_TypeModel, MailAddressToGroupTypeRef), downcast<MailAddressToGroup>(values))
}
export type MailAddressToGroup = {
_type: TypeRef<MailAddressToGroup>;
_format: NumberString;
_id: Id;
_ownerGroup: null | Id;
_permissions: Id;
internalGroup: null | Id;
} | random_line_split |
|
MailAddressToGroup.ts | import {create} from "../../common/utils/EntityUtils.js"
import {TypeRef, downcast} from "@tutao/tutanota-utils"
import type {TypeModel} from "../../common/EntityTypes.js"
export const MailAddressToGroupTypeRef: TypeRef<MailAddressToGroup> = new TypeRef("sys", "MailAddressToGroup")
export const _TypeModel: TypeModel = {
"name": "MailAddressToGroup",
"since": 1,
"type": "ELEMENT_TYPE",
"id": 204,
"rootId": "A3N5cwAAzA",
"versioned": false,
"encrypted": false,
"values": {
"_format": {
"id": 208,
"type": "Number",
"cardinality": "One",
"final": false,
"encrypted": false
},
"_id": {
"id": 206,
"type": "CustomId",
"cardinality": "One",
"final": true,
"encrypted": false
},
"_ownerGroup": {
"id": 1019,
"type": "GeneratedId",
"cardinality": "ZeroOrOne",
"final": true,
"encrypted": false
},
"_permissions": {
"id": 207,
"type": "GeneratedId",
"cardinality": "One",
"final": true,
"encrypted": false
}
},
"associations": {
"internalGroup": {
"id": 209,
"type": "ELEMENT_ASSOCIATION",
"cardinality": "ZeroOrOne",
"final": false,
"refType": "Group"
}
},
"app": "sys",
"version": "72"
}
export function createMailAddressToGroup(values?: Partial<MailAddressToGroup>): MailAddressToGroup |
export type MailAddressToGroup = {
_type: TypeRef<MailAddressToGroup>;
_format: NumberString;
_id: Id;
_ownerGroup: null | Id;
_permissions: Id;
internalGroup: null | Id;
} | {
return Object.assign(create(_TypeModel, MailAddressToGroupTypeRef), downcast<MailAddressToGroup>(values))
} | identifier_body |
MailAddressToGroup.ts | import {create} from "../../common/utils/EntityUtils.js"
import {TypeRef, downcast} from "@tutao/tutanota-utils"
import type {TypeModel} from "../../common/EntityTypes.js"
export const MailAddressToGroupTypeRef: TypeRef<MailAddressToGroup> = new TypeRef("sys", "MailAddressToGroup")
export const _TypeModel: TypeModel = {
"name": "MailAddressToGroup",
"since": 1,
"type": "ELEMENT_TYPE",
"id": 204,
"rootId": "A3N5cwAAzA",
"versioned": false,
"encrypted": false,
"values": {
"_format": {
"id": 208,
"type": "Number",
"cardinality": "One",
"final": false,
"encrypted": false
},
"_id": {
"id": 206,
"type": "CustomId",
"cardinality": "One",
"final": true,
"encrypted": false
},
"_ownerGroup": {
"id": 1019,
"type": "GeneratedId",
"cardinality": "ZeroOrOne",
"final": true,
"encrypted": false
},
"_permissions": {
"id": 207,
"type": "GeneratedId",
"cardinality": "One",
"final": true,
"encrypted": false
}
},
"associations": {
"internalGroup": {
"id": 209,
"type": "ELEMENT_ASSOCIATION",
"cardinality": "ZeroOrOne",
"final": false,
"refType": "Group"
}
},
"app": "sys",
"version": "72"
}
export function | (values?: Partial<MailAddressToGroup>): MailAddressToGroup {
return Object.assign(create(_TypeModel, MailAddressToGroupTypeRef), downcast<MailAddressToGroup>(values))
}
export type MailAddressToGroup = {
_type: TypeRef<MailAddressToGroup>;
_format: NumberString;
_id: Id;
_ownerGroup: null | Id;
_permissions: Id;
internalGroup: null | Id;
} | createMailAddressToGroup | identifier_name |
objects.py | import _metagam3d
from _metagam3d import AxisAlignment, AlignmentType
from metagam3d.channels import blocking
from metagam3d.scripts import m3d_expr
from concurrence import Tasklet
class LoadError(Exception):
pass
class Object(_metagam3d.Object):
def __init__(self, objid):
_metagam3d.Object.__init__(self, objid)
self._params = {}
def param(self, paramid):
"Get parameter object for given parameter id"
try:
return self._params[paramid]
except KeyError:
pass
param = ObjectParam(self, paramid)
self._params[paramid] = param
return param
def load(self, filename, flags=0):
"Load and return new subobject from file"
objid = _metagam3d._loadObject(filename, self.id, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(self, axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(self.id, axisAlignment, alignment))
def getParam(self, paramid, t):
return self.param(paramid).getValue(t)
def setParam(self, paramid, val):
if type(val) is not _metagam3d.DynamicValue:
if type(val) is not _metagam3d.Variant:
|
val = _metagam3d.DynamicValue(val)
self.param(paramid).setValue(val)
def setParam3(self, paramid, x, y, z):
self.setParam(paramid, _metagam3d.Vec3d(x, y, z))
def setParamExpr(self, paramid, expr, till=None):
self.param(paramid).setValue(m3d_expr(expr, till))
def assignMaterial(self, geodeName, ambient=0, diffuse=0, specular=0, emission=0, shininess=0):
_metagam3d._assignMaterial(self.id, geodeName, ambient, diffuse, specular, emission, shininess)
def createConsole(self, cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(self.id, cols, rows, fontSize))
def createLine(self):
return Object(_metagam3d._createLine(self.id))
def destroyAfter(self, t):
Tasklet.new(self._destroyAfter)(t)
def _destroyAfter(self, t):
Tasklet.sleep(t)
self.destroy()
class Console(Object):
def println(self, elements):
line = _metagam3d.ConsoleLine()
for el in elements:
line.add(_metagam3d.ConsoleLineElement(el[0], el[1]))
_metagam3d._printConsole(self.id, line)
class ObjectParam(_metagam3d.ObjectParam):
def __init__(self, obj, paramid):
_metagam3d.ObjectParam.__init__(self, obj.id, paramid)
self._obj = obj
@property
def obj(self):
return self._obj
def load(filename, flags=0):
"Load root level object from file"
objid = _metagam3d._loadObject(filename, 0, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(0, axisAlignment, alignment))
def createConsole(cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(0, cols, rows, fontSize))
def createLine():
return Object(_metagam3d._createLine(0))
| val = _metagam3d.Variant(val) | conditional_block |
objects.py | import _metagam3d
from _metagam3d import AxisAlignment, AlignmentType
from metagam3d.channels import blocking
from metagam3d.scripts import m3d_expr
from concurrence import Tasklet
class LoadError(Exception):
pass
class Object(_metagam3d.Object):
def __init__(self, objid):
_metagam3d.Object.__init__(self, objid)
self._params = {}
def param(self, paramid):
"Get parameter object for given parameter id"
try:
return self._params[paramid]
except KeyError:
pass
param = ObjectParam(self, paramid)
self._params[paramid] = param
return param
def load(self, filename, flags=0):
"Load and return new subobject from file"
objid = _metagam3d._loadObject(filename, self.id, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(self, axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(self.id, axisAlignment, alignment))
def getParam(self, paramid, t):
return self.param(paramid).getValue(t)
def setParam(self, paramid, val):
if type(val) is not _metagam3d.DynamicValue:
if type(val) is not _metagam3d.Variant:
val = _metagam3d.Variant(val)
val = _metagam3d.DynamicValue(val)
self.param(paramid).setValue(val)
def setParam3(self, paramid, x, y, z):
self.setParam(paramid, _metagam3d.Vec3d(x, y, z))
def setParamExpr(self, paramid, expr, till=None):
self.param(paramid).setValue(m3d_expr(expr, till))
def assignMaterial(self, geodeName, ambient=0, diffuse=0, specular=0, emission=0, shininess=0):
_metagam3d._assignMaterial(self.id, geodeName, ambient, diffuse, specular, emission, shininess)
def createConsole(self, cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(self.id, cols, rows, fontSize))
def createLine(self):
return Object(_metagam3d._createLine(self.id))
def destroyAfter(self, t):
Tasklet.new(self._destroyAfter)(t)
def _destroyAfter(self, t):
Tasklet.sleep(t)
self.destroy()
class Console(Object):
def println(self, elements):
line = _metagam3d.ConsoleLine()
for el in elements:
line.add(_metagam3d.ConsoleLineElement(el[0], el[1]))
_metagam3d._printConsole(self.id, line)
class ObjectParam(_metagam3d.ObjectParam):
def __init__(self, obj, paramid):
_metagam3d.ObjectParam.__init__(self, obj.id, paramid)
self._obj = obj
@property
def obj(self):
return self._obj
def load(filename, flags=0):
"Load root level object from file"
objid = _metagam3d._loadObject(filename, 0, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def | (axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(0, axisAlignment, alignment))
def createConsole(cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(0, cols, rows, fontSize))
def createLine():
return Object(_metagam3d._createLine(0))
| createText | identifier_name |
objects.py | import _metagam3d
from _metagam3d import AxisAlignment, AlignmentType
from metagam3d.channels import blocking
from metagam3d.scripts import m3d_expr
from concurrence import Tasklet
class LoadError(Exception):
pass
class Object(_metagam3d.Object):
def __init__(self, objid):
_metagam3d.Object.__init__(self, objid)
self._params = {}
def param(self, paramid):
"Get parameter object for given parameter id"
try:
return self._params[paramid]
except KeyError:
pass
param = ObjectParam(self, paramid)
self._params[paramid] = param
return param
def load(self, filename, flags=0):
"Load and return new subobject from file"
objid = _metagam3d._loadObject(filename, self.id, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(self, axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(self.id, axisAlignment, alignment))
| if type(val) is not _metagam3d.Variant:
val = _metagam3d.Variant(val)
val = _metagam3d.DynamicValue(val)
self.param(paramid).setValue(val)
def setParam3(self, paramid, x, y, z):
self.setParam(paramid, _metagam3d.Vec3d(x, y, z))
def setParamExpr(self, paramid, expr, till=None):
self.param(paramid).setValue(m3d_expr(expr, till))
def assignMaterial(self, geodeName, ambient=0, diffuse=0, specular=0, emission=0, shininess=0):
_metagam3d._assignMaterial(self.id, geodeName, ambient, diffuse, specular, emission, shininess)
def createConsole(self, cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(self.id, cols, rows, fontSize))
def createLine(self):
return Object(_metagam3d._createLine(self.id))
def destroyAfter(self, t):
Tasklet.new(self._destroyAfter)(t)
def _destroyAfter(self, t):
Tasklet.sleep(t)
self.destroy()
class Console(Object):
def println(self, elements):
line = _metagam3d.ConsoleLine()
for el in elements:
line.add(_metagam3d.ConsoleLineElement(el[0], el[1]))
_metagam3d._printConsole(self.id, line)
class ObjectParam(_metagam3d.ObjectParam):
def __init__(self, obj, paramid):
_metagam3d.ObjectParam.__init__(self, obj.id, paramid)
self._obj = obj
@property
def obj(self):
return self._obj
def load(filename, flags=0):
"Load root level object from file"
objid = _metagam3d._loadObject(filename, 0, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(0, axisAlignment, alignment))
def createConsole(cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(0, cols, rows, fontSize))
def createLine():
return Object(_metagam3d._createLine(0)) | def getParam(self, paramid, t):
return self.param(paramid).getValue(t)
def setParam(self, paramid, val):
if type(val) is not _metagam3d.DynamicValue: | random_line_split |
objects.py | import _metagam3d
from _metagam3d import AxisAlignment, AlignmentType
from metagam3d.channels import blocking
from metagam3d.scripts import m3d_expr
from concurrence import Tasklet
class LoadError(Exception):
pass
class Object(_metagam3d.Object):
def __init__(self, objid):
_metagam3d.Object.__init__(self, objid)
self._params = {}
def param(self, paramid):
"Get parameter object for given parameter id"
try:
return self._params[paramid]
except KeyError:
pass
param = ObjectParam(self, paramid)
self._params[paramid] = param
return param
def load(self, filename, flags=0):
"Load and return new subobject from file"
objid = _metagam3d._loadObject(filename, self.id, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(self, axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(self.id, axisAlignment, alignment))
def getParam(self, paramid, t):
return self.param(paramid).getValue(t)
def setParam(self, paramid, val):
if type(val) is not _metagam3d.DynamicValue:
if type(val) is not _metagam3d.Variant:
val = _metagam3d.Variant(val)
val = _metagam3d.DynamicValue(val)
self.param(paramid).setValue(val)
def setParam3(self, paramid, x, y, z):
self.setParam(paramid, _metagam3d.Vec3d(x, y, z))
def setParamExpr(self, paramid, expr, till=None):
|
def assignMaterial(self, geodeName, ambient=0, diffuse=0, specular=0, emission=0, shininess=0):
_metagam3d._assignMaterial(self.id, geodeName, ambient, diffuse, specular, emission, shininess)
def createConsole(self, cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(self.id, cols, rows, fontSize))
def createLine(self):
return Object(_metagam3d._createLine(self.id))
def destroyAfter(self, t):
Tasklet.new(self._destroyAfter)(t)
def _destroyAfter(self, t):
Tasklet.sleep(t)
self.destroy()
class Console(Object):
def println(self, elements):
line = _metagam3d.ConsoleLine()
for el in elements:
line.add(_metagam3d.ConsoleLineElement(el[0], el[1]))
_metagam3d._printConsole(self.id, line)
class ObjectParam(_metagam3d.ObjectParam):
def __init__(self, obj, paramid):
_metagam3d.ObjectParam.__init__(self, obj.id, paramid)
self._obj = obj
@property
def obj(self):
return self._obj
def load(filename, flags=0):
"Load root level object from file"
objid = _metagam3d._loadObject(filename, 0, flags)
if objid is None:
raise LoadError("Error loading %s" % filename)
return Object(objid)
def createText(axisAlignment=AxisAlignment.XY_PLANE, alignment=AlignmentType.CENTER_CENTER):
"Create text object"
return Object(_metagam3d._createText(0, axisAlignment, alignment))
def createConsole(cols=80, rows=25, fontSize=1.0):
return Console(_metagam3d._createConsole(0, cols, rows, fontSize))
def createLine():
return Object(_metagam3d._createLine(0))
| self.param(paramid).setValue(m3d_expr(expr, till)) | identifier_body |
AoFecharAspas.js | /* Copyright 2017 Assembleia Legislativa de Minas Gerais
*
* This file is part of Editor-Articulacao.
*
* Editor-Articulacao is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, version 3.
*
* Editor-Articulacao is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | *
* You should have received a copy of the GNU Lesser General Public License
* along with Editor-Articulacao. If not, see <http://www.gnu.org/licenses/>.
*/
import { TransformacaoDoProximo } from './Transformacao';
/**
* Quando usuário encerra as aspas na continuação do caput de um artigo,
* então cria-se um novo artigo.
*/
class AoFecharAspas extends TransformacaoDoProximo {
constructor() {
super('"\n', '".\n');
}
get tipoTransformacao() {
return 'AoFecharAspas';
}
proximoTipo(editor, ctrl, contexto) {
return contexto.cursor.artigo && contexto.cursor.continuacao ? 'artigo' : null;
}
}
export default AoFecharAspas; | * GNU Lesser General Public License for more details. | random_line_split |
AoFecharAspas.js | /* Copyright 2017 Assembleia Legislativa de Minas Gerais
*
* This file is part of Editor-Articulacao.
*
* Editor-Articulacao is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, version 3.
*
* Editor-Articulacao is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Editor-Articulacao. If not, see <http://www.gnu.org/licenses/>.
*/
import { TransformacaoDoProximo } from './Transformacao';
/**
* Quando usuário encerra as aspas na continuação do caput de um artigo,
* então cria-se um novo artigo.
*/
class AoFe | ends TransformacaoDoProximo {
constructor() {
super('"\n', '".\n');
}
get tipoTransformacao() {
return 'AoFecharAspas';
}
proximoTipo(editor, ctrl, contexto) {
return contexto.cursor.artigo && contexto.cursor.continuacao ? 'artigo' : null;
}
}
export default AoFecharAspas; | charAspas ext | identifier_name |
cli.py | #!/usr/bin/env python
# Copyright 2017, Major Hayden <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle all shell commands/arguments/options."""
import importlib
import os
import pkgutil
import sys
import click
context_settings = dict(auto_envvar_prefix='MonitorStack')
class Context(object):
"""Set up a context object that we can pass."""
def __init__(self):
"""Initialize class."""
self.verbose = False
self.home = os.getcwd()
def log(self, msg, *args):
"""Log a message to stderr."""
click.echo(msg, file=sys.stderr)
def vlog(self, msg, *args):
"""Log a message to stderr only if verbose is enabled."""
if self.verbose:
self.log(msg, *args)
pass_context = click.make_pass_decorator(Context, ensure=True)
class MonitorStackCLI(click.MultiCommand):
"""Create a complex command finder."""
@property
def cmd_folder(self):
"""Get the path to the plugin directory."""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'plugins'
)
)
def list_commands(self, ctx):
"""Get a list of all available commands."""
rv = list()
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
rv.append(pkg_name)
else:
return sorted(rv)
def get_command(self, ctx, name):
|
VALID_OUTPUT_FORMATS = [
'json',
'line',
'telegraf',
'rax-maas'
]
@click.command(cls=MonitorStackCLI, context_settings=context_settings)
@click.option(
'-f', '--format', 'output_format',
type=click.Choice(VALID_OUTPUT_FORMATS),
default='json',
help="Output format (valid options: {}".format(
', '.join(VALID_OUTPUT_FORMATS)
),
)
@click.option('-v', '--verbose', is_flag=True, help='Enables verbose mode.')
@pass_context
def cli(*args, **kwargs):
"""A complex command line interface."""
try:
args[0].verbose = kwargs.get('verbose', False)
except IndexError: # pragma: no cover
pass
@cli.resultcallback(replace=True)
def process_result(results, output_format, **kwargs):
"""Render the output into the proper format."""
module_name = 'monitorstack.common.formatters'
method_name = 'write_{}'.format(output_format.replace('-', '_'))
output_formatter = getattr(
importlib.import_module(module_name),
method_name
)
# Force the output formatter into a list
if not isinstance(results, list): # pragma: no cover
results = [results]
exit_code = 0
for result in results:
output_formatter(result)
if result['exit_code'] != 0:
exit_code = result['exit_code']
else:
sys.exit(exit_code)
if __name__ == '__main__': # pragma: no cover
topdir = os.path.normpath(
os.path.join(
os.path.abspath(
sys.argv[0]
),
os.pardir,
os.pardir
)
)
sys.path.insert(0, topdir)
cli()
| """Load a command and run it."""
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
if pkg_name == name:
mod = importlib.import_module(
'monitorstack.plugins.{}'.format(name)
)
return getattr(mod, 'cli')
else:
raise SystemExit('Module "{}" Not Found.'.format(name)) | identifier_body |
cli.py | #!/usr/bin/env python
# Copyright 2017, Major Hayden <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle all shell commands/arguments/options."""
import importlib
import os
import pkgutil
import sys
import click
context_settings = dict(auto_envvar_prefix='MonitorStack')
class Context(object):
"""Set up a context object that we can pass."""
def __init__(self):
"""Initialize class."""
self.verbose = False
self.home = os.getcwd()
def log(self, msg, *args):
"""Log a message to stderr."""
click.echo(msg, file=sys.stderr)
def vlog(self, msg, *args):
"""Log a message to stderr only if verbose is enabled."""
if self.verbose:
self.log(msg, *args)
pass_context = click.make_pass_decorator(Context, ensure=True)
class MonitorStackCLI(click.MultiCommand):
"""Create a complex command finder."""
@property
def cmd_folder(self):
"""Get the path to the plugin directory."""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'plugins'
)
)
def list_commands(self, ctx):
"""Get a list of all available commands."""
rv = list()
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
rv.append(pkg_name)
else:
return sorted(rv)
def get_command(self, ctx, name):
"""Load a command and run it."""
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
|
else:
raise SystemExit('Module "{}" Not Found.'.format(name))
VALID_OUTPUT_FORMATS = [
'json',
'line',
'telegraf',
'rax-maas'
]
@click.command(cls=MonitorStackCLI, context_settings=context_settings)
@click.option(
'-f', '--format', 'output_format',
type=click.Choice(VALID_OUTPUT_FORMATS),
default='json',
help="Output format (valid options: {}".format(
', '.join(VALID_OUTPUT_FORMATS)
),
)
@click.option('-v', '--verbose', is_flag=True, help='Enables verbose mode.')
@pass_context
def cli(*args, **kwargs):
"""A complex command line interface."""
try:
args[0].verbose = kwargs.get('verbose', False)
except IndexError: # pragma: no cover
pass
@cli.resultcallback(replace=True)
def process_result(results, output_format, **kwargs):
"""Render the output into the proper format."""
module_name = 'monitorstack.common.formatters'
method_name = 'write_{}'.format(output_format.replace('-', '_'))
output_formatter = getattr(
importlib.import_module(module_name),
method_name
)
# Force the output formatter into a list
if not isinstance(results, list): # pragma: no cover
results = [results]
exit_code = 0
for result in results:
output_formatter(result)
if result['exit_code'] != 0:
exit_code = result['exit_code']
else:
sys.exit(exit_code)
if __name__ == '__main__': # pragma: no cover
topdir = os.path.normpath(
os.path.join(
os.path.abspath(
sys.argv[0]
),
os.pardir,
os.pardir
)
)
sys.path.insert(0, topdir)
cli()
| if pkg_name == name:
mod = importlib.import_module(
'monitorstack.plugins.{}'.format(name)
)
return getattr(mod, 'cli') | conditional_block |
cli.py | #!/usr/bin/env python
# Copyright 2017, Major Hayden <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle all shell commands/arguments/options."""
import importlib |
import click
context_settings = dict(auto_envvar_prefix='MonitorStack')
class Context(object):
"""Set up a context object that we can pass."""
def __init__(self):
"""Initialize class."""
self.verbose = False
self.home = os.getcwd()
def log(self, msg, *args):
"""Log a message to stderr."""
click.echo(msg, file=sys.stderr)
def vlog(self, msg, *args):
"""Log a message to stderr only if verbose is enabled."""
if self.verbose:
self.log(msg, *args)
pass_context = click.make_pass_decorator(Context, ensure=True)
class MonitorStackCLI(click.MultiCommand):
"""Create a complex command finder."""
@property
def cmd_folder(self):
"""Get the path to the plugin directory."""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'plugins'
)
)
def list_commands(self, ctx):
"""Get a list of all available commands."""
rv = list()
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
rv.append(pkg_name)
else:
return sorted(rv)
def get_command(self, ctx, name):
"""Load a command and run it."""
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
if pkg_name == name:
mod = importlib.import_module(
'monitorstack.plugins.{}'.format(name)
)
return getattr(mod, 'cli')
else:
raise SystemExit('Module "{}" Not Found.'.format(name))
VALID_OUTPUT_FORMATS = [
'json',
'line',
'telegraf',
'rax-maas'
]
@click.command(cls=MonitorStackCLI, context_settings=context_settings)
@click.option(
'-f', '--format', 'output_format',
type=click.Choice(VALID_OUTPUT_FORMATS),
default='json',
help="Output format (valid options: {}".format(
', '.join(VALID_OUTPUT_FORMATS)
),
)
@click.option('-v', '--verbose', is_flag=True, help='Enables verbose mode.')
@pass_context
def cli(*args, **kwargs):
"""A complex command line interface."""
try:
args[0].verbose = kwargs.get('verbose', False)
except IndexError: # pragma: no cover
pass
@cli.resultcallback(replace=True)
def process_result(results, output_format, **kwargs):
"""Render the output into the proper format."""
module_name = 'monitorstack.common.formatters'
method_name = 'write_{}'.format(output_format.replace('-', '_'))
output_formatter = getattr(
importlib.import_module(module_name),
method_name
)
# Force the output formatter into a list
if not isinstance(results, list): # pragma: no cover
results = [results]
exit_code = 0
for result in results:
output_formatter(result)
if result['exit_code'] != 0:
exit_code = result['exit_code']
else:
sys.exit(exit_code)
if __name__ == '__main__': # pragma: no cover
topdir = os.path.normpath(
os.path.join(
os.path.abspath(
sys.argv[0]
),
os.pardir,
os.pardir
)
)
sys.path.insert(0, topdir)
cli() | import os
import pkgutil
import sys | random_line_split |
cli.py | #!/usr/bin/env python
# Copyright 2017, Major Hayden <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle all shell commands/arguments/options."""
import importlib
import os
import pkgutil
import sys
import click
context_settings = dict(auto_envvar_prefix='MonitorStack')
class Context(object):
"""Set up a context object that we can pass."""
def __init__(self):
"""Initialize class."""
self.verbose = False
self.home = os.getcwd()
def log(self, msg, *args):
"""Log a message to stderr."""
click.echo(msg, file=sys.stderr)
def vlog(self, msg, *args):
"""Log a message to stderr only if verbose is enabled."""
if self.verbose:
self.log(msg, *args)
pass_context = click.make_pass_decorator(Context, ensure=True)
class MonitorStackCLI(click.MultiCommand):
"""Create a complex command finder."""
@property
def cmd_folder(self):
"""Get the path to the plugin directory."""
return os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'plugins'
)
)
def | (self, ctx):
"""Get a list of all available commands."""
rv = list()
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
rv.append(pkg_name)
else:
return sorted(rv)
def get_command(self, ctx, name):
"""Load a command and run it."""
for _, pkg_name, _ in pkgutil.iter_modules([self.cmd_folder]):
if pkg_name == name:
mod = importlib.import_module(
'monitorstack.plugins.{}'.format(name)
)
return getattr(mod, 'cli')
else:
raise SystemExit('Module "{}" Not Found.'.format(name))
VALID_OUTPUT_FORMATS = [
'json',
'line',
'telegraf',
'rax-maas'
]
@click.command(cls=MonitorStackCLI, context_settings=context_settings)
@click.option(
'-f', '--format', 'output_format',
type=click.Choice(VALID_OUTPUT_FORMATS),
default='json',
help="Output format (valid options: {}".format(
', '.join(VALID_OUTPUT_FORMATS)
),
)
@click.option('-v', '--verbose', is_flag=True, help='Enables verbose mode.')
@pass_context
def cli(*args, **kwargs):
"""A complex command line interface."""
try:
args[0].verbose = kwargs.get('verbose', False)
except IndexError: # pragma: no cover
pass
@cli.resultcallback(replace=True)
def process_result(results, output_format, **kwargs):
"""Render the output into the proper format."""
module_name = 'monitorstack.common.formatters'
method_name = 'write_{}'.format(output_format.replace('-', '_'))
output_formatter = getattr(
importlib.import_module(module_name),
method_name
)
# Force the output formatter into a list
if not isinstance(results, list): # pragma: no cover
results = [results]
exit_code = 0
for result in results:
output_formatter(result)
if result['exit_code'] != 0:
exit_code = result['exit_code']
else:
sys.exit(exit_code)
if __name__ == '__main__': # pragma: no cover
topdir = os.path.normpath(
os.path.join(
os.path.abspath(
sys.argv[0]
),
os.pardir,
os.pardir
)
)
sys.path.insert(0, topdir)
cli()
| list_commands | identifier_name |
ezRPConfig.py | # Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module to share configuration information across modules.
This global object is used through out to store and retreive configuration.
This is to avoid passing gConfig as variables throughout.
All the configurations needed are added in ezReverseProxy.
'''
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
import sys
import os
import time
from gevent.queue import JoinableQueue
from ezbake.reverseproxy.thriftapi.ttypes import AuthorizationOperation
'''
We want addGreenlets() and kill() to access global members without an instance,
perhaps the simplest idea is to just make them simple functions outside the class,
not class methods. I tried @staticmethod decorator.
#class EzRPConfig(object):
'''
appName = 'EzBakeFrontend'
watches = {}
containerDir = os.path.abspath(os.path.join(os.path.abspath(__file__),os.pardir,os.pardir,os.pardir,os.pardir))
configurationChangeQueue = JoinableQueue()
run = True
clientService = None
zkMonitor = None
cfgGreenlet = None
wGreenlet = None
current_milli_time = lambda: int(round(time.time() * 1000))
if getattr(sys, 'frozen', False):
containerDir = os.path.abspath(os.path.join(os.path.dirname(sys.executable),os.pardir,os.pardir))
templateDir = os.path.join(containerDir,'app','templates')
nginx = os.path.join(containerDir,'app','nginx')
eznginxlibpath = os.path.join(containerDir,'libs')
workingDirectory = os.path.join(containerDir,'wd')
logDirectory = os.path.join(containerDir,'logs')
eznginxmoduleLogProp = os.path.join(logDirectory,'log4j.properties')
configDirectory = os.path.join(workingDirectory,'conf')
mainConfig = os.path.join(configDirectory,'nginx.conf') | confdDirectory = os.path.join(configDirectory,'conf.d')
manualDirectory = os.path.join(containerDir,'manual')
ezconfig_dir = os.path.join(containerDir, 'config')
htmlRootDir = os.path.join(containerDir, 'static_content')
favicon_file = os.path.join(htmlRootDir, 'ezbstatic', 'images', 'favicon.ico')
# external facing ssl files for nginx
ssl_cadir = os.path.join(ezconfig_dir,'ssl/user_ca_files')
ssl_keyfile = os.path.join(ezconfig_dir,'ssl/server/server.key')
ssl_certfile = os.path.join(ezconfig_dir,'ssl/server/server.crt')
ssl_server_certs = os.path.join(workingDirectory, 'ssl')
ssl_server_certs_dirs = [os.path.join(workingDirectory, 'ssl_a'), os.path.join(workingDirectory, 'ssl_b')]
ssl_cafile = os.path.join(containerDir,'wd','CAchain.pem')
# internal ssl files for thrift service w/in EzBake
ezEtc = os.path.join(containerDir,'etc')
ezcertdir = os.path.join(containerDir,'etc/ezbake/pki/cert/config/ssl')
ez_keyfile = os.path.join(ezcertdir,'application.priv')
ez_cafile = os.path.join(ezcertdir,'ezbakeca.crt')
ez_certfile = os.path.join(ezcertdir,'application.crt')
# Static content directory to serve per site static content
static_contents = os.path.join(containerDir,'ezbappstatic')
static_contents_dirs = [os.path.join(containerDir, 'sc_a'), os.path.join(containerDir, 'sc_b')]
mainConfigTemplate = os.path.join(templateDir,'nginx.conf')
mimeTemplate = os.path.join(templateDir,'mime.types')
mimeConfig = os.path.join(configDirectory,'mime.types')
nginxPidFile = os.path.join(workingDirectory,'nginx_%d.pid' % os.getpid())
shutdownFile = os.path.join(workingDirectory,'delete_this_file_to_shutdown_efe')
ezproxyciphers = "HIGH:!DSS:!aNULL@STRENGTH"
defaultEznginxOps = AuthorizationOperation.USER_INFO
# Restrict access to EzFrontend Thrift services to the following CN
ez_frontend_access = r'_Ez_Deployer|_Ez_EFEUI'
def addGreenlets(thriftService, kzMonitor, cfgChange, shutdown):
global clientService
global zkMonitor
global cfgGreenlet
global wGreenlet
clientService = thriftService
zkMonitor = kzMonitor
cfgGreenlet = cfgChange
wGreenlet = shutdown
def kill():
if clientService:
clientService.kill()
if zkMonitor:
zkMonitor.kill()
if cfgGreenlet:
cfgGreenlet.kill() | random_line_split |
|
ezRPConfig.py | # Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module to share configuration information across modules.
This global object is used through out to store and retreive configuration.
This is to avoid passing gConfig as variables throughout.
All the configurations needed are added in ezReverseProxy.
'''
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
import sys
import os
import time
from gevent.queue import JoinableQueue
from ezbake.reverseproxy.thriftapi.ttypes import AuthorizationOperation
'''
We want addGreenlets() and kill() to access global members without an instance,
perhaps the simplest idea is to just make them simple functions outside the class,
not class methods. I tried @staticmethod decorator.
#class EzRPConfig(object):
'''
appName = 'EzBakeFrontend'
watches = {}
containerDir = os.path.abspath(os.path.join(os.path.abspath(__file__),os.pardir,os.pardir,os.pardir,os.pardir))
configurationChangeQueue = JoinableQueue()
run = True
clientService = None
zkMonitor = None
cfgGreenlet = None
wGreenlet = None
current_milli_time = lambda: int(round(time.time() * 1000))
if getattr(sys, 'frozen', False):
containerDir = os.path.abspath(os.path.join(os.path.dirname(sys.executable),os.pardir,os.pardir))
templateDir = os.path.join(containerDir,'app','templates')
nginx = os.path.join(containerDir,'app','nginx')
eznginxlibpath = os.path.join(containerDir,'libs')
workingDirectory = os.path.join(containerDir,'wd')
logDirectory = os.path.join(containerDir,'logs')
eznginxmoduleLogProp = os.path.join(logDirectory,'log4j.properties')
configDirectory = os.path.join(workingDirectory,'conf')
mainConfig = os.path.join(configDirectory,'nginx.conf')
confdDirectory = os.path.join(configDirectory,'conf.d')
manualDirectory = os.path.join(containerDir,'manual')
ezconfig_dir = os.path.join(containerDir, 'config')
htmlRootDir = os.path.join(containerDir, 'static_content')
favicon_file = os.path.join(htmlRootDir, 'ezbstatic', 'images', 'favicon.ico')
# external facing ssl files for nginx
ssl_cadir = os.path.join(ezconfig_dir,'ssl/user_ca_files')
ssl_keyfile = os.path.join(ezconfig_dir,'ssl/server/server.key')
ssl_certfile = os.path.join(ezconfig_dir,'ssl/server/server.crt')
ssl_server_certs = os.path.join(workingDirectory, 'ssl')
ssl_server_certs_dirs = [os.path.join(workingDirectory, 'ssl_a'), os.path.join(workingDirectory, 'ssl_b')]
ssl_cafile = os.path.join(containerDir,'wd','CAchain.pem')
# internal ssl files for thrift service w/in EzBake
ezEtc = os.path.join(containerDir,'etc')
ezcertdir = os.path.join(containerDir,'etc/ezbake/pki/cert/config/ssl')
ez_keyfile = os.path.join(ezcertdir,'application.priv')
ez_cafile = os.path.join(ezcertdir,'ezbakeca.crt')
ez_certfile = os.path.join(ezcertdir,'application.crt')
# Static content directory to serve per site static content
static_contents = os.path.join(containerDir,'ezbappstatic')
static_contents_dirs = [os.path.join(containerDir, 'sc_a'), os.path.join(containerDir, 'sc_b')]
mainConfigTemplate = os.path.join(templateDir,'nginx.conf')
mimeTemplate = os.path.join(templateDir,'mime.types')
mimeConfig = os.path.join(configDirectory,'mime.types')
nginxPidFile = os.path.join(workingDirectory,'nginx_%d.pid' % os.getpid())
shutdownFile = os.path.join(workingDirectory,'delete_this_file_to_shutdown_efe')
ezproxyciphers = "HIGH:!DSS:!aNULL@STRENGTH"
defaultEznginxOps = AuthorizationOperation.USER_INFO
# Restrict access to EzFrontend Thrift services to the following CN
ez_frontend_access = r'_Ez_Deployer|_Ez_EFEUI'
def | (thriftService, kzMonitor, cfgChange, shutdown):
global clientService
global zkMonitor
global cfgGreenlet
global wGreenlet
clientService = thriftService
zkMonitor = kzMonitor
cfgGreenlet = cfgChange
wGreenlet = shutdown
def kill():
if clientService:
clientService.kill()
if zkMonitor:
zkMonitor.kill()
if cfgGreenlet:
cfgGreenlet.kill()
| addGreenlets | identifier_name |
ezRPConfig.py | # Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module to share configuration information across modules.
This global object is used through out to store and retreive configuration.
This is to avoid passing gConfig as variables throughout.
All the configurations needed are added in ezReverseProxy.
'''
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
import sys
import os
import time
from gevent.queue import JoinableQueue
from ezbake.reverseproxy.thriftapi.ttypes import AuthorizationOperation
'''
We want addGreenlets() and kill() to access global members without an instance,
perhaps the simplest idea is to just make them simple functions outside the class,
not class methods. I tried @staticmethod decorator.
#class EzRPConfig(object):
'''
appName = 'EzBakeFrontend'
watches = {}
containerDir = os.path.abspath(os.path.join(os.path.abspath(__file__),os.pardir,os.pardir,os.pardir,os.pardir))
configurationChangeQueue = JoinableQueue()
run = True
clientService = None
zkMonitor = None
cfgGreenlet = None
wGreenlet = None
current_milli_time = lambda: int(round(time.time() * 1000))
if getattr(sys, 'frozen', False):
containerDir = os.path.abspath(os.path.join(os.path.dirname(sys.executable),os.pardir,os.pardir))
templateDir = os.path.join(containerDir,'app','templates')
nginx = os.path.join(containerDir,'app','nginx')
eznginxlibpath = os.path.join(containerDir,'libs')
workingDirectory = os.path.join(containerDir,'wd')
logDirectory = os.path.join(containerDir,'logs')
eznginxmoduleLogProp = os.path.join(logDirectory,'log4j.properties')
configDirectory = os.path.join(workingDirectory,'conf')
mainConfig = os.path.join(configDirectory,'nginx.conf')
confdDirectory = os.path.join(configDirectory,'conf.d')
manualDirectory = os.path.join(containerDir,'manual')
ezconfig_dir = os.path.join(containerDir, 'config')
htmlRootDir = os.path.join(containerDir, 'static_content')
favicon_file = os.path.join(htmlRootDir, 'ezbstatic', 'images', 'favicon.ico')
# external facing ssl files for nginx
ssl_cadir = os.path.join(ezconfig_dir,'ssl/user_ca_files')
ssl_keyfile = os.path.join(ezconfig_dir,'ssl/server/server.key')
ssl_certfile = os.path.join(ezconfig_dir,'ssl/server/server.crt')
ssl_server_certs = os.path.join(workingDirectory, 'ssl')
ssl_server_certs_dirs = [os.path.join(workingDirectory, 'ssl_a'), os.path.join(workingDirectory, 'ssl_b')]
ssl_cafile = os.path.join(containerDir,'wd','CAchain.pem')
# internal ssl files for thrift service w/in EzBake
ezEtc = os.path.join(containerDir,'etc')
ezcertdir = os.path.join(containerDir,'etc/ezbake/pki/cert/config/ssl')
ez_keyfile = os.path.join(ezcertdir,'application.priv')
ez_cafile = os.path.join(ezcertdir,'ezbakeca.crt')
ez_certfile = os.path.join(ezcertdir,'application.crt')
# Static content directory to serve per site static content
static_contents = os.path.join(containerDir,'ezbappstatic')
static_contents_dirs = [os.path.join(containerDir, 'sc_a'), os.path.join(containerDir, 'sc_b')]
mainConfigTemplate = os.path.join(templateDir,'nginx.conf')
mimeTemplate = os.path.join(templateDir,'mime.types')
mimeConfig = os.path.join(configDirectory,'mime.types')
nginxPidFile = os.path.join(workingDirectory,'nginx_%d.pid' % os.getpid())
shutdownFile = os.path.join(workingDirectory,'delete_this_file_to_shutdown_efe')
ezproxyciphers = "HIGH:!DSS:!aNULL@STRENGTH"
defaultEznginxOps = AuthorizationOperation.USER_INFO
# Restrict access to EzFrontend Thrift services to the following CN
ez_frontend_access = r'_Ez_Deployer|_Ez_EFEUI'
def addGreenlets(thriftService, kzMonitor, cfgChange, shutdown):
|
def kill():
if clientService:
clientService.kill()
if zkMonitor:
zkMonitor.kill()
if cfgGreenlet:
cfgGreenlet.kill()
| global clientService
global zkMonitor
global cfgGreenlet
global wGreenlet
clientService = thriftService
zkMonitor = kzMonitor
cfgGreenlet = cfgChange
wGreenlet = shutdown | identifier_body |
ezRPConfig.py | # Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module to share configuration information across modules.
This global object is used through out to store and retreive configuration.
This is to avoid passing gConfig as variables throughout.
All the configurations needed are added in ezReverseProxy.
'''
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
import sys
import os
import time
from gevent.queue import JoinableQueue
from ezbake.reverseproxy.thriftapi.ttypes import AuthorizationOperation
'''
We want addGreenlets() and kill() to access global members without an instance,
perhaps the simplest idea is to just make them simple functions outside the class,
not class methods. I tried @staticmethod decorator.
#class EzRPConfig(object):
'''
appName = 'EzBakeFrontend'
watches = {}
containerDir = os.path.abspath(os.path.join(os.path.abspath(__file__),os.pardir,os.pardir,os.pardir,os.pardir))
configurationChangeQueue = JoinableQueue()
run = True
clientService = None
zkMonitor = None
cfgGreenlet = None
wGreenlet = None
current_milli_time = lambda: int(round(time.time() * 1000))
if getattr(sys, 'frozen', False):
containerDir = os.path.abspath(os.path.join(os.path.dirname(sys.executable),os.pardir,os.pardir))
templateDir = os.path.join(containerDir,'app','templates')
nginx = os.path.join(containerDir,'app','nginx')
eznginxlibpath = os.path.join(containerDir,'libs')
workingDirectory = os.path.join(containerDir,'wd')
logDirectory = os.path.join(containerDir,'logs')
eznginxmoduleLogProp = os.path.join(logDirectory,'log4j.properties')
configDirectory = os.path.join(workingDirectory,'conf')
mainConfig = os.path.join(configDirectory,'nginx.conf')
confdDirectory = os.path.join(configDirectory,'conf.d')
manualDirectory = os.path.join(containerDir,'manual')
ezconfig_dir = os.path.join(containerDir, 'config')
htmlRootDir = os.path.join(containerDir, 'static_content')
favicon_file = os.path.join(htmlRootDir, 'ezbstatic', 'images', 'favicon.ico')
# external facing ssl files for nginx
ssl_cadir = os.path.join(ezconfig_dir,'ssl/user_ca_files')
ssl_keyfile = os.path.join(ezconfig_dir,'ssl/server/server.key')
ssl_certfile = os.path.join(ezconfig_dir,'ssl/server/server.crt')
ssl_server_certs = os.path.join(workingDirectory, 'ssl')
ssl_server_certs_dirs = [os.path.join(workingDirectory, 'ssl_a'), os.path.join(workingDirectory, 'ssl_b')]
ssl_cafile = os.path.join(containerDir,'wd','CAchain.pem')
# internal ssl files for thrift service w/in EzBake
ezEtc = os.path.join(containerDir,'etc')
ezcertdir = os.path.join(containerDir,'etc/ezbake/pki/cert/config/ssl')
ez_keyfile = os.path.join(ezcertdir,'application.priv')
ez_cafile = os.path.join(ezcertdir,'ezbakeca.crt')
ez_certfile = os.path.join(ezcertdir,'application.crt')
# Static content directory to serve per site static content
static_contents = os.path.join(containerDir,'ezbappstatic')
static_contents_dirs = [os.path.join(containerDir, 'sc_a'), os.path.join(containerDir, 'sc_b')]
mainConfigTemplate = os.path.join(templateDir,'nginx.conf')
mimeTemplate = os.path.join(templateDir,'mime.types')
mimeConfig = os.path.join(configDirectory,'mime.types')
nginxPidFile = os.path.join(workingDirectory,'nginx_%d.pid' % os.getpid())
shutdownFile = os.path.join(workingDirectory,'delete_this_file_to_shutdown_efe')
ezproxyciphers = "HIGH:!DSS:!aNULL@STRENGTH"
defaultEznginxOps = AuthorizationOperation.USER_INFO
# Restrict access to EzFrontend Thrift services to the following CN
ez_frontend_access = r'_Ez_Deployer|_Ez_EFEUI'
def addGreenlets(thriftService, kzMonitor, cfgChange, shutdown):
global clientService
global zkMonitor
global cfgGreenlet
global wGreenlet
clientService = thriftService
zkMonitor = kzMonitor
cfgGreenlet = cfgChange
wGreenlet = shutdown
def kill():
if clientService:
clientService.kill()
if zkMonitor:
zkMonitor.kill()
if cfgGreenlet:
| cfgGreenlet.kill() | conditional_block |
|
hint.rs | use std::collections::HashMap;
pub type FieldIndex = usize;
/// Hints given when reading parcels.
#[derive(Clone, Debug, PartialEq)]
pub struct Hints {
pub current_field_index: Option<FieldIndex>,
/// The fields for which a length prefix
/// was already present earlier in the layout.
pub known_field_lengths: HashMap<FieldIndex, FieldLength>,
}
/// Information about the length of a field.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct FieldLength {
pub length: usize,
pub kind: LengthPrefixKind,
}
/// Specifies what kind of data the length prefix captures.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum LengthPrefixKind {
/// The length prefix stores the total number of bytes making up another field.
Bytes,
/// The length prefix stores the total number of elements inside another field.
Elements,
}
impl Default for Hints {
fn default() -> Self {
Hints {
current_field_index: None,
known_field_lengths: HashMap::new(),
}
}
}
impl Hints {
/// Gets the length of the field currently being
/// read, if known.
pub fn current_field_length(&self) -> Option<FieldLength> {
self.current_field_index.and_then(|index| self.known_field_lengths.get(&index)).cloned()
}
}
/// Helpers for the `protocol-derive` crate.
mod protocol_derive_helpers {
use super::*;
impl Hints {
// Sets hints indicating a new set of fields are beginning.
#[doc(hidden)]
pub fn begin_fields(&mut self) |
// Updates the hints to indicate a field was just read.
#[doc(hidden)]
pub fn next_field(&mut self) {
*self.current_field_index.as_mut()
.expect("cannot increment next field when not in a struct")+= 1;
}
// Sets the length of a variable-sized field by its 0-based index.
#[doc(hidden)]
pub fn set_field_length(&mut self,
field_index: FieldIndex,
length: usize,
kind: LengthPrefixKind) {
self.known_field_lengths.insert(field_index, FieldLength { kind, length });
}
}
}
| {
self.current_field_index = Some(0);
} | identifier_body |
hint.rs | use std::collections::HashMap;
pub type FieldIndex = usize;
/// Hints given when reading parcels.
#[derive(Clone, Debug, PartialEq)]
pub struct Hints {
pub current_field_index: Option<FieldIndex>,
/// The fields for which a length prefix
/// was already present earlier in the layout.
pub known_field_lengths: HashMap<FieldIndex, FieldLength>,
}
/// Information about the length of a field.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct FieldLength {
pub length: usize,
pub kind: LengthPrefixKind,
}
/// Specifies what kind of data the length prefix captures.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum LengthPrefixKind {
/// The length prefix stores the total number of bytes making up another field. |
impl Default for Hints {
fn default() -> Self {
Hints {
current_field_index: None,
known_field_lengths: HashMap::new(),
}
}
}
impl Hints {
/// Gets the length of the field currently being
/// read, if known.
pub fn current_field_length(&self) -> Option<FieldLength> {
self.current_field_index.and_then(|index| self.known_field_lengths.get(&index)).cloned()
}
}
/// Helpers for the `protocol-derive` crate.
mod protocol_derive_helpers {
use super::*;
impl Hints {
// Sets hints indicating a new set of fields are beginning.
#[doc(hidden)]
pub fn begin_fields(&mut self) {
self.current_field_index = Some(0);
}
// Updates the hints to indicate a field was just read.
#[doc(hidden)]
pub fn next_field(&mut self) {
*self.current_field_index.as_mut()
.expect("cannot increment next field when not in a struct")+= 1;
}
// Sets the length of a variable-sized field by its 0-based index.
#[doc(hidden)]
pub fn set_field_length(&mut self,
field_index: FieldIndex,
length: usize,
kind: LengthPrefixKind) {
self.known_field_lengths.insert(field_index, FieldLength { kind, length });
}
}
} | Bytes,
/// The length prefix stores the total number of elements inside another field.
Elements,
}
| random_line_split |
hint.rs | use std::collections::HashMap;
pub type FieldIndex = usize;
/// Hints given when reading parcels.
#[derive(Clone, Debug, PartialEq)]
pub struct Hints {
pub current_field_index: Option<FieldIndex>,
/// The fields for which a length prefix
/// was already present earlier in the layout.
pub known_field_lengths: HashMap<FieldIndex, FieldLength>,
}
/// Information about the length of a field.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct FieldLength {
pub length: usize,
pub kind: LengthPrefixKind,
}
/// Specifies what kind of data the length prefix captures.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum | {
/// The length prefix stores the total number of bytes making up another field.
Bytes,
/// The length prefix stores the total number of elements inside another field.
Elements,
}
impl Default for Hints {
fn default() -> Self {
Hints {
current_field_index: None,
known_field_lengths: HashMap::new(),
}
}
}
impl Hints {
/// Gets the length of the field currently being
/// read, if known.
pub fn current_field_length(&self) -> Option<FieldLength> {
self.current_field_index.and_then(|index| self.known_field_lengths.get(&index)).cloned()
}
}
/// Helpers for the `protocol-derive` crate.
mod protocol_derive_helpers {
use super::*;
impl Hints {
// Sets hints indicating a new set of fields are beginning.
#[doc(hidden)]
pub fn begin_fields(&mut self) {
self.current_field_index = Some(0);
}
// Updates the hints to indicate a field was just read.
#[doc(hidden)]
pub fn next_field(&mut self) {
*self.current_field_index.as_mut()
.expect("cannot increment next field when not in a struct")+= 1;
}
// Sets the length of a variable-sized field by its 0-based index.
#[doc(hidden)]
pub fn set_field_length(&mut self,
field_index: FieldIndex,
length: usize,
kind: LengthPrefixKind) {
self.known_field_lengths.insert(field_index, FieldLength { kind, length });
}
}
}
| LengthPrefixKind | identifier_name |
nonuniform_random_number_generation.py | import sys
import random
import collections
import itertools
import bisect
# @include
def | (values, probabilities):
prefix_sum_of_probabilities = (
[0.0] + list(itertools.accumulate(probabilities)))
interval_idx = bisect.bisect(prefix_sum_of_probabilities,
random.random()) - 1
return values[interval_idx]
# @exclude
def main():
n = int(sys.argv[1]) if len(sys.argv) == 2 else random.randint(1, 50)
T = [float(i) for i in range(n)]
P = []
full_prob = 1.0
for i in range(n - 1):
pi = random.uniform(0.0, full_prob)
P.append(pi)
full_prob -= pi
P.append(full_prob)
print(*T)
print(*P)
print(nonuniform_random_number_generation(T, P))
# Test. Perform the nonuniform random number generation for n * k_times
# times and calculate the distribution of each bucket.
k_times = 100000
counts = collections.Counter(
int(nonuniform_random_number_generation(T, P))
for _ in range(n * k_times))
for i in range(n):
print(counts[i] / (n * k_times), P[i])
assert abs(counts[i] / (n * k_times) - P[i]) < 0.01
if __name__ == '__main__':
main()
| nonuniform_random_number_generation | identifier_name |
nonuniform_random_number_generation.py | import sys
import random
import collections
import itertools
import bisect
# @include
def nonuniform_random_number_generation(values, probabilities):
prefix_sum_of_probabilities = (
[0.0] + list(itertools.accumulate(probabilities)))
interval_idx = bisect.bisect(prefix_sum_of_probabilities,
random.random()) - 1
return values[interval_idx]
# @exclude
def main():
|
if __name__ == '__main__':
main()
| n = int(sys.argv[1]) if len(sys.argv) == 2 else random.randint(1, 50)
T = [float(i) for i in range(n)]
P = []
full_prob = 1.0
for i in range(n - 1):
pi = random.uniform(0.0, full_prob)
P.append(pi)
full_prob -= pi
P.append(full_prob)
print(*T)
print(*P)
print(nonuniform_random_number_generation(T, P))
# Test. Perform the nonuniform random number generation for n * k_times
# times and calculate the distribution of each bucket.
k_times = 100000
counts = collections.Counter(
int(nonuniform_random_number_generation(T, P))
for _ in range(n * k_times))
for i in range(n):
print(counts[i] / (n * k_times), P[i])
assert abs(counts[i] / (n * k_times) - P[i]) < 0.01 | identifier_body |
nonuniform_random_number_generation.py | import sys
import random
import collections
import itertools
import bisect
# @include
def nonuniform_random_number_generation(values, probabilities):
prefix_sum_of_probabilities = (
[0.0] + list(itertools.accumulate(probabilities)))
interval_idx = bisect.bisect(prefix_sum_of_probabilities,
random.random()) - 1
return values[interval_idx]
# @exclude
def main():
n = int(sys.argv[1]) if len(sys.argv) == 2 else random.randint(1, 50)
T = [float(i) for i in range(n)]
P = []
full_prob = 1.0
for i in range(n - 1):
|
P.append(full_prob)
print(*T)
print(*P)
print(nonuniform_random_number_generation(T, P))
# Test. Perform the nonuniform random number generation for n * k_times
# times and calculate the distribution of each bucket.
k_times = 100000
counts = collections.Counter(
int(nonuniform_random_number_generation(T, P))
for _ in range(n * k_times))
for i in range(n):
print(counts[i] / (n * k_times), P[i])
assert abs(counts[i] / (n * k_times) - P[i]) < 0.01
if __name__ == '__main__':
main()
| pi = random.uniform(0.0, full_prob)
P.append(pi)
full_prob -= pi | conditional_block |
nonuniform_random_number_generation.py | import sys
import random
import collections
import itertools
import bisect
# @include
def nonuniform_random_number_generation(values, probabilities):
prefix_sum_of_probabilities = (
[0.0] + list(itertools.accumulate(probabilities)))
interval_idx = bisect.bisect(prefix_sum_of_probabilities,
random.random()) - 1
return values[interval_idx]
# @exclude
def main():
n = int(sys.argv[1]) if len(sys.argv) == 2 else random.randint(1, 50)
T = [float(i) for i in range(n)]
P = []
full_prob = 1.0
for i in range(n - 1):
pi = random.uniform(0.0, full_prob)
P.append(pi)
full_prob -= pi
P.append(full_prob)
print(*T)
print(*P)
print(nonuniform_random_number_generation(T, P))
# Test. Perform the nonuniform random number generation for n * k_times | k_times = 100000
counts = collections.Counter(
int(nonuniform_random_number_generation(T, P))
for _ in range(n * k_times))
for i in range(n):
print(counts[i] / (n * k_times), P[i])
assert abs(counts[i] / (n * k_times) - P[i]) < 0.01
if __name__ == '__main__':
main() | # times and calculate the distribution of each bucket. | random_line_split |
requireFiles_test.js | var assert = require('assert');
var fs = require('fs');
var requireFiles = require(__dirname + '/../lib/requireFiles');
var files = [
__dirname + '/moch/custom_test.txt',
__dirname + '/moch/json_test.json',
__dirname + '/moch/test.js'
];
describe('requireFiles testing', function(){
describe('Structure type', function(){
it('requireFiles should be a function', function(){
assert.equal(typeof requireFiles, 'function');
});
});
describe('Error', function(){
it('Should throw error when an engine isn\'t supported', function(){
assert.throws(function(){
requireFiles(files, {
'.js' : require,
'.json' : require
});
}, /there is no engine registered/);
});
it('Should not throw an error when everything is ok', function(){
assert.doesNotThrow(function(){
result = requireFiles(files.slice(1, 3), { | });
describe('Custom engine', function(){
it('Custom engines should work', function(){
var result = requireFiles(files, {
'.js' : require,
'.json' : require,
'.txt' : function(path){
return fs.readFileSync(path).toString();
}
});
assert.equal(result[files[0]], 'worked\n');
assert.equal(result[files[1]].worked, true);
assert.equal(result[files[2]], 'worked');
});
});
}); | '.js' : require,
'.json' : require
});
});
}); | random_line_split |
red.js | /**
* Copyright 2013 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either press or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
var server = require("./server");
var nodes = require("./nodes");
var library = require("./library");
var comms = require("./comms");
var log = require("./log");
var util = require("./util");
var fs = require("fs");
var settings = require("./settings");
var credentials = require("./nodes/credentials");
var path = require('path');
process.env.NODE_RED_HOME = process.env.NODE_RED_HOME || path.resolve(__dirname+"/..");
var events = require("events");
var RED = {
init: function(httpServer,userSettings) {
userSettings.version = this.version();
settings.init(userSettings);
server.init(httpServer,settings);
library.init();
return server.app;
},
start: server.start,
stop: server.stop,
nodes: nodes,
library: library,
credentials: credentials,
events: events,
log: log,
comms: comms,
settings:settings,
util: util, | return p.version;
}
}
};
RED.__defineGetter__("app", function() { console.log("Deprecated use of RED.app - use RED.httpAdmin instead"); return server.app });
RED.__defineGetter__("httpAdmin", function() { return server.app });
RED.__defineGetter__("httpNode", function() { return server.nodeApp });
RED.__defineGetter__("server", function() { return server.server });
module.exports = RED; | version: function () {
var p = require(path.join(process.env.NODE_RED_HOME,"package.json"));
if (fs.existsSync(path.join(process.env.NODE_RED_HOME,".git"))) {
return p.version+".git";
} else { | random_line_split |
red.js | /**
* Copyright 2013 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either press or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
var server = require("./server");
var nodes = require("./nodes");
var library = require("./library");
var comms = require("./comms");
var log = require("./log");
var util = require("./util");
var fs = require("fs");
var settings = require("./settings");
var credentials = require("./nodes/credentials");
var path = require('path');
process.env.NODE_RED_HOME = process.env.NODE_RED_HOME || path.resolve(__dirname+"/..");
var events = require("events");
var RED = {
init: function(httpServer,userSettings) {
userSettings.version = this.version();
settings.init(userSettings);
server.init(httpServer,settings);
library.init();
return server.app;
},
start: server.start,
stop: server.stop,
nodes: nodes,
library: library,
credentials: credentials,
events: events,
log: log,
comms: comms,
settings:settings,
util: util,
version: function () {
var p = require(path.join(process.env.NODE_RED_HOME,"package.json"));
if (fs.existsSync(path.join(process.env.NODE_RED_HOME,".git"))) {
return p.version+".git";
} else |
}
};
RED.__defineGetter__("app", function() { console.log("Deprecated use of RED.app - use RED.httpAdmin instead"); return server.app });
RED.__defineGetter__("httpAdmin", function() { return server.app });
RED.__defineGetter__("httpNode", function() { return server.nodeApp });
RED.__defineGetter__("server", function() { return server.server });
module.exports = RED;
| {
return p.version;
} | conditional_block |
template.js | import defaults from './defaults.js';
import _ from './underscore.js';
import './templateSettings.js';
// When customizing `_.templateSettings`, if you don't want to define an
// interpolation, evaluation or escaping regex, we need one that is
// guaranteed not to match.
var noMatch = /(.)^/;
// Certain characters need to be escaped so that they can be put into a
// string literal.
var escapes = {
"'": "'",
'\\': '\\',
'\r': 'r',
'\n': 'n',
'\u2028': 'u2028',
'\u2029': 'u2029'
};
var escapeRegExp = /\\|'|\r|\n|\u2028|\u2029/g;
function escapeChar(match) {
return '\\' + escapes[match];
}
// In order to prevent third-party code injection through
// `_.templateSettings.variable`, we test it against the following regular
// expression. It is intentionally a bit more liberal than just matching valid
// identifiers, but still prevents possible loopholes through defaults or
// destructuring assignment.
var bareIdentifier = /^\s*(\w|\$)+\s*$/;
// JavaScript micro-templating, similar to John Resig's implementation.
// Underscore templating handles arbitrary delimiters, preserves whitespace,
// and correctly escapes quotes within interpolated code.
// NB: `oldSettings` only exists for backwards compatibility.
export default function template(text, settings, oldSettings) {
if (!settings && oldSettings) settings = oldSettings;
settings = defaults({}, settings, _.templateSettings);
// Combine delimiters into one regular expression via alternation.
var matcher = RegExp([
(settings.escape || noMatch).source,
(settings.interpolate || noMatch).source,
(settings.evaluate || noMatch).source
].join('|') + '|$', 'g'); |
// Compile the template source, escaping string literals appropriately.
var index = 0;
var source = "__p+='";
text.replace(matcher, function(match, escape, interpolate, evaluate, offset) {
source += text.slice(index, offset).replace(escapeRegExp, escapeChar);
index = offset + match.length;
if (escape) {
source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'";
} else if (interpolate) {
source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'";
} else if (evaluate) {
source += "';\n" + evaluate + "\n__p+='";
}
// Adobe VMs need the match returned to produce the correct offset.
return match;
});
source += "';\n";
var argument = settings.variable;
if (argument) {
// Insure against third-party code injection. (CVE-2021-23358)
if (!bareIdentifier.test(argument)) throw new Error(
'variable is not a bare identifier: ' + argument
);
} else {
// If a variable is not specified, place data values in local scope.
source = 'with(obj||{}){\n' + source + '}\n';
argument = 'obj';
}
source = "var __t,__p='',__j=Array.prototype.join," +
"print=function(){__p+=__j.call(arguments,'');};\n" +
source + 'return __p;\n';
var render;
try {
render = new Function(argument, '_', source);
} catch (e) {
e.source = source;
throw e;
}
var template = function(data) {
return render.call(this, data, _);
};
// Provide the compiled source as a convenience for precompilation.
template.source = 'function(' + argument + '){\n' + source + '}';
return template;
} | random_line_split |
|
template.js | import defaults from './defaults.js';
import _ from './underscore.js';
import './templateSettings.js';
// When customizing `_.templateSettings`, if you don't want to define an
// interpolation, evaluation or escaping regex, we need one that is
// guaranteed not to match.
var noMatch = /(.)^/;
// Certain characters need to be escaped so that they can be put into a
// string literal.
var escapes = {
"'": "'",
'\\': '\\',
'\r': 'r',
'\n': 'n',
'\u2028': 'u2028',
'\u2029': 'u2029'
};
var escapeRegExp = /\\|'|\r|\n|\u2028|\u2029/g;
function | (match) {
return '\\' + escapes[match];
}
// In order to prevent third-party code injection through
// `_.templateSettings.variable`, we test it against the following regular
// expression. It is intentionally a bit more liberal than just matching valid
// identifiers, but still prevents possible loopholes through defaults or
// destructuring assignment.
var bareIdentifier = /^\s*(\w|\$)+\s*$/;
// JavaScript micro-templating, similar to John Resig's implementation.
// Underscore templating handles arbitrary delimiters, preserves whitespace,
// and correctly escapes quotes within interpolated code.
// NB: `oldSettings` only exists for backwards compatibility.
export default function template(text, settings, oldSettings) {
if (!settings && oldSettings) settings = oldSettings;
settings = defaults({}, settings, _.templateSettings);
// Combine delimiters into one regular expression via alternation.
var matcher = RegExp([
(settings.escape || noMatch).source,
(settings.interpolate || noMatch).source,
(settings.evaluate || noMatch).source
].join('|') + '|$', 'g');
// Compile the template source, escaping string literals appropriately.
var index = 0;
var source = "__p+='";
text.replace(matcher, function(match, escape, interpolate, evaluate, offset) {
source += text.slice(index, offset).replace(escapeRegExp, escapeChar);
index = offset + match.length;
if (escape) {
source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'";
} else if (interpolate) {
source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'";
} else if (evaluate) {
source += "';\n" + evaluate + "\n__p+='";
}
// Adobe VMs need the match returned to produce the correct offset.
return match;
});
source += "';\n";
var argument = settings.variable;
if (argument) {
// Insure against third-party code injection. (CVE-2021-23358)
if (!bareIdentifier.test(argument)) throw new Error(
'variable is not a bare identifier: ' + argument
);
} else {
// If a variable is not specified, place data values in local scope.
source = 'with(obj||{}){\n' + source + '}\n';
argument = 'obj';
}
source = "var __t,__p='',__j=Array.prototype.join," +
"print=function(){__p+=__j.call(arguments,'');};\n" +
source + 'return __p;\n';
var render;
try {
render = new Function(argument, '_', source);
} catch (e) {
e.source = source;
throw e;
}
var template = function(data) {
return render.call(this, data, _);
};
// Provide the compiled source as a convenience for precompilation.
template.source = 'function(' + argument + '){\n' + source + '}';
return template;
}
| escapeChar | identifier_name |
template.js | import defaults from './defaults.js';
import _ from './underscore.js';
import './templateSettings.js';
// When customizing `_.templateSettings`, if you don't want to define an
// interpolation, evaluation or escaping regex, we need one that is
// guaranteed not to match.
var noMatch = /(.)^/;
// Certain characters need to be escaped so that they can be put into a
// string literal.
var escapes = {
"'": "'",
'\\': '\\',
'\r': 'r',
'\n': 'n',
'\u2028': 'u2028',
'\u2029': 'u2029'
};
var escapeRegExp = /\\|'|\r|\n|\u2028|\u2029/g;
function escapeChar(match) |
// In order to prevent third-party code injection through
// `_.templateSettings.variable`, we test it against the following regular
// expression. It is intentionally a bit more liberal than just matching valid
// identifiers, but still prevents possible loopholes through defaults or
// destructuring assignment.
var bareIdentifier = /^\s*(\w|\$)+\s*$/;
// JavaScript micro-templating, similar to John Resig's implementation.
// Underscore templating handles arbitrary delimiters, preserves whitespace,
// and correctly escapes quotes within interpolated code.
// NB: `oldSettings` only exists for backwards compatibility.
export default function template(text, settings, oldSettings) {
if (!settings && oldSettings) settings = oldSettings;
settings = defaults({}, settings, _.templateSettings);
// Combine delimiters into one regular expression via alternation.
var matcher = RegExp([
(settings.escape || noMatch).source,
(settings.interpolate || noMatch).source,
(settings.evaluate || noMatch).source
].join('|') + '|$', 'g');
// Compile the template source, escaping string literals appropriately.
var index = 0;
var source = "__p+='";
text.replace(matcher, function(match, escape, interpolate, evaluate, offset) {
source += text.slice(index, offset).replace(escapeRegExp, escapeChar);
index = offset + match.length;
if (escape) {
source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'";
} else if (interpolate) {
source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'";
} else if (evaluate) {
source += "';\n" + evaluate + "\n__p+='";
}
// Adobe VMs need the match returned to produce the correct offset.
return match;
});
source += "';\n";
var argument = settings.variable;
if (argument) {
// Insure against third-party code injection. (CVE-2021-23358)
if (!bareIdentifier.test(argument)) throw new Error(
'variable is not a bare identifier: ' + argument
);
} else {
// If a variable is not specified, place data values in local scope.
source = 'with(obj||{}){\n' + source + '}\n';
argument = 'obj';
}
source = "var __t,__p='',__j=Array.prototype.join," +
"print=function(){__p+=__j.call(arguments,'');};\n" +
source + 'return __p;\n';
var render;
try {
render = new Function(argument, '_', source);
} catch (e) {
e.source = source;
throw e;
}
var template = function(data) {
return render.call(this, data, _);
};
// Provide the compiled source as a convenience for precompilation.
template.source = 'function(' + argument + '){\n' + source + '}';
return template;
}
| {
return '\\' + escapes[match];
} | identifier_body |
managers.py | import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Sum
from django.contrib.contenttypes.models import ContentType
class TrendingManager(models.Manager):
def trending(self, model, days=30, kind=""):
views = self.filter(
viewed_content_type=ContentType.objects.get_for_model(model),
views_on__gte=datetime.date.today() - datetime.timedelta(days=days),
kind=kind
).values(
"viewed_content_type",
"viewed_object_id",
"kind"
).annotate(
num_views=Sum("count")
).order_by("-num_views")
for d in views:
|
return views
| try:
d["object"] = ContentType.objects.get_for_id(
d["viewed_content_type"]
).get_object_for_this_type(
pk=d["viewed_object_id"]
)
except ObjectDoesNotExist:
d["object"] = None | conditional_block |
managers.py | import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Sum
from django.contrib.contenttypes.models import ContentType
class TrendingManager(models.Manager):
| def trending(self, model, days=30, kind=""):
views = self.filter(
viewed_content_type=ContentType.objects.get_for_model(model),
views_on__gte=datetime.date.today() - datetime.timedelta(days=days),
kind=kind
).values(
"viewed_content_type",
"viewed_object_id",
"kind"
).annotate(
num_views=Sum("count")
).order_by("-num_views")
for d in views:
try:
d["object"] = ContentType.objects.get_for_id(
d["viewed_content_type"]
).get_object_for_this_type(
pk=d["viewed_object_id"]
)
except ObjectDoesNotExist:
d["object"] = None
return views | identifier_body |
|
managers.py | import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Sum
from django.contrib.contenttypes.models import ContentType
class TrendingManager(models.Manager):
def trending(self, model, days=30, kind=""):
views = self.filter(
viewed_content_type=ContentType.objects.get_for_model(model),
views_on__gte=datetime.date.today() - datetime.timedelta(days=days),
kind=kind
).values(
"viewed_content_type", | num_views=Sum("count")
).order_by("-num_views")
for d in views:
try:
d["object"] = ContentType.objects.get_for_id(
d["viewed_content_type"]
).get_object_for_this_type(
pk=d["viewed_object_id"]
)
except ObjectDoesNotExist:
d["object"] = None
return views | "viewed_object_id",
"kind"
).annotate( | random_line_split |
managers.py | import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Sum
from django.contrib.contenttypes.models import ContentType
class TrendingManager(models.Manager):
def | (self, model, days=30, kind=""):
views = self.filter(
viewed_content_type=ContentType.objects.get_for_model(model),
views_on__gte=datetime.date.today() - datetime.timedelta(days=days),
kind=kind
).values(
"viewed_content_type",
"viewed_object_id",
"kind"
).annotate(
num_views=Sum("count")
).order_by("-num_views")
for d in views:
try:
d["object"] = ContentType.objects.get_for_id(
d["viewed_content_type"]
).get_object_for_this_type(
pk=d["viewed_object_id"]
)
except ObjectDoesNotExist:
d["object"] = None
return views
| trending | identifier_name |
work-dashboard.component.ts | import { Component, OnInit } from '@angular/core';
import { CurrentWorkService } from '../current-work/current-work.service';
import { Work } from 'src/app/services/work/work';
import { Chapter } from 'src/app/services/chapter/chapter';
import { WorkReview } from 'src/app/services/review/work-review';
import { ReviewModalComponent } from '../../review-modal/review-modal.component';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
@Component({
selector: 'app-work-dashboard',
templateUrl: './work-dashboard.component.html',
styleUrls: ['./work-dashboard.component.css']
})
export class WorkDashboardComponent implements OnInit {
work: Work;
workChapters: Chapter[];
workReviews: WorkReview[];
openReview(review: WorkReview) {
const activeModal = this.modalService.open(ReviewModalComponent, { size: 'lg', scrollable: true, centered: true });
activeModal.componentInstance.review = review;
}
constructor(
private currentWork: CurrentWorkService,
private modalService: NgbModal,
) |
ngOnInit() {
this.currentWork.work
.subscribe(work => this.work = work);
this.currentWork.chapters
.subscribe(chapters => this.workChapters = chapters);
this.currentWork.reviews
.subscribe(reviews => this.workReviews = reviews);
}
}
| { } | identifier_body |
work-dashboard.component.ts | import { Component, OnInit } from '@angular/core';
import { CurrentWorkService } from '../current-work/current-work.service';
import { Work } from 'src/app/services/work/work';
import { Chapter } from 'src/app/services/chapter/chapter';
import { WorkReview } from 'src/app/services/review/work-review';
import { ReviewModalComponent } from '../../review-modal/review-modal.component';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
@Component({
selector: 'app-work-dashboard',
templateUrl: './work-dashboard.component.html',
styleUrls: ['./work-dashboard.component.css']
})
export class WorkDashboardComponent implements OnInit {
work: Work; | activeModal.componentInstance.review = review;
}
constructor(
private currentWork: CurrentWorkService,
private modalService: NgbModal,
) { }
ngOnInit() {
this.currentWork.work
.subscribe(work => this.work = work);
this.currentWork.chapters
.subscribe(chapters => this.workChapters = chapters);
this.currentWork.reviews
.subscribe(reviews => this.workReviews = reviews);
}
} | workChapters: Chapter[];
workReviews: WorkReview[];
openReview(review: WorkReview) {
const activeModal = this.modalService.open(ReviewModalComponent, { size: 'lg', scrollable: true, centered: true }); | random_line_split |
work-dashboard.component.ts | import { Component, OnInit } from '@angular/core';
import { CurrentWorkService } from '../current-work/current-work.service';
import { Work } from 'src/app/services/work/work';
import { Chapter } from 'src/app/services/chapter/chapter';
import { WorkReview } from 'src/app/services/review/work-review';
import { ReviewModalComponent } from '../../review-modal/review-modal.component';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
@Component({
selector: 'app-work-dashboard',
templateUrl: './work-dashboard.component.html',
styleUrls: ['./work-dashboard.component.css']
})
export class WorkDashboardComponent implements OnInit {
work: Work;
workChapters: Chapter[];
workReviews: WorkReview[];
openReview(review: WorkReview) {
const activeModal = this.modalService.open(ReviewModalComponent, { size: 'lg', scrollable: true, centered: true });
activeModal.componentInstance.review = review;
}
constructor(
private currentWork: CurrentWorkService,
private modalService: NgbModal,
) { }
| () {
this.currentWork.work
.subscribe(work => this.work = work);
this.currentWork.chapters
.subscribe(chapters => this.workChapters = chapters);
this.currentWork.reviews
.subscribe(reviews => this.workReviews = reviews);
}
}
| ngOnInit | identifier_name |
photo-page.ts | /**
* Created by tiwen.wang on 8/4/2015.
*/
import {Component,
View,
ElementRef,
EventEmitter,
Inject,
NgFor, NgIf} from 'angular2/angular2';
import { PhotoApi } from 'services/Apis';
@Component({
selector: 'photo-page',
properties: ['photoId', 'open'],
events: ['openChanged: open'],
viewBindings: [ PhotoApi ],
})
@View({
templateUrl: 'components/photo-page/photo-page.html',
directives: [ NgIf ]
})
export class PhotoPage {
elementRef: ElementRef;
id: string;
photo: {id: string};
_open: boolean;
openChanged = new EventEmitter();
photoLoading: boolean;
photoApi: PhotoApi;
constructor(elementRef: ElementRef, @Inject(PhotoApi) photoApi) {
this.elementRef = elementRef;
this.photoApi = photoApi;
}
set photoId(id) {
this.id = id;
}
get photoId() {
return this.id;
}
set open(open) {
this._open = open;
if(this._open && this.id) |
}
get open() {
return this._open;
}
onPhotoDialogClosed() {
this._open = false;
this.openChanged.next(false);
}
getPhoto(id) {
this.photoLoading = true;
this.photoApi.getPhoto(id).subscribe((photo) => {
this.photoLoading = false;
photo.ossKey = photo.oss_key;
this.photo = photo;
},
(err) => {this.photoLoading = false;}
);
}
} | {
delete this.photo;
var el = this.elementRef.nativeElement;
var pd = el.querySelector("#photoDialog");
pd.open();
this.getPhoto(this.id);
} | conditional_block |
photo-page.ts | /**
* Created by tiwen.wang on 8/4/2015.
*/
import {Component,
View,
ElementRef,
EventEmitter,
Inject,
NgFor, NgIf} from 'angular2/angular2';
import { PhotoApi } from 'services/Apis';
@Component({
selector: 'photo-page',
properties: ['photoId', 'open'],
events: ['openChanged: open'],
viewBindings: [ PhotoApi ],
})
@View({
templateUrl: 'components/photo-page/photo-page.html',
directives: [ NgIf ]
})
export class PhotoPage {
elementRef: ElementRef;
id: string;
photo: {id: string}; | _open: boolean;
openChanged = new EventEmitter();
photoLoading: boolean;
photoApi: PhotoApi;
constructor(elementRef: ElementRef, @Inject(PhotoApi) photoApi) {
this.elementRef = elementRef;
this.photoApi = photoApi;
}
set photoId(id) {
this.id = id;
}
get photoId() {
return this.id;
}
set open(open) {
this._open = open;
if(this._open && this.id) {
delete this.photo;
var el = this.elementRef.nativeElement;
var pd = el.querySelector("#photoDialog");
pd.open();
this.getPhoto(this.id);
}
}
get open() {
return this._open;
}
onPhotoDialogClosed() {
this._open = false;
this.openChanged.next(false);
}
getPhoto(id) {
this.photoLoading = true;
this.photoApi.getPhoto(id).subscribe((photo) => {
this.photoLoading = false;
photo.ossKey = photo.oss_key;
this.photo = photo;
},
(err) => {this.photoLoading = false;}
);
}
} | random_line_split |
|
photo-page.ts | /**
* Created by tiwen.wang on 8/4/2015.
*/
import {Component,
View,
ElementRef,
EventEmitter,
Inject,
NgFor, NgIf} from 'angular2/angular2';
import { PhotoApi } from 'services/Apis';
@Component({
selector: 'photo-page',
properties: ['photoId', 'open'],
events: ['openChanged: open'],
viewBindings: [ PhotoApi ],
})
@View({
templateUrl: 'components/photo-page/photo-page.html',
directives: [ NgIf ]
})
export class PhotoPage {
elementRef: ElementRef;
id: string;
photo: {id: string};
_open: boolean;
openChanged = new EventEmitter();
photoLoading: boolean;
photoApi: PhotoApi;
constructor(elementRef: ElementRef, @Inject(PhotoApi) photoApi) {
this.elementRef = elementRef;
this.photoApi = photoApi;
}
set | (id) {
this.id = id;
}
get photoId() {
return this.id;
}
set open(open) {
this._open = open;
if(this._open && this.id) {
delete this.photo;
var el = this.elementRef.nativeElement;
var pd = el.querySelector("#photoDialog");
pd.open();
this.getPhoto(this.id);
}
}
get open() {
return this._open;
}
onPhotoDialogClosed() {
this._open = false;
this.openChanged.next(false);
}
getPhoto(id) {
this.photoLoading = true;
this.photoApi.getPhoto(id).subscribe((photo) => {
this.photoLoading = false;
photo.ossKey = photo.oss_key;
this.photo = photo;
},
(err) => {this.photoLoading = false;}
);
}
} | photoId | identifier_name |
photo-page.ts | /**
* Created by tiwen.wang on 8/4/2015.
*/
import {Component,
View,
ElementRef,
EventEmitter,
Inject,
NgFor, NgIf} from 'angular2/angular2';
import { PhotoApi } from 'services/Apis';
@Component({
selector: 'photo-page',
properties: ['photoId', 'open'],
events: ['openChanged: open'],
viewBindings: [ PhotoApi ],
})
@View({
templateUrl: 'components/photo-page/photo-page.html',
directives: [ NgIf ]
})
export class PhotoPage {
elementRef: ElementRef;
id: string;
photo: {id: string};
_open: boolean;
openChanged = new EventEmitter();
photoLoading: boolean;
photoApi: PhotoApi;
constructor(elementRef: ElementRef, @Inject(PhotoApi) photoApi) |
set photoId(id) {
this.id = id;
}
get photoId() {
return this.id;
}
set open(open) {
this._open = open;
if(this._open && this.id) {
delete this.photo;
var el = this.elementRef.nativeElement;
var pd = el.querySelector("#photoDialog");
pd.open();
this.getPhoto(this.id);
}
}
get open() {
return this._open;
}
onPhotoDialogClosed() {
this._open = false;
this.openChanged.next(false);
}
getPhoto(id) {
this.photoLoading = true;
this.photoApi.getPhoto(id).subscribe((photo) => {
this.photoLoading = false;
photo.ossKey = photo.oss_key;
this.photo = photo;
},
(err) => {this.photoLoading = false;}
);
}
} | {
this.elementRef = elementRef;
this.photoApi = photoApi;
} | identifier_body |
refcounted.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between tasks (or intra-task for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script task via
//! message passing. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script task
//! from a `Trusted<T>` via the `to_temporary` method.
//!
//! The implementation of Trusted<T> is as follows:
//! A hashtable resides in the script task, keyed on the pointer to the Rust DOM object.
//! The values in this hashtable are atomic reference counts. When a Trusted<T> object is
//! created or cloned, this count is increased. When a Trusted<T> is dropped, the count
//! decreases. If the count hits zero, a message is dispatched to the script task to remove
//! the entry from the hashmap if the count is still zero. The JS reflector for the DOM object
//! is rooted when a hashmap entry is first created, and unrooted when the hashmap entry
//! is removed.
use dom::bindings::js::Root;
use dom::bindings::utils::{Reflector, Reflectable};
use dom::bindings::trace::trace_reflector;
use script_task::{ScriptMsg, ScriptChan};
use js::jsapi::{JSContext, JSTracer};
use libc;
use std::cell::RefCell;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex};
use core::nonzero::NonZero;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::rc::Rc;
use std::cell::RefCell;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among tasks for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: Reflectable> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between tasks, regardless of T's sendability.
ptr: *const libc::c_void,
refcount: Arc<Mutex<usize>>,
script_chan: Box<ScriptChan + Send>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: Reflectable> Send for Trusted<T> {}
impl<T: Reflectable> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(_cx: *mut JSContext, ptr: &T, script_chan: Box<ScriptChan + Send>) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
ptr: &*ptr as *const T as *const libc::c_void,
refcount: refcount,
script_chan: script_chan.clone(),
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> Root<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
Root::new(NonZero::new(self.ptr as *const T))
}
}
}
impl<T: Reflectable> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
{
let mut refcount = self.refcount.lock().unwrap();
*refcount += 1;
}
Trusted {
ptr: self.ptr,
refcount: self.refcount.clone(),
script_chan: self.script_chan.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
impl<T: Reflectable> Drop for Trusted<T> {
fn drop(&mut self) {
let mut refcount = self.refcount.lock().unwrap();
assert!(*refcount > 0);
*refcount -= 1;
if *refcount == 0 {
// It's possible this send will fail if the script task
// has already exited. There's not much we can do at this
// point though.
let msg = ScriptMsg::RefcountCleanup(TrustedReference(self.ptr));
let _ = self.script_chan.send(msg);
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
table: RefCell<HashMap<*const libc::c_void, Arc<Mutex<usize>>>>
}
impl LiveDOMReferences {
/// Set up the task-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
table: RefCell::new(HashMap::new()),
})
});
}
fn addref<T: Reflectable>(&self, ptr: *const T) -> Arc<Mutex<usize>> {
let mut table = self.table.borrow_mut();
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => {
let refcount = entry.get_mut();
*refcount.lock().unwrap() += 1;
refcount.clone()
}
Vacant(entry) => {
let refcount = Arc::new(Mutex::new(1));
entry.insert(refcount.clone());
refcount
}
}
}
/// Unpin the given DOM object if its refcount is 0.
pub fn cleanup(raw_reflectable: TrustedReference) {
|
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
pub unsafe extern fn trace_refcounted_objects(tracer: *mut JSTracer, _data: *mut libc::c_void) {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let table = live_references.table.borrow();
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "LIVE_REFERENCES", reflectable);
}
});
}
| let TrustedReference(raw_reflectable) = raw_reflectable;
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let mut table = live_references.table.borrow_mut();
match table.entry(raw_reflectable) {
Occupied(entry) => {
if *entry.get().lock().unwrap() != 0 {
// there could have been a new reference taken since
// this message was dispatched.
return;
}
let _ = entry.remove();
}
Vacant(_) => {
// there could be a cleanup message dispatched, then a new
// pinned reference obtained and released before the message
// is processed, at which point there would be no matching
// hashtable entry.
info!("attempt to cleanup an unrecognized reflector");
}
}
})
}
} | identifier_body |
refcounted.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between tasks (or intra-task for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script task via
//! message passing. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script task
//! from a `Trusted<T>` via the `to_temporary` method.
//!
//! The implementation of Trusted<T> is as follows:
//! A hashtable resides in the script task, keyed on the pointer to the Rust DOM object.
//! The values in this hashtable are atomic reference counts. When a Trusted<T> object is
//! created or cloned, this count is increased. When a Trusted<T> is dropped, the count
//! decreases. If the count hits zero, a message is dispatched to the script task to remove
//! the entry from the hashmap if the count is still zero. The JS reflector for the DOM object
//! is rooted when a hashmap entry is first created, and unrooted when the hashmap entry
//! is removed.
use dom::bindings::js::Root;
use dom::bindings::utils::{Reflector, Reflectable};
use dom::bindings::trace::trace_reflector;
use script_task::{ScriptMsg, ScriptChan};
use js::jsapi::{JSContext, JSTracer};
use libc;
use std::cell::RefCell;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex};
use core::nonzero::NonZero;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro. | }
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among tasks for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: Reflectable> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between tasks, regardless of T's sendability.
ptr: *const libc::c_void,
refcount: Arc<Mutex<usize>>,
script_chan: Box<ScriptChan + Send>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: Reflectable> Send for Trusted<T> {}
impl<T: Reflectable> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(_cx: *mut JSContext, ptr: &T, script_chan: Box<ScriptChan + Send>) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
ptr: &*ptr as *const T as *const libc::c_void,
refcount: refcount,
script_chan: script_chan.clone(),
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> Root<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
Root::new(NonZero::new(self.ptr as *const T))
}
}
}
impl<T: Reflectable> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
{
let mut refcount = self.refcount.lock().unwrap();
*refcount += 1;
}
Trusted {
ptr: self.ptr,
refcount: self.refcount.clone(),
script_chan: self.script_chan.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
impl<T: Reflectable> Drop for Trusted<T> {
fn drop(&mut self) {
let mut refcount = self.refcount.lock().unwrap();
assert!(*refcount > 0);
*refcount -= 1;
if *refcount == 0 {
// It's possible this send will fail if the script task
// has already exited. There's not much we can do at this
// point though.
let msg = ScriptMsg::RefcountCleanup(TrustedReference(self.ptr));
let _ = self.script_chan.send(msg);
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
table: RefCell<HashMap<*const libc::c_void, Arc<Mutex<usize>>>>
}
impl LiveDOMReferences {
/// Set up the task-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
table: RefCell::new(HashMap::new()),
})
});
}
fn addref<T: Reflectable>(&self, ptr: *const T) -> Arc<Mutex<usize>> {
let mut table = self.table.borrow_mut();
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => {
let refcount = entry.get_mut();
*refcount.lock().unwrap() += 1;
refcount.clone()
}
Vacant(entry) => {
let refcount = Arc::new(Mutex::new(1));
entry.insert(refcount.clone());
refcount
}
}
}
/// Unpin the given DOM object if its refcount is 0.
pub fn cleanup(raw_reflectable: TrustedReference) {
let TrustedReference(raw_reflectable) = raw_reflectable;
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let mut table = live_references.table.borrow_mut();
match table.entry(raw_reflectable) {
Occupied(entry) => {
if *entry.get().lock().unwrap() != 0 {
// there could have been a new reference taken since
// this message was dispatched.
return;
}
let _ = entry.remove();
}
Vacant(_) => {
// there could be a cleanup message dispatched, then a new
// pinned reference obtained and released before the message
// is processed, at which point there would be no matching
// hashtable entry.
info!("attempt to cleanup an unrecognized reflector");
}
}
})
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
pub unsafe extern fn trace_refcounted_objects(tracer: *mut JSTracer, _data: *mut libc::c_void) {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let table = live_references.table.borrow();
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "LIVE_REFERENCES", reflectable);
}
});
} | use std::rc::Rc;
use std::cell::RefCell;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None))); | random_line_split |
refcounted.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between tasks (or intra-task for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script task via
//! message passing. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script task
//! from a `Trusted<T>` via the `to_temporary` method.
//!
//! The implementation of Trusted<T> is as follows:
//! A hashtable resides in the script task, keyed on the pointer to the Rust DOM object.
//! The values in this hashtable are atomic reference counts. When a Trusted<T> object is
//! created or cloned, this count is increased. When a Trusted<T> is dropped, the count
//! decreases. If the count hits zero, a message is dispatched to the script task to remove
//! the entry from the hashmap if the count is still zero. The JS reflector for the DOM object
//! is rooted when a hashmap entry is first created, and unrooted when the hashmap entry
//! is removed.
use dom::bindings::js::Root;
use dom::bindings::utils::{Reflector, Reflectable};
use dom::bindings::trace::trace_reflector;
use script_task::{ScriptMsg, ScriptChan};
use js::jsapi::{JSContext, JSTracer};
use libc;
use std::cell::RefCell;
use std::collections::hash_map::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
use std::marker::PhantomData;
use std::sync::{Arc, Mutex};
use core::nonzero::NonZero;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::rc::Rc;
use std::cell::RefCell;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct Tr | const libc::c_void);
unsafe impl Send for TrustedReference {}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among tasks for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: Reflectable> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between tasks, regardless of T's sendability.
ptr: *const libc::c_void,
refcount: Arc<Mutex<usize>>,
script_chan: Box<ScriptChan + Send>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: Reflectable> Send for Trusted<T> {}
impl<T: Reflectable> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(_cx: *mut JSContext, ptr: &T, script_chan: Box<ScriptChan + Send>) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
ptr: &*ptr as *const T as *const libc::c_void,
refcount: refcount,
script_chan: script_chan.clone(),
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> Root<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
Root::new(NonZero::new(self.ptr as *const T))
}
}
}
impl<T: Reflectable> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
{
let mut refcount = self.refcount.lock().unwrap();
*refcount += 1;
}
Trusted {
ptr: self.ptr,
refcount: self.refcount.clone(),
script_chan: self.script_chan.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
impl<T: Reflectable> Drop for Trusted<T> {
fn drop(&mut self) {
let mut refcount = self.refcount.lock().unwrap();
assert!(*refcount > 0);
*refcount -= 1;
if *refcount == 0 {
// It's possible this send will fail if the script task
// has already exited. There's not much we can do at this
// point though.
let msg = ScriptMsg::RefcountCleanup(TrustedReference(self.ptr));
let _ = self.script_chan.send(msg);
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
table: RefCell<HashMap<*const libc::c_void, Arc<Mutex<usize>>>>
}
impl LiveDOMReferences {
/// Set up the task-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
table: RefCell::new(HashMap::new()),
})
});
}
fn addref<T: Reflectable>(&self, ptr: *const T) -> Arc<Mutex<usize>> {
let mut table = self.table.borrow_mut();
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => {
let refcount = entry.get_mut();
*refcount.lock().unwrap() += 1;
refcount.clone()
}
Vacant(entry) => {
let refcount = Arc::new(Mutex::new(1));
entry.insert(refcount.clone());
refcount
}
}
}
/// Unpin the given DOM object if its refcount is 0.
pub fn cleanup(raw_reflectable: TrustedReference) {
let TrustedReference(raw_reflectable) = raw_reflectable;
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let mut table = live_references.table.borrow_mut();
match table.entry(raw_reflectable) {
Occupied(entry) => {
if *entry.get().lock().unwrap() != 0 {
// there could have been a new reference taken since
// this message was dispatched.
return;
}
let _ = entry.remove();
}
Vacant(_) => {
// there could be a cleanup message dispatched, then a new
// pinned reference obtained and released before the message
// is processed, at which point there would be no matching
// hashtable entry.
info!("attempt to cleanup an unrecognized reflector");
}
}
})
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
pub unsafe extern fn trace_refcounted_objects(tracer: *mut JSTracer, _data: *mut libc::c_void) {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let table = live_references.table.borrow();
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "LIVE_REFERENCES", reflectable);
}
});
}
| ustedReference(* | identifier_name |
TestFileDoc.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
var _createClass = (function () { function defineProperties(target, props) | return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; }
var _FileDocJs = require('./FileDoc.js');
var _FileDocJs2 = _interopRequireDefault(_FileDocJs);
/**
* Doc class for test code file.
*/
var TestFileDoc = (function (_FileDoc) {
function TestFileDoc() {
_classCallCheck(this, TestFileDoc);
_get(Object.getPrototypeOf(TestFileDoc.prototype), 'constructor', this).apply(this, arguments);
}
_inherits(TestFileDoc, _FileDoc);
_createClass(TestFileDoc, [{
key: '@kind',
/** set ``testFile`` to kind. */
value: function kind() {
this._value.kind = 'testFile';
}
}]);
return TestFileDoc;
})(_FileDocJs2['default']);
exports['default'] = TestFileDoc;
module.exports = exports['default']; | { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } | identifier_body |
TestFileDoc.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; }
var _FileDocJs = require('./FileDoc.js');
var _FileDocJs2 = _interopRequireDefault(_FileDocJs);
/**
* Doc class for test code file.
*/
var TestFileDoc = (function (_FileDoc) {
function | () {
_classCallCheck(this, TestFileDoc);
_get(Object.getPrototypeOf(TestFileDoc.prototype), 'constructor', this).apply(this, arguments);
}
_inherits(TestFileDoc, _FileDoc);
_createClass(TestFileDoc, [{
key: '@kind',
/** set ``testFile`` to kind. */
value: function kind() {
this._value.kind = 'testFile';
}
}]);
return TestFileDoc;
})(_FileDocJs2['default']);
exports['default'] = TestFileDoc;
module.exports = exports['default']; | TestFileDoc | identifier_name |
TestFileDoc.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; } |
var _FileDocJs2 = _interopRequireDefault(_FileDocJs);
/**
* Doc class for test code file.
*/
var TestFileDoc = (function (_FileDoc) {
function TestFileDoc() {
_classCallCheck(this, TestFileDoc);
_get(Object.getPrototypeOf(TestFileDoc.prototype), 'constructor', this).apply(this, arguments);
}
_inherits(TestFileDoc, _FileDoc);
_createClass(TestFileDoc, [{
key: '@kind',
/** set ``testFile`` to kind. */
value: function kind() {
this._value.kind = 'testFile';
}
}]);
return TestFileDoc;
})(_FileDocJs2['default']);
exports['default'] = TestFileDoc;
module.exports = exports['default']; |
var _FileDocJs = require('./FileDoc.js'); | random_line_split |
TestFileDoc.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) | else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; }
var _FileDocJs = require('./FileDoc.js');
var _FileDocJs2 = _interopRequireDefault(_FileDocJs);
/**
* Doc class for test code file.
*/
var TestFileDoc = (function (_FileDoc) {
function TestFileDoc() {
_classCallCheck(this, TestFileDoc);
_get(Object.getPrototypeOf(TestFileDoc.prototype), 'constructor', this).apply(this, arguments);
}
_inherits(TestFileDoc, _FileDoc);
_createClass(TestFileDoc, [{
key: '@kind',
/** set ``testFile`` to kind. */
value: function kind() {
this._value.kind = 'testFile';
}
}]);
return TestFileDoc;
})(_FileDocJs2['default']);
exports['default'] = TestFileDoc;
module.exports = exports['default']; | { return undefined; } | conditional_block |
plugins.rs | use crate::{
config::{self},
prelude::*,
};
use neon::{prelude::*, result::Throw};
use std::str::FromStr;
use stencila::{
config::Config,
tokio::sync::MutexGuard,
};
use plugins::{self, Plugin, PluginInstallation, Plugins, PLUGINS};
/// Lock the global plugins store
pub fn lock(cx: &mut FunctionContext) -> NeonResult<MutexGuard<'static, Plugins>> {
match PLUGINS.try_lock() {
Ok(guard) => Ok(guard),
Err(error) => cx.throw_error(format!(
"When attempting to lock plugins: {}",
error.to_string()
)),
}
}
/// Get plugin schema
pub fn schema(cx: FunctionContext) -> JsResult<JsString> {
let schema = Plugin::schema();
to_json_or_throw(cx, schema)
}
/// List plugins
pub fn list(mut cx: FunctionContext) -> JsResult<JsString> |
/// Install a plugin
pub fn install(mut cx: FunctionContext) -> JsResult<JsString> {
let spec = &cx.argument::<JsString>(0)?.value(&mut cx);
let config = &config::lock(&mut cx)?;
let installs = &installations(&mut cx, 1, config)?;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::install(spec, installs, aliases, plugins, None).await })
{
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Uninstall a plugin
pub fn uninstall(mut cx: FunctionContext) -> JsResult<JsString> {
let alias = &cx.argument::<JsString>(0)?.value(&mut cx);
let aliases = &config::lock(&mut cx)?.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match Plugin::uninstall(alias, aliases, plugins) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Upgrade a plugin
pub fn upgrade(mut cx: FunctionContext) -> JsResult<JsString> {
let spec = &cx.argument::<JsString>(0)?.value(&mut cx);
let config = &config::lock(&mut cx)?;
let installs = &config.plugins.installations;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::upgrade(spec, installs, aliases, plugins).await }) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Refresh plugins
pub fn refresh(mut cx: FunctionContext) -> JsResult<JsString> {
let arg = cx.argument::<JsArray>(0)?.to_vec(&mut cx)?;
let list = arg
.iter()
.map(|item| {
item.to_string(&mut cx)
.expect("Unable to convert to string")
.value(&mut cx)
})
.collect();
let config = &config::lock(&mut cx)?;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::refresh_list(list, aliases, plugins).await }) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Get the `installations` argument, falling back to the array in `config.plugins.installations`
pub fn installations(
cx: &mut FunctionContext,
position: i32,
config: &Config,
) -> Result<Vec<PluginInstallation>, Throw> {
let arg = cx.argument::<JsArray>(position)?.to_vec(cx)?;
if arg.is_empty() {
Ok(config.plugins.installations.clone())
} else {
let mut installations = Vec::new();
for value in arg {
let str = value.to_string(cx)?.value(cx);
let installation = match plugins::PluginInstallation::from_str(&str) {
Ok(value) => value,
Err(error) => return cx.throw_error(error.to_string()),
};
installations.push(installation)
}
Ok(installations)
}
}
| {
let aliases = &config::lock(&mut cx)?.plugins.aliases;
let plugins = &*lock(&mut cx)?;
to_json(cx, plugins.list_plugins(aliases))
} | identifier_body |
plugins.rs | use crate::{
config::{self},
prelude::*,
};
use neon::{prelude::*, result::Throw};
use std::str::FromStr;
use stencila::{
config::Config,
tokio::sync::MutexGuard,
};
use plugins::{self, Plugin, PluginInstallation, Plugins, PLUGINS};
/// Lock the global plugins store
pub fn lock(cx: &mut FunctionContext) -> NeonResult<MutexGuard<'static, Plugins>> {
match PLUGINS.try_lock() {
Ok(guard) => Ok(guard),
Err(error) => cx.throw_error(format!(
"When attempting to lock plugins: {}",
error.to_string()
)),
}
}
/// Get plugin schema
pub fn schema(cx: FunctionContext) -> JsResult<JsString> {
let schema = Plugin::schema();
to_json_or_throw(cx, schema)
}
/// List plugins
pub fn list(mut cx: FunctionContext) -> JsResult<JsString> {
let aliases = &config::lock(&mut cx)?.plugins.aliases;
let plugins = &*lock(&mut cx)?;
to_json(cx, plugins.list_plugins(aliases))
}
/// Install a plugin
pub fn install(mut cx: FunctionContext) -> JsResult<JsString> {
let spec = &cx.argument::<JsString>(0)?.value(&mut cx);
let config = &config::lock(&mut cx)?;
let installs = &installations(&mut cx, 1, config)?;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::install(spec, installs, aliases, plugins, None).await })
{
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Uninstall a plugin
pub fn uninstall(mut cx: FunctionContext) -> JsResult<JsString> {
let alias = &cx.argument::<JsString>(0)?.value(&mut cx);
let aliases = &config::lock(&mut cx)?.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match Plugin::uninstall(alias, aliases, plugins) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Upgrade a plugin
pub fn upgrade(mut cx: FunctionContext) -> JsResult<JsString> {
let spec = &cx.argument::<JsString>(0)?.value(&mut cx);
let config = &config::lock(&mut cx)?;
let installs = &config.plugins.installations;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::upgrade(spec, installs, aliases, plugins).await }) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Refresh plugins
pub fn refresh(mut cx: FunctionContext) -> JsResult<JsString> {
let arg = cx.argument::<JsArray>(0)?.to_vec(&mut cx)?;
let list = arg
.iter()
.map(|item| {
item.to_string(&mut cx)
.expect("Unable to convert to string")
.value(&mut cx)
})
.collect();
let config = &config::lock(&mut cx)?;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::refresh_list(list, aliases, plugins).await }) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Get the `installations` argument, falling back to the array in `config.plugins.installations`
pub fn | (
cx: &mut FunctionContext,
position: i32,
config: &Config,
) -> Result<Vec<PluginInstallation>, Throw> {
let arg = cx.argument::<JsArray>(position)?.to_vec(cx)?;
if arg.is_empty() {
Ok(config.plugins.installations.clone())
} else {
let mut installations = Vec::new();
for value in arg {
let str = value.to_string(cx)?.value(cx);
let installation = match plugins::PluginInstallation::from_str(&str) {
Ok(value) => value,
Err(error) => return cx.throw_error(error.to_string()),
};
installations.push(installation)
}
Ok(installations)
}
}
| installations | identifier_name |
plugins.rs | use crate::{
config::{self},
prelude::*,
};
use neon::{prelude::*, result::Throw};
use std::str::FromStr;
use stencila::{
config::Config,
tokio::sync::MutexGuard,
};
use plugins::{self, Plugin, PluginInstallation, Plugins, PLUGINS};
/// Lock the global plugins store
pub fn lock(cx: &mut FunctionContext) -> NeonResult<MutexGuard<'static, Plugins>> {
match PLUGINS.try_lock() {
Ok(guard) => Ok(guard),
Err(error) => cx.throw_error(format!(
"When attempting to lock plugins: {}",
error.to_string()
)),
}
}
/// Get plugin schema
pub fn schema(cx: FunctionContext) -> JsResult<JsString> {
let schema = Plugin::schema();
to_json_or_throw(cx, schema)
}
/// List plugins
pub fn list(mut cx: FunctionContext) -> JsResult<JsString> {
let aliases = &config::lock(&mut cx)?.plugins.aliases;
let plugins = &*lock(&mut cx)?;
to_json(cx, plugins.list_plugins(aliases))
}
/// Install a plugin
pub fn install(mut cx: FunctionContext) -> JsResult<JsString> {
let spec = &cx.argument::<JsString>(0)?.value(&mut cx);
let config = &config::lock(&mut cx)?;
let installs = &installations(&mut cx, 1, config)?;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::install(spec, installs, aliases, plugins, None).await }) | {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Uninstall a plugin
pub fn uninstall(mut cx: FunctionContext) -> JsResult<JsString> {
let alias = &cx.argument::<JsString>(0)?.value(&mut cx);
let aliases = &config::lock(&mut cx)?.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match Plugin::uninstall(alias, aliases, plugins) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Upgrade a plugin
pub fn upgrade(mut cx: FunctionContext) -> JsResult<JsString> {
let spec = &cx.argument::<JsString>(0)?.value(&mut cx);
let config = &config::lock(&mut cx)?;
let installs = &config.plugins.installations;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::upgrade(spec, installs, aliases, plugins).await }) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Refresh plugins
pub fn refresh(mut cx: FunctionContext) -> JsResult<JsString> {
let arg = cx.argument::<JsArray>(0)?.to_vec(&mut cx)?;
let list = arg
.iter()
.map(|item| {
item.to_string(&mut cx)
.expect("Unable to convert to string")
.value(&mut cx)
})
.collect();
let config = &config::lock(&mut cx)?;
let aliases = &config.plugins.aliases;
let plugins = &mut *lock(&mut cx)?;
match RUNTIME.block_on(async { Plugin::refresh_list(list, aliases, plugins).await }) {
Ok(_) => to_json(cx, plugins.list_plugins(aliases)),
Err(error) => cx.throw_error(error.to_string()),
}
}
/// Get the `installations` argument, falling back to the array in `config.plugins.installations`
pub fn installations(
cx: &mut FunctionContext,
position: i32,
config: &Config,
) -> Result<Vec<PluginInstallation>, Throw> {
let arg = cx.argument::<JsArray>(position)?.to_vec(cx)?;
if arg.is_empty() {
Ok(config.plugins.installations.clone())
} else {
let mut installations = Vec::new();
for value in arg {
let str = value.to_string(cx)?.value(cx);
let installation = match plugins::PluginInstallation::from_str(&str) {
Ok(value) => value,
Err(error) => return cx.throw_error(error.to_string()),
};
installations.push(installation)
}
Ok(installations)
}
} | random_line_split |
|
test-gun-modifier-base.ts | import { expect } from 'chai';
import 'mocha';
import {Vector2D} from '../../gun-tree-ts/vector';
import {Bullet} from '../../gun-tree-ts/bullet';
import {Effect} from '../../gun-tree-ts/effect';
import {Gun, AimingDirectionType, UnusableDirectionTypeError, MustContainSubGunsError}
from '../../gun-tree-ts/gun-base';
import {FiringProperty} from '../../gun-tree-ts/player';
import {Wait, Combine} from '../../gun-tree-ts/gun-contents/gun-primitive';
import {DebuggerGunTreePlayer} from '../../gun-tree-ts/debugger/debugger-player';
import {CombinedModifier}
from '../../gun-tree-ts/gun-contents/gun-modifier-base';
import {AddAngle}
from '../../gun-tree-ts/gun-contents/gun-modifier-angle';
import {AddSpeed}
from '../../gun-tree-ts/gun-contents/gun-modifier-speed';
| const property = new FiringProperty(player);
const angleOffset = 2;
const speedOffset = 3;
const gun = new CombinedModifier(
new AddAngle(angleOffset),
new AddSpeed(speedOffset)
);
gun.modifyProperty(property);
expect(property.getAngle()).to.equals(angleOffset);
expect(property.getSpeed()).to.equals(1 + speedOffset);
});
it('throw_exception_if_no_guns', () => {
// Check exception.
expect(() => {new CombinedModifier();}).to.throw(
MustContainSubGunsError, 'CombinedModifier needs sub modifiers');
});
}); | describe('CombinedModifier_gun_unit_test', () => {
it('modify_property', () => {
const player = new DebuggerGunTreePlayer(); | random_line_split |
contactus.js | $(document).on('click', '.contact-click', function () {
var name = $("input#name-contact").val();
var surname = $("input#surname-contact").val();
var email = $("input#email-contact").val();
var message = $("textarea#message-contact").val();
var phone = $("input#phone-contact").val();
var htmlBody = '<h2>'+ name + ' ' + surname +' ha contactado con nosotros</h2>'+
'<h4><strong>Esta es su pregunta: </strong> </h4>'+
'<p>' + message + '</p>'+
'<h4><strong>Si desea contacta con él estos son sus datos: </strong></h4>' +
'<p><strong>Email: </strong>' + email + '</p>'+
'<p><strong>Teléfono: </strong>' + phone + '</p>';
var jsonFile = {
'name': name, | };
if(name && surname && email && message && phone && htmlBody){
$.ajax({
type: "POST",
url: "controllers/contactus.php",
data: {
Contact: jsonFile
},
dataType: "json"
}).done(function (data) {
if (data.response == 'ok') {
$('#contactModal').modal('show');
$('#contactModal').on('hidden.bs.modal', function () {
document.location = "home";
});
}
else{
alert("Uppss! Ha habido un error durante el envío: Por favor intentelo de nuevo más tarde");
document.location = "contactus";
}
});
}
else{
alert('Todos los campos del formulario son obligatorios, rellenelos para continuar');
}
}); | 'surname': surname,
'mail': email,
'message': htmlBody,
'phone': phone | random_line_split |
contactus.js | $(document).on('click', '.contact-click', function () {
var name = $("input#name-contact").val();
var surname = $("input#surname-contact").val();
var email = $("input#email-contact").val();
var message = $("textarea#message-contact").val();
var phone = $("input#phone-contact").val();
var htmlBody = '<h2>'+ name + ' ' + surname +' ha contactado con nosotros</h2>'+
'<h4><strong>Esta es su pregunta: </strong> </h4>'+
'<p>' + message + '</p>'+
'<h4><strong>Si desea contacta con él estos son sus datos: </strong></h4>' +
'<p><strong>Email: </strong>' + email + '</p>'+
'<p><strong>Teléfono: </strong>' + phone + '</p>';
var jsonFile = {
'name': name,
'surname': surname,
'mail': email,
'message': htmlBody,
'phone': phone
};
if(name && surname && email && message && phone && htmlBody){
$.ajax({
type: "POST",
url: "controllers/contactus.php",
data: {
Contact: jsonFile
},
dataType: "json"
}).done(function (data) {
if (data.response == 'ok') {
| else{
alert("Uppss! Ha habido un error durante el envío: Por favor intentelo de nuevo más tarde");
document.location = "contactus";
}
});
}
else{
alert('Todos los campos del formulario son obligatorios, rellenelos para continuar');
}
}); | $('#contactModal').modal('show');
$('#contactModal').on('hidden.bs.modal', function () {
document.location = "home";
});
}
| conditional_block |
forms.py | # -*- coding: utf-8 -*-
import django
import sys
from itertools import chain
from django import forms
from django.conf import settings
from django.db.models.query import QuerySet
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.html import conditional_escape, escape
from django.utils.safestring import mark_safe
if sys.version_info[0] < 3:
iteritems = lambda d: iter(d.iteritems())
string_types = basestring,
str_ = unicode
else:
iteritems = lambda d: iter(d.items())
string_types = str,
str_ = str
STATIC_URL = getattr(settings, 'STATIC_URL', settings.MEDIA_URL)
class SortedCheckboxSelectMultiple(forms.CheckboxSelectMultiple):
class Media:
js = (
STATIC_URL + 'sortedm2m/widget.js',
STATIC_URL + 'sortedm2m/jquery-ui.js',
)
css = {'screen': (
STATIC_URL + 'sortedm2m/widget.css',
)}
def build_attrs(self, attrs=None, **kwargs):
attrs = super(SortedCheckboxSelectMultiple, self).\
build_attrs(attrs, **kwargs)
classes = attrs.setdefault('class', '').split()
classes.append('sortedm2m')
attrs['class'] = ' '.join(classes)
return attrs
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
# Normalize to strings
str_values = [force_text(v) for v in value]
selected = []
unselected = []
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = ' for="%s"' % conditional_escape(final_attrs['id'])
else:
label_for = ''
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_text(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_text(option_label))
item = {'label_for': label_for, 'rendered_cb': rendered_cb, 'option_label': option_label, 'option_value': option_value}
if option_value in str_values:
|
else:
unselected.append(item)
# re-order `selected` array according str_values which is a set of `option_value`s in the order they should be shown on screen
ordered = []
for value in str_values:
for select in selected:
if value == select['option_value']:
ordered.append(select)
selected = ordered
html = render_to_string(
'sortedm2m/sorted_checkbox_select_multiple_widget.html',
{'selected': selected, 'unselected': unselected})
return mark_safe(html)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if isinstance(value, string_types):
return [v for v in value.split(',') if v]
return value
if django.VERSION < (1, 7):
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in initial]
data_set = [force_text(value) for value in data]
return data_set != initial_set
class SortedMultipleChoiceField(forms.ModelMultipleChoiceField):
widget = SortedCheckboxSelectMultiple
def clean(self, value):
queryset = super(SortedMultipleChoiceField, self).clean(value)
if value is None or not isinstance(queryset, QuerySet):
return queryset
object_list = dict((
(str_(key), value)
for key, value in iteritems(queryset.in_bulk(value))))
return [object_list[str_(pk)] for pk in value]
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in self.prepare_value(initial)]
data_set = [force_text(value) for value in data]
return data_set != initial_set
| selected.append(item) | conditional_block |
forms.py | # -*- coding: utf-8 -*-
import django
import sys
from itertools import chain
from django import forms
from django.conf import settings
from django.db.models.query import QuerySet
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.html import conditional_escape, escape
from django.utils.safestring import mark_safe
if sys.version_info[0] < 3:
iteritems = lambda d: iter(d.iteritems())
string_types = basestring,
str_ = unicode
else:
iteritems = lambda d: iter(d.items())
string_types = str,
str_ = str
STATIC_URL = getattr(settings, 'STATIC_URL', settings.MEDIA_URL)
class SortedCheckboxSelectMultiple(forms.CheckboxSelectMultiple):
class Media:
js = (
STATIC_URL + 'sortedm2m/widget.js',
STATIC_URL + 'sortedm2m/jquery-ui.js',
)
css = {'screen': (
STATIC_URL + 'sortedm2m/widget.css',
)}
def build_attrs(self, attrs=None, **kwargs):
attrs = super(SortedCheckboxSelectMultiple, self).\
build_attrs(attrs, **kwargs)
classes = attrs.setdefault('class', '').split()
classes.append('sortedm2m')
attrs['class'] = ' '.join(classes)
return attrs
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
# Normalize to strings
str_values = [force_text(v) for v in value]
selected = []
unselected = []
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = ' for="%s"' % conditional_escape(final_attrs['id'])
else:
label_for = ''
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_text(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_text(option_label))
item = {'label_for': label_for, 'rendered_cb': rendered_cb, 'option_label': option_label, 'option_value': option_value}
if option_value in str_values:
selected.append(item)
else:
unselected.append(item)
# re-order `selected` array according str_values which is a set of `option_value`s in the order they should be shown on screen
ordered = []
for value in str_values:
for select in selected:
if value == select['option_value']:
ordered.append(select)
selected = ordered
html = render_to_string(
'sortedm2m/sorted_checkbox_select_multiple_widget.html',
{'selected': selected, 'unselected': unselected})
return mark_safe(html)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if isinstance(value, string_types):
return [v for v in value.split(',') if v]
return value
if django.VERSION < (1, 7):
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in initial]
data_set = [force_text(value) for value in data]
return data_set != initial_set
class SortedMultipleChoiceField(forms.ModelMultipleChoiceField):
widget = SortedCheckboxSelectMultiple
def clean(self, value):
queryset = super(SortedMultipleChoiceField, self).clean(value)
if value is None or not isinstance(queryset, QuerySet):
return queryset
object_list = dict((
(str_(key), value)
for key, value in iteritems(queryset.in_bulk(value))))
return [object_list[str_(pk)] for pk in value]
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True | return data_set != initial_set | initial_set = [force_text(value) for value in self.prepare_value(initial)]
data_set = [force_text(value) for value in data] | random_line_split |
forms.py | # -*- coding: utf-8 -*-
import django
import sys
from itertools import chain
from django import forms
from django.conf import settings
from django.db.models.query import QuerySet
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.html import conditional_escape, escape
from django.utils.safestring import mark_safe
if sys.version_info[0] < 3:
iteritems = lambda d: iter(d.iteritems())
string_types = basestring,
str_ = unicode
else:
iteritems = lambda d: iter(d.items())
string_types = str,
str_ = str
STATIC_URL = getattr(settings, 'STATIC_URL', settings.MEDIA_URL)
class SortedCheckboxSelectMultiple(forms.CheckboxSelectMultiple):
|
class SortedMultipleChoiceField(forms.ModelMultipleChoiceField):
widget = SortedCheckboxSelectMultiple
def clean(self, value):
queryset = super(SortedMultipleChoiceField, self).clean(value)
if value is None or not isinstance(queryset, QuerySet):
return queryset
object_list = dict((
(str_(key), value)
for key, value in iteritems(queryset.in_bulk(value))))
return [object_list[str_(pk)] for pk in value]
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in self.prepare_value(initial)]
data_set = [force_text(value) for value in data]
return data_set != initial_set
| class Media:
js = (
STATIC_URL + 'sortedm2m/widget.js',
STATIC_URL + 'sortedm2m/jquery-ui.js',
)
css = {'screen': (
STATIC_URL + 'sortedm2m/widget.css',
)}
def build_attrs(self, attrs=None, **kwargs):
attrs = super(SortedCheckboxSelectMultiple, self).\
build_attrs(attrs, **kwargs)
classes = attrs.setdefault('class', '').split()
classes.append('sortedm2m')
attrs['class'] = ' '.join(classes)
return attrs
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
# Normalize to strings
str_values = [force_text(v) for v in value]
selected = []
unselected = []
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = ' for="%s"' % conditional_escape(final_attrs['id'])
else:
label_for = ''
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_text(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_text(option_label))
item = {'label_for': label_for, 'rendered_cb': rendered_cb, 'option_label': option_label, 'option_value': option_value}
if option_value in str_values:
selected.append(item)
else:
unselected.append(item)
# re-order `selected` array according str_values which is a set of `option_value`s in the order they should be shown on screen
ordered = []
for value in str_values:
for select in selected:
if value == select['option_value']:
ordered.append(select)
selected = ordered
html = render_to_string(
'sortedm2m/sorted_checkbox_select_multiple_widget.html',
{'selected': selected, 'unselected': unselected})
return mark_safe(html)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if isinstance(value, string_types):
return [v for v in value.split(',') if v]
return value
if django.VERSION < (1, 7):
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in initial]
data_set = [force_text(value) for value in data]
return data_set != initial_set | identifier_body |
forms.py | # -*- coding: utf-8 -*-
import django
import sys
from itertools import chain
from django import forms
from django.conf import settings
from django.db.models.query import QuerySet
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.html import conditional_escape, escape
from django.utils.safestring import mark_safe
if sys.version_info[0] < 3:
iteritems = lambda d: iter(d.iteritems())
string_types = basestring,
str_ = unicode
else:
iteritems = lambda d: iter(d.items())
string_types = str,
str_ = str
STATIC_URL = getattr(settings, 'STATIC_URL', settings.MEDIA_URL)
class SortedCheckboxSelectMultiple(forms.CheckboxSelectMultiple):
class Media:
js = (
STATIC_URL + 'sortedm2m/widget.js',
STATIC_URL + 'sortedm2m/jquery-ui.js',
)
css = {'screen': (
STATIC_URL + 'sortedm2m/widget.css',
)}
def build_attrs(self, attrs=None, **kwargs):
attrs = super(SortedCheckboxSelectMultiple, self).\
build_attrs(attrs, **kwargs)
classes = attrs.setdefault('class', '').split()
classes.append('sortedm2m')
attrs['class'] = ' '.join(classes)
return attrs
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
# Normalize to strings
str_values = [force_text(v) for v in value]
selected = []
unselected = []
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = ' for="%s"' % conditional_escape(final_attrs['id'])
else:
label_for = ''
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_text(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_text(option_label))
item = {'label_for': label_for, 'rendered_cb': rendered_cb, 'option_label': option_label, 'option_value': option_value}
if option_value in str_values:
selected.append(item)
else:
unselected.append(item)
# re-order `selected` array according str_values which is a set of `option_value`s in the order they should be shown on screen
ordered = []
for value in str_values:
for select in selected:
if value == select['option_value']:
ordered.append(select)
selected = ordered
html = render_to_string(
'sortedm2m/sorted_checkbox_select_multiple_widget.html',
{'selected': selected, 'unselected': unselected})
return mark_safe(html)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
if isinstance(value, string_types):
return [v for v in value.split(',') if v]
return value
if django.VERSION < (1, 7):
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in initial]
data_set = [force_text(value) for value in data]
return data_set != initial_set
class SortedMultipleChoiceField(forms.ModelMultipleChoiceField):
widget = SortedCheckboxSelectMultiple
def | (self, value):
queryset = super(SortedMultipleChoiceField, self).clean(value)
if value is None or not isinstance(queryset, QuerySet):
return queryset
object_list = dict((
(str_(key), value)
for key, value in iteritems(queryset.in_bulk(value))))
return [object_list[str_(pk)] for pk in value]
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = [force_text(value) for value in self.prepare_value(initial)]
data_set = [force_text(value) for value in data]
return data_set != initial_set
| clean | identifier_name |
mod.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! OS-specific functionality
#![stable(feature = "os", since = "1.0.0")] | #[cfg(windows)] pub use sys::ext as windows;
#[cfg(target_os = "android")] pub mod android;
#[cfg(target_os = "bitrig")] pub mod bitrig;
#[cfg(target_os = "dragonfly")] pub mod dragonfly;
#[cfg(target_os = "freebsd")] pub mod freebsd;
#[cfg(target_os = "ios")] pub mod ios;
#[cfg(target_os = "linux")] pub mod linux;
#[cfg(target_os = "redox")] pub mod redox;
#[cfg(target_os = "macos")] pub mod macos;
#[cfg(target_os = "nacl")] pub mod nacl;
#[cfg(target_os = "netbsd")] pub mod netbsd;
#[cfg(target_os = "openbsd")] pub mod openbsd;
pub mod raw; | #![allow(missing_docs, bad_style)]
#[cfg(unix)] pub use sys::ext as unix; | random_line_split |
utils.js | exports.pathResolver = function pathResolver(base, path) {
base = base.slice();
path = path.slice();
while (base.length && path[0] === '..') {
path.shift();
base.pop();
}
return base.concat(path);
};
exports.pathSpliter = function pathSpliter(path) {
var splitPath;
if (path instanceof Array) {
splitPath = path;
} else if (typeof path === 'string') {
if (path.match(/[/]|[.][.]/)) {
splitPath = path.split('/');
} else {
splitPath = path.split('.');
}
if (!splitPath[0] && !splitPath[1]) {
splitPath = ['.'];
}
var barsProp = splitPath.pop()
.split('@');
if (barsProp[0]) {
splitPath.push(barsProp[0]);
}
if (barsProp[1]) {
splitPath.push('@' + barsProp[1]);
}
} else {
throw 'bad arrgument: expected String | Array<String>.';
}
return splitPath;
};
function findPath(arg) {
if (arg) {
if (arg.type === 'insert') {
return arg.path;
} else if (
arg.type === 'operator' ||
arg.type === 'transform'
) {
for (var i = 0; i < arg.arguments.length; i++) { | }
}
return '';
}
exports.findPath = findPath; | var argI = findPath(arg.arguments[i]);
if (argI.type === 'insert') {
return argI.argument;
}
} | random_line_split |
utils.js | exports.pathResolver = function pathResolver(base, path) {
base = base.slice();
path = path.slice();
while (base.length && path[0] === '..') {
path.shift();
base.pop();
}
return base.concat(path);
};
exports.pathSpliter = function pathSpliter(path) {
var splitPath;
if (path instanceof Array) {
splitPath = path;
} else if (typeof path === 'string') {
if (path.match(/[/]|[.][.]/)) {
splitPath = path.split('/');
} else {
splitPath = path.split('.');
}
if (!splitPath[0] && !splitPath[1]) {
splitPath = ['.'];
}
var barsProp = splitPath.pop()
.split('@');
if (barsProp[0]) {
splitPath.push(barsProp[0]);
}
if (barsProp[1]) {
splitPath.push('@' + barsProp[1]);
}
} else {
throw 'bad arrgument: expected String | Array<String>.';
}
return splitPath;
};
function | (arg) {
if (arg) {
if (arg.type === 'insert') {
return arg.path;
} else if (
arg.type === 'operator' ||
arg.type === 'transform'
) {
for (var i = 0; i < arg.arguments.length; i++) {
var argI = findPath(arg.arguments[i]);
if (argI.type === 'insert') {
return argI.argument;
}
}
}
}
return '';
}
exports.findPath = findPath;
| findPath | identifier_name |
utils.js | exports.pathResolver = function pathResolver(base, path) {
base = base.slice();
path = path.slice();
while (base.length && path[0] === '..') {
path.shift();
base.pop();
}
return base.concat(path);
};
exports.pathSpliter = function pathSpliter(path) {
var splitPath;
if (path instanceof Array) {
splitPath = path;
} else if (typeof path === 'string') {
if (path.match(/[/]|[.][.]/)) {
splitPath = path.split('/');
} else {
splitPath = path.split('.');
}
if (!splitPath[0] && !splitPath[1]) {
splitPath = ['.'];
}
var barsProp = splitPath.pop()
.split('@');
if (barsProp[0]) {
splitPath.push(barsProp[0]);
}
if (barsProp[1]) {
splitPath.push('@' + barsProp[1]);
}
} else {
throw 'bad arrgument: expected String | Array<String>.';
}
return splitPath;
};
function findPath(arg) |
exports.findPath = findPath;
| {
if (arg) {
if (arg.type === 'insert') {
return arg.path;
} else if (
arg.type === 'operator' ||
arg.type === 'transform'
) {
for (var i = 0; i < arg.arguments.length; i++) {
var argI = findPath(arg.arguments[i]);
if (argI.type === 'insert') {
return argI.argument;
}
}
}
}
return '';
} | identifier_body |
utils.js | exports.pathResolver = function pathResolver(base, path) {
base = base.slice();
path = path.slice();
while (base.length && path[0] === '..') {
path.shift();
base.pop();
}
return base.concat(path);
};
exports.pathSpliter = function pathSpliter(path) {
var splitPath;
if (path instanceof Array) {
splitPath = path;
} else if (typeof path === 'string') {
if (path.match(/[/]|[.][.]/)) | else {
splitPath = path.split('.');
}
if (!splitPath[0] && !splitPath[1]) {
splitPath = ['.'];
}
var barsProp = splitPath.pop()
.split('@');
if (barsProp[0]) {
splitPath.push(barsProp[0]);
}
if (barsProp[1]) {
splitPath.push('@' + barsProp[1]);
}
} else {
throw 'bad arrgument: expected String | Array<String>.';
}
return splitPath;
};
function findPath(arg) {
if (arg) {
if (arg.type === 'insert') {
return arg.path;
} else if (
arg.type === 'operator' ||
arg.type === 'transform'
) {
for (var i = 0; i < arg.arguments.length; i++) {
var argI = findPath(arg.arguments[i]);
if (argI.type === 'insert') {
return argI.argument;
}
}
}
}
return '';
}
exports.findPath = findPath;
| {
splitPath = path.split('/');
} | conditional_block |
utils.ts | import {EventEmitter} from 'events';
import * as _isPlainObject from 'lodash.isplainobject';
export function drop(item: any, arr: Array<any>): void {
const index = arr.indexOf(item);
if (index > -1) {
arr.splice(index, 1);
}
}
export function readyErrorHandling(emitter: EventEmitter): void {
if (!(emitter instanceof EventEmitter)) throw new TypeError(`invalid parameter. [emitter: EventEmitter]: ${typeof emitter}`);
const eventName = 'error';
if (emitter.listenerCount(eventName) == 0) {
emitter.once(eventName, () => {});
}
}
export function normalizeArray<T>(value: T|Array<T>): Array<T> {
let result: Array<T>;
if (value == undefined) {
result = [];
} else if (!(value instanceof Array)) { | result = value;
}
return result;
}
export function isObject(value: any): value is Object {
return _isPlainObject(value);
} | result = [value];
} else { | random_line_split |
utils.ts |
import {EventEmitter} from 'events';
import * as _isPlainObject from 'lodash.isplainobject';
export function drop(item: any, arr: Array<any>): void |
export function readyErrorHandling(emitter: EventEmitter): void {
if (!(emitter instanceof EventEmitter)) throw new TypeError(`invalid parameter. [emitter: EventEmitter]: ${typeof emitter}`);
const eventName = 'error';
if (emitter.listenerCount(eventName) == 0) {
emitter.once(eventName, () => {});
}
}
export function normalizeArray<T>(value: T|Array<T>): Array<T> {
let result: Array<T>;
if (value == undefined) {
result = [];
} else if (!(value instanceof Array)) {
result = [value];
} else {
result = value;
}
return result;
}
export function isObject(value: any): value is Object {
return _isPlainObject(value);
}
| {
const index = arr.indexOf(item);
if (index > -1) {
arr.splice(index, 1);
}
} | identifier_body |
utils.ts |
import {EventEmitter} from 'events';
import * as _isPlainObject from 'lodash.isplainobject';
export function drop(item: any, arr: Array<any>): void {
const index = arr.indexOf(item);
if (index > -1) {
arr.splice(index, 1);
}
}
export function readyErrorHandling(emitter: EventEmitter): void {
if (!(emitter instanceof EventEmitter)) throw new TypeError(`invalid parameter. [emitter: EventEmitter]: ${typeof emitter}`);
const eventName = 'error';
if (emitter.listenerCount(eventName) == 0) {
emitter.once(eventName, () => {});
}
}
export function normalizeArray<T>(value: T|Array<T>): Array<T> {
let result: Array<T>;
if (value == undefined) {
result = [];
} else if (!(value instanceof Array)) {
result = [value];
} else {
result = value;
}
return result;
}
export function | (value: any): value is Object {
return _isPlainObject(value);
}
| isObject | identifier_name |
utils.ts |
import {EventEmitter} from 'events';
import * as _isPlainObject from 'lodash.isplainobject';
export function drop(item: any, arr: Array<any>): void {
const index = arr.indexOf(item);
if (index > -1) {
arr.splice(index, 1);
}
}
export function readyErrorHandling(emitter: EventEmitter): void {
if (!(emitter instanceof EventEmitter)) throw new TypeError(`invalid parameter. [emitter: EventEmitter]: ${typeof emitter}`);
const eventName = 'error';
if (emitter.listenerCount(eventName) == 0) |
}
export function normalizeArray<T>(value: T|Array<T>): Array<T> {
let result: Array<T>;
if (value == undefined) {
result = [];
} else if (!(value instanceof Array)) {
result = [value];
} else {
result = value;
}
return result;
}
export function isObject(value: any): value is Object {
return _isPlainObject(value);
}
| {
emitter.once(eventName, () => {});
} | conditional_block |
power-cycle-droplet.mock.ts | export const request = {
"headers": {
"Content-Type": "application/json",
},
"body": {
"type": "power_cycle",
},
};
export const response = {
"body": {
"action": {
"id": 72531856,
"status": "completed",
"type": "power_cycle",
"started_at": "2015-11-12T17:51:03Z",
"completed_at": "2015-11-12T17:51:14Z",
"resource_id": '1234',
"resource_type": "volume",
"region": {
"name": "New York 1",
"slug": "nyc1",
"sizes": [
"s-1vcpu-1gb",
"s-1vcpu-2gb",
"s-1vcpu-3gb",
"s-2vcpu-2gb",
"s-3vcpu-1gb", | "s-16vcpu-64gb",
"s-20vcpu-96gb",
"s-24vcpu-128gb",
"s-32vcpu-192gb"
],
"features": [
"private_networking",
"backups",
"ipv6",
"metadata"
],
"available": true
},
"region_slug": "nyc1"
}
},
"headers": {
"content-type": "application/json; charset=utf-8",
"status": 200,
"ratelimit-limit": 1200,
"ratelimit-remaining": 1137,
"ratelimit-reset": 1415984218
},
}; | "s-2vcpu-4gb",
"s-4vcpu-8gb",
"s-6vcpu-16gb",
"s-8vcpu-32gb",
"s-12vcpu-48gb", | random_line_split |
__init__.py | """Hierarchical Networks Package.
This package makes it possible to construct graph-like Node structures,
especially hierarchical networks.
The most important building block is the new Layer node, which works as an
horizontal version of flow. It encapsulates a list of Nodes, which are trained
and executed in parallel.
For example we can take two Nodes with 100 dimensional input to
construct a layer with a 200 dimensional input. The first half of the input
data is automatically fed into the first Node, the second half into the second
Node.
Since one might also want to use Flows (i.e. vertical stacks of Nodes) in a
Layer, a wrapper class for Nodes is provided.
The FlowNode class wraps any Flow into a Node, which can then be used like any
other Node. Together with the Layer this allows you to combine Nodes both
horizontally and vertically. Thereby one can in principle realize
any feed-forward network topology.
For a hierarchical networks one might want to route the different parts of the
data to different Nodes in a Layer in complicated ways. This is done by a
Switchboard that handles all the routing.
Defining the routing manually can be quite tedious, so one can derive subclasses
for special routing situations. One such subclass for 2d image data is provided.
It maps the data according to rectangular overlapping 2d input areas. One can
then feed the output into a Layer and each Node will get the correct input.
"""
from flownode import FlowNode
from layer import Layer, SameInputLayer, CloneLayer
from switchboard import (
Switchboard, SwitchboardException, MeanInverseSwitchboard,
ChannelSwitchboard,
Rectangular2dSwitchboard, Rectangular2dSwitchboardException,
DoubleRect2dSwitchboard, DoubleRect2dSwitchboardException,
DoubleRhomb2dSwitchboard, DoubleRhomb2dSwitchboardException
)
from htmlvisitor import (
HiNetHTMLVisitor, HiNetXHTMLVisitor, NewlineWriteFile, show_flow
)
from switchboard_factory import (
get_2d_image_switchboard, FactoryExtensionChannelSwitchboard,
FactoryRectangular2dSwitchboard, FactoryDoubleRect2dSwitchboard,
FactoryDoubleRhomb2dSwitchboard
)
__all__ = ['FlowNode', 'Layer', 'SameInputLayer', 'CloneLayer',
'Switchboard', 'SwitchboardException', 'ChannelSwitchboard',
'Rectangular2dSwitchboard', 'Rectangular2dSwitchboardException',
'DoubleRect2dSwitchboard', 'DoubleRect2dSwitchboardException',
'DoubleRhomb2dSwitchboard', 'DoubleRhomb2dSwitchboardException',
'HiNetHTMLVisitor', 'HiNetXHTMLVisitor', 'NewlineWriteFile',
'show_flow', 'get_2d_image_switchboard'
]
from mdp.utils import fixup_namespace
fixup_namespace(__name__, __all__,
('flownode', | 'switchboard_factory',
'utils',
'fixup_namespace'
)) | 'layer',
'switchboard',
'hinet_Visitor', | random_line_split |
tests.py | from django.contrib.gis.db.models.functions import (
Area, Distance, Length, Perimeter, Transform,
)
from django.contrib.gis.geos import GEOSGeometry, LineString, Point
from django.contrib.gis.measure import D # alias for Distance
from django.db import connection
from django.db.models import F, Q
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from ..utils import no_oracle, oracle, postgis, spatialite
from .models import (
AustraliaCity, CensusZipcode, Interstate, SouthTexasCity, SouthTexasCityFt,
SouthTexasInterstate, SouthTexasZipcode,
)
class DistanceTest(TestCase):
fixtures = ['initial']
def setUp(self):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transformed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
self.stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
self.au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test_init(self):
"""
Test initialization of distance models.
"""
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@skipUnlessDBFeature("supports_dwithin_lookup")
def test_dwithin(self):
"""
Test the `dwithin` lookup type.
"""
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple):
dist1, dist2 = dist
else:
dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
with self.subTest(dist=dist, qs=qs):
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
with self.subTest(dist=dist):
if isinstance(dist, D) and not oracle:
type_error = True
else:
type_error = False
if isinstance(dist, tuple):
if oracle or spatialite:
# Result in meters
dist = dist[1]
else:
# Result in units of the field
dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to
# pass Distance objects into a DWithin query using a
# geodetic field.
with self.assertRaises(ValueError):
AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count()
else:
self.assertEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups(self):
"""
Test the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types.
"""
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
for model in [SouthTexasCity, SouthTexasCityFt]:
qs = model.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test_geodetic_distance_lookups(self):
"""
Test distance lookups on geodetic coordinate systems.
"""
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
expected_cities = [
'Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong',
]
if spatialite:
# SpatiaLite is less accurate and returns 102.8km for Batemans Bay.
expected_cities.pop(0)
self.assertEqual(expected_cities, self.get_names(dist_qs))
# Too many params (4 in this case) should raise a ValueError.
queryset = AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
with self.assertRaises(ValueError):
len(queryset)
# Not enough params should raise a ValueError.
with self.assertRaises(ValueError):
len(AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
querysets = [qs1]
if connection.features.has_DistanceSpheroid_function:
gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets.append(qs2)
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups_with_expression_rhs(self):
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius')),
).order_by('name')
self.assertEqual(
self.get_names(qs),
['Bellaire', 'Downtown Houston', 'Southside Place', 'West University Place']
)
# With a combined expression
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius') * 2),
).order_by('name')
self.assertEqual(len(qs), 5)
self.assertIn('Pearland', self.get_names(qs))
# With spheroid param
if connection.features.supports_distance_geodetic:
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.filter(
point__distance_lte=(hobart.point, F('radius') * 70, 'spheroid'),
).order_by('name')
self.assertEqual(self.get_names(qs), ['Canberra', 'Hobart', 'Melbourne'])
'''
=============================
Distance functions on PostGIS
=============================
| Projected Geometry | Lon/lat Geometry | Geography (4326)
ST_Distance(geom1, geom2) | OK (meters) | :-( (degrees) | OK (meters)
ST_Distance(geom1, geom2, use_spheroid=False) | N/A | N/A | OK (meters), less accurate, quick
Distance_Sphere(geom1, geom2) | N/A | OK (meters) | N/A
Distance_Spheroid(geom1, geom2, spheroid) | N/A | OK (meters) | N/A
ST_Perimeter(geom1) | OK | :-( (degrees) | OK
================================
Distance functions on SpatiaLite
================================
| Projected Geometry | Lon/lat Geometry
ST_Distance(geom1, geom2) | OK (meters) | N/A
ST_Distance(geom1, geom2, use_ellipsoid=True) | N/A | OK (meters)
ST_Distance(geom1, geom2, use_ellipsoid=False) | N/A | OK (meters), less accurate, quick
Perimeter(geom1) | OK | :-( (degrees)
''' # NOQA
class DistanceFunctionsTests(TestCase): | # Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.annotate(area=Area('poly')).order_by('name')):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_simple(self):
"""
Test a simple distance query, with projected coordinates and without
transformation.
"""
lagrange = GEOSGeometry('POINT(805066.295722839 4231496.29461335)', 32140)
houston = SouthTexasCity.objects.annotate(dist=Distance('point', lagrange)).order_by('id').first()
tol = 2 if oracle else 5
self.assertAlmostEqual(
houston.dist.m,
147075.069813,
tol
)
@skipUnlessDBFeature("has_Distance_function", "has_Transform_function")
def test_distance_projected(self):
"""
Test the `Distance` function on projected coordinate systems.
"""
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist2 = SouthTexasCityFt.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist_qs = [dist1, dist2]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
tol = 2 if oracle else 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic(self):
"""
Test the `Distance` function on geodetic coordinate systems.
"""
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)), srid=4326)
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.annotate(distance=Distance('point', ls)).order_by('name')
for city, distance in zip(qs, distances):
with self.subTest(city=city, distance=distance):
# Testing equivalence to within a meter (kilometer on SpatiaLite).
tol = -3 if spatialite else 0
self.assertAlmostEqual(distance, city.distance.m, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic_spheroid(self):
tol = 2 if oracle else 4
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
spheroid_distances = [
60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034,
]
sphere_distances = [
60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134,
]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point, spheroid=True)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis or spatialite:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result(self):
distance = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).first().d
self.assertEqual(distance, 1)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=3857), Point(0, 1, srid=3857)),
).filter(d=D(m=1))
self.assertTrue(qs.exists())
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).filter(d=D(m=1))
msg = 'Distance measure is supplied, but units are unknown for result.'
with self.assertRaisesMessage(ValueError, msg):
list(qs)
@no_oracle # Oracle already handles geographic distance calculation.
@skipUnlessDBFeature("has_Distance_function", 'has_Transform_function')
def test_distance_transform(self):
"""
Test the `Distance` function used with `Transform` on a geographic field.
"""
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').annotate(
distance=Distance(Transform('poly', 32140), buf)
).order_by('name')
self.assertEqual(ref_zips, sorted([c.name for c in qs]))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_order_by(self):
qs = SouthTexasCity.objects.annotate(distance=Distance('point', Point(3, 3, srid=32140))).order_by(
'distance'
).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
self.assertSequenceEqual(qs, ['San Antonio', 'Pearland'])
@skipUnlessDBFeature("has_Length_function")
def test_length(self):
"""
Test the `Length` function.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if connection.features.supports_length_geodetic:
qs = Interstate.objects.annotate(length=Length('path'))
tol = 2 if oracle else 3
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# TODO: test with spheroid argument (True and False)
else:
# Does not support geodetic coordinate systems.
with self.assertRaises(NotImplementedError):
list(Interstate.objects.annotate(length=Length('path')))
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.annotate(length=Length('path')).get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
self.assertTrue(
SouthTexasInterstate.objects.annotate(length=Length('path')).filter(length__gt=4000).exists()
)
# Length with an explicit geometry value.
qs = Interstate.objects.annotate(length=Length(i10.path))
self.assertAlmostEqual(qs.first().length.m, len_m2, 2)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter(self):
"""
Test the `Perimeter` function.
"""
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
tol = 2 if oracle else 7
qs = SouthTexasZipcode.objects.annotate(perimeter=Perimeter('poly')).order_by('name')
for i, z in enumerate(qs):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
qs = SouthTexasCity.objects.annotate(perim=Perimeter('point'))
for city in qs:
self.assertEqual(0, city.perim.m)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter_geodetic(self):
# Currently only Oracle supports calculating the perimeter on geodetic
# geometries (without being transformed).
qs1 = CensusZipcode.objects.annotate(perim=Perimeter('poly'))
if connection.features.supports_perimeter_geodetic:
self.assertAlmostEqual(qs1[0].perim.m, 18406.3818954314, 3)
else:
with self.assertRaises(NotImplementedError):
list(qs1)
# But should work fine when transformed to projected coordinates
qs2 = CensusZipcode.objects.annotate(perim=Perimeter(Transform('poly', 32140))).filter(name='77002')
self.assertAlmostEqual(qs2[0].perim.m, 18404.355, 3)
@skipUnlessDBFeature("supports_null_geometries", "has_Area_function", "has_Distance_function")
def test_measurement_null_fields(self):
"""
Test the measurement functions on fields with NULL values.
"""
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.annotate(
distance=Distance('poly', htown.point), area=Area('poly')
).get(name='78212')
self.assertIsNone(z.distance)
self.assertIsNone(z.area) | fixtures = ['initial']
@skipUnlessDBFeature("has_Area_function")
def test_area(self): | random_line_split |
tests.py | from django.contrib.gis.db.models.functions import (
Area, Distance, Length, Perimeter, Transform,
)
from django.contrib.gis.geos import GEOSGeometry, LineString, Point
from django.contrib.gis.measure import D # alias for Distance
from django.db import connection
from django.db.models import F, Q
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from ..utils import no_oracle, oracle, postgis, spatialite
from .models import (
AustraliaCity, CensusZipcode, Interstate, SouthTexasCity, SouthTexasCityFt,
SouthTexasInterstate, SouthTexasZipcode,
)
class DistanceTest(TestCase):
fixtures = ['initial']
def setUp(self):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transformed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
self.stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
self.au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test_init(self):
"""
Test initialization of distance models.
"""
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@skipUnlessDBFeature("supports_dwithin_lookup")
def test_dwithin(self):
"""
Test the `dwithin` lookup type.
"""
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple):
dist1, dist2 = dist
else:
dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
with self.subTest(dist=dist, qs=qs):
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
with self.subTest(dist=dist):
if isinstance(dist, D) and not oracle:
type_error = True
else:
type_error = False
if isinstance(dist, tuple):
if oracle or spatialite:
# Result in meters
dist = dist[1]
else:
# Result in units of the field
dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to
# pass Distance objects into a DWithin query using a
# geodetic field.
with self.assertRaises(ValueError):
AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count()
else:
self.assertEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups(self):
"""
Test the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types.
"""
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
for model in [SouthTexasCity, SouthTexasCityFt]:
qs = model.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test_geodetic_distance_lookups(self):
"""
Test distance lookups on geodetic coordinate systems.
"""
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
expected_cities = [
'Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong',
]
if spatialite:
# SpatiaLite is less accurate and returns 102.8km for Batemans Bay.
expected_cities.pop(0)
self.assertEqual(expected_cities, self.get_names(dist_qs))
# Too many params (4 in this case) should raise a ValueError.
queryset = AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
with self.assertRaises(ValueError):
len(queryset)
# Not enough params should raise a ValueError.
with self.assertRaises(ValueError):
len(AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
querysets = [qs1]
if connection.features.has_DistanceSpheroid_function:
|
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups_with_expression_rhs(self):
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius')),
).order_by('name')
self.assertEqual(
self.get_names(qs),
['Bellaire', 'Downtown Houston', 'Southside Place', 'West University Place']
)
# With a combined expression
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius') * 2),
).order_by('name')
self.assertEqual(len(qs), 5)
self.assertIn('Pearland', self.get_names(qs))
# With spheroid param
if connection.features.supports_distance_geodetic:
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.filter(
point__distance_lte=(hobart.point, F('radius') * 70, 'spheroid'),
).order_by('name')
self.assertEqual(self.get_names(qs), ['Canberra', 'Hobart', 'Melbourne'])
'''
=============================
Distance functions on PostGIS
=============================
| Projected Geometry | Lon/lat Geometry | Geography (4326)
ST_Distance(geom1, geom2) | OK (meters) | :-( (degrees) | OK (meters)
ST_Distance(geom1, geom2, use_spheroid=False) | N/A | N/A | OK (meters), less accurate, quick
Distance_Sphere(geom1, geom2) | N/A | OK (meters) | N/A
Distance_Spheroid(geom1, geom2, spheroid) | N/A | OK (meters) | N/A
ST_Perimeter(geom1) | OK | :-( (degrees) | OK
================================
Distance functions on SpatiaLite
================================
| Projected Geometry | Lon/lat Geometry
ST_Distance(geom1, geom2) | OK (meters) | N/A
ST_Distance(geom1, geom2, use_ellipsoid=True) | N/A | OK (meters)
ST_Distance(geom1, geom2, use_ellipsoid=False) | N/A | OK (meters), less accurate, quick
Perimeter(geom1) | OK | :-( (degrees)
''' # NOQA
class DistanceFunctionsTests(TestCase):
fixtures = ['initial']
@skipUnlessDBFeature("has_Area_function")
def test_area(self):
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.annotate(area=Area('poly')).order_by('name')):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_simple(self):
"""
Test a simple distance query, with projected coordinates and without
transformation.
"""
lagrange = GEOSGeometry('POINT(805066.295722839 4231496.29461335)', 32140)
houston = SouthTexasCity.objects.annotate(dist=Distance('point', lagrange)).order_by('id').first()
tol = 2 if oracle else 5
self.assertAlmostEqual(
houston.dist.m,
147075.069813,
tol
)
@skipUnlessDBFeature("has_Distance_function", "has_Transform_function")
def test_distance_projected(self):
"""
Test the `Distance` function on projected coordinate systems.
"""
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist2 = SouthTexasCityFt.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist_qs = [dist1, dist2]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
tol = 2 if oracle else 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic(self):
"""
Test the `Distance` function on geodetic coordinate systems.
"""
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)), srid=4326)
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.annotate(distance=Distance('point', ls)).order_by('name')
for city, distance in zip(qs, distances):
with self.subTest(city=city, distance=distance):
# Testing equivalence to within a meter (kilometer on SpatiaLite).
tol = -3 if spatialite else 0
self.assertAlmostEqual(distance, city.distance.m, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic_spheroid(self):
tol = 2 if oracle else 4
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
spheroid_distances = [
60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034,
]
sphere_distances = [
60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134,
]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point, spheroid=True)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis or spatialite:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result(self):
distance = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).first().d
self.assertEqual(distance, 1)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=3857), Point(0, 1, srid=3857)),
).filter(d=D(m=1))
self.assertTrue(qs.exists())
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).filter(d=D(m=1))
msg = 'Distance measure is supplied, but units are unknown for result.'
with self.assertRaisesMessage(ValueError, msg):
list(qs)
@no_oracle # Oracle already handles geographic distance calculation.
@skipUnlessDBFeature("has_Distance_function", 'has_Transform_function')
def test_distance_transform(self):
"""
Test the `Distance` function used with `Transform` on a geographic field.
"""
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').annotate(
distance=Distance(Transform('poly', 32140), buf)
).order_by('name')
self.assertEqual(ref_zips, sorted([c.name for c in qs]))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_order_by(self):
qs = SouthTexasCity.objects.annotate(distance=Distance('point', Point(3, 3, srid=32140))).order_by(
'distance'
).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
self.assertSequenceEqual(qs, ['San Antonio', 'Pearland'])
@skipUnlessDBFeature("has_Length_function")
def test_length(self):
"""
Test the `Length` function.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if connection.features.supports_length_geodetic:
qs = Interstate.objects.annotate(length=Length('path'))
tol = 2 if oracle else 3
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# TODO: test with spheroid argument (True and False)
else:
# Does not support geodetic coordinate systems.
with self.assertRaises(NotImplementedError):
list(Interstate.objects.annotate(length=Length('path')))
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.annotate(length=Length('path')).get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
self.assertTrue(
SouthTexasInterstate.objects.annotate(length=Length('path')).filter(length__gt=4000).exists()
)
# Length with an explicit geometry value.
qs = Interstate.objects.annotate(length=Length(i10.path))
self.assertAlmostEqual(qs.first().length.m, len_m2, 2)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter(self):
"""
Test the `Perimeter` function.
"""
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
tol = 2 if oracle else 7
qs = SouthTexasZipcode.objects.annotate(perimeter=Perimeter('poly')).order_by('name')
for i, z in enumerate(qs):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
qs = SouthTexasCity.objects.annotate(perim=Perimeter('point'))
for city in qs:
self.assertEqual(0, city.perim.m)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter_geodetic(self):
# Currently only Oracle supports calculating the perimeter on geodetic
# geometries (without being transformed).
qs1 = CensusZipcode.objects.annotate(perim=Perimeter('poly'))
if connection.features.supports_perimeter_geodetic:
self.assertAlmostEqual(qs1[0].perim.m, 18406.3818954314, 3)
else:
with self.assertRaises(NotImplementedError):
list(qs1)
# But should work fine when transformed to projected coordinates
qs2 = CensusZipcode.objects.annotate(perim=Perimeter(Transform('poly', 32140))).filter(name='77002')
self.assertAlmostEqual(qs2[0].perim.m, 18404.355, 3)
@skipUnlessDBFeature("supports_null_geometries", "has_Area_function", "has_Distance_function")
def test_measurement_null_fields(self):
"""
Test the measurement functions on fields with NULL values.
"""
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.annotate(
distance=Distance('poly', htown.point), area=Area('poly')
).get(name='78212')
self.assertIsNone(z.distance)
self.assertIsNone(z.area)
| gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets.append(qs2) | conditional_block |
tests.py | from django.contrib.gis.db.models.functions import (
Area, Distance, Length, Perimeter, Transform,
)
from django.contrib.gis.geos import GEOSGeometry, LineString, Point
from django.contrib.gis.measure import D # alias for Distance
from django.db import connection
from django.db.models import F, Q
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from ..utils import no_oracle, oracle, postgis, spatialite
from .models import (
AustraliaCity, CensusZipcode, Interstate, SouthTexasCity, SouthTexasCityFt,
SouthTexasInterstate, SouthTexasZipcode,
)
class DistanceTest(TestCase):
fixtures = ['initial']
def setUp(self):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transformed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
self.stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
self.au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test_init(self):
"""
Test initialization of distance models.
"""
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@skipUnlessDBFeature("supports_dwithin_lookup")
def test_dwithin(self):
"""
Test the `dwithin` lookup type.
"""
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple):
dist1, dist2 = dist
else:
dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
with self.subTest(dist=dist, qs=qs):
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
with self.subTest(dist=dist):
if isinstance(dist, D) and not oracle:
type_error = True
else:
type_error = False
if isinstance(dist, tuple):
if oracle or spatialite:
# Result in meters
dist = dist[1]
else:
# Result in units of the field
dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to
# pass Distance objects into a DWithin query using a
# geodetic field.
with self.assertRaises(ValueError):
AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count()
else:
self.assertEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups(self):
"""
Test the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types.
"""
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
for model in [SouthTexasCity, SouthTexasCityFt]:
qs = model.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test_geodetic_distance_lookups(self):
"""
Test distance lookups on geodetic coordinate systems.
"""
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
expected_cities = [
'Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong',
]
if spatialite:
# SpatiaLite is less accurate and returns 102.8km for Batemans Bay.
expected_cities.pop(0)
self.assertEqual(expected_cities, self.get_names(dist_qs))
# Too many params (4 in this case) should raise a ValueError.
queryset = AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
with self.assertRaises(ValueError):
len(queryset)
# Not enough params should raise a ValueError.
with self.assertRaises(ValueError):
len(AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
querysets = [qs1]
if connection.features.has_DistanceSpheroid_function:
gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets.append(qs2)
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
@skipUnlessDBFeature("supports_distances_lookups")
def | (self):
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius')),
).order_by('name')
self.assertEqual(
self.get_names(qs),
['Bellaire', 'Downtown Houston', 'Southside Place', 'West University Place']
)
# With a combined expression
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius') * 2),
).order_by('name')
self.assertEqual(len(qs), 5)
self.assertIn('Pearland', self.get_names(qs))
# With spheroid param
if connection.features.supports_distance_geodetic:
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.filter(
point__distance_lte=(hobart.point, F('radius') * 70, 'spheroid'),
).order_by('name')
self.assertEqual(self.get_names(qs), ['Canberra', 'Hobart', 'Melbourne'])
'''
=============================
Distance functions on PostGIS
=============================
| Projected Geometry | Lon/lat Geometry | Geography (4326)
ST_Distance(geom1, geom2) | OK (meters) | :-( (degrees) | OK (meters)
ST_Distance(geom1, geom2, use_spheroid=False) | N/A | N/A | OK (meters), less accurate, quick
Distance_Sphere(geom1, geom2) | N/A | OK (meters) | N/A
Distance_Spheroid(geom1, geom2, spheroid) | N/A | OK (meters) | N/A
ST_Perimeter(geom1) | OK | :-( (degrees) | OK
================================
Distance functions on SpatiaLite
================================
| Projected Geometry | Lon/lat Geometry
ST_Distance(geom1, geom2) | OK (meters) | N/A
ST_Distance(geom1, geom2, use_ellipsoid=True) | N/A | OK (meters)
ST_Distance(geom1, geom2, use_ellipsoid=False) | N/A | OK (meters), less accurate, quick
Perimeter(geom1) | OK | :-( (degrees)
''' # NOQA
class DistanceFunctionsTests(TestCase):
fixtures = ['initial']
@skipUnlessDBFeature("has_Area_function")
def test_area(self):
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.annotate(area=Area('poly')).order_by('name')):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_simple(self):
"""
Test a simple distance query, with projected coordinates and without
transformation.
"""
lagrange = GEOSGeometry('POINT(805066.295722839 4231496.29461335)', 32140)
houston = SouthTexasCity.objects.annotate(dist=Distance('point', lagrange)).order_by('id').first()
tol = 2 if oracle else 5
self.assertAlmostEqual(
houston.dist.m,
147075.069813,
tol
)
@skipUnlessDBFeature("has_Distance_function", "has_Transform_function")
def test_distance_projected(self):
"""
Test the `Distance` function on projected coordinate systems.
"""
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist2 = SouthTexasCityFt.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist_qs = [dist1, dist2]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
tol = 2 if oracle else 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic(self):
"""
Test the `Distance` function on geodetic coordinate systems.
"""
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)), srid=4326)
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.annotate(distance=Distance('point', ls)).order_by('name')
for city, distance in zip(qs, distances):
with self.subTest(city=city, distance=distance):
# Testing equivalence to within a meter (kilometer on SpatiaLite).
tol = -3 if spatialite else 0
self.assertAlmostEqual(distance, city.distance.m, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic_spheroid(self):
tol = 2 if oracle else 4
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
spheroid_distances = [
60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034,
]
sphere_distances = [
60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134,
]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point, spheroid=True)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis or spatialite:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result(self):
distance = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).first().d
self.assertEqual(distance, 1)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=3857), Point(0, 1, srid=3857)),
).filter(d=D(m=1))
self.assertTrue(qs.exists())
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).filter(d=D(m=1))
msg = 'Distance measure is supplied, but units are unknown for result.'
with self.assertRaisesMessage(ValueError, msg):
list(qs)
@no_oracle # Oracle already handles geographic distance calculation.
@skipUnlessDBFeature("has_Distance_function", 'has_Transform_function')
def test_distance_transform(self):
"""
Test the `Distance` function used with `Transform` on a geographic field.
"""
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').annotate(
distance=Distance(Transform('poly', 32140), buf)
).order_by('name')
self.assertEqual(ref_zips, sorted([c.name for c in qs]))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_order_by(self):
qs = SouthTexasCity.objects.annotate(distance=Distance('point', Point(3, 3, srid=32140))).order_by(
'distance'
).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
self.assertSequenceEqual(qs, ['San Antonio', 'Pearland'])
@skipUnlessDBFeature("has_Length_function")
def test_length(self):
"""
Test the `Length` function.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if connection.features.supports_length_geodetic:
qs = Interstate.objects.annotate(length=Length('path'))
tol = 2 if oracle else 3
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# TODO: test with spheroid argument (True and False)
else:
# Does not support geodetic coordinate systems.
with self.assertRaises(NotImplementedError):
list(Interstate.objects.annotate(length=Length('path')))
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.annotate(length=Length('path')).get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
self.assertTrue(
SouthTexasInterstate.objects.annotate(length=Length('path')).filter(length__gt=4000).exists()
)
# Length with an explicit geometry value.
qs = Interstate.objects.annotate(length=Length(i10.path))
self.assertAlmostEqual(qs.first().length.m, len_m2, 2)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter(self):
"""
Test the `Perimeter` function.
"""
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
tol = 2 if oracle else 7
qs = SouthTexasZipcode.objects.annotate(perimeter=Perimeter('poly')).order_by('name')
for i, z in enumerate(qs):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
qs = SouthTexasCity.objects.annotate(perim=Perimeter('point'))
for city in qs:
self.assertEqual(0, city.perim.m)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter_geodetic(self):
# Currently only Oracle supports calculating the perimeter on geodetic
# geometries (without being transformed).
qs1 = CensusZipcode.objects.annotate(perim=Perimeter('poly'))
if connection.features.supports_perimeter_geodetic:
self.assertAlmostEqual(qs1[0].perim.m, 18406.3818954314, 3)
else:
with self.assertRaises(NotImplementedError):
list(qs1)
# But should work fine when transformed to projected coordinates
qs2 = CensusZipcode.objects.annotate(perim=Perimeter(Transform('poly', 32140))).filter(name='77002')
self.assertAlmostEqual(qs2[0].perim.m, 18404.355, 3)
@skipUnlessDBFeature("supports_null_geometries", "has_Area_function", "has_Distance_function")
def test_measurement_null_fields(self):
"""
Test the measurement functions on fields with NULL values.
"""
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.annotate(
distance=Distance('poly', htown.point), area=Area('poly')
).get(name='78212')
self.assertIsNone(z.distance)
self.assertIsNone(z.area)
| test_distance_lookups_with_expression_rhs | identifier_name |
tests.py | from django.contrib.gis.db.models.functions import (
Area, Distance, Length, Perimeter, Transform,
)
from django.contrib.gis.geos import GEOSGeometry, LineString, Point
from django.contrib.gis.measure import D # alias for Distance
from django.db import connection
from django.db.models import F, Q
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from ..utils import no_oracle, oracle, postgis, spatialite
from .models import (
AustraliaCity, CensusZipcode, Interstate, SouthTexasCity, SouthTexasCityFt,
SouthTexasInterstate, SouthTexasZipcode,
)
class DistanceTest(TestCase):
fixtures = ['initial']
def setUp(self):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transformed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
self.stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
self.au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test_init(self):
"""
Test initialization of distance models.
"""
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@skipUnlessDBFeature("supports_dwithin_lookup")
def test_dwithin(self):
"""
Test the `dwithin` lookup type.
"""
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple):
dist1, dist2 = dist
else:
dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
with self.subTest(dist=dist, qs=qs):
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
with self.subTest(dist=dist):
if isinstance(dist, D) and not oracle:
type_error = True
else:
type_error = False
if isinstance(dist, tuple):
if oracle or spatialite:
# Result in meters
dist = dist[1]
else:
# Result in units of the field
dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to
# pass Distance objects into a DWithin query using a
# geodetic field.
with self.assertRaises(ValueError):
AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count()
else:
self.assertEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups(self):
"""
Test the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types.
"""
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
for model in [SouthTexasCity, SouthTexasCityFt]:
qs = model.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test_geodetic_distance_lookups(self):
"""
Test distance lookups on geodetic coordinate systems.
"""
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
expected_cities = [
'Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong',
]
if spatialite:
# SpatiaLite is less accurate and returns 102.8km for Batemans Bay.
expected_cities.pop(0)
self.assertEqual(expected_cities, self.get_names(dist_qs))
# Too many params (4 in this case) should raise a ValueError.
queryset = AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
with self.assertRaises(ValueError):
len(queryset)
# Not enough params should raise a ValueError.
with self.assertRaises(ValueError):
len(AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
querysets = [qs1]
if connection.features.has_DistanceSpheroid_function:
gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets.append(qs2)
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups_with_expression_rhs(self):
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius')),
).order_by('name')
self.assertEqual(
self.get_names(qs),
['Bellaire', 'Downtown Houston', 'Southside Place', 'West University Place']
)
# With a combined expression
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius') * 2),
).order_by('name')
self.assertEqual(len(qs), 5)
self.assertIn('Pearland', self.get_names(qs))
# With spheroid param
if connection.features.supports_distance_geodetic:
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.filter(
point__distance_lte=(hobart.point, F('radius') * 70, 'spheroid'),
).order_by('name')
self.assertEqual(self.get_names(qs), ['Canberra', 'Hobart', 'Melbourne'])
'''
=============================
Distance functions on PostGIS
=============================
| Projected Geometry | Lon/lat Geometry | Geography (4326)
ST_Distance(geom1, geom2) | OK (meters) | :-( (degrees) | OK (meters)
ST_Distance(geom1, geom2, use_spheroid=False) | N/A | N/A | OK (meters), less accurate, quick
Distance_Sphere(geom1, geom2) | N/A | OK (meters) | N/A
Distance_Spheroid(geom1, geom2, spheroid) | N/A | OK (meters) | N/A
ST_Perimeter(geom1) | OK | :-( (degrees) | OK
================================
Distance functions on SpatiaLite
================================
| Projected Geometry | Lon/lat Geometry
ST_Distance(geom1, geom2) | OK (meters) | N/A
ST_Distance(geom1, geom2, use_ellipsoid=True) | N/A | OK (meters)
ST_Distance(geom1, geom2, use_ellipsoid=False) | N/A | OK (meters), less accurate, quick
Perimeter(geom1) | OK | :-( (degrees)
''' # NOQA
class DistanceFunctionsTests(TestCase):
fixtures = ['initial']
@skipUnlessDBFeature("has_Area_function")
def test_area(self):
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.annotate(area=Area('poly')).order_by('name')):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_simple(self):
"""
Test a simple distance query, with projected coordinates and without
transformation.
"""
lagrange = GEOSGeometry('POINT(805066.295722839 4231496.29461335)', 32140)
houston = SouthTexasCity.objects.annotate(dist=Distance('point', lagrange)).order_by('id').first()
tol = 2 if oracle else 5
self.assertAlmostEqual(
houston.dist.m,
147075.069813,
tol
)
@skipUnlessDBFeature("has_Distance_function", "has_Transform_function")
def test_distance_projected(self):
"""
Test the `Distance` function on projected coordinate systems.
"""
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist2 = SouthTexasCityFt.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
dist_qs = [dist1, dist2]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
tol = 2 if oracle else 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic(self):
"""
Test the `Distance` function on geodetic coordinate systems.
"""
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)), srid=4326)
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.annotate(distance=Distance('point', ls)).order_by('name')
for city, distance in zip(qs, distances):
with self.subTest(city=city, distance=distance):
# Testing equivalence to within a meter (kilometer on SpatiaLite).
tol = -3 if spatialite else 0
self.assertAlmostEqual(distance, city.distance.m, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic_spheroid(self):
tol = 2 if oracle else 4
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
spheroid_distances = [
60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034,
]
sphere_distances = [
60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134,
]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point, spheroid=True)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis or spatialite:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point)
).order_by('id')
for i, c in enumerate(qs):
with self.subTest(c=c):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result(self):
distance = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).first().d
self.assertEqual(distance, 1)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=3857), Point(0, 1, srid=3857)),
).filter(d=D(m=1))
self.assertTrue(qs.exists())
@skipIfDBFeature("supports_distance_geodetic")
@skipUnlessDBFeature("has_Distance_function")
def test_distance_function_raw_result_d_lookup(self):
qs = Interstate.objects.annotate(
d=Distance(Point(0, 0, srid=4326), Point(0, 1, srid=4326)),
).filter(d=D(m=1))
msg = 'Distance measure is supplied, but units are unknown for result.'
with self.assertRaisesMessage(ValueError, msg):
list(qs)
@no_oracle # Oracle already handles geographic distance calculation.
@skipUnlessDBFeature("has_Distance_function", 'has_Transform_function')
def test_distance_transform(self):
"""
Test the `Distance` function used with `Transform` on a geographic field.
"""
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').annotate(
distance=Distance(Transform('poly', 32140), buf)
).order_by('name')
self.assertEqual(ref_zips, sorted([c.name for c in qs]))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_order_by(self):
qs = SouthTexasCity.objects.annotate(distance=Distance('point', Point(3, 3, srid=32140))).order_by(
'distance'
).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
self.assertSequenceEqual(qs, ['San Antonio', 'Pearland'])
@skipUnlessDBFeature("has_Length_function")
def test_length(self):
|
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter(self):
"""
Test the `Perimeter` function.
"""
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
tol = 2 if oracle else 7
qs = SouthTexasZipcode.objects.annotate(perimeter=Perimeter('poly')).order_by('name')
for i, z in enumerate(qs):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
qs = SouthTexasCity.objects.annotate(perim=Perimeter('point'))
for city in qs:
self.assertEqual(0, city.perim.m)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter_geodetic(self):
# Currently only Oracle supports calculating the perimeter on geodetic
# geometries (without being transformed).
qs1 = CensusZipcode.objects.annotate(perim=Perimeter('poly'))
if connection.features.supports_perimeter_geodetic:
self.assertAlmostEqual(qs1[0].perim.m, 18406.3818954314, 3)
else:
with self.assertRaises(NotImplementedError):
list(qs1)
# But should work fine when transformed to projected coordinates
qs2 = CensusZipcode.objects.annotate(perim=Perimeter(Transform('poly', 32140))).filter(name='77002')
self.assertAlmostEqual(qs2[0].perim.m, 18404.355, 3)
@skipUnlessDBFeature("supports_null_geometries", "has_Area_function", "has_Distance_function")
def test_measurement_null_fields(self):
"""
Test the measurement functions on fields with NULL values.
"""
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.annotate(
distance=Distance('poly', htown.point), area=Area('poly')
).get(name='78212')
self.assertIsNone(z.distance)
self.assertIsNone(z.area)
| """
Test the `Length` function.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if connection.features.supports_length_geodetic:
qs = Interstate.objects.annotate(length=Length('path'))
tol = 2 if oracle else 3
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# TODO: test with spheroid argument (True and False)
else:
# Does not support geodetic coordinate systems.
with self.assertRaises(NotImplementedError):
list(Interstate.objects.annotate(length=Length('path')))
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.annotate(length=Length('path')).get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
self.assertTrue(
SouthTexasInterstate.objects.annotate(length=Length('path')).filter(length__gt=4000).exists()
)
# Length with an explicit geometry value.
qs = Interstate.objects.annotate(length=Length(i10.path))
self.assertAlmostEqual(qs.first().length.m, len_m2, 2) | identifier_body |
postcss.js | 'use strict';
exports.__esModule = true;
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _declaration = require('./declaration');
var _declaration2 = _interopRequireDefault(_declaration);
var _processor = require('./processor');
var _processor2 = _interopRequireDefault(_processor);
var _stringify = require('./stringify');
var _stringify2 = _interopRequireDefault(_stringify);
var _comment = require('./comment');
var _comment2 = _interopRequireDefault(_comment);
var _atRule = require('./at-rule');
var _atRule2 = _interopRequireDefault(_atRule);
var _vendor = require('./vendor');
var _vendor2 = _interopRequireDefault(_vendor);
var _parse = require('./parse');
var _parse2 = _interopRequireDefault(_parse);
var _list = require('./list');
var _list2 = _interopRequireDefault(_list);
var _rule = require('./rule');
var _rule2 = _interopRequireDefault(_rule);
var _root = require('./root');
var _root2 = _interopRequireDefault(_root);
var postcss = function postcss() {
for (var _len = arguments.length, plugins = Array(_len), _key = 0; _key < _len; _key++) {
plugins[_key] = arguments[_key];
}
if (plugins.length === 1 && Array.isArray(plugins[0])) |
return new _processor2['default'](plugins);
};
postcss.plugin = function (name, initializer) {
var creator = function creator() {
var transformer = initializer.apply(undefined, arguments);
transformer.postcssPlugin = name;
transformer.postcssVersion = new _processor2['default']().version;
return transformer;
};
creator.postcss = creator();
creator.process = function (css, opts) {
return postcss([creator(opts)]).process(css, opts);
};
return creator;
};
postcss.stringify = _stringify2['default'];
postcss.vendor = _vendor2['default'];
postcss.parse = _parse2['default'];
postcss.list = _list2['default'];
postcss.comment = function (defaults) {
return new _comment2['default'](defaults);
};
postcss.atRule = function (defaults) {
return new _atRule2['default'](defaults);
};
postcss.decl = function (defaults) {
return new _declaration2['default'](defaults);
};
postcss.rule = function (defaults) {
return new _rule2['default'](defaults);
};
postcss.root = function (defaults) {
return new _root2['default'](defaults);
};
exports['default'] = postcss;
module.exports = exports['default']; | {
plugins = plugins[0];
} | conditional_block |
postcss.js | 'use strict';
exports.__esModule = true;
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _declaration = require('./declaration');
var _declaration2 = _interopRequireDefault(_declaration);
| var _stringify = require('./stringify');
var _stringify2 = _interopRequireDefault(_stringify);
var _comment = require('./comment');
var _comment2 = _interopRequireDefault(_comment);
var _atRule = require('./at-rule');
var _atRule2 = _interopRequireDefault(_atRule);
var _vendor = require('./vendor');
var _vendor2 = _interopRequireDefault(_vendor);
var _parse = require('./parse');
var _parse2 = _interopRequireDefault(_parse);
var _list = require('./list');
var _list2 = _interopRequireDefault(_list);
var _rule = require('./rule');
var _rule2 = _interopRequireDefault(_rule);
var _root = require('./root');
var _root2 = _interopRequireDefault(_root);
var postcss = function postcss() {
for (var _len = arguments.length, plugins = Array(_len), _key = 0; _key < _len; _key++) {
plugins[_key] = arguments[_key];
}
if (plugins.length === 1 && Array.isArray(plugins[0])) {
plugins = plugins[0];
}
return new _processor2['default'](plugins);
};
postcss.plugin = function (name, initializer) {
var creator = function creator() {
var transformer = initializer.apply(undefined, arguments);
transformer.postcssPlugin = name;
transformer.postcssVersion = new _processor2['default']().version;
return transformer;
};
creator.postcss = creator();
creator.process = function (css, opts) {
return postcss([creator(opts)]).process(css, opts);
};
return creator;
};
postcss.stringify = _stringify2['default'];
postcss.vendor = _vendor2['default'];
postcss.parse = _parse2['default'];
postcss.list = _list2['default'];
postcss.comment = function (defaults) {
return new _comment2['default'](defaults);
};
postcss.atRule = function (defaults) {
return new _atRule2['default'](defaults);
};
postcss.decl = function (defaults) {
return new _declaration2['default'](defaults);
};
postcss.rule = function (defaults) {
return new _rule2['default'](defaults);
};
postcss.root = function (defaults) {
return new _root2['default'](defaults);
};
exports['default'] = postcss;
module.exports = exports['default']; | var _processor = require('./processor');
var _processor2 = _interopRequireDefault(_processor);
| random_line_split |
postcss.js | 'use strict';
exports.__esModule = true;
function | (obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _declaration = require('./declaration');
var _declaration2 = _interopRequireDefault(_declaration);
var _processor = require('./processor');
var _processor2 = _interopRequireDefault(_processor);
var _stringify = require('./stringify');
var _stringify2 = _interopRequireDefault(_stringify);
var _comment = require('./comment');
var _comment2 = _interopRequireDefault(_comment);
var _atRule = require('./at-rule');
var _atRule2 = _interopRequireDefault(_atRule);
var _vendor = require('./vendor');
var _vendor2 = _interopRequireDefault(_vendor);
var _parse = require('./parse');
var _parse2 = _interopRequireDefault(_parse);
var _list = require('./list');
var _list2 = _interopRequireDefault(_list);
var _rule = require('./rule');
var _rule2 = _interopRequireDefault(_rule);
var _root = require('./root');
var _root2 = _interopRequireDefault(_root);
var postcss = function postcss() {
for (var _len = arguments.length, plugins = Array(_len), _key = 0; _key < _len; _key++) {
plugins[_key] = arguments[_key];
}
if (plugins.length === 1 && Array.isArray(plugins[0])) {
plugins = plugins[0];
}
return new _processor2['default'](plugins);
};
postcss.plugin = function (name, initializer) {
var creator = function creator() {
var transformer = initializer.apply(undefined, arguments);
transformer.postcssPlugin = name;
transformer.postcssVersion = new _processor2['default']().version;
return transformer;
};
creator.postcss = creator();
creator.process = function (css, opts) {
return postcss([creator(opts)]).process(css, opts);
};
return creator;
};
postcss.stringify = _stringify2['default'];
postcss.vendor = _vendor2['default'];
postcss.parse = _parse2['default'];
postcss.list = _list2['default'];
postcss.comment = function (defaults) {
return new _comment2['default'](defaults);
};
postcss.atRule = function (defaults) {
return new _atRule2['default'](defaults);
};
postcss.decl = function (defaults) {
return new _declaration2['default'](defaults);
};
postcss.rule = function (defaults) {
return new _rule2['default'](defaults);
};
postcss.root = function (defaults) {
return new _root2['default'](defaults);
};
exports['default'] = postcss;
module.exports = exports['default']; | _interopRequireDefault | identifier_name |
postcss.js | 'use strict';
exports.__esModule = true;
function _interopRequireDefault(obj) |
var _declaration = require('./declaration');
var _declaration2 = _interopRequireDefault(_declaration);
var _processor = require('./processor');
var _processor2 = _interopRequireDefault(_processor);
var _stringify = require('./stringify');
var _stringify2 = _interopRequireDefault(_stringify);
var _comment = require('./comment');
var _comment2 = _interopRequireDefault(_comment);
var _atRule = require('./at-rule');
var _atRule2 = _interopRequireDefault(_atRule);
var _vendor = require('./vendor');
var _vendor2 = _interopRequireDefault(_vendor);
var _parse = require('./parse');
var _parse2 = _interopRequireDefault(_parse);
var _list = require('./list');
var _list2 = _interopRequireDefault(_list);
var _rule = require('./rule');
var _rule2 = _interopRequireDefault(_rule);
var _root = require('./root');
var _root2 = _interopRequireDefault(_root);
var postcss = function postcss() {
for (var _len = arguments.length, plugins = Array(_len), _key = 0; _key < _len; _key++) {
plugins[_key] = arguments[_key];
}
if (plugins.length === 1 && Array.isArray(plugins[0])) {
plugins = plugins[0];
}
return new _processor2['default'](plugins);
};
postcss.plugin = function (name, initializer) {
var creator = function creator() {
var transformer = initializer.apply(undefined, arguments);
transformer.postcssPlugin = name;
transformer.postcssVersion = new _processor2['default']().version;
return transformer;
};
creator.postcss = creator();
creator.process = function (css, opts) {
return postcss([creator(opts)]).process(css, opts);
};
return creator;
};
postcss.stringify = _stringify2['default'];
postcss.vendor = _vendor2['default'];
postcss.parse = _parse2['default'];
postcss.list = _list2['default'];
postcss.comment = function (defaults) {
return new _comment2['default'](defaults);
};
postcss.atRule = function (defaults) {
return new _atRule2['default'](defaults);
};
postcss.decl = function (defaults) {
return new _declaration2['default'](defaults);
};
postcss.rule = function (defaults) {
return new _rule2['default'](defaults);
};
postcss.root = function (defaults) {
return new _root2['default'](defaults);
};
exports['default'] = postcss;
module.exports = exports['default']; | { return obj && obj.__esModule ? obj : { 'default': obj }; } | identifier_body |
persistent_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A persistent, thread-safe singly-linked list.
use std::mem;
use std::sync::Arc;
pub struct PersistentList<T> {
head: PersistentListLink<T>,
length: usize,
}
struct PersistentListEntry<T> {
value: T,
next: PersistentListLink<T>,
}
type PersistentListLink<T> = Option<Arc<PersistentListEntry<T>>>;
impl<T> PersistentList<T> where T: Send + Sync {
#[inline]
pub fn new() -> PersistentList<T> {
PersistentList {
head: None,
length: 0,
}
}
#[inline]
pub fn len(&self) -> usize {
self.length
}
#[inline]
pub fn front(&self) -> Option<&T> {
self.head.as_ref().map(|head| &head.value)
}
#[inline]
pub fn prepend_elem(&self, value: T) -> PersistentList<T> {
PersistentList {
head: Some(Arc::new(PersistentListEntry {
value: value,
next: self.head.clone(),
})),
length: self.length + 1,
}
}
#[inline]
pub fn iter<'a>(&'a self) -> PersistentListIterator<'a,T> {
// This could clone (and would not need the lifetime if it did), but then it would incur
// atomic operations on every call to `.next()`. Bad.
PersistentListIterator {
entry: self.head.as_ref().map(|head| &**head),
}
}
}
impl<T> Clone for PersistentList<T> where T: Send + Sync {
fn | (&self) -> PersistentList<T> {
// This establishes the persistent nature of this list: we can clone a list by just cloning
// its head.
PersistentList {
head: self.head.clone(),
length: self.length,
}
}
}
pub struct PersistentListIterator<'a,T> where T: 'a + Send + Sync {
entry: Option<&'a PersistentListEntry<T>>,
}
impl<'a,T> Iterator for PersistentListIterator<'a,T> where T: Send + Sync + 'static {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<&'a T> {
let entry = match self.entry {
None => return None,
Some(entry) => {
// This `transmute` is necessary to ensure that the lifetimes of the next entry and
// this entry match up; the compiler doesn't know this, but we do because of the
// reference counting behavior of `Arc`.
unsafe {
mem::transmute::<&'a PersistentListEntry<T>,
&'static PersistentListEntry<T>>(entry)
}
}
};
let value = &entry.value;
self.entry = match entry.next {
None => None,
Some(ref entry) => Some(&**entry),
};
Some(value)
}
}
| clone | identifier_name |
persistent_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A persistent, thread-safe singly-linked list. |
use std::mem;
use std::sync::Arc;
pub struct PersistentList<T> {
head: PersistentListLink<T>,
length: usize,
}
struct PersistentListEntry<T> {
value: T,
next: PersistentListLink<T>,
}
type PersistentListLink<T> = Option<Arc<PersistentListEntry<T>>>;
impl<T> PersistentList<T> where T: Send + Sync {
#[inline]
pub fn new() -> PersistentList<T> {
PersistentList {
head: None,
length: 0,
}
}
#[inline]
pub fn len(&self) -> usize {
self.length
}
#[inline]
pub fn front(&self) -> Option<&T> {
self.head.as_ref().map(|head| &head.value)
}
#[inline]
pub fn prepend_elem(&self, value: T) -> PersistentList<T> {
PersistentList {
head: Some(Arc::new(PersistentListEntry {
value: value,
next: self.head.clone(),
})),
length: self.length + 1,
}
}
#[inline]
pub fn iter<'a>(&'a self) -> PersistentListIterator<'a,T> {
// This could clone (and would not need the lifetime if it did), but then it would incur
// atomic operations on every call to `.next()`. Bad.
PersistentListIterator {
entry: self.head.as_ref().map(|head| &**head),
}
}
}
impl<T> Clone for PersistentList<T> where T: Send + Sync {
fn clone(&self) -> PersistentList<T> {
// This establishes the persistent nature of this list: we can clone a list by just cloning
// its head.
PersistentList {
head: self.head.clone(),
length: self.length,
}
}
}
pub struct PersistentListIterator<'a,T> where T: 'a + Send + Sync {
entry: Option<&'a PersistentListEntry<T>>,
}
impl<'a,T> Iterator for PersistentListIterator<'a,T> where T: Send + Sync + 'static {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<&'a T> {
let entry = match self.entry {
None => return None,
Some(entry) => {
// This `transmute` is necessary to ensure that the lifetimes of the next entry and
// this entry match up; the compiler doesn't know this, but we do because of the
// reference counting behavior of `Arc`.
unsafe {
mem::transmute::<&'a PersistentListEntry<T>,
&'static PersistentListEntry<T>>(entry)
}
}
};
let value = &entry.value;
self.entry = match entry.next {
None => None,
Some(ref entry) => Some(&**entry),
};
Some(value)
}
} | random_line_split |
|
map.rs | #![allow(dead_code)]
#[derive(Debug)] enum | { Apple, Carrot, Potato }
#[derive(Debug)] struct Peeled(Food);
#[derive(Debug)] struct Chopped(Food);
#[derive(Debug)] struct Cooked(Food);
// 削水果皮。如果没有水果,就返回 `None`。
// 否则返回削好皮的水果。
fn peel(food: Option<Food>) -> Option<Peeled> {
match food {
Some(food) => Some(Peeled(food)),
None => None,
}
}
// 和上面一样,我们要在切水果之前确认水果是否已经削皮。
fn chop(peeled: Option<Peeled>) -> Option<Chopped> {
match peeled {
Some(Peeled(food)) => Some(Chopped(food)),
None => None,
}
}
// 和前面的检查类似,但是使用 `map()` 来替代 `match`。
fn cook(chopped: Option<Chopped>) -> Option<Cooked> {
chopped.map(|Chopped(food)| Cooked(food))
}
// 另外一种实现,我们可以链式调用 `map()` 来简化上述的流程。
fn process(food: Option<Food>) -> Option<Cooked> {
food.map(|f| Peeled(f))
.map(|Peeled(f)| Chopped(f))
.map(|Chopped(f)| Cooked(f))
}
// 在尝试吃水果之前确认水果是否存在是非常重要的!
fn eat(food: Option<Cooked>) {
match food {
Some(food) => println!("Mmm. I love {:?}", food),
None => println!("Oh no! It wasn't edible."),
}
}
fn main() {
let apple = Some(Food::Apple);
let carrot = Some(Food::Carrot);
let potato = None;
let cooked_apple = cook(chop(peel(apple)));
let cooked_carrot = cook(chop(peel(carrot)));
// 现在让我们试试更简便的方式 `process()`。
// (原文:Let's try the simpler looking `process()` now.)
// (翻译疑问:looking 是什么意思呢?望指教。)
let cooked_potato = process(potato);
eat(cooked_apple);
eat(cooked_carrot);
eat(cooked_potato);
}
| Food | identifier_name |
map.rs | #![allow(dead_code)]
#[derive(Debug)] enum Food { Apple, Carrot, Potato }
#[derive(Debug)] struct Peeled(Food);
#[derive(Debug)] struct Chopped(Food);
#[derive(Debug)] struct Cooked(Food);
// 削水果皮。如果没有水果,就返回 `None`。
// 否则返回削好皮的水果。
fn peel(food: Option<Food>) -> Option<Peeled> {
match food {
Some(food) => Some(Peeled(food)),
None => None,
}
}
| Some(Peeled(food)) => Some(Chopped(food)),
None => None,
}
}
// 和前面的检查类似,但是使用 `map()` 来替代 `match`。
fn cook(chopped: Option<Chopped>) -> Option<Cooked> {
chopped.map(|Chopped(food)| Cooked(food))
}
// 另外一种实现,我们可以链式调用 `map()` 来简化上述的流程。
fn process(food: Option<Food>) -> Option<Cooked> {
food.map(|f| Peeled(f))
.map(|Peeled(f)| Chopped(f))
.map(|Chopped(f)| Cooked(f))
}
// 在尝试吃水果之前确认水果是否存在是非常重要的!
fn eat(food: Option<Cooked>) {
match food {
Some(food) => println!("Mmm. I love {:?}", food),
None => println!("Oh no! It wasn't edible."),
}
}
fn main() {
let apple = Some(Food::Apple);
let carrot = Some(Food::Carrot);
let potato = None;
let cooked_apple = cook(chop(peel(apple)));
let cooked_carrot = cook(chop(peel(carrot)));
// 现在让我们试试更简便的方式 `process()`。
// (原文:Let's try the simpler looking `process()` now.)
// (翻译疑问:looking 是什么意思呢?望指教。)
let cooked_potato = process(potato);
eat(cooked_apple);
eat(cooked_carrot);
eat(cooked_potato);
} | // 和上面一样,我们要在切水果之前确认水果是否已经削皮。
fn chop(peeled: Option<Peeled>) -> Option<Chopped> {
match peeled { | random_line_split |
exceptions.py | """Custom Exception Classes for Phylotyper Module
"""
class PhylotyperError(Exception):
"""Basic exception for errors raised by Phylotyper modules"""
def __init__(self, subtype, msg=None):
if msg is None:
|
super(PhylotyperError, self).__init__(msg)
self.subtype = subtype
class ValuesError(PhylotyperError):
"""Unknown subtype"""
def __init__(self, subtype, msg=None):
super(PhylotyperError, self).__init__(
subtype, msg="Unrecognized subtype {}".format(subtype))
class DatabaseError(PhylotyperError):
"""Missing data in Database"""
def __init__(self, subtype, data, msg=None):
m = "Database is missing data {} for {}".format(data, subtype)
super(PhylotyperError, self).__init__(subtype, m)
self.data = data | msg = "An error occured for subtype {}".format(subtype) | conditional_block |
exceptions.py | """Custom Exception Classes for Phylotyper Module
"""
class PhylotyperError(Exception):
"""Basic exception for errors raised by Phylotyper modules"""
def __init__(self, subtype, msg=None):
if msg is None: | msg = "An error occured for subtype {}".format(subtype)
super(PhylotyperError, self).__init__(msg)
self.subtype = subtype
class ValuesError(PhylotyperError):
"""Unknown subtype"""
def __init__(self, subtype, msg=None):
super(PhylotyperError, self).__init__(
subtype, msg="Unrecognized subtype {}".format(subtype))
class DatabaseError(PhylotyperError):
"""Missing data in Database"""
def __init__(self, subtype, data, msg=None):
m = "Database is missing data {} for {}".format(data, subtype)
super(PhylotyperError, self).__init__(subtype, m)
self.data = data | random_line_split |
|
exceptions.py | """Custom Exception Classes for Phylotyper Module
"""
class PhylotyperError(Exception):
"""Basic exception for errors raised by Phylotyper modules"""
def __init__(self, subtype, msg=None):
if msg is None:
msg = "An error occured for subtype {}".format(subtype)
super(PhylotyperError, self).__init__(msg)
self.subtype = subtype
class | (PhylotyperError):
"""Unknown subtype"""
def __init__(self, subtype, msg=None):
super(PhylotyperError, self).__init__(
subtype, msg="Unrecognized subtype {}".format(subtype))
class DatabaseError(PhylotyperError):
"""Missing data in Database"""
def __init__(self, subtype, data, msg=None):
m = "Database is missing data {} for {}".format(data, subtype)
super(PhylotyperError, self).__init__(subtype, m)
self.data = data | ValuesError | identifier_name |
exceptions.py | """Custom Exception Classes for Phylotyper Module
"""
class PhylotyperError(Exception):
"""Basic exception for errors raised by Phylotyper modules"""
def __init__(self, subtype, msg=None):
if msg is None:
msg = "An error occured for subtype {}".format(subtype)
super(PhylotyperError, self).__init__(msg)
self.subtype = subtype
class ValuesError(PhylotyperError):
"""Unknown subtype"""
def __init__(self, subtype, msg=None):
|
class DatabaseError(PhylotyperError):
"""Missing data in Database"""
def __init__(self, subtype, data, msg=None):
m = "Database is missing data {} for {}".format(data, subtype)
super(PhylotyperError, self).__init__(subtype, m)
self.data = data | super(PhylotyperError, self).__init__(
subtype, msg="Unrecognized subtype {}".format(subtype)) | identifier_body |
py_group.py | # coding=UTF-8
import mysql.connector
import xlrd
import xlsxwriter
import os
from mysql.connector import errorcode
from datetime import datetime
# 符号化后的 Excel 文件名
EXCEL_NAME = '20170223_4.0.1_feedback_result_py'
DB_NAME = 'zl_crash'
config = {
'user': 'root',
'password': '123456',
'host': '127.0.0.1',
'database': 'zl_crash',
}
class Report(object):
'''
Report class used to encapsulate the row data in EXCEL
'''
def __init__(self, report_id, exception_type, device_id, exception_symbols, os_version):
self.report_id = report_id;
self.exception_type = exception_type;
self.device_id = device_id;
self.exception_symbols = exception_symbols;
self.os_version = os_version;
def main():
begin_time = datetime.now()
# 表名
table_name = 'report_' + begin_time.strftime("%Y_%m_%d_%H_%M_%S")
# 建表
create_table_in_db(table_name)
# 插入数据
insert_symbolication_result_into_db(table_name)
# 对数据进行分组并导出
generate_grouped_exception(table_name)
end_time = datetime.now()
print('耗时:' + str(end_time - begin_time))
def create_table_in_db(table_name):
'''
Create a table in database, and named as `table_name`
:param table_name: table_name
'''
SQLS = {}
SQLS['drop_report'] = (
"DROP TABLE IF EXISTS `" + table_name + "`")
SQLS['report'] = (
"CREATE TABLE `" + table_name + "` ( "
"`report_id` int(11) NOT NULL AUTO_INCREMENT, "
"`exception_type` varchar(255) DEFAULT NULL, "
"`device_id` varchar(255) DEFAULT NULL, "
"`exception_symbols` longtext, "
"`os_version` varchar(255) DEFAULT NULL, "
"PRIMARY KEY (`report_id`)"
") ENGINE=InnoDB DEFAULT CHARSET=utf8")
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor();
for name, sql in SQLS.items():
try:
print("Executing sql {}.".format(name))
cursor.execute(sql)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print('Table already exists.')
else:
print(err.msg)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def insert_symbolication_result_into_db(table_name):
'''
Insert the symbolicated result into database
:param table_name: table_name in database
'''
try:
conn = mysql.connector.connect(**config)
# print('connected to db')
cursor = conn.cursor()
insert_report = (
"INSERT INTO " + table_name + " "
"(exception_type, device_id, exception_symbols, os_version) "
"VALUES (%s, %s, %s, %s)")
work_book = xlrd.open_workbook(EXCEL_NAME + '.xlsx')
sheet = work_book.sheets()[0]
nrows = sheet.nrows
ncols = sheet.ncols
row_index = 1
for row_index in range(1, nrows):
data_row = sheet.row_values(row_index)
# assert col < ncols
device_id = data_row[0]
os_version = data_row[1]
exception_type = data_row[2]
exception_symbols = data_row[3]
if exception_symbols == '':
continue
data_report = (exception_type, dev | exception_symbols, os_version)
# insert report data
cursor.execute(insert_report, data_report)
conn.commit()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def generate_grouped_exception(table_name):
'''
According the group data in database, make all exception to group data.
:param table_name: table_name in zl_crash database
'''
EXCEPTION_TYPE_COUNT = {}
EXCEPTION_MAPPING = {}
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor()
group_exception_type = (
"SELECT exception_type, COUNT(*) as nums "
"FROM " + table_name + " GROUP BY exception_type")
query_specific_exception = (
"SELECT * FROM " + table_name + " "
"WHERE exception_type = %s")
cursor.execute(group_exception_type)
for (exception_type, nums) in cursor:
EXCEPTION_TYPE_COUNT[exception_type] = nums
# print("exception_type:" + exception_type + ", nums:" + str(nums))
for exception_type in EXCEPTION_TYPE_COUNT.keys():
cursor.execute(query_specific_exception, (exception_type,))
exception_list = []
for (report_id, exception_type, device_id, exception_symbols, os_version) in cursor:
report = Report(report_id, exception_type, device_id, exception_symbols, os_version)
exception_list.append(report)
EXCEPTION_MAPPING[exception_type] = exception_list
write_grouped_exception_to_file(EXCEPTION_TYPE_COUNT, EXCEPTION_MAPPING)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def write_grouped_exception_to_file(count, mapping):
'''
Export grouped exception to file
:param count: 字典 key:exception_type value:count
:param mapping: 字典 key:exception_type value:exception_list
'''
output_file_name = EXCEL_NAME + '_grouped.xlsx'
os.system('rm -rf ' + output_file_name)
workbook = xlsxwriter.Workbook(output_file_name)
worksheet = workbook.add_worksheet()
# 设置列宽
worksheet.set_column('A:A', 25)
worksheet.set_column('B:B', 10)
worksheet.set_column('C:C', 25)
worksheet.set_column('D:D', 40)
worksheet.set_column('E:E', 500)
# 粗体格式
bold = workbook.add_format({'font_size': 14,
'align': 'center',
'bold': True})
# 标题行
worksheet.write('A1', 'exception_type', bold)
worksheet.write('B1', 'count', bold)
worksheet.write('C1', 'os_version', bold)
worksheet.write('D1', 'device_id', bold)
worksheet.write('E1', 'symbols', bold)
# 写入 Excel Index 指示器
row_index = 1
col_index = 0
colors = ('#A8BAAA', '#FFF6CF', '#DCCDAE', '#B49D7E',
'#816854', '#334D5C', '#45B29D', '#EFC94C')
count_index = 0
pattern = 0.5
for (type, num) in count.items():
bg_color = colors[count_index % len(colors)]
col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color})
num_col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color,
'bold': True,
'align': 'center'})
count_index += 1
list = mapping[type]
for i in range(num):
report_item = list[i]
if i == 0:
worksheet.write(row_index, col_index, report_item.exception_type, col_format)
col_index += 1
worksheet.write(row_index, col_index, num, num_col_format)
col_index += 1
else:
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.os_version, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.device_id, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.exception_symbols, col_format)
# 设置 index
row_index += 1
col_index = 0
# 关闭文件
workbook.close()
print("Exporting grouped data to " + output_file_name)
if __name__ == '__main__':
main()
| ice_id, | conditional_block |
py_group.py | # coding=UTF-8
import mysql.connector
import xlrd
import xlsxwriter
import os
from mysql.connector import errorcode
from datetime import datetime
# 符号化后的 Excel 文件名
EXCEL_NAME = '20170223_4.0.1_feedback_result_py'
DB_NAME = 'zl_crash'
config = {
'user': 'root',
'password': '123456',
'host': '127.0.0.1',
'database': 'zl_crash',
}
class Report(object):
'''
Report class used to encapsulate the row data in EXCEL
'''
def __init__(self, report_id, exception_type, device_id, exception_symbols, os_version):
self.report_id = report_id;
self.exception_type = exception_type;
self.device_id = device_id;
self.exception_symbols = exception_symbols;
self.os_version = os_version;
def main():
begin_time = datetime.now()
# 表名
table_name = 'report_' + begin_time.strftime("%Y_%m_%d_%H_%M_%S")
# 建表
create_table_in_db(table_name)
# 插入数据
insert_symbolication_result_into_db(table_name)
# 对数据进行分组并导出
generate_grouped_exception(table_name)
end_time = datetime.now()
print('耗时:' + str(end_time - begin_time))
def create_table_in_db(table_name):
'''
Create a table in database, and named as `table_name`
:param table_name: table_name
'''
SQLS = {}
SQLS['drop_report'] = (
"DROP TABLE IF EXISTS `" + table_name + "`")
SQLS['report'] = (
"CREATE TABLE `" + table_name + "` ( "
"`report_id` int(11) NOT NULL AUTO_INCREMENT, "
"`exception_type` varchar(255) DEFAULT NULL, "
"`device_id` varchar(255) DEFAULT NULL, "
"`exception_symbols` longtext, "
"`os_version` varchar(255) DEFAULT NULL, "
"PRIMARY KEY (`report_id`)"
") ENGINE=InnoDB DEFAULT CHARSET=utf8")
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor();
for name, sql in SQLS.items():
try:
print("Executing sql {}.".format(name))
cursor.execute(sql)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print('Table already exists.')
else:
print(err.msg)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password") | else:
print(err.msg)
finally:
cursor.close()
conn.close()
def insert_symbolication_result_into_db(table_name):
'''
Insert the symbolicated result into database
:param table_name: table_name in database
'''
try:
conn = mysql.connector.connect(**config)
# print('connected to db')
cursor = conn.cursor()
insert_report = (
"INSERT INTO " + table_name + " "
"(exception_type, device_id, exception_symbols, os_version) "
"VALUES (%s, %s, %s, %s)")
work_book = xlrd.open_workbook(EXCEL_NAME + '.xlsx')
sheet = work_book.sheets()[0]
nrows = sheet.nrows
ncols = sheet.ncols
row_index = 1
for row_index in range(1, nrows):
data_row = sheet.row_values(row_index)
# assert col < ncols
device_id = data_row[0]
os_version = data_row[1]
exception_type = data_row[2]
exception_symbols = data_row[3]
if exception_symbols == '':
continue
data_report = (exception_type, device_id, exception_symbols, os_version)
# insert report data
cursor.execute(insert_report, data_report)
conn.commit()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def generate_grouped_exception(table_name):
'''
According the group data in database, make all exception to group data.
:param table_name: table_name in zl_crash database
'''
EXCEPTION_TYPE_COUNT = {}
EXCEPTION_MAPPING = {}
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor()
group_exception_type = (
"SELECT exception_type, COUNT(*) as nums "
"FROM " + table_name + " GROUP BY exception_type")
query_specific_exception = (
"SELECT * FROM " + table_name + " "
"WHERE exception_type = %s")
cursor.execute(group_exception_type)
for (exception_type, nums) in cursor:
EXCEPTION_TYPE_COUNT[exception_type] = nums
# print("exception_type:" + exception_type + ", nums:" + str(nums))
for exception_type in EXCEPTION_TYPE_COUNT.keys():
cursor.execute(query_specific_exception, (exception_type,))
exception_list = []
for (report_id, exception_type, device_id, exception_symbols, os_version) in cursor:
report = Report(report_id, exception_type, device_id, exception_symbols, os_version)
exception_list.append(report)
EXCEPTION_MAPPING[exception_type] = exception_list
write_grouped_exception_to_file(EXCEPTION_TYPE_COUNT, EXCEPTION_MAPPING)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def write_grouped_exception_to_file(count, mapping):
'''
Export grouped exception to file
:param count: 字典 key:exception_type value:count
:param mapping: 字典 key:exception_type value:exception_list
'''
output_file_name = EXCEL_NAME + '_grouped.xlsx'
os.system('rm -rf ' + output_file_name)
workbook = xlsxwriter.Workbook(output_file_name)
worksheet = workbook.add_worksheet()
# 设置列宽
worksheet.set_column('A:A', 25)
worksheet.set_column('B:B', 10)
worksheet.set_column('C:C', 25)
worksheet.set_column('D:D', 40)
worksheet.set_column('E:E', 500)
# 粗体格式
bold = workbook.add_format({'font_size': 14,
'align': 'center',
'bold': True})
# 标题行
worksheet.write('A1', 'exception_type', bold)
worksheet.write('B1', 'count', bold)
worksheet.write('C1', 'os_version', bold)
worksheet.write('D1', 'device_id', bold)
worksheet.write('E1', 'symbols', bold)
# 写入 Excel Index 指示器
row_index = 1
col_index = 0
colors = ('#A8BAAA', '#FFF6CF', '#DCCDAE', '#B49D7E',
'#816854', '#334D5C', '#45B29D', '#EFC94C')
count_index = 0
pattern = 0.5
for (type, num) in count.items():
bg_color = colors[count_index % len(colors)]
col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color})
num_col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color,
'bold': True,
'align': 'center'})
count_index += 1
list = mapping[type]
for i in range(num):
report_item = list[i]
if i == 0:
worksheet.write(row_index, col_index, report_item.exception_type, col_format)
col_index += 1
worksheet.write(row_index, col_index, num, num_col_format)
col_index += 1
else:
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.os_version, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.device_id, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.exception_symbols, col_format)
# 设置 index
row_index += 1
col_index = 0
# 关闭文件
workbook.close()
print("Exporting grouped data to " + output_file_name)
if __name__ == '__main__':
main() | elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist") | random_line_split |
py_group.py | # coding=UTF-8
import mysql.connector
import xlrd
import xlsxwriter
import os
from mysql.connector import errorcode
from datetime import datetime
# 符号化后的 Excel 文件名
EXCEL_NAME = '20170223_4.0.1_feedback_result_py'
DB_NAME = 'zl_crash'
config = {
'user': 'root',
'password': '123456',
'host': '127.0.0.1',
'database': 'zl_crash',
}
class Report(object):
'''
Report c | begin_time = datetime.now()
# 表名
table_name = 'report_' + begin_time.strftime("%Y_%m_%d_%H_%M_%S")
# 建表
create_table_in_db(table_name)
# 插入数据
insert_symbolication_result_into_db(table_name)
# 对数据进行分组并导出
generate_grouped_exception(table_name)
end_time = datetime.now()
print('耗时:' + str(end_time - begin_time))
def create_table_in_db(table_name):
'''
Create a table in database, and named as `table_name`
:param table_name: table_name
'''
SQLS = {}
SQLS['drop_report'] = (
"DROP TABLE IF EXISTS `" + table_name + "`")
SQLS['report'] = (
"CREATE TABLE `" + table_name + "` ( "
"`report_id` int(11) NOT NULL AUTO_INCREMENT, "
"`exception_type` varchar(255) DEFAULT NULL, "
"`device_id` varchar(255) DEFAULT NULL, "
"`exception_symbols` longtext, "
"`os_version` varchar(255) DEFAULT NULL, "
"PRIMARY KEY (`report_id`)"
") ENGINE=InnoDB DEFAULT CHARSET=utf8")
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor();
for name, sql in SQLS.items():
try:
print("Executing sql {}.".format(name))
cursor.execute(sql)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print('Table already exists.')
else:
print(err.msg)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def insert_symbolication_result_into_db(table_name):
'''
Insert the symbolicated result into database
:param table_name: table_name in database
'''
try:
conn = mysql.connector.connect(**config)
# print('connected to db')
cursor = conn.cursor()
insert_report = (
"INSERT INTO " + table_name + " "
"(exception_type, device_id, exception_symbols, os_version) "
"VALUES (%s, %s, %s, %s)")
work_book = xlrd.open_workbook(EXCEL_NAME + '.xlsx')
sheet = work_book.sheets()[0]
nrows = sheet.nrows
ncols = sheet.ncols
row_index = 1
for row_index in range(1, nrows):
data_row = sheet.row_values(row_index)
# assert col < ncols
device_id = data_row[0]
os_version = data_row[1]
exception_type = data_row[2]
exception_symbols = data_row[3]
if exception_symbols == '':
continue
data_report = (exception_type, device_id, exception_symbols, os_version)
# insert report data
cursor.execute(insert_report, data_report)
conn.commit()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def generate_grouped_exception(table_name):
'''
According the group data in database, make all exception to group data.
:param table_name: table_name in zl_crash database
'''
EXCEPTION_TYPE_COUNT = {}
EXCEPTION_MAPPING = {}
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor()
group_exception_type = (
"SELECT exception_type, COUNT(*) as nums "
"FROM " + table_name + " GROUP BY exception_type")
query_specific_exception = (
"SELECT * FROM " + table_name + " "
"WHERE exception_type = %s")
cursor.execute(group_exception_type)
for (exception_type, nums) in cursor:
EXCEPTION_TYPE_COUNT[exception_type] = nums
# print("exception_type:" + exception_type + ", nums:" + str(nums))
for exception_type in EXCEPTION_TYPE_COUNT.keys():
cursor.execute(query_specific_exception, (exception_type,))
exception_list = []
for (report_id, exception_type, device_id, exception_symbols, os_version) in cursor:
report = Report(report_id, exception_type, device_id, exception_symbols, os_version)
exception_list.append(report)
EXCEPTION_MAPPING[exception_type] = exception_list
write_grouped_exception_to_file(EXCEPTION_TYPE_COUNT, EXCEPTION_MAPPING)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def write_grouped_exception_to_file(count, mapping):
'''
Export grouped exception to file
:param count: 字典 key:exception_type value:count
:param mapping: 字典 key:exception_type value:exception_list
'''
output_file_name = EXCEL_NAME + '_grouped.xlsx'
os.system('rm -rf ' + output_file_name)
workbook = xlsxwriter.Workbook(output_file_name)
worksheet = workbook.add_worksheet()
# 设置列宽
worksheet.set_column('A:A', 25)
worksheet.set_column('B:B', 10)
worksheet.set_column('C:C', 25)
worksheet.set_column('D:D', 40)
worksheet.set_column('E:E', 500)
# 粗体格式
bold = workbook.add_format({'font_size': 14,
'align': 'center',
'bold': True})
# 标题行
worksheet.write('A1', 'exception_type', bold)
worksheet.write('B1', 'count', bold)
worksheet.write('C1', 'os_version', bold)
worksheet.write('D1', 'device_id', bold)
worksheet.write('E1', 'symbols', bold)
# 写入 Excel Index 指示器
row_index = 1
col_index = 0
colors = ('#A8BAAA', '#FFF6CF', '#DCCDAE', '#B49D7E',
'#816854', '#334D5C', '#45B29D', '#EFC94C')
count_index = 0
pattern = 0.5
for (type, num) in count.items():
bg_color = colors[count_index % len(colors)]
col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color})
num_col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color,
'bold': True,
'align': 'center'})
count_index += 1
list = mapping[type]
for i in range(num):
report_item = list[i]
if i == 0:
worksheet.write(row_index, col_index, report_item.exception_type, col_format)
col_index += 1
worksheet.write(row_index, col_index, num, num_col_format)
col_index += 1
else:
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.os_version, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.device_id, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.exception_symbols, col_format)
# 设置 index
row_index += 1
col_index = 0
# 关闭文件
workbook.close()
print("Exporting grouped data to " + output_file_name)
if __name__ == '__main__':
main()
| lass used to encapsulate the row data in EXCEL
'''
def __init__(self, report_id, exception_type, device_id, exception_symbols, os_version):
self.report_id = report_id;
self.exception_type = exception_type;
self.device_id = device_id;
self.exception_symbols = exception_symbols;
self.os_version = os_version;
def main():
| identifier_body |
py_group.py | # coding=UTF-8
import mysql.connector
import xlrd
import xlsxwriter
import os
from mysql.connector import errorcode
from datetime import datetime
# 符号化后的 Excel 文件名
EXCEL_NAME = '20170223_4.0.1_feedback_result_py'
DB_NAME = 'zl_crash'
config = {
'user': 'root',
'password': '123456',
'host': '127.0.0.1',
'database': 'zl_crash',
}
class Report(object):
'''
Report class used to encapsulate the row data in EXCEL
'''
def __init__(self, report_id, exception_type, device_id, exception_symbols, os_version):
self.report_id = report_id;
self.exception_type = exception_type;
self.device_id = device_id;
self.exception_symbols = exception_symbols;
self.os_version = os_version;
def main():
begin_time = datetime.now()
# 表名
table_name = 'report_' + begin_time.strftime("%Y_%m_%d_%H_%M_%S")
# 建表
create_table_in_db(table_name)
# 插入数据
insert_symbolication_result_into_db(table_name)
# 对数据进行分组并导出
generate_grouped_exception(table_name)
end_time = datetime.now()
print('耗时:' + str(end_time - begin_time))
def create_table_in_db(table_name):
'''
Create a tab | d named as `table_name`
:param table_name: table_name
'''
SQLS = {}
SQLS['drop_report'] = (
"DROP TABLE IF EXISTS `" + table_name + "`")
SQLS['report'] = (
"CREATE TABLE `" + table_name + "` ( "
"`report_id` int(11) NOT NULL AUTO_INCREMENT, "
"`exception_type` varchar(255) DEFAULT NULL, "
"`device_id` varchar(255) DEFAULT NULL, "
"`exception_symbols` longtext, "
"`os_version` varchar(255) DEFAULT NULL, "
"PRIMARY KEY (`report_id`)"
") ENGINE=InnoDB DEFAULT CHARSET=utf8")
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor();
for name, sql in SQLS.items():
try:
print("Executing sql {}.".format(name))
cursor.execute(sql)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print('Table already exists.')
else:
print(err.msg)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def insert_symbolication_result_into_db(table_name):
'''
Insert the symbolicated result into database
:param table_name: table_name in database
'''
try:
conn = mysql.connector.connect(**config)
# print('connected to db')
cursor = conn.cursor()
insert_report = (
"INSERT INTO " + table_name + " "
"(exception_type, device_id, exception_symbols, os_version) "
"VALUES (%s, %s, %s, %s)")
work_book = xlrd.open_workbook(EXCEL_NAME + '.xlsx')
sheet = work_book.sheets()[0]
nrows = sheet.nrows
ncols = sheet.ncols
row_index = 1
for row_index in range(1, nrows):
data_row = sheet.row_values(row_index)
# assert col < ncols
device_id = data_row[0]
os_version = data_row[1]
exception_type = data_row[2]
exception_symbols = data_row[3]
if exception_symbols == '':
continue
data_report = (exception_type, device_id, exception_symbols, os_version)
# insert report data
cursor.execute(insert_report, data_report)
conn.commit()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def generate_grouped_exception(table_name):
'''
According the group data in database, make all exception to group data.
:param table_name: table_name in zl_crash database
'''
EXCEPTION_TYPE_COUNT = {}
EXCEPTION_MAPPING = {}
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor()
group_exception_type = (
"SELECT exception_type, COUNT(*) as nums "
"FROM " + table_name + " GROUP BY exception_type")
query_specific_exception = (
"SELECT * FROM " + table_name + " "
"WHERE exception_type = %s")
cursor.execute(group_exception_type)
for (exception_type, nums) in cursor:
EXCEPTION_TYPE_COUNT[exception_type] = nums
# print("exception_type:" + exception_type + ", nums:" + str(nums))
for exception_type in EXCEPTION_TYPE_COUNT.keys():
cursor.execute(query_specific_exception, (exception_type,))
exception_list = []
for (report_id, exception_type, device_id, exception_symbols, os_version) in cursor:
report = Report(report_id, exception_type, device_id, exception_symbols, os_version)
exception_list.append(report)
EXCEPTION_MAPPING[exception_type] = exception_list
write_grouped_exception_to_file(EXCEPTION_TYPE_COUNT, EXCEPTION_MAPPING)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def write_grouped_exception_to_file(count, mapping):
'''
Export grouped exception to file
:param count: 字典 key:exception_type value:count
:param mapping: 字典 key:exception_type value:exception_list
'''
output_file_name = EXCEL_NAME + '_grouped.xlsx'
os.system('rm -rf ' + output_file_name)
workbook = xlsxwriter.Workbook(output_file_name)
worksheet = workbook.add_worksheet()
# 设置列宽
worksheet.set_column('A:A', 25)
worksheet.set_column('B:B', 10)
worksheet.set_column('C:C', 25)
worksheet.set_column('D:D', 40)
worksheet.set_column('E:E', 500)
# 粗体格式
bold = workbook.add_format({'font_size': 14,
'align': 'center',
'bold': True})
# 标题行
worksheet.write('A1', 'exception_type', bold)
worksheet.write('B1', 'count', bold)
worksheet.write('C1', 'os_version', bold)
worksheet.write('D1', 'device_id', bold)
worksheet.write('E1', 'symbols', bold)
# 写入 Excel Index 指示器
row_index = 1
col_index = 0
colors = ('#A8BAAA', '#FFF6CF', '#DCCDAE', '#B49D7E',
'#816854', '#334D5C', '#45B29D', '#EFC94C')
count_index = 0
pattern = 0.5
for (type, num) in count.items():
bg_color = colors[count_index % len(colors)]
col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color})
num_col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color,
'bold': True,
'align': 'center'})
count_index += 1
list = mapping[type]
for i in range(num):
report_item = list[i]
if i == 0:
worksheet.write(row_index, col_index, report_item.exception_type, col_format)
col_index += 1
worksheet.write(row_index, col_index, num, num_col_format)
col_index += 1
else:
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.os_version, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.device_id, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.exception_symbols, col_format)
# 设置 index
row_index += 1
col_index = 0
# 关闭文件
workbook.close()
print("Exporting grouped data to " + output_file_name)
if __name__ == '__main__':
main()
| le in database, an | identifier_name |
drawing.rs | /*
* Copyright (c) 2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use std::f64::consts::PI;
use gdk::{EventMask, RGBA};
use gtk::{
DrawingArea,
Inhibit,
prelude::BoxExt,
prelude::OrientableExt,
prelude::WidgetExt,
prelude::WidgetExtManual,
};
use gtk::Orientation::Vertical;
use rand::Rng;
use relm_derive::Msg;
use relm::{
DrawHandler,
Relm,
Widget,
interval,
};
use relm_derive::widget;
use self::Msg::*;
const SIZE: f64 = 15.0;
struct Circle {
x: f64,
y: f64,
color: RGBA,
vx: f64,
vy: f64,
}
impl Circle {
fn generate() -> Self { | x: gen.gen_range(20.0, 500.0),
y: gen.gen_range(20.0, 500.0),
color: RGBA::new(
gen.gen_range(0.0, 1.0),
gen.gen_range(0.0, 1.0),
gen.gen_range(0.0, 1.0),
1.0,
),
vx: gen.gen_range(1.0, 5.0),
vy: gen.gen_range(1.0, 5.0),
}
}
}
pub struct Model {
draw_handler: DrawHandler<DrawingArea>,
circles: Vec<Circle>,
cursor_pos: (f64, f64),
}
#[derive(Msg)]
pub enum Msg {
Generate,
Move,
MoveCursor((f64, f64)),
Quit,
UpdateDrawBuffer,
}
#[widget]
impl Widget for Win {
fn init_view(&mut self) {
self.model.draw_handler.init(&self.widgets.drawing_area);
self.widgets.drawing_area.add_events(EventMask::POINTER_MOTION_MASK);
}
fn model() -> Model {
Model {
draw_handler: DrawHandler::new().expect("draw handler"),
circles: vec![Circle::generate()],
cursor_pos: (-1000.0, -1000.0),
}
}
fn subscriptions(&mut self, relm: &Relm<Self>) {
interval(relm.stream(), 1000, || Generate);
interval(relm.stream(), 16, || Move);
}
fn update(&mut self, event: Msg) {
match event {
Generate => self.model.circles.push(Circle::generate()),
Move => {
let allocation = self.widgets.drawing_area.allocation();
for circle in &mut self.model.circles {
if (circle.x + circle.vx + SIZE / 2.0 < allocation.width() as f64)
&& (circle.x + circle.vx - SIZE / 2.0 > 0.0)
{
circle.x += circle.vx;
}
else {
circle.vx *= -1.0;
}
if (circle.y + circle.vy + SIZE / 2.0 < allocation.height() as f64)
&& (circle.y + circle.vy - SIZE / 2.0 > 0.0)
{
circle.y += circle.vy;
}
else {
circle.vy *= -1.0;
}
}
},
MoveCursor(pos) => self.model.cursor_pos = pos,
Quit => gtk::main_quit(),
UpdateDrawBuffer => {
let context = self.model.draw_handler.get_context().unwrap();
context.set_source_rgb(1.0, 1.0, 1.0);
context.paint().unwrap();
for circle in &self.model.circles {
context.set_source_rgb(
circle.color.red(),
circle.color.green(),
circle.color.blue(),
);
context.arc(circle.x, circle.y, SIZE, 0.0, 2.0 * PI);
context.fill().unwrap();
}
context.set_source_rgb(0.1, 0.2, 0.3);
context.rectangle(self.model.cursor_pos.0 - SIZE / 2.0, self.model.cursor_pos.1 - SIZE / 2.0, SIZE,
SIZE);
context.fill().unwrap();
},
}
}
view! {
gtk::Window {
gtk::Box {
orientation: Vertical,
#[name="drawing_area"]
gtk::DrawingArea {
child: {
expand: true,
},
draw(_, _) => (UpdateDrawBuffer, Inhibit(false)),
motion_notify_event(_, event) => (MoveCursor(event.position()), Inhibit(false))
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
}
}
fn main() {
Win::run(()).unwrap();
} | let mut gen = rand::thread_rng();
Circle { | random_line_split |
drawing.rs | /*
* Copyright (c) 2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use std::f64::consts::PI;
use gdk::{EventMask, RGBA};
use gtk::{
DrawingArea,
Inhibit,
prelude::BoxExt,
prelude::OrientableExt,
prelude::WidgetExt,
prelude::WidgetExtManual,
};
use gtk::Orientation::Vertical;
use rand::Rng;
use relm_derive::Msg;
use relm::{
DrawHandler,
Relm,
Widget,
interval,
};
use relm_derive::widget;
use self::Msg::*;
const SIZE: f64 = 15.0;
struct Circle {
x: f64,
y: f64,
color: RGBA,
vx: f64,
vy: f64,
}
impl Circle {
fn generate() -> Self {
let mut gen = rand::thread_rng();
Circle {
x: gen.gen_range(20.0, 500.0),
y: gen.gen_range(20.0, 500.0),
color: RGBA::new(
gen.gen_range(0.0, 1.0),
gen.gen_range(0.0, 1.0),
gen.gen_range(0.0, 1.0),
1.0,
),
vx: gen.gen_range(1.0, 5.0),
vy: gen.gen_range(1.0, 5.0),
}
}
}
pub struct Model {
draw_handler: DrawHandler<DrawingArea>,
circles: Vec<Circle>,
cursor_pos: (f64, f64),
}
#[derive(Msg)]
pub enum Msg {
Generate,
Move,
MoveCursor((f64, f64)),
Quit,
UpdateDrawBuffer,
}
#[widget]
impl Widget for Win {
fn init_view(&mut self) {
self.model.draw_handler.init(&self.widgets.drawing_area);
self.widgets.drawing_area.add_events(EventMask::POINTER_MOTION_MASK);
}
fn model() -> Model {
Model {
draw_handler: DrawHandler::new().expect("draw handler"),
circles: vec![Circle::generate()],
cursor_pos: (-1000.0, -1000.0),
}
}
fn subscriptions(&mut self, relm: &Relm<Self>) {
interval(relm.stream(), 1000, || Generate);
interval(relm.stream(), 16, || Move);
}
fn update(&mut self, event: Msg) {
match event {
Generate => self.model.circles.push(Circle::generate()),
Move => {
let allocation = self.widgets.drawing_area.allocation();
for circle in &mut self.model.circles {
if (circle.x + circle.vx + SIZE / 2.0 < allocation.width() as f64)
&& (circle.x + circle.vx - SIZE / 2.0 > 0.0)
{
circle.x += circle.vx;
}
else {
circle.vx *= -1.0;
}
if (circle.y + circle.vy + SIZE / 2.0 < allocation.height() as f64)
&& (circle.y + circle.vy - SIZE / 2.0 > 0.0)
{
circle.y += circle.vy;
}
else {
circle.vy *= -1.0;
}
}
},
MoveCursor(pos) => self.model.cursor_pos = pos,
Quit => gtk::main_quit(),
UpdateDrawBuffer => {
let context = self.model.draw_handler.get_context().unwrap();
context.set_source_rgb(1.0, 1.0, 1.0);
context.paint().unwrap();
for circle in &self.model.circles {
context.set_source_rgb(
circle.color.red(),
circle.color.green(),
circle.color.blue(),
);
context.arc(circle.x, circle.y, SIZE, 0.0, 2.0 * PI);
context.fill().unwrap();
}
context.set_source_rgb(0.1, 0.2, 0.3);
context.rectangle(self.model.cursor_pos.0 - SIZE / 2.0, self.model.cursor_pos.1 - SIZE / 2.0, SIZE,
SIZE);
context.fill().unwrap();
},
}
}
view! {
gtk::Window {
gtk::Box {
orientation: Vertical,
#[name="drawing_area"]
gtk::DrawingArea {
child: {
expand: true,
},
draw(_, _) => (UpdateDrawBuffer, Inhibit(false)),
motion_notify_event(_, event) => (MoveCursor(event.position()), Inhibit(false))
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
}
}
fn | () {
Win::run(()).unwrap();
}
| main | identifier_name |
finally.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The Finally trait provides a method, `finally` on
stack closures that emulates Java-style try/finally blocks.
# Example
```
(|| {
...
}).finally(|| {
always_run_this();
})
```
*/
use ops::Drop;
#[cfg(test)] use task::failing;
pub trait Finally<T> {
fn finally(&self, dtor: ||) -> T;
}
macro_rules! finally_fn {
($fnty:ty) => {
impl<T> Finally<T> for $fnty {
fn finally(&self, dtor: ||) -> T {
let _d = Finallyalizer {
dtor: dtor
};
(*self)()
}
}
}
}
impl<'a,T> Finally<T> for 'a || -> T {
fn finally(&self, dtor: ||) -> T {
let _d = Finallyalizer {
dtor: dtor
};
(*self)()
}
}
finally_fn!(extern "Rust" fn() -> T)
struct Finallyalizer<'a> {
dtor: 'a ||
}
#[unsafe_destructor]
impl<'a> Drop for Finallyalizer<'a> {
fn drop(&mut self) {
(self.dtor)();
}
}
#[test]
fn test_success() {
let mut i = 0;
(|| {
i = 10;
}).finally(|| {
assert!(!failing());
assert_eq!(i, 10);
i = 20;
});
assert_eq!(i, 20);
}
#[test]
#[should_fail]
fn | () {
let mut i = 0;
(|| {
i = 10;
fail!();
}).finally(|| {
assert!(failing());
assert_eq!(i, 10);
})
}
#[test]
fn test_retval() {
let closure: || -> int = || 10;
let i = closure.finally(|| { });
assert_eq!(i, 10);
}
#[test]
fn test_compact() {
fn do_some_fallible_work() {}
fn but_always_run_this_function() { }
do_some_fallible_work.finally(
but_always_run_this_function);
}
| test_fail | identifier_name |
finally.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The Finally trait provides a method, `finally` on
stack closures that emulates Java-style try/finally blocks.
# Example
```
(|| {
...
}).finally(|| {
always_run_this();
})
```
*/
use ops::Drop;
#[cfg(test)] use task::failing;
pub trait Finally<T> {
fn finally(&self, dtor: ||) -> T;
}
macro_rules! finally_fn {
($fnty:ty) => {
impl<T> Finally<T> for $fnty {
fn finally(&self, dtor: ||) -> T {
let _d = Finallyalizer {
dtor: dtor
};
(*self)()
}
}
}
}
impl<'a,T> Finally<T> for 'a || -> T {
fn finally(&self, dtor: ||) -> T {
let _d = Finallyalizer {
dtor: dtor
};
(*self)()
}
}
finally_fn!(extern "Rust" fn() -> T)
struct Finallyalizer<'a> {
dtor: 'a ||
}
#[unsafe_destructor]
impl<'a> Drop for Finallyalizer<'a> {
fn drop(&mut self) |
}
#[test]
fn test_success() {
let mut i = 0;
(|| {
i = 10;
}).finally(|| {
assert!(!failing());
assert_eq!(i, 10);
i = 20;
});
assert_eq!(i, 20);
}
#[test]
#[should_fail]
fn test_fail() {
let mut i = 0;
(|| {
i = 10;
fail!();
}).finally(|| {
assert!(failing());
assert_eq!(i, 10);
})
}
#[test]
fn test_retval() {
let closure: || -> int = || 10;
let i = closure.finally(|| { });
assert_eq!(i, 10);
}
#[test]
fn test_compact() {
fn do_some_fallible_work() {}
fn but_always_run_this_function() { }
do_some_fallible_work.finally(
but_always_run_this_function);
}
| {
(self.dtor)();
} | identifier_body |
finally.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The Finally trait provides a method, `finally` on
stack closures that emulates Java-style try/finally blocks.
# Example
```
(|| {
...
}).finally(|| {
always_run_this();
})
```
*/
use ops::Drop;
#[cfg(test)] use task::failing;
pub trait Finally<T> {
fn finally(&self, dtor: ||) -> T;
}
macro_rules! finally_fn {
($fnty:ty) => {
impl<T> Finally<T> for $fnty {
fn finally(&self, dtor: ||) -> T {
let _d = Finallyalizer {
dtor: dtor
};
(*self)() | }
impl<'a,T> Finally<T> for 'a || -> T {
fn finally(&self, dtor: ||) -> T {
let _d = Finallyalizer {
dtor: dtor
};
(*self)()
}
}
finally_fn!(extern "Rust" fn() -> T)
struct Finallyalizer<'a> {
dtor: 'a ||
}
#[unsafe_destructor]
impl<'a> Drop for Finallyalizer<'a> {
fn drop(&mut self) {
(self.dtor)();
}
}
#[test]
fn test_success() {
let mut i = 0;
(|| {
i = 10;
}).finally(|| {
assert!(!failing());
assert_eq!(i, 10);
i = 20;
});
assert_eq!(i, 20);
}
#[test]
#[should_fail]
fn test_fail() {
let mut i = 0;
(|| {
i = 10;
fail!();
}).finally(|| {
assert!(failing());
assert_eq!(i, 10);
})
}
#[test]
fn test_retval() {
let closure: || -> int = || 10;
let i = closure.finally(|| { });
assert_eq!(i, 10);
}
#[test]
fn test_compact() {
fn do_some_fallible_work() {}
fn but_always_run_this_function() { }
do_some_fallible_work.finally(
but_always_run_this_function);
} | }
}
} | random_line_split |
service.py | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import functools
import bleach
# BEGIN PATCH
import html5lib
from html5lib.serializer.htmlserializer import HTMLSerializer
def _serialize(domtree):
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(domtree)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False,
alphabetical_attributes=True)
return serializer.render(stream)
bleach._serialize = _serialize
# END PATCH
from django.core.cache import cache
from django.utils.encoding import force_bytes
from markdown import Markdown
from .extensions.autolink import AutolinkExtension
from .extensions.automail import AutomailExtension
from .extensions.semi_sane_lists import SemiSaneListExtension
from .extensions.spaced_link import SpacedLinkExtension
from .extensions.strikethrough import StrikethroughExtension
from .extensions.wikilinks import WikiLinkExtension
from .extensions.emojify import EmojifyExtension
from .extensions.mentions import MentionsExtension
from .extensions.references import TaigaReferencesExtension
from .extensions.target_link import TargetBlankLinkExtension
# Bleach configuration
bleach.ALLOWED_TAGS += ["p", "table", "thead", "tbody", "th", "tr", "td", "h1",
"h2", "h3", "h4", "h5", "h6", "div", "pre", "span",
"hr", "dl", "dt", "dd", "sup", "img", "del", "br",
"ins"]
bleach.ALLOWED_STYLES.append("background")
bleach.ALLOWED_ATTRIBUTES["a"] = ["href", "title", "alt", "target"]
bleach.ALLOWED_ATTRIBUTES["img"] = ["alt", "src"]
bleach.ALLOWED_ATTRIBUTES["*"] = ["class", "style"]
def _make_extensions_list(project=None):
return [AutolinkExtension(),
AutomailExtension(),
SemiSaneListExtension(),
SpacedLinkExtension(),
StrikethroughExtension(),
WikiLinkExtension(project),
EmojifyExtension(),
MentionsExtension(),
TaigaReferencesExtension(project),
TargetBlankLinkExtension(),
"extra",
"codehilite",
"sane_lists",
"toc",
"nl2br"]
import diff_match_patch
def cache_by_sha(func):
@functools.wraps(func)
def _decorator(project, text):
sha1_hash = hashlib.sha1(force_bytes(text)).hexdigest()
key = "{}-{}".format(sha1_hash, project.id)
# Try to get it from the cache
cached = cache.get(key)
if cached is not None:
re | returned_value = func(project, text)
cache.set(key, returned_value, timeout=None)
return returned_value
return _decorator
def _get_markdown(project):
extensions = _make_extensions_list(project=project)
md = Markdown(extensions=extensions)
md.extracted_data = {"mentions": [], "references": []}
return md
@cache_by_sha
def render(project, text):
md = _get_markdown(project)
return bleach.clean(md.convert(text))
def render_and_extract(project, text):
md = _get_markdown(project)
result = bleach.clean(md.convert(text))
return (result, md.extracted_data)
class DiffMatchPatch(diff_match_patch.diff_match_patch):
def diff_pretty_html(self, diffs):
html = []
for (op, data) in diffs:
text = (data.replace("&", "&").replace("<", "<")
.replace(">", ">").replace("\n", "<br />"))
if op == self.DIFF_INSERT:
html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
elif op == self.DIFF_DELETE:
html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
elif op == self.DIFF_EQUAL:
html.append("<span>%s</span>" % text)
return "".join(html)
def get_diff_of_htmls(html1, html2):
diffutil = DiffMatchPatch()
diffs = diffutil.diff_main(html1, html2)
diffutil.diff_cleanupSemantic(diffs)
return diffutil.diff_pretty_html(diffs)
__all__ = ["render", "get_diff_of_htmls", "render_and_extract"]
| turn cached
| conditional_block |
service.py | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import functools
import bleach
# BEGIN PATCH
import html5lib
from html5lib.serializer.htmlserializer import HTMLSerializer
def _serialize(domtree):
wa | bleach._serialize = _serialize
# END PATCH
from django.core.cache import cache
from django.utils.encoding import force_bytes
from markdown import Markdown
from .extensions.autolink import AutolinkExtension
from .extensions.automail import AutomailExtension
from .extensions.semi_sane_lists import SemiSaneListExtension
from .extensions.spaced_link import SpacedLinkExtension
from .extensions.strikethrough import StrikethroughExtension
from .extensions.wikilinks import WikiLinkExtension
from .extensions.emojify import EmojifyExtension
from .extensions.mentions import MentionsExtension
from .extensions.references import TaigaReferencesExtension
from .extensions.target_link import TargetBlankLinkExtension
# Bleach configuration
bleach.ALLOWED_TAGS += ["p", "table", "thead", "tbody", "th", "tr", "td", "h1",
"h2", "h3", "h4", "h5", "h6", "div", "pre", "span",
"hr", "dl", "dt", "dd", "sup", "img", "del", "br",
"ins"]
bleach.ALLOWED_STYLES.append("background")
bleach.ALLOWED_ATTRIBUTES["a"] = ["href", "title", "alt", "target"]
bleach.ALLOWED_ATTRIBUTES["img"] = ["alt", "src"]
bleach.ALLOWED_ATTRIBUTES["*"] = ["class", "style"]
def _make_extensions_list(project=None):
return [AutolinkExtension(),
AutomailExtension(),
SemiSaneListExtension(),
SpacedLinkExtension(),
StrikethroughExtension(),
WikiLinkExtension(project),
EmojifyExtension(),
MentionsExtension(),
TaigaReferencesExtension(project),
TargetBlankLinkExtension(),
"extra",
"codehilite",
"sane_lists",
"toc",
"nl2br"]
import diff_match_patch
def cache_by_sha(func):
@functools.wraps(func)
def _decorator(project, text):
sha1_hash = hashlib.sha1(force_bytes(text)).hexdigest()
key = "{}-{}".format(sha1_hash, project.id)
# Try to get it from the cache
cached = cache.get(key)
if cached is not None:
return cached
returned_value = func(project, text)
cache.set(key, returned_value, timeout=None)
return returned_value
return _decorator
def _get_markdown(project):
extensions = _make_extensions_list(project=project)
md = Markdown(extensions=extensions)
md.extracted_data = {"mentions": [], "references": []}
return md
@cache_by_sha
def render(project, text):
md = _get_markdown(project)
return bleach.clean(md.convert(text))
def render_and_extract(project, text):
md = _get_markdown(project)
result = bleach.clean(md.convert(text))
return (result, md.extracted_data)
class DiffMatchPatch(diff_match_patch.diff_match_patch):
def diff_pretty_html(self, diffs):
html = []
for (op, data) in diffs:
text = (data.replace("&", "&").replace("<", "<")
.replace(">", ">").replace("\n", "<br />"))
if op == self.DIFF_INSERT:
html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
elif op == self.DIFF_DELETE:
html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
elif op == self.DIFF_EQUAL:
html.append("<span>%s</span>" % text)
return "".join(html)
def get_diff_of_htmls(html1, html2):
diffutil = DiffMatchPatch()
diffs = diffutil.diff_main(html1, html2)
diffutil.diff_cleanupSemantic(diffs)
return diffutil.diff_pretty_html(diffs)
__all__ = ["render", "get_diff_of_htmls", "render_and_extract"]
| lker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(domtree)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False,
alphabetical_attributes=True)
return serializer.render(stream)
| identifier_body |
service.py | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import functools
import bleach
# BEGIN PATCH
import html5lib
from html5lib.serializer.htmlserializer import HTMLSerializer
def _serialize(domtree):
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(domtree)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False,
alphabetical_attributes=True)
return serializer.render(stream)
bleach._serialize = _serialize
# END PATCH
from django.core.cache import cache
from django.utils.encoding import force_bytes
from markdown import Markdown
from .extensions.autolink import AutolinkExtension
from .extensions.automail import AutomailExtension
from .extensions.semi_sane_lists import SemiSaneListExtension
from .extensions.spaced_link import SpacedLinkExtension
from .extensions.strikethrough import StrikethroughExtension
from .extensions.wikilinks import WikiLinkExtension
from .extensions.emojify import EmojifyExtension
from .extensions.mentions import MentionsExtension
from .extensions.references import TaigaReferencesExtension
from .extensions.target_link import TargetBlankLinkExtension
# Bleach configuration
bleach.ALLOWED_TAGS += ["p", "table", "thead", "tbody", "th", "tr", "td", "h1",
"h2", "h3", "h4", "h5", "h6", "div", "pre", "span",
"hr", "dl", "dt", "dd", "sup", "img", "del", "br",
"ins"]
bleach.ALLOWED_STYLES.append("background")
bleach.ALLOWED_ATTRIBUTES["a"] = ["href", "title", "alt", "target"]
bleach.ALLOWED_ATTRIBUTES["img"] = ["alt", "src"]
bleach.ALLOWED_ATTRIBUTES["*"] = ["class", "style"]
def _make_extensions_list(project=None):
return [AutolinkExtension(),
AutomailExtension(),
SemiSaneListExtension(),
SpacedLinkExtension(),
StrikethroughExtension(),
WikiLinkExtension(project),
EmojifyExtension(),
MentionsExtension(),
TaigaReferencesExtension(project),
TargetBlankLinkExtension(),
"extra",
"codehilite",
"sane_lists",
"toc",
"nl2br"]
import diff_match_patch
def cache_by_sha(func):
@functools.wraps(func)
def _d | roject, text):
sha1_hash = hashlib.sha1(force_bytes(text)).hexdigest()
key = "{}-{}".format(sha1_hash, project.id)
# Try to get it from the cache
cached = cache.get(key)
if cached is not None:
return cached
returned_value = func(project, text)
cache.set(key, returned_value, timeout=None)
return returned_value
return _decorator
def _get_markdown(project):
extensions = _make_extensions_list(project=project)
md = Markdown(extensions=extensions)
md.extracted_data = {"mentions": [], "references": []}
return md
@cache_by_sha
def render(project, text):
md = _get_markdown(project)
return bleach.clean(md.convert(text))
def render_and_extract(project, text):
md = _get_markdown(project)
result = bleach.clean(md.convert(text))
return (result, md.extracted_data)
class DiffMatchPatch(diff_match_patch.diff_match_patch):
def diff_pretty_html(self, diffs):
html = []
for (op, data) in diffs:
text = (data.replace("&", "&").replace("<", "<")
.replace(">", ">").replace("\n", "<br />"))
if op == self.DIFF_INSERT:
html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
elif op == self.DIFF_DELETE:
html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
elif op == self.DIFF_EQUAL:
html.append("<span>%s</span>" % text)
return "".join(html)
def get_diff_of_htmls(html1, html2):
diffutil = DiffMatchPatch()
diffs = diffutil.diff_main(html1, html2)
diffutil.diff_cleanupSemantic(diffs)
return diffutil.diff_pretty_html(diffs)
__all__ = ["render", "get_diff_of_htmls", "render_and_extract"]
| ecorator(p | identifier_name |
service.py | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import functools
import bleach
# BEGIN PATCH
import html5lib
from html5lib.serializer.htmlserializer import HTMLSerializer
def _serialize(domtree):
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(domtree)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False,
alphabetical_attributes=True)
return serializer.render(stream)
bleach._serialize = _serialize
# END PATCH
from django.core.cache import cache
from django.utils.encoding import force_bytes
from markdown import Markdown
from .extensions.autolink import AutolinkExtension
from .extensions.automail import AutomailExtension | from .extensions.spaced_link import SpacedLinkExtension
from .extensions.strikethrough import StrikethroughExtension
from .extensions.wikilinks import WikiLinkExtension
from .extensions.emojify import EmojifyExtension
from .extensions.mentions import MentionsExtension
from .extensions.references import TaigaReferencesExtension
from .extensions.target_link import TargetBlankLinkExtension
# Bleach configuration
bleach.ALLOWED_TAGS += ["p", "table", "thead", "tbody", "th", "tr", "td", "h1",
"h2", "h3", "h4", "h5", "h6", "div", "pre", "span",
"hr", "dl", "dt", "dd", "sup", "img", "del", "br",
"ins"]
bleach.ALLOWED_STYLES.append("background")
bleach.ALLOWED_ATTRIBUTES["a"] = ["href", "title", "alt", "target"]
bleach.ALLOWED_ATTRIBUTES["img"] = ["alt", "src"]
bleach.ALLOWED_ATTRIBUTES["*"] = ["class", "style"]
def _make_extensions_list(project=None):
return [AutolinkExtension(),
AutomailExtension(),
SemiSaneListExtension(),
SpacedLinkExtension(),
StrikethroughExtension(),
WikiLinkExtension(project),
EmojifyExtension(),
MentionsExtension(),
TaigaReferencesExtension(project),
TargetBlankLinkExtension(),
"extra",
"codehilite",
"sane_lists",
"toc",
"nl2br"]
import diff_match_patch
def cache_by_sha(func):
@functools.wraps(func)
def _decorator(project, text):
sha1_hash = hashlib.sha1(force_bytes(text)).hexdigest()
key = "{}-{}".format(sha1_hash, project.id)
# Try to get it from the cache
cached = cache.get(key)
if cached is not None:
return cached
returned_value = func(project, text)
cache.set(key, returned_value, timeout=None)
return returned_value
return _decorator
def _get_markdown(project):
extensions = _make_extensions_list(project=project)
md = Markdown(extensions=extensions)
md.extracted_data = {"mentions": [], "references": []}
return md
@cache_by_sha
def render(project, text):
md = _get_markdown(project)
return bleach.clean(md.convert(text))
def render_and_extract(project, text):
md = _get_markdown(project)
result = bleach.clean(md.convert(text))
return (result, md.extracted_data)
class DiffMatchPatch(diff_match_patch.diff_match_patch):
def diff_pretty_html(self, diffs):
html = []
for (op, data) in diffs:
text = (data.replace("&", "&").replace("<", "<")
.replace(">", ">").replace("\n", "<br />"))
if op == self.DIFF_INSERT:
html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
elif op == self.DIFF_DELETE:
html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
elif op == self.DIFF_EQUAL:
html.append("<span>%s</span>" % text)
return "".join(html)
def get_diff_of_htmls(html1, html2):
diffutil = DiffMatchPatch()
diffs = diffutil.diff_main(html1, html2)
diffutil.diff_cleanupSemantic(diffs)
return diffutil.diff_pretty_html(diffs)
__all__ = ["render", "get_diff_of_htmls", "render_and_extract"] | from .extensions.semi_sane_lists import SemiSaneListExtension | random_line_split |
view_environment.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
Script to visualize the model coordination environments
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "[email protected]"
__date__ = "Feb 20, 2016"
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import AllCoordinationGeometries
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import SEPARATION_PLANE
from pymatgen.analysis.chemenv.utils.scripts_utils import visualize
from pymatgen.analysis.chemenv.utils.coordination_geometry_utils import Plane
import numpy as np
if __name__ == '__main__':
print('+-------------------------------------------------------+\n'
'| Development script of the ChemEnv utility of pymatgen |\n'
'| Visualization of the model coordination environments |\n'
'+-------------------------------------------------------+\n')
allcg = AllCoordinationGeometries()
vis = None
while True:
cg_symbol = raw_input('Enter symbol of the geometry you want to see, "l" to see the list '
'of existing geometries or "q" to quit : ')
if cg_symbol == 'q':
break
if cg_symbol == 'l':
print(allcg.pretty_print(maxcn=13, additional_info={'nb_hints': True}))
continue
try:
cg = allcg[cg_symbol]
except LookupError:
print('Wrong geometry, try again ...')
continue
print(cg.name)
for ipoint, point in enumerate(cg.points):
print('Point #{:d} : {} {} {}'.format(ipoint, repr(point[0]), repr(point[1]), repr(point[2])))
print('Algorithms used :')
for ialgo, algo in enumerate(cg.algorithms):
print('Algorithm #{:d} :'.format(ialgo))
print(algo)
print('')
# Visualize the separation plane of a given algorithm
sepplane = False
if any([algo.algorithm_type == SEPARATION_PLANE for algo in cg.algorithms]):
test = raw_input('Enter index of the algorithm for which you want to visualize the plane : ')
if test != '':
try:
ialgo = int(test)
algo = cg.algorithms[ialgo]
sepplane = True
except:
print('Unable to determine the algorithm/separation_plane you want '
'to visualize for this geometry. Continues without ...')
myfactor = 3.0
if vis is None:
vis = visualize(cg=cg, zoom=1.0, myfactor=myfactor)
else:
vis = visualize(cg=cg, vis=vis, myfactor=myfactor)
cg_points = [myfactor*np.array(pp) for pp in cg.points]
cg_central_site = myfactor*np.array(cg.central_site)
if sepplane: | if algo.minimum_number_of_points == 2:
pts.append(cg_central_site)
centre = cg_central_site
else:
centre = np.sum(pts, axis=0) / len(pts)
factor = 1.5
target_dist = max([np.dot(pp-centre, pp-centre) for pp in cg_points])
current_dist = np.dot(pts[0] - centre, pts[0] - centre)
factor = factor * target_dist / current_dist
plane = Plane.from_npoints(points=pts)
p1 = centre + factor * (pts[0] - centre)
perp = factor * np.cross(pts[0] - centre, plane.normal_vector)
p2 = centre + perp
p3 = centre - factor * (pts[0] - centre)
p4 = centre - perp
vis.add_faces([[p1, p2, p3, p4]], [1.0, 0.0, 0.0], opacity=0.5)
target_radius = 0.25
radius = 1.5 * target_radius
if algo.minimum_number_of_points == 2:
vis.add_partial_sphere(coords=cg_central_site, radius=radius,
color=[1.0, 0.0, 0.0], start=0, end=360,
opacity=0.5)
for pp in pts:
vis.add_partial_sphere(coords=pp, radius=radius,
color=[1.0, 0.0, 0.0], start=0, end=360,
opacity=0.5)
ps1 = [cg_points[ii] for ii in algo.point_groups[0]]
ps2 = [cg_points[ii] for ii in algo.point_groups[1]]
for pp in ps1:
vis.add_partial_sphere(coords=pp, radius=radius,
color=[0.0, 1.0, 0.0], start=0, end=360,
opacity=0.5)
for pp in ps2:
vis.add_partial_sphere(coords=pp, radius=radius,
color=[0.0, 0.0, 1.0], start=0, end=360,
opacity=0.5)
vis.show() | pts = [cg_points[ii] for ii in algo.plane_points] | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.