file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
ObjectFactory.js
|
var inherit = require('./inherit'),
Sprite = require('../display/Sprite'),
Tilemap = require('../tilemap/Tilemap'),
Rectangle = require('../geom/Rectangle'),
BitmapText = require('../text/BitmapText');
/**
* The object factory makes it simple to create and add objects to a parent. One is added
* to a State's world and camera by default, but they can be used for any parent but they
* can only belong to a single state.
*
* @class ObjectFactory
* @extends Object
* @constructor
* @param state {State} The game state this factory belongs to
* @param parent {Container} The container to act as the parent for created objects
*/
var ObjectFactory = function(state, parent) {
this.state = state;
this.game = state.game;
this.parent = parent;
};
inherit(ObjectFactory, Object, {
/**
* Adds a generic object to the world or camera
*
* @method obj
* @param object {mixed} Any game object you want to add to the parent
* @return {mixed} Returns the added object
*/
obj: function(obj) {
return this.parent.addChild(obj);
},
/**
* Creates a new sprite and adds it to the game world
*
* @method sprite
* @param texture {String|Texture} The texture for the sprite, or the key for one in the cache
* @param [frame=null] {String|Number} A specific frame of a sprite sheet to use, either the index or string key
* depending on the type of the sheet when loaded.
* @param [physics=true] {Boolean} Should this sprite be added to the physics simulation?
* @return {Sprite} The sprite added
*/
sprite: function(tx, frame, physics) {
var spr,
game = this.game;
if(typeof tx === 'string') {
if(frame || frame === 0)
tx = game.cache.getTextures(tx)[frame];
else
tx = game.cache.getTexture(tx);
}
if(!tx)
|
spr = new Sprite(tx);
//if undefined, then default to true
if(physics || physics === undefined) {
spr.enablePhysics(this.state.physics);
//this.state.physics.addSprite(spr);
}
return this.parent.addChild(spr);
},
/**
* Creates a new AudioPlayer to play the sound passed in
*
* @method audio
* @param key {String} The unique cache key for the preloaded audio
* @param [settings] {Object} All the settings for the audio player (see AudioManager.add for all settings)
* @return {AudioPlayer} The player added
*/
audio: function(key, settings) {
return this.state.audio.add(key, settings);
},
/**
* Creates a new tilemap to add to the world
*
* @method tilemap
* @param key {String} The unique cache key for the preloaded tilemap data
* @param [constrain=true] {Boolean} Should the camera be constrained to this tilemap's size?
* @return {Tilemap} The tilemap added
*/
tilemap: function(key, constrain) {
var obj = this.game.cache.getTilemap(key) || {},
tilemap = new Tilemap(this.state, obj.data, obj.textures);
if(constrain) {
this.state.camera.constrain(new Rectangle(0, 0, tilemap.realSize.x, tilemap.realSize.y));
}
//force render of tilemap
tilemap.render(
-this.state.world.position.x,
-this.state.world.position.x,
this.game.width,
this.game.height
);
tilemap._cachekey = key;
return this.parent.addChild(tilemap);
},
/**
* Creates a new instance of BitmapText
*
* @method bitmaptext
* @param text {String} The text for the BitmapText to display
* @param font {String} The key for the bitmap font loaded into the cache
* @param interactive {Boolean} Can the item be interacted with by mouse (clicked, dragged, etc)
* @return {BitmapText} The bitmap text object added
*/
bitmaptext: function(text, font, style) {
if(typeof font === 'string')
font = this.game.cache.getBitmapFont(font);
return this.parent.addChild(new BitmapText(text, font, style));
}
});
module.exports = ObjectFactory;
|
{
tx = game.cache.getTexture('__default');
}
|
conditional_block
|
patsub.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Our local modules
from trepan.processor.command import base_subcmd as Mbase_subcmd
class SetPatSub(Mbase_subcmd.DebuggerSubcommand):
|
if __name__ == "__main__":
from trepan.processor.command.set_subcmd import __demo_helper__ as Mhelper
Mhelper.demo_run(SetPatSub)
pass
|
"""**set patsub** *from-re* *replace-string*
Add a substitution pattern rule replacing *patsub* with
*replace-string* anywhere it is found in source file names. If a
substitution rule was previously set for *from-re*, the old rule is
replaced by the new one.
In the following example, suppose in a docker container /mnt/project is
the mount-point for /home/rocky/project. You are running the code
from the docker container, but debugging this from outside of that.
Example:
--------
set patsub ^/mmt/project /home/rocky/project
"""
in_list = True
max_args = 2
min_abbrev = len("pats")
min_args = 2
short_help = "Set pattern substitution rule"
def run(self, args):
self.proc.add_remap_pat(args[0], args[1])
pass
|
identifier_body
|
patsub.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Our local modules
from trepan.processor.command import base_subcmd as Mbase_subcmd
|
*replace-string* anywhere it is found in source file names. If a
substitution rule was previously set for *from-re*, the old rule is
replaced by the new one.
In the following example, suppose in a docker container /mnt/project is
the mount-point for /home/rocky/project. You are running the code
from the docker container, but debugging this from outside of that.
Example:
--------
set patsub ^/mmt/project /home/rocky/project
"""
in_list = True
max_args = 2
min_abbrev = len("pats")
min_args = 2
short_help = "Set pattern substitution rule"
def run(self, args):
self.proc.add_remap_pat(args[0], args[1])
pass
if __name__ == "__main__":
from trepan.processor.command.set_subcmd import __demo_helper__ as Mhelper
Mhelper.demo_run(SetPatSub)
pass
|
class SetPatSub(Mbase_subcmd.DebuggerSubcommand):
"""**set patsub** *from-re* *replace-string*
Add a substitution pattern rule replacing *patsub* with
|
random_line_split
|
patsub.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Our local modules
from trepan.processor.command import base_subcmd as Mbase_subcmd
class SetPatSub(Mbase_subcmd.DebuggerSubcommand):
"""**set patsub** *from-re* *replace-string*
Add a substitution pattern rule replacing *patsub* with
*replace-string* anywhere it is found in source file names. If a
substitution rule was previously set for *from-re*, the old rule is
replaced by the new one.
In the following example, suppose in a docker container /mnt/project is
the mount-point for /home/rocky/project. You are running the code
from the docker container, but debugging this from outside of that.
Example:
--------
set patsub ^/mmt/project /home/rocky/project
"""
in_list = True
max_args = 2
min_abbrev = len("pats")
min_args = 2
short_help = "Set pattern substitution rule"
def run(self, args):
self.proc.add_remap_pat(args[0], args[1])
pass
if __name__ == "__main__":
|
from trepan.processor.command.set_subcmd import __demo_helper__ as Mhelper
Mhelper.demo_run(SetPatSub)
pass
|
conditional_block
|
|
patsub.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Our local modules
from trepan.processor.command import base_subcmd as Mbase_subcmd
class SetPatSub(Mbase_subcmd.DebuggerSubcommand):
"""**set patsub** *from-re* *replace-string*
Add a substitution pattern rule replacing *patsub* with
*replace-string* anywhere it is found in source file names. If a
substitution rule was previously set for *from-re*, the old rule is
replaced by the new one.
In the following example, suppose in a docker container /mnt/project is
the mount-point for /home/rocky/project. You are running the code
from the docker container, but debugging this from outside of that.
Example:
--------
set patsub ^/mmt/project /home/rocky/project
"""
in_list = True
max_args = 2
min_abbrev = len("pats")
min_args = 2
short_help = "Set pattern substitution rule"
def
|
(self, args):
self.proc.add_remap_pat(args[0], args[1])
pass
if __name__ == "__main__":
from trepan.processor.command.set_subcmd import __demo_helper__ as Mhelper
Mhelper.demo_run(SetPatSub)
pass
|
run
|
identifier_name
|
task.py
|
# -*- test-case-name: twisted.test.test_task,twisted.test.test_cooperator -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Scheduling utility methods and classes.
@author: Jp Calderone
"""
__metaclass__ = type
import time
from zope.interface import implements
from twisted.python import reflect
from twisted.python.failure import Failure
from twisted.internet import base, defer
from twisted.internet.interfaces import IReactorTime
class LoopingCall:
"""Call a function repeatedly.
If C{f} returns a deferred, rescheduling will not take place until the
deferred has fired. The result value is ignored.
@ivar f: The function to call.
@ivar a: A tuple of arguments to pass the function.
@ivar kw: A dictionary of keyword arguments to pass to the function.
@ivar clock: A provider of
L{twisted.internet.interfaces.IReactorTime}. The default is
L{twisted.internet.reactor}. Feel free to set this to
something else, but it probably ought to be set *before*
calling L{start}.
@type running: C{bool}
@ivar running: A flag which is C{True} while C{f} is scheduled to be called
(or is currently being called). It is set to C{True} when L{start} is
called and set to C{False} when L{stop} is called or if C{f} raises an
exception. In either case, it will be C{False} by the time the
C{Deferred} returned by L{start} fires its callback or errback.
@type _expectNextCallAt: C{float}
@ivar _expectNextCallAt: The time at which this instance most recently
scheduled itself to run.
@type _realLastTime: C{float}
@ivar _realLastTime: When counting skips, the time at which the skip
counter was last invoked.
@type _runAtStart: C{bool}
@ivar _runAtStart: A flag indicating whether the 'now' argument was passed
to L{LoopingCall.start}.
"""
call = None
running = False
deferred = None
interval = None
_expectNextCallAt = 0.0
_runAtStart = False
starttime = None
def __init__(self, f, *a, **kw):
self.f = f
self.a = a
self.kw = kw
from twisted.internet import reactor
self.clock = reactor
def withCount(cls, countCallable):
"""
An alternate constructor for L{LoopingCall} that makes available the
number of calls which should have occurred since it was last invoked.
Note that this number is an C{int} value; It represents the discrete
number of calls that should have been made. For example, if you are
using a looping call to display an animation with discrete frames, this
number would be the number of frames to advance.
The count is normally 1, but can be higher. For example, if the reactor
is blocked and takes too long to invoke the L{LoopingCall}, a Deferred
returned from a previous call is not fired before an interval has
elapsed, or if the callable itself blocks for longer than an interval,
preventing I{itself} from being called.
@param countCallable: A callable that will be invoked each time the
resulting LoopingCall is run, with an integer specifying the number
of calls that should have been invoked.
@type countCallable: 1-argument callable which takes an C{int}
@return: An instance of L{LoopingCall} with call counting enabled,
which provides the count as the first positional argument.
@rtype: L{LoopingCall}
@since: 9.0
"""
def counter():
now = self.clock.seconds()
lastTime = self._realLastTime
if lastTime is None:
lastTime = self.starttime
if self._runAtStart:
lastTime -= self.interval
self._realLastTime = now
lastInterval = self._intervalOf(lastTime)
thisInterval = self._intervalOf(now)
count = thisInterval - lastInterval
return countCallable(count)
self = cls(counter)
self._realLastTime = None
return self
withCount = classmethod(withCount)
def _intervalOf(self, t):
"""
Determine the number of intervals passed as of the given point in
time.
@param t: The specified time (from the start of the L{LoopingCall}) to
be measured in intervals
@return: The C{int} number of intervals which have passed as of the
given point in time.
"""
elapsedTime = t - self.starttime
intervalNum = int(elapsedTime / self.interval)
return intervalNum
def start(self, interval, now=True):
"""
Start running function every interval seconds.
@param interval: The number of seconds between calls. May be
less than one. Precision will depend on the underlying
platform, the available hardware, and the load on the system.
@param now: If True, run this call right now. Otherwise, wait
until the interval has elapsed before beginning.
@return: A Deferred whose callback will be invoked with
C{self} when C{self.stop} is called, or whose errback will be
invoked when the function raises an exception or returned a
deferred that has its errback invoked.
"""
assert not self.running, ("Tried to start an already running "
"LoopingCall.")
if interval < 0:
raise ValueError, "interval must be >= 0"
self.running = True
d = self.deferred = defer.Deferred()
self.starttime = self.clock.seconds()
self._expectNextCallAt = self.starttime
self.interval = interval
self._runAtStart = now
if now:
self()
else:
self._reschedule()
return d
def stop(self):
"""Stop running function.
"""
assert self.running, ("Tried to stop a LoopingCall that was "
"not running.")
self.running = False
if self.call is not None:
self.call.cancel()
self.call = None
d, self.deferred = self.deferred, None
d.callback(self)
def reset(self):
"""
Skip the next iteration and reset the timer.
@since: 11.1
"""
assert self.running, ("Tried to reset a LoopingCall that was "
"not running.")
if self.call is not None:
self.call.cancel()
self.call = None
self._expectNextCallAt = self.clock.seconds()
self._reschedule()
def __call__(self):
def cb(result):
if self.running:
self._reschedule()
else:
d, self.deferred = self.deferred, None
d.callback(self)
def eb(failure):
self.running = False
d, self.deferred = self.deferred, None
d.errback(failure)
self.call = None
d = defer.maybeDeferred(self.f, *self.a, **self.kw)
d.addCallback(cb)
d.addErrback(eb)
def _reschedule(self):
"""
Schedule the next iteration of this looping call.
"""
if self.interval == 0:
self.call = self.clock.callLater(0, self)
return
currentTime = self.clock.seconds()
# Find how long is left until the interval comes around again.
untilNextTime = (self._expectNextCallAt - currentTime) % self.interval
# Make sure it is in the future, in case more than one interval worth
# of time passed since the previous call was made.
nextTime = max(
self._expectNextCallAt + self.interval, currentTime + untilNextTime)
# If the interval falls on the current time exactly, skip it and
# schedule the call for the next interval.
if nextTime == currentTime:
nextTime += self.interval
self._expectNextCallAt = nextTime
self.call = self.clock.callLater(nextTime - currentTime, self)
def __repr__(self):
if hasattr(self.f, 'func_name'):
func = self.f.func_name
if hasattr(self.f, 'im_class'):
func = self.f.im_class.__name__ + '.' + func
else:
func = reflect.safe_repr(self.f)
return 'LoopingCall<%r>(%s, *%s, **%s)' % (
self.interval, func, reflect.safe_repr(self.a),
reflect.safe_repr(self.kw))
class SchedulerError(Exception):
"""
The operation could not be completed because the scheduler or one of its
tasks was in an invalid state. This exception should not be raised
directly, but is a superclass of various scheduler-state-related
exceptions.
"""
class SchedulerStopped(SchedulerError):
"""
The operation could not complete because the scheduler was stopped in
progress or was already stopped.
"""
class TaskFinished(SchedulerError):
"""
The operation could not complete because the task was already completed,
stopped, encountered an error or otherwise permanently stopped running.
"""
class TaskDone(TaskFinished):
"""
The operation could not complete because the task was already completed.
"""
class TaskStopped(TaskFinished):
"""
The operation could not complete because the task was stopped.
"""
class TaskFailed(TaskFinished):
"""
The operation could not complete because the task died with an unhandled
error.
"""
class NotPaused(SchedulerError):
"""
This exception is raised when a task is resumed which was not previously
paused.
"""
class _Timer(object):
MAX_SLICE = 0.01
def __init__(self):
self.end = time.time() + self.MAX_SLICE
def __call__(self):
return time.time() >= self.end
_EPSILON = 0.00000001
def _defaultScheduler(x):
from twisted.internet import reactor
return reactor.callLater(_EPSILON, x)
class CooperativeTask(object):
"""
A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be
paused, resumed, and stopped. It can also have its completion (or
termination) monitored.
@see: L{CooperativeTask.cooperate}
@ivar _iterator: the iterator to iterate when this L{CooperativeTask} is
asked to do work.
@ivar _cooperator: the L{Cooperator} that this L{CooperativeTask}
participates in, which is used to re-insert it upon resume.
@ivar _deferreds: the list of L{defer.Deferred}s to fire when this task
completes, fails, or finishes.
@type _deferreds: L{list}
@type _cooperator: L{Cooperator}
@ivar _pauseCount: the number of times that this L{CooperativeTask} has
been paused; if 0, it is running.
@type _pauseCount: L{int}
@ivar _completionState: The completion-state of this L{CooperativeTask}.
C{None} if the task is not yet completed, an instance of L{TaskStopped}
if C{stop} was called to stop this task early, of L{TaskFailed} if the
application code in the iterator raised an exception which caused it to
terminate, and of L{TaskDone} if it terminated normally via raising
L{StopIteration}.
@type _completionState: L{TaskFinished}
"""
def __init__(self, iterator, cooperator):
"""
A private constructor: to create a new L{CooperativeTask}, see
L{Cooperator.cooperate}.
"""
self._iterator = iterator
self._cooperator = cooperator
self._deferreds = []
self._pauseCount = 0
self._completionState = None
self._completionResult = None
cooperator._addTask(self)
def whenDone(self):
"""
Get a L{defer.Deferred} notification of when this task is complete.
@return: a L{defer.Deferred} that fires with the C{iterator} that this
L{CooperativeTask} was created with when the iterator has been
exhausted (i.e. its C{next} method has raised L{StopIteration}), or
fails with the exception raised by C{next} if it raises some other
exception.
@rtype: L{defer.Deferred}
"""
d = defer.Deferred()
if self._completionState is None:
self._deferreds.append(d)
else:
|
return d
def pause(self):
"""
Pause this L{CooperativeTask}. Stop doing work until
L{CooperativeTask.resume} is called. If C{pause} is called more than
once, C{resume} must be called an equal number of times to resume this
task.
@raise TaskFinished: if this task has already finished or completed.
"""
self._checkFinish()
self._pauseCount += 1
if self._pauseCount == 1:
self._cooperator._removeTask(self)
def resume(self):
"""
Resume processing of a paused L{CooperativeTask}.
@raise NotPaused: if this L{CooperativeTask} is not paused.
"""
if self._pauseCount == 0:
raise NotPaused()
self._pauseCount -= 1
if self._pauseCount == 0 and self._completionState is None:
self._cooperator._addTask(self)
def _completeWith(self, completionState, deferredResult):
"""
@param completionState: a L{TaskFinished} exception or a subclass
thereof, indicating what exception should be raised when subsequent
operations are performed.
@param deferredResult: the result to fire all the deferreds with.
"""
self._completionState = completionState
self._completionResult = deferredResult
if not self._pauseCount:
self._cooperator._removeTask(self)
# The Deferreds need to be invoked after all this is completed, because
# a Deferred may want to manipulate other tasks in a Cooperator. For
# example, if you call "stop()" on a cooperator in a callback on a
# Deferred returned from whenDone(), this CooperativeTask must be gone
# from the Cooperator by that point so that _completeWith is not
# invoked reentrantly; that would cause these Deferreds to blow up with
# an AlreadyCalledError, or the _removeTask to fail with a ValueError.
for d in self._deferreds:
d.callback(deferredResult)
def stop(self):
"""
Stop further processing of this task.
@raise TaskFinished: if this L{CooperativeTask} has previously
completed, via C{stop}, completion, or failure.
"""
self._checkFinish()
self._completeWith(TaskStopped(), Failure(TaskStopped()))
def _checkFinish(self):
"""
If this task has been stopped, raise the appropriate subclass of
L{TaskFinished}.
"""
if self._completionState is not None:
raise self._completionState
def _oneWorkUnit(self):
"""
Perform one unit of work for this task, retrieving one item from its
iterator, stopping if there are no further items in the iterator, and
pausing if the result was a L{defer.Deferred}.
"""
try:
result = self._iterator.next()
except StopIteration:
self._completeWith(TaskDone(), self._iterator)
except:
self._completeWith(TaskFailed(), Failure())
else:
if isinstance(result, defer.Deferred):
self.pause()
def failLater(f):
self._completeWith(TaskFailed(), f)
result.addCallbacks(lambda result: self.resume(),
failLater)
class Cooperator(object):
"""
Cooperative task scheduler.
"""
def __init__(self,
terminationPredicateFactory=_Timer,
scheduler=_defaultScheduler,
started=True):
"""
Create a scheduler-like object to which iterators may be added.
@param terminationPredicateFactory: A no-argument callable which will
be invoked at the beginning of each step and should return a
no-argument callable which will return True when the step should be
terminated. The default factory is time-based and allows iterators to
run for 1/100th of a second at a time.
@param scheduler: A one-argument callable which takes a no-argument
callable and should invoke it at some future point. This will be used
to schedule each step of this Cooperator.
@param started: A boolean which indicates whether iterators should be
stepped as soon as they are added, or if they will be queued up until
L{Cooperator.start} is called.
"""
self._tasks = []
self._metarator = iter(())
self._terminationPredicateFactory = terminationPredicateFactory
self._scheduler = scheduler
self._delayedCall = None
self._stopped = False
self._started = started
def coiterate(self, iterator, doneDeferred=None):
"""
Add an iterator to the list of iterators this L{Cooperator} is
currently running.
@param doneDeferred: If specified, this will be the Deferred used as
the completion deferred. It is suggested that you use the default,
which creates a new Deferred for you.
@return: a Deferred that will fire when the iterator finishes.
"""
if doneDeferred is None:
doneDeferred = defer.Deferred()
CooperativeTask(iterator, self).whenDone().chainDeferred(doneDeferred)
return doneDeferred
def cooperate(self, iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return CooperativeTask(iterator, self)
def _addTask(self, task):
"""
Add a L{CooperativeTask} object to this L{Cooperator}.
"""
if self._stopped:
self._tasks.append(task) # XXX silly, I know, but _completeWith
# does the inverse
task._completeWith(SchedulerStopped(), Failure(SchedulerStopped()))
else:
self._tasks.append(task)
self._reschedule()
def _removeTask(self, task):
"""
Remove a L{CooperativeTask} from this L{Cooperator}.
"""
self._tasks.remove(task)
# If no work left to do, cancel the delayed call:
if not self._tasks and self._delayedCall:
self._delayedCall.cancel()
self._delayedCall = None
def _tasksWhileNotStopped(self):
"""
Yield all L{CooperativeTask} objects in a loop as long as this
L{Cooperator}'s termination condition has not been met.
"""
terminator = self._terminationPredicateFactory()
while self._tasks:
for t in self._metarator:
yield t
if terminator():
return
self._metarator = iter(self._tasks)
def _tick(self):
"""
Run one scheduler tick.
"""
self._delayedCall = None
for taskObj in self._tasksWhileNotStopped():
taskObj._oneWorkUnit()
self._reschedule()
_mustScheduleOnStart = False
def _reschedule(self):
if not self._started:
self._mustScheduleOnStart = True
return
if self._delayedCall is None and self._tasks:
self._delayedCall = self._scheduler(self._tick)
def start(self):
"""
Begin scheduling steps.
"""
self._stopped = False
self._started = True
if self._mustScheduleOnStart:
del self._mustScheduleOnStart
self._reschedule()
def stop(self):
"""
Stop scheduling steps. Errback the completion Deferreds of all
iterators which have been added and forget about them.
"""
self._stopped = True
for taskObj in self._tasks:
taskObj._completeWith(SchedulerStopped(),
Failure(SchedulerStopped()))
self._tasks = []
if self._delayedCall is not None:
self._delayedCall.cancel()
self._delayedCall = None
_theCooperator = Cooperator()
def coiterate(iterator):
"""
Cooperatively iterate over the given iterator, dividing runtime between it
and all other iterators which have been passed to this function and not yet
exhausted.
"""
return _theCooperator.coiterate(iterator)
def cooperate(iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return _theCooperator.cooperate(iterator)
class Clock:
"""
Provide a deterministic, easily-controlled implementation of
L{IReactorTime.callLater}. This is commonly useful for writing
deterministic unit tests for code which schedules events using this API.
"""
implements(IReactorTime)
rightNow = 0.0
def __init__(self):
self.calls = []
def seconds(self):
"""
Pretend to be time.time(). This is used internally when an operation
such as L{IDelayedCall.reset} needs to determine a a time value
relative to the current time.
@rtype: C{float}
@return: The time which should be considered the current time.
"""
return self.rightNow
def _sortCalls(self):
"""
Sort the pending calls according to the time they are scheduled.
"""
self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime()))
def callLater(self, when, what, *a, **kw):
"""
See L{twisted.internet.interfaces.IReactorTime.callLater}.
"""
dc = base.DelayedCall(self.seconds() + when,
what, a, kw,
self.calls.remove,
lambda c: None,
self.seconds)
self.calls.append(dc)
self._sortCalls()
return dc
def getDelayedCalls(self):
"""
See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls}
"""
return self.calls
def advance(self, amount):
"""
Move time on this clock forward by the given amount and run whatever
pending calls should be run.
@type amount: C{float}
@param amount: The number of seconds which to advance this clock's
time.
"""
self.rightNow += amount
self._sortCalls()
while self.calls and self.calls[0].getTime() <= self.seconds():
call = self.calls.pop(0)
call.called = 1
call.func(*call.args, **call.kw)
self._sortCalls()
def pump(self, timings):
"""
Advance incrementally by the given set of times.
@type timings: iterable of C{float}
"""
for amount in timings:
self.advance(amount)
def deferLater(clock, delay, callable, *args, **kw):
"""
Call the given function after a certain period of time has passed.
@type clock: L{IReactorTime} provider
@param clock: The object which will be used to schedule the delayed
call.
@type delay: C{float} or C{int}
@param delay: The number of seconds to wait before calling the function.
@param callable: The object to call after the delay.
@param *args: The positional arguments to pass to C{callable}.
@param **kw: The keyword arguments to pass to C{callable}.
@rtype: L{defer.Deferred}
@return: A deferred that fires with the result of the callable when the
specified time has elapsed.
"""
def deferLaterCancel(deferred):
delayedCall.cancel()
d = defer.Deferred(deferLaterCancel)
d.addCallback(lambda ignored: callable(*args, **kw))
delayedCall = clock.callLater(delay, d.callback, None)
return d
__all__ = [
'LoopingCall',
'Clock',
'SchedulerStopped', 'Cooperator', 'coiterate',
'deferLater',
]
|
d.callback(self._completionResult)
|
conditional_block
|
task.py
|
# -*- test-case-name: twisted.test.test_task,twisted.test.test_cooperator -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Scheduling utility methods and classes.
@author: Jp Calderone
"""
__metaclass__ = type
import time
from zope.interface import implements
from twisted.python import reflect
from twisted.python.failure import Failure
from twisted.internet import base, defer
from twisted.internet.interfaces import IReactorTime
class LoopingCall:
"""Call a function repeatedly.
If C{f} returns a deferred, rescheduling will not take place until the
deferred has fired. The result value is ignored.
@ivar f: The function to call.
@ivar a: A tuple of arguments to pass the function.
@ivar kw: A dictionary of keyword arguments to pass to the function.
@ivar clock: A provider of
L{twisted.internet.interfaces.IReactorTime}. The default is
L{twisted.internet.reactor}. Feel free to set this to
something else, but it probably ought to be set *before*
calling L{start}.
@type running: C{bool}
@ivar running: A flag which is C{True} while C{f} is scheduled to be called
(or is currently being called). It is set to C{True} when L{start} is
called and set to C{False} when L{stop} is called or if C{f} raises an
exception. In either case, it will be C{False} by the time the
C{Deferred} returned by L{start} fires its callback or errback.
@type _expectNextCallAt: C{float}
@ivar _expectNextCallAt: The time at which this instance most recently
scheduled itself to run.
@type _realLastTime: C{float}
@ivar _realLastTime: When counting skips, the time at which the skip
counter was last invoked.
@type _runAtStart: C{bool}
@ivar _runAtStart: A flag indicating whether the 'now' argument was passed
to L{LoopingCall.start}.
"""
call = None
running = False
deferred = None
interval = None
_expectNextCallAt = 0.0
_runAtStart = False
starttime = None
def __init__(self, f, *a, **kw):
self.f = f
self.a = a
self.kw = kw
from twisted.internet import reactor
self.clock = reactor
def withCount(cls, countCallable):
"""
An alternate constructor for L{LoopingCall} that makes available the
number of calls which should have occurred since it was last invoked.
Note that this number is an C{int} value; It represents the discrete
number of calls that should have been made. For example, if you are
using a looping call to display an animation with discrete frames, this
number would be the number of frames to advance.
The count is normally 1, but can be higher. For example, if the reactor
is blocked and takes too long to invoke the L{LoopingCall}, a Deferred
returned from a previous call is not fired before an interval has
elapsed, or if the callable itself blocks for longer than an interval,
preventing I{itself} from being called.
@param countCallable: A callable that will be invoked each time the
resulting LoopingCall is run, with an integer specifying the number
of calls that should have been invoked.
@type countCallable: 1-argument callable which takes an C{int}
@return: An instance of L{LoopingCall} with call counting enabled,
which provides the count as the first positional argument.
@rtype: L{LoopingCall}
@since: 9.0
"""
def counter():
now = self.clock.seconds()
lastTime = self._realLastTime
if lastTime is None:
lastTime = self.starttime
if self._runAtStart:
lastTime -= self.interval
self._realLastTime = now
lastInterval = self._intervalOf(lastTime)
thisInterval = self._intervalOf(now)
count = thisInterval - lastInterval
return countCallable(count)
self = cls(counter)
self._realLastTime = None
return self
withCount = classmethod(withCount)
def _intervalOf(self, t):
"""
Determine the number of intervals passed as of the given point in
time.
@param t: The specified time (from the start of the L{LoopingCall}) to
be measured in intervals
@return: The C{int} number of intervals which have passed as of the
given point in time.
"""
elapsedTime = t - self.starttime
intervalNum = int(elapsedTime / self.interval)
return intervalNum
def start(self, interval, now=True):
"""
Start running function every interval seconds.
@param interval: The number of seconds between calls. May be
less than one. Precision will depend on the underlying
platform, the available hardware, and the load on the system.
@param now: If True, run this call right now. Otherwise, wait
until the interval has elapsed before beginning.
@return: A Deferred whose callback will be invoked with
C{self} when C{self.stop} is called, or whose errback will be
invoked when the function raises an exception or returned a
deferred that has its errback invoked.
"""
assert not self.running, ("Tried to start an already running "
"LoopingCall.")
if interval < 0:
raise ValueError, "interval must be >= 0"
self.running = True
d = self.deferred = defer.Deferred()
self.starttime = self.clock.seconds()
self._expectNextCallAt = self.starttime
self.interval = interval
self._runAtStart = now
if now:
self()
else:
self._reschedule()
return d
def stop(self):
"""Stop running function.
"""
assert self.running, ("Tried to stop a LoopingCall that was "
"not running.")
self.running = False
if self.call is not None:
self.call.cancel()
self.call = None
d, self.deferred = self.deferred, None
d.callback(self)
def reset(self):
"""
Skip the next iteration and reset the timer.
@since: 11.1
"""
assert self.running, ("Tried to reset a LoopingCall that was "
"not running.")
if self.call is not None:
self.call.cancel()
self.call = None
self._expectNextCallAt = self.clock.seconds()
self._reschedule()
def __call__(self):
def cb(result):
if self.running:
self._reschedule()
else:
d, self.deferred = self.deferred, None
d.callback(self)
def eb(failure):
self.running = False
d, self.deferred = self.deferred, None
d.errback(failure)
self.call = None
d = defer.maybeDeferred(self.f, *self.a, **self.kw)
d.addCallback(cb)
d.addErrback(eb)
def _reschedule(self):
"""
Schedule the next iteration of this looping call.
"""
if self.interval == 0:
self.call = self.clock.callLater(0, self)
return
currentTime = self.clock.seconds()
# Find how long is left until the interval comes around again.
untilNextTime = (self._expectNextCallAt - currentTime) % self.interval
# Make sure it is in the future, in case more than one interval worth
# of time passed since the previous call was made.
nextTime = max(
self._expectNextCallAt + self.interval, currentTime + untilNextTime)
# If the interval falls on the current time exactly, skip it and
# schedule the call for the next interval.
if nextTime == currentTime:
nextTime += self.interval
self._expectNextCallAt = nextTime
self.call = self.clock.callLater(nextTime - currentTime, self)
def __repr__(self):
if hasattr(self.f, 'func_name'):
func = self.f.func_name
if hasattr(self.f, 'im_class'):
func = self.f.im_class.__name__ + '.' + func
else:
func = reflect.safe_repr(self.f)
return 'LoopingCall<%r>(%s, *%s, **%s)' % (
self.interval, func, reflect.safe_repr(self.a),
reflect.safe_repr(self.kw))
class SchedulerError(Exception):
"""
The operation could not be completed because the scheduler or one of its
tasks was in an invalid state. This exception should not be raised
directly, but is a superclass of various scheduler-state-related
exceptions.
"""
class SchedulerStopped(SchedulerError):
"""
The operation could not complete because the scheduler was stopped in
progress or was already stopped.
"""
class TaskFinished(SchedulerError):
"""
The operation could not complete because the task was already completed,
stopped, encountered an error or otherwise permanently stopped running.
"""
class TaskDone(TaskFinished):
"""
The operation could not complete because the task was already completed.
"""
class TaskStopped(TaskFinished):
"""
The operation could not complete because the task was stopped.
"""
class TaskFailed(TaskFinished):
"""
The operation could not complete because the task died with an unhandled
error.
"""
class NotPaused(SchedulerError):
"""
This exception is raised when a task is resumed which was not previously
paused.
"""
class _Timer(object):
MAX_SLICE = 0.01
def __init__(self):
self.end = time.time() + self.MAX_SLICE
def __call__(self):
return time.time() >= self.end
_EPSILON = 0.00000001
def _defaultScheduler(x):
from twisted.internet import reactor
return reactor.callLater(_EPSILON, x)
class CooperativeTask(object):
"""
A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be
paused, resumed, and stopped. It can also have its completion (or
termination) monitored.
@see: L{CooperativeTask.cooperate}
@ivar _iterator: the iterator to iterate when this L{CooperativeTask} is
asked to do work.
@ivar _cooperator: the L{Cooperator} that this L{CooperativeTask}
participates in, which is used to re-insert it upon resume.
@ivar _deferreds: the list of L{defer.Deferred}s to fire when this task
completes, fails, or finishes.
@type _deferreds: L{list}
@type _cooperator: L{Cooperator}
@ivar _pauseCount: the number of times that this L{CooperativeTask} has
been paused; if 0, it is running.
@type _pauseCount: L{int}
@ivar _completionState: The completion-state of this L{CooperativeTask}.
C{None} if the task is not yet completed, an instance of L{TaskStopped}
if C{stop} was called to stop this task early, of L{TaskFailed} if the
application code in the iterator raised an exception which caused it to
terminate, and of L{TaskDone} if it terminated normally via raising
L{StopIteration}.
@type _completionState: L{TaskFinished}
"""
def __init__(self, iterator, cooperator):
"""
A private constructor: to create a new L{CooperativeTask}, see
L{Cooperator.cooperate}.
"""
self._iterator = iterator
self._cooperator = cooperator
self._deferreds = []
self._pauseCount = 0
self._completionState = None
self._completionResult = None
cooperator._addTask(self)
def whenDone(self):
"""
Get a L{defer.Deferred} notification of when this task is complete.
@return: a L{defer.Deferred} that fires with the C{iterator} that this
L{CooperativeTask} was created with when the iterator has been
exhausted (i.e. its C{next} method has raised L{StopIteration}), or
fails with the exception raised by C{next} if it raises some other
exception.
@rtype: L{defer.Deferred}
"""
d = defer.Deferred()
if self._completionState is None:
self._deferreds.append(d)
else:
d.callback(self._completionResult)
return d
def pause(self):
"""
Pause this L{CooperativeTask}. Stop doing work until
L{CooperativeTask.resume} is called. If C{pause} is called more than
once, C{resume} must be called an equal number of times to resume this
task.
@raise TaskFinished: if this task has already finished or completed.
"""
self._checkFinish()
self._pauseCount += 1
if self._pauseCount == 1:
self._cooperator._removeTask(self)
def resume(self):
"""
Resume processing of a paused L{CooperativeTask}.
@raise NotPaused: if this L{CooperativeTask} is not paused.
"""
if self._pauseCount == 0:
raise NotPaused()
self._pauseCount -= 1
if self._pauseCount == 0 and self._completionState is None:
self._cooperator._addTask(self)
def _completeWith(self, completionState, deferredResult):
"""
@param completionState: a L{TaskFinished} exception or a subclass
thereof, indicating what exception should be raised when subsequent
operations are performed.
@param deferredResult: the result to fire all the deferreds with.
"""
self._completionState = completionState
self._completionResult = deferredResult
if not self._pauseCount:
self._cooperator._removeTask(self)
# The Deferreds need to be invoked after all this is completed, because
# a Deferred may want to manipulate other tasks in a Cooperator. For
# example, if you call "stop()" on a cooperator in a callback on a
# Deferred returned from whenDone(), this CooperativeTask must be gone
# from the Cooperator by that point so that _completeWith is not
# invoked reentrantly; that would cause these Deferreds to blow up with
# an AlreadyCalledError, or the _removeTask to fail with a ValueError.
for d in self._deferreds:
d.callback(deferredResult)
def stop(self):
"""
Stop further processing of this task.
@raise TaskFinished: if this L{CooperativeTask} has previously
completed, via C{stop}, completion, or failure.
"""
self._checkFinish()
self._completeWith(TaskStopped(), Failure(TaskStopped()))
def _checkFinish(self):
"""
If this task has been stopped, raise the appropriate subclass of
L{TaskFinished}.
"""
if self._completionState is not None:
raise self._completionState
def _oneWorkUnit(self):
"""
Perform one unit of work for this task, retrieving one item from its
iterator, stopping if there are no further items in the iterator, and
pausing if the result was a L{defer.Deferred}.
"""
try:
result = self._iterator.next()
except StopIteration:
self._completeWith(TaskDone(), self._iterator)
except:
self._completeWith(TaskFailed(), Failure())
else:
if isinstance(result, defer.Deferred):
self.pause()
def failLater(f):
self._completeWith(TaskFailed(), f)
result.addCallbacks(lambda result: self.resume(),
failLater)
class Cooperator(object):
"""
Cooperative task scheduler.
"""
def __init__(self,
terminationPredicateFactory=_Timer,
scheduler=_defaultScheduler,
started=True):
"""
Create a scheduler-like object to which iterators may be added.
@param terminationPredicateFactory: A no-argument callable which will
be invoked at the beginning of each step and should return a
no-argument callable which will return True when the step should be
terminated. The default factory is time-based and allows iterators to
run for 1/100th of a second at a time.
@param scheduler: A one-argument callable which takes a no-argument
callable and should invoke it at some future point. This will be used
to schedule each step of this Cooperator.
@param started: A boolean which indicates whether iterators should be
stepped as soon as they are added, or if they will be queued up until
L{Cooperator.start} is called.
"""
self._tasks = []
self._metarator = iter(())
self._terminationPredicateFactory = terminationPredicateFactory
self._scheduler = scheduler
self._delayedCall = None
self._stopped = False
self._started = started
def coiterate(self, iterator, doneDeferred=None):
"""
Add an iterator to the list of iterators this L{Cooperator} is
currently running.
@param doneDeferred: If specified, this will be the Deferred used as
the completion deferred. It is suggested that you use the default,
which creates a new Deferred for you.
@return: a Deferred that will fire when the iterator finishes.
"""
if doneDeferred is None:
doneDeferred = defer.Deferred()
CooperativeTask(iterator, self).whenDone().chainDeferred(doneDeferred)
return doneDeferred
def cooperate(self, iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return CooperativeTask(iterator, self)
def _addTask(self, task):
"""
Add a L{CooperativeTask} object to this L{Cooperator}.
"""
if self._stopped:
self._tasks.append(task) # XXX silly, I know, but _completeWith
# does the inverse
task._completeWith(SchedulerStopped(), Failure(SchedulerStopped()))
else:
self._tasks.append(task)
self._reschedule()
def _removeTask(self, task):
"""
Remove a L{CooperativeTask} from this L{Cooperator}.
"""
self._tasks.remove(task)
# If no work left to do, cancel the delayed call:
if not self._tasks and self._delayedCall:
self._delayedCall.cancel()
self._delayedCall = None
def _tasksWhileNotStopped(self):
"""
Yield all L{CooperativeTask} objects in a loop as long as this
L{Cooperator}'s termination condition has not been met.
"""
terminator = self._terminationPredicateFactory()
while self._tasks:
for t in self._metarator:
yield t
if terminator():
return
self._metarator = iter(self._tasks)
def _tick(self):
"""
Run one scheduler tick.
"""
self._delayedCall = None
for taskObj in self._tasksWhileNotStopped():
taskObj._oneWorkUnit()
self._reschedule()
_mustScheduleOnStart = False
def _reschedule(self):
if not self._started:
self._mustScheduleOnStart = True
return
if self._delayedCall is None and self._tasks:
self._delayedCall = self._scheduler(self._tick)
def start(self):
"""
Begin scheduling steps.
"""
self._stopped = False
self._started = True
if self._mustScheduleOnStart:
del self._mustScheduleOnStart
self._reschedule()
def stop(self):
"""
Stop scheduling steps. Errback the completion Deferreds of all
iterators which have been added and forget about them.
"""
self._stopped = True
for taskObj in self._tasks:
taskObj._completeWith(SchedulerStopped(),
Failure(SchedulerStopped()))
self._tasks = []
if self._delayedCall is not None:
self._delayedCall.cancel()
self._delayedCall = None
_theCooperator = Cooperator()
def coiterate(iterator):
"""
Cooperatively iterate over the given iterator, dividing runtime between it
and all other iterators which have been passed to this function and not yet
exhausted.
"""
return _theCooperator.coiterate(iterator)
def cooperate(iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return _theCooperator.cooperate(iterator)
class
|
:
"""
Provide a deterministic, easily-controlled implementation of
L{IReactorTime.callLater}. This is commonly useful for writing
deterministic unit tests for code which schedules events using this API.
"""
implements(IReactorTime)
rightNow = 0.0
def __init__(self):
self.calls = []
def seconds(self):
"""
Pretend to be time.time(). This is used internally when an operation
such as L{IDelayedCall.reset} needs to determine a a time value
relative to the current time.
@rtype: C{float}
@return: The time which should be considered the current time.
"""
return self.rightNow
def _sortCalls(self):
"""
Sort the pending calls according to the time they are scheduled.
"""
self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime()))
def callLater(self, when, what, *a, **kw):
"""
See L{twisted.internet.interfaces.IReactorTime.callLater}.
"""
dc = base.DelayedCall(self.seconds() + when,
what, a, kw,
self.calls.remove,
lambda c: None,
self.seconds)
self.calls.append(dc)
self._sortCalls()
return dc
def getDelayedCalls(self):
"""
See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls}
"""
return self.calls
def advance(self, amount):
"""
Move time on this clock forward by the given amount and run whatever
pending calls should be run.
@type amount: C{float}
@param amount: The number of seconds which to advance this clock's
time.
"""
self.rightNow += amount
self._sortCalls()
while self.calls and self.calls[0].getTime() <= self.seconds():
call = self.calls.pop(0)
call.called = 1
call.func(*call.args, **call.kw)
self._sortCalls()
def pump(self, timings):
"""
Advance incrementally by the given set of times.
@type timings: iterable of C{float}
"""
for amount in timings:
self.advance(amount)
def deferLater(clock, delay, callable, *args, **kw):
"""
Call the given function after a certain period of time has passed.
@type clock: L{IReactorTime} provider
@param clock: The object which will be used to schedule the delayed
call.
@type delay: C{float} or C{int}
@param delay: The number of seconds to wait before calling the function.
@param callable: The object to call after the delay.
@param *args: The positional arguments to pass to C{callable}.
@param **kw: The keyword arguments to pass to C{callable}.
@rtype: L{defer.Deferred}
@return: A deferred that fires with the result of the callable when the
specified time has elapsed.
"""
def deferLaterCancel(deferred):
delayedCall.cancel()
d = defer.Deferred(deferLaterCancel)
d.addCallback(lambda ignored: callable(*args, **kw))
delayedCall = clock.callLater(delay, d.callback, None)
return d
__all__ = [
'LoopingCall',
'Clock',
'SchedulerStopped', 'Cooperator', 'coiterate',
'deferLater',
]
|
Clock
|
identifier_name
|
task.py
|
# -*- test-case-name: twisted.test.test_task,twisted.test.test_cooperator -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Scheduling utility methods and classes.
@author: Jp Calderone
"""
__metaclass__ = type
import time
from zope.interface import implements
from twisted.python import reflect
from twisted.python.failure import Failure
from twisted.internet import base, defer
from twisted.internet.interfaces import IReactorTime
class LoopingCall:
"""Call a function repeatedly.
If C{f} returns a deferred, rescheduling will not take place until the
deferred has fired. The result value is ignored.
@ivar f: The function to call.
@ivar a: A tuple of arguments to pass the function.
@ivar kw: A dictionary of keyword arguments to pass to the function.
@ivar clock: A provider of
L{twisted.internet.interfaces.IReactorTime}. The default is
L{twisted.internet.reactor}. Feel free to set this to
something else, but it probably ought to be set *before*
calling L{start}.
@type running: C{bool}
@ivar running: A flag which is C{True} while C{f} is scheduled to be called
(or is currently being called). It is set to C{True} when L{start} is
called and set to C{False} when L{stop} is called or if C{f} raises an
exception. In either case, it will be C{False} by the time the
C{Deferred} returned by L{start} fires its callback or errback.
@type _expectNextCallAt: C{float}
@ivar _expectNextCallAt: The time at which this instance most recently
scheduled itself to run.
@type _realLastTime: C{float}
@ivar _realLastTime: When counting skips, the time at which the skip
counter was last invoked.
@type _runAtStart: C{bool}
@ivar _runAtStart: A flag indicating whether the 'now' argument was passed
to L{LoopingCall.start}.
"""
call = None
running = False
deferred = None
interval = None
_expectNextCallAt = 0.0
_runAtStart = False
starttime = None
def __init__(self, f, *a, **kw):
self.f = f
self.a = a
self.kw = kw
from twisted.internet import reactor
self.clock = reactor
def withCount(cls, countCallable):
"""
An alternate constructor for L{LoopingCall} that makes available the
number of calls which should have occurred since it was last invoked.
Note that this number is an C{int} value; It represents the discrete
number of calls that should have been made. For example, if you are
using a looping call to display an animation with discrete frames, this
number would be the number of frames to advance.
The count is normally 1, but can be higher. For example, if the reactor
is blocked and takes too long to invoke the L{LoopingCall}, a Deferred
returned from a previous call is not fired before an interval has
elapsed, or if the callable itself blocks for longer than an interval,
preventing I{itself} from being called.
@param countCallable: A callable that will be invoked each time the
resulting LoopingCall is run, with an integer specifying the number
of calls that should have been invoked.
@type countCallable: 1-argument callable which takes an C{int}
@return: An instance of L{LoopingCall} with call counting enabled,
which provides the count as the first positional argument.
@rtype: L{LoopingCall}
@since: 9.0
"""
def counter():
now = self.clock.seconds()
lastTime = self._realLastTime
if lastTime is None:
lastTime = self.starttime
if self._runAtStart:
lastTime -= self.interval
self._realLastTime = now
lastInterval = self._intervalOf(lastTime)
thisInterval = self._intervalOf(now)
count = thisInterval - lastInterval
return countCallable(count)
self = cls(counter)
self._realLastTime = None
return self
withCount = classmethod(withCount)
def _intervalOf(self, t):
"""
Determine the number of intervals passed as of the given point in
time.
@param t: The specified time (from the start of the L{LoopingCall}) to
be measured in intervals
@return: The C{int} number of intervals which have passed as of the
given point in time.
"""
elapsedTime = t - self.starttime
intervalNum = int(elapsedTime / self.interval)
return intervalNum
def start(self, interval, now=True):
"""
Start running function every interval seconds.
@param interval: The number of seconds between calls. May be
less than one. Precision will depend on the underlying
platform, the available hardware, and the load on the system.
@param now: If True, run this call right now. Otherwise, wait
until the interval has elapsed before beginning.
@return: A Deferred whose callback will be invoked with
C{self} when C{self.stop} is called, or whose errback will be
invoked when the function raises an exception or returned a
deferred that has its errback invoked.
"""
assert not self.running, ("Tried to start an already running "
"LoopingCall.")
if interval < 0:
raise ValueError, "interval must be >= 0"
self.running = True
d = self.deferred = defer.Deferred()
self.starttime = self.clock.seconds()
self._expectNextCallAt = self.starttime
self.interval = interval
self._runAtStart = now
if now:
self()
else:
self._reschedule()
return d
def stop(self):
"""Stop running function.
"""
assert self.running, ("Tried to stop a LoopingCall that was "
"not running.")
self.running = False
if self.call is not None:
self.call.cancel()
self.call = None
d, self.deferred = self.deferred, None
d.callback(self)
def reset(self):
"""
Skip the next iteration and reset the timer.
@since: 11.1
"""
assert self.running, ("Tried to reset a LoopingCall that was "
"not running.")
if self.call is not None:
self.call.cancel()
self.call = None
self._expectNextCallAt = self.clock.seconds()
self._reschedule()
def __call__(self):
def cb(result):
if self.running:
self._reschedule()
else:
d, self.deferred = self.deferred, None
d.callback(self)
def eb(failure):
self.running = False
d, self.deferred = self.deferred, None
d.errback(failure)
self.call = None
d = defer.maybeDeferred(self.f, *self.a, **self.kw)
d.addCallback(cb)
d.addErrback(eb)
|
Schedule the next iteration of this looping call.
"""
if self.interval == 0:
self.call = self.clock.callLater(0, self)
return
currentTime = self.clock.seconds()
# Find how long is left until the interval comes around again.
untilNextTime = (self._expectNextCallAt - currentTime) % self.interval
# Make sure it is in the future, in case more than one interval worth
# of time passed since the previous call was made.
nextTime = max(
self._expectNextCallAt + self.interval, currentTime + untilNextTime)
# If the interval falls on the current time exactly, skip it and
# schedule the call for the next interval.
if nextTime == currentTime:
nextTime += self.interval
self._expectNextCallAt = nextTime
self.call = self.clock.callLater(nextTime - currentTime, self)
def __repr__(self):
if hasattr(self.f, 'func_name'):
func = self.f.func_name
if hasattr(self.f, 'im_class'):
func = self.f.im_class.__name__ + '.' + func
else:
func = reflect.safe_repr(self.f)
return 'LoopingCall<%r>(%s, *%s, **%s)' % (
self.interval, func, reflect.safe_repr(self.a),
reflect.safe_repr(self.kw))
class SchedulerError(Exception):
"""
The operation could not be completed because the scheduler or one of its
tasks was in an invalid state. This exception should not be raised
directly, but is a superclass of various scheduler-state-related
exceptions.
"""
class SchedulerStopped(SchedulerError):
"""
The operation could not complete because the scheduler was stopped in
progress or was already stopped.
"""
class TaskFinished(SchedulerError):
"""
The operation could not complete because the task was already completed,
stopped, encountered an error or otherwise permanently stopped running.
"""
class TaskDone(TaskFinished):
"""
The operation could not complete because the task was already completed.
"""
class TaskStopped(TaskFinished):
"""
The operation could not complete because the task was stopped.
"""
class TaskFailed(TaskFinished):
"""
The operation could not complete because the task died with an unhandled
error.
"""
class NotPaused(SchedulerError):
"""
This exception is raised when a task is resumed which was not previously
paused.
"""
class _Timer(object):
MAX_SLICE = 0.01
def __init__(self):
self.end = time.time() + self.MAX_SLICE
def __call__(self):
return time.time() >= self.end
_EPSILON = 0.00000001
def _defaultScheduler(x):
from twisted.internet import reactor
return reactor.callLater(_EPSILON, x)
class CooperativeTask(object):
"""
A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be
paused, resumed, and stopped. It can also have its completion (or
termination) monitored.
@see: L{CooperativeTask.cooperate}
@ivar _iterator: the iterator to iterate when this L{CooperativeTask} is
asked to do work.
@ivar _cooperator: the L{Cooperator} that this L{CooperativeTask}
participates in, which is used to re-insert it upon resume.
@ivar _deferreds: the list of L{defer.Deferred}s to fire when this task
completes, fails, or finishes.
@type _deferreds: L{list}
@type _cooperator: L{Cooperator}
@ivar _pauseCount: the number of times that this L{CooperativeTask} has
been paused; if 0, it is running.
@type _pauseCount: L{int}
@ivar _completionState: The completion-state of this L{CooperativeTask}.
C{None} if the task is not yet completed, an instance of L{TaskStopped}
if C{stop} was called to stop this task early, of L{TaskFailed} if the
application code in the iterator raised an exception which caused it to
terminate, and of L{TaskDone} if it terminated normally via raising
L{StopIteration}.
@type _completionState: L{TaskFinished}
"""
def __init__(self, iterator, cooperator):
"""
A private constructor: to create a new L{CooperativeTask}, see
L{Cooperator.cooperate}.
"""
self._iterator = iterator
self._cooperator = cooperator
self._deferreds = []
self._pauseCount = 0
self._completionState = None
self._completionResult = None
cooperator._addTask(self)
def whenDone(self):
"""
Get a L{defer.Deferred} notification of when this task is complete.
@return: a L{defer.Deferred} that fires with the C{iterator} that this
L{CooperativeTask} was created with when the iterator has been
exhausted (i.e. its C{next} method has raised L{StopIteration}), or
fails with the exception raised by C{next} if it raises some other
exception.
@rtype: L{defer.Deferred}
"""
d = defer.Deferred()
if self._completionState is None:
self._deferreds.append(d)
else:
d.callback(self._completionResult)
return d
def pause(self):
"""
Pause this L{CooperativeTask}. Stop doing work until
L{CooperativeTask.resume} is called. If C{pause} is called more than
once, C{resume} must be called an equal number of times to resume this
task.
@raise TaskFinished: if this task has already finished or completed.
"""
self._checkFinish()
self._pauseCount += 1
if self._pauseCount == 1:
self._cooperator._removeTask(self)
def resume(self):
"""
Resume processing of a paused L{CooperativeTask}.
@raise NotPaused: if this L{CooperativeTask} is not paused.
"""
if self._pauseCount == 0:
raise NotPaused()
self._pauseCount -= 1
if self._pauseCount == 0 and self._completionState is None:
self._cooperator._addTask(self)
def _completeWith(self, completionState, deferredResult):
"""
@param completionState: a L{TaskFinished} exception or a subclass
thereof, indicating what exception should be raised when subsequent
operations are performed.
@param deferredResult: the result to fire all the deferreds with.
"""
self._completionState = completionState
self._completionResult = deferredResult
if not self._pauseCount:
self._cooperator._removeTask(self)
# The Deferreds need to be invoked after all this is completed, because
# a Deferred may want to manipulate other tasks in a Cooperator. For
# example, if you call "stop()" on a cooperator in a callback on a
# Deferred returned from whenDone(), this CooperativeTask must be gone
# from the Cooperator by that point so that _completeWith is not
# invoked reentrantly; that would cause these Deferreds to blow up with
# an AlreadyCalledError, or the _removeTask to fail with a ValueError.
for d in self._deferreds:
d.callback(deferredResult)
def stop(self):
"""
Stop further processing of this task.
@raise TaskFinished: if this L{CooperativeTask} has previously
completed, via C{stop}, completion, or failure.
"""
self._checkFinish()
self._completeWith(TaskStopped(), Failure(TaskStopped()))
def _checkFinish(self):
"""
If this task has been stopped, raise the appropriate subclass of
L{TaskFinished}.
"""
if self._completionState is not None:
raise self._completionState
def _oneWorkUnit(self):
"""
Perform one unit of work for this task, retrieving one item from its
iterator, stopping if there are no further items in the iterator, and
pausing if the result was a L{defer.Deferred}.
"""
try:
result = self._iterator.next()
except StopIteration:
self._completeWith(TaskDone(), self._iterator)
except:
self._completeWith(TaskFailed(), Failure())
else:
if isinstance(result, defer.Deferred):
self.pause()
def failLater(f):
self._completeWith(TaskFailed(), f)
result.addCallbacks(lambda result: self.resume(),
failLater)
class Cooperator(object):
"""
Cooperative task scheduler.
"""
def __init__(self,
terminationPredicateFactory=_Timer,
scheduler=_defaultScheduler,
started=True):
"""
Create a scheduler-like object to which iterators may be added.
@param terminationPredicateFactory: A no-argument callable which will
be invoked at the beginning of each step and should return a
no-argument callable which will return True when the step should be
terminated. The default factory is time-based and allows iterators to
run for 1/100th of a second at a time.
@param scheduler: A one-argument callable which takes a no-argument
callable and should invoke it at some future point. This will be used
to schedule each step of this Cooperator.
@param started: A boolean which indicates whether iterators should be
stepped as soon as they are added, or if they will be queued up until
L{Cooperator.start} is called.
"""
self._tasks = []
self._metarator = iter(())
self._terminationPredicateFactory = terminationPredicateFactory
self._scheduler = scheduler
self._delayedCall = None
self._stopped = False
self._started = started
def coiterate(self, iterator, doneDeferred=None):
"""
Add an iterator to the list of iterators this L{Cooperator} is
currently running.
@param doneDeferred: If specified, this will be the Deferred used as
the completion deferred. It is suggested that you use the default,
which creates a new Deferred for you.
@return: a Deferred that will fire when the iterator finishes.
"""
if doneDeferred is None:
doneDeferred = defer.Deferred()
CooperativeTask(iterator, self).whenDone().chainDeferred(doneDeferred)
return doneDeferred
def cooperate(self, iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return CooperativeTask(iterator, self)
def _addTask(self, task):
"""
Add a L{CooperativeTask} object to this L{Cooperator}.
"""
if self._stopped:
self._tasks.append(task) # XXX silly, I know, but _completeWith
# does the inverse
task._completeWith(SchedulerStopped(), Failure(SchedulerStopped()))
else:
self._tasks.append(task)
self._reschedule()
def _removeTask(self, task):
"""
Remove a L{CooperativeTask} from this L{Cooperator}.
"""
self._tasks.remove(task)
# If no work left to do, cancel the delayed call:
if not self._tasks and self._delayedCall:
self._delayedCall.cancel()
self._delayedCall = None
def _tasksWhileNotStopped(self):
"""
Yield all L{CooperativeTask} objects in a loop as long as this
L{Cooperator}'s termination condition has not been met.
"""
terminator = self._terminationPredicateFactory()
while self._tasks:
for t in self._metarator:
yield t
if terminator():
return
self._metarator = iter(self._tasks)
def _tick(self):
"""
Run one scheduler tick.
"""
self._delayedCall = None
for taskObj in self._tasksWhileNotStopped():
taskObj._oneWorkUnit()
self._reschedule()
_mustScheduleOnStart = False
def _reschedule(self):
if not self._started:
self._mustScheduleOnStart = True
return
if self._delayedCall is None and self._tasks:
self._delayedCall = self._scheduler(self._tick)
def start(self):
"""
Begin scheduling steps.
"""
self._stopped = False
self._started = True
if self._mustScheduleOnStart:
del self._mustScheduleOnStart
self._reschedule()
def stop(self):
"""
Stop scheduling steps. Errback the completion Deferreds of all
iterators which have been added and forget about them.
"""
self._stopped = True
for taskObj in self._tasks:
taskObj._completeWith(SchedulerStopped(),
Failure(SchedulerStopped()))
self._tasks = []
if self._delayedCall is not None:
self._delayedCall.cancel()
self._delayedCall = None
_theCooperator = Cooperator()
def coiterate(iterator):
"""
Cooperatively iterate over the given iterator, dividing runtime between it
and all other iterators which have been passed to this function and not yet
exhausted.
"""
return _theCooperator.coiterate(iterator)
def cooperate(iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return _theCooperator.cooperate(iterator)
class Clock:
"""
Provide a deterministic, easily-controlled implementation of
L{IReactorTime.callLater}. This is commonly useful for writing
deterministic unit tests for code which schedules events using this API.
"""
implements(IReactorTime)
rightNow = 0.0
def __init__(self):
self.calls = []
def seconds(self):
"""
Pretend to be time.time(). This is used internally when an operation
such as L{IDelayedCall.reset} needs to determine a a time value
relative to the current time.
@rtype: C{float}
@return: The time which should be considered the current time.
"""
return self.rightNow
def _sortCalls(self):
"""
Sort the pending calls according to the time they are scheduled.
"""
self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime()))
def callLater(self, when, what, *a, **kw):
"""
See L{twisted.internet.interfaces.IReactorTime.callLater}.
"""
dc = base.DelayedCall(self.seconds() + when,
what, a, kw,
self.calls.remove,
lambda c: None,
self.seconds)
self.calls.append(dc)
self._sortCalls()
return dc
def getDelayedCalls(self):
"""
See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls}
"""
return self.calls
def advance(self, amount):
"""
Move time on this clock forward by the given amount and run whatever
pending calls should be run.
@type amount: C{float}
@param amount: The number of seconds which to advance this clock's
time.
"""
self.rightNow += amount
self._sortCalls()
while self.calls and self.calls[0].getTime() <= self.seconds():
call = self.calls.pop(0)
call.called = 1
call.func(*call.args, **call.kw)
self._sortCalls()
def pump(self, timings):
"""
Advance incrementally by the given set of times.
@type timings: iterable of C{float}
"""
for amount in timings:
self.advance(amount)
def deferLater(clock, delay, callable, *args, **kw):
"""
Call the given function after a certain period of time has passed.
@type clock: L{IReactorTime} provider
@param clock: The object which will be used to schedule the delayed
call.
@type delay: C{float} or C{int}
@param delay: The number of seconds to wait before calling the function.
@param callable: The object to call after the delay.
@param *args: The positional arguments to pass to C{callable}.
@param **kw: The keyword arguments to pass to C{callable}.
@rtype: L{defer.Deferred}
@return: A deferred that fires with the result of the callable when the
specified time has elapsed.
"""
def deferLaterCancel(deferred):
delayedCall.cancel()
d = defer.Deferred(deferLaterCancel)
d.addCallback(lambda ignored: callable(*args, **kw))
delayedCall = clock.callLater(delay, d.callback, None)
return d
__all__ = [
'LoopingCall',
'Clock',
'SchedulerStopped', 'Cooperator', 'coiterate',
'deferLater',
]
|
def _reschedule(self):
"""
|
random_line_split
|
task.py
|
# -*- test-case-name: twisted.test.test_task,twisted.test.test_cooperator -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Scheduling utility methods and classes.
@author: Jp Calderone
"""
__metaclass__ = type
import time
from zope.interface import implements
from twisted.python import reflect
from twisted.python.failure import Failure
from twisted.internet import base, defer
from twisted.internet.interfaces import IReactorTime
class LoopingCall:
"""Call a function repeatedly.
If C{f} returns a deferred, rescheduling will not take place until the
deferred has fired. The result value is ignored.
@ivar f: The function to call.
@ivar a: A tuple of arguments to pass the function.
@ivar kw: A dictionary of keyword arguments to pass to the function.
@ivar clock: A provider of
L{twisted.internet.interfaces.IReactorTime}. The default is
L{twisted.internet.reactor}. Feel free to set this to
something else, but it probably ought to be set *before*
calling L{start}.
@type running: C{bool}
@ivar running: A flag which is C{True} while C{f} is scheduled to be called
(or is currently being called). It is set to C{True} when L{start} is
called and set to C{False} when L{stop} is called or if C{f} raises an
exception. In either case, it will be C{False} by the time the
C{Deferred} returned by L{start} fires its callback or errback.
@type _expectNextCallAt: C{float}
@ivar _expectNextCallAt: The time at which this instance most recently
scheduled itself to run.
@type _realLastTime: C{float}
@ivar _realLastTime: When counting skips, the time at which the skip
counter was last invoked.
@type _runAtStart: C{bool}
@ivar _runAtStart: A flag indicating whether the 'now' argument was passed
to L{LoopingCall.start}.
"""
call = None
running = False
deferred = None
interval = None
_expectNextCallAt = 0.0
_runAtStart = False
starttime = None
def __init__(self, f, *a, **kw):
self.f = f
self.a = a
self.kw = kw
from twisted.internet import reactor
self.clock = reactor
def withCount(cls, countCallable):
"""
An alternate constructor for L{LoopingCall} that makes available the
number of calls which should have occurred since it was last invoked.
Note that this number is an C{int} value; It represents the discrete
number of calls that should have been made. For example, if you are
using a looping call to display an animation with discrete frames, this
number would be the number of frames to advance.
The count is normally 1, but can be higher. For example, if the reactor
is blocked and takes too long to invoke the L{LoopingCall}, a Deferred
returned from a previous call is not fired before an interval has
elapsed, or if the callable itself blocks for longer than an interval,
preventing I{itself} from being called.
@param countCallable: A callable that will be invoked each time the
resulting LoopingCall is run, with an integer specifying the number
of calls that should have been invoked.
@type countCallable: 1-argument callable which takes an C{int}
@return: An instance of L{LoopingCall} with call counting enabled,
which provides the count as the first positional argument.
@rtype: L{LoopingCall}
@since: 9.0
"""
def counter():
now = self.clock.seconds()
lastTime = self._realLastTime
if lastTime is None:
lastTime = self.starttime
if self._runAtStart:
lastTime -= self.interval
self._realLastTime = now
lastInterval = self._intervalOf(lastTime)
thisInterval = self._intervalOf(now)
count = thisInterval - lastInterval
return countCallable(count)
self = cls(counter)
self._realLastTime = None
return self
withCount = classmethod(withCount)
def _intervalOf(self, t):
"""
Determine the number of intervals passed as of the given point in
time.
@param t: The specified time (from the start of the L{LoopingCall}) to
be measured in intervals
@return: The C{int} number of intervals which have passed as of the
given point in time.
"""
elapsedTime = t - self.starttime
intervalNum = int(elapsedTime / self.interval)
return intervalNum
def start(self, interval, now=True):
"""
Start running function every interval seconds.
@param interval: The number of seconds between calls. May be
less than one. Precision will depend on the underlying
platform, the available hardware, and the load on the system.
@param now: If True, run this call right now. Otherwise, wait
until the interval has elapsed before beginning.
@return: A Deferred whose callback will be invoked with
C{self} when C{self.stop} is called, or whose errback will be
invoked when the function raises an exception or returned a
deferred that has its errback invoked.
"""
assert not self.running, ("Tried to start an already running "
"LoopingCall.")
if interval < 0:
raise ValueError, "interval must be >= 0"
self.running = True
d = self.deferred = defer.Deferred()
self.starttime = self.clock.seconds()
self._expectNextCallAt = self.starttime
self.interval = interval
self._runAtStart = now
if now:
self()
else:
self._reschedule()
return d
def stop(self):
"""Stop running function.
"""
assert self.running, ("Tried to stop a LoopingCall that was "
"not running.")
self.running = False
if self.call is not None:
self.call.cancel()
self.call = None
d, self.deferred = self.deferred, None
d.callback(self)
def reset(self):
"""
Skip the next iteration and reset the timer.
@since: 11.1
"""
assert self.running, ("Tried to reset a LoopingCall that was "
"not running.")
if self.call is not None:
self.call.cancel()
self.call = None
self._expectNextCallAt = self.clock.seconds()
self._reschedule()
def __call__(self):
def cb(result):
if self.running:
self._reschedule()
else:
d, self.deferred = self.deferred, None
d.callback(self)
def eb(failure):
self.running = False
d, self.deferred = self.deferred, None
d.errback(failure)
self.call = None
d = defer.maybeDeferred(self.f, *self.a, **self.kw)
d.addCallback(cb)
d.addErrback(eb)
def _reschedule(self):
"""
Schedule the next iteration of this looping call.
"""
if self.interval == 0:
self.call = self.clock.callLater(0, self)
return
currentTime = self.clock.seconds()
# Find how long is left until the interval comes around again.
untilNextTime = (self._expectNextCallAt - currentTime) % self.interval
# Make sure it is in the future, in case more than one interval worth
# of time passed since the previous call was made.
nextTime = max(
self._expectNextCallAt + self.interval, currentTime + untilNextTime)
# If the interval falls on the current time exactly, skip it and
# schedule the call for the next interval.
if nextTime == currentTime:
nextTime += self.interval
self._expectNextCallAt = nextTime
self.call = self.clock.callLater(nextTime - currentTime, self)
def __repr__(self):
if hasattr(self.f, 'func_name'):
func = self.f.func_name
if hasattr(self.f, 'im_class'):
func = self.f.im_class.__name__ + '.' + func
else:
func = reflect.safe_repr(self.f)
return 'LoopingCall<%r>(%s, *%s, **%s)' % (
self.interval, func, reflect.safe_repr(self.a),
reflect.safe_repr(self.kw))
class SchedulerError(Exception):
"""
The operation could not be completed because the scheduler or one of its
tasks was in an invalid state. This exception should not be raised
directly, but is a superclass of various scheduler-state-related
exceptions.
"""
class SchedulerStopped(SchedulerError):
|
class TaskFinished(SchedulerError):
"""
The operation could not complete because the task was already completed,
stopped, encountered an error or otherwise permanently stopped running.
"""
class TaskDone(TaskFinished):
"""
The operation could not complete because the task was already completed.
"""
class TaskStopped(TaskFinished):
"""
The operation could not complete because the task was stopped.
"""
class TaskFailed(TaskFinished):
"""
The operation could not complete because the task died with an unhandled
error.
"""
class NotPaused(SchedulerError):
"""
This exception is raised when a task is resumed which was not previously
paused.
"""
class _Timer(object):
MAX_SLICE = 0.01
def __init__(self):
self.end = time.time() + self.MAX_SLICE
def __call__(self):
return time.time() >= self.end
_EPSILON = 0.00000001
def _defaultScheduler(x):
from twisted.internet import reactor
return reactor.callLater(_EPSILON, x)
class CooperativeTask(object):
"""
A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be
paused, resumed, and stopped. It can also have its completion (or
termination) monitored.
@see: L{CooperativeTask.cooperate}
@ivar _iterator: the iterator to iterate when this L{CooperativeTask} is
asked to do work.
@ivar _cooperator: the L{Cooperator} that this L{CooperativeTask}
participates in, which is used to re-insert it upon resume.
@ivar _deferreds: the list of L{defer.Deferred}s to fire when this task
completes, fails, or finishes.
@type _deferreds: L{list}
@type _cooperator: L{Cooperator}
@ivar _pauseCount: the number of times that this L{CooperativeTask} has
been paused; if 0, it is running.
@type _pauseCount: L{int}
@ivar _completionState: The completion-state of this L{CooperativeTask}.
C{None} if the task is not yet completed, an instance of L{TaskStopped}
if C{stop} was called to stop this task early, of L{TaskFailed} if the
application code in the iterator raised an exception which caused it to
terminate, and of L{TaskDone} if it terminated normally via raising
L{StopIteration}.
@type _completionState: L{TaskFinished}
"""
def __init__(self, iterator, cooperator):
"""
A private constructor: to create a new L{CooperativeTask}, see
L{Cooperator.cooperate}.
"""
self._iterator = iterator
self._cooperator = cooperator
self._deferreds = []
self._pauseCount = 0
self._completionState = None
self._completionResult = None
cooperator._addTask(self)
def whenDone(self):
"""
Get a L{defer.Deferred} notification of when this task is complete.
@return: a L{defer.Deferred} that fires with the C{iterator} that this
L{CooperativeTask} was created with when the iterator has been
exhausted (i.e. its C{next} method has raised L{StopIteration}), or
fails with the exception raised by C{next} if it raises some other
exception.
@rtype: L{defer.Deferred}
"""
d = defer.Deferred()
if self._completionState is None:
self._deferreds.append(d)
else:
d.callback(self._completionResult)
return d
def pause(self):
"""
Pause this L{CooperativeTask}. Stop doing work until
L{CooperativeTask.resume} is called. If C{pause} is called more than
once, C{resume} must be called an equal number of times to resume this
task.
@raise TaskFinished: if this task has already finished or completed.
"""
self._checkFinish()
self._pauseCount += 1
if self._pauseCount == 1:
self._cooperator._removeTask(self)
def resume(self):
"""
Resume processing of a paused L{CooperativeTask}.
@raise NotPaused: if this L{CooperativeTask} is not paused.
"""
if self._pauseCount == 0:
raise NotPaused()
self._pauseCount -= 1
if self._pauseCount == 0 and self._completionState is None:
self._cooperator._addTask(self)
def _completeWith(self, completionState, deferredResult):
"""
@param completionState: a L{TaskFinished} exception or a subclass
thereof, indicating what exception should be raised when subsequent
operations are performed.
@param deferredResult: the result to fire all the deferreds with.
"""
self._completionState = completionState
self._completionResult = deferredResult
if not self._pauseCount:
self._cooperator._removeTask(self)
# The Deferreds need to be invoked after all this is completed, because
# a Deferred may want to manipulate other tasks in a Cooperator. For
# example, if you call "stop()" on a cooperator in a callback on a
# Deferred returned from whenDone(), this CooperativeTask must be gone
# from the Cooperator by that point so that _completeWith is not
# invoked reentrantly; that would cause these Deferreds to blow up with
# an AlreadyCalledError, or the _removeTask to fail with a ValueError.
for d in self._deferreds:
d.callback(deferredResult)
def stop(self):
"""
Stop further processing of this task.
@raise TaskFinished: if this L{CooperativeTask} has previously
completed, via C{stop}, completion, or failure.
"""
self._checkFinish()
self._completeWith(TaskStopped(), Failure(TaskStopped()))
def _checkFinish(self):
"""
If this task has been stopped, raise the appropriate subclass of
L{TaskFinished}.
"""
if self._completionState is not None:
raise self._completionState
def _oneWorkUnit(self):
"""
Perform one unit of work for this task, retrieving one item from its
iterator, stopping if there are no further items in the iterator, and
pausing if the result was a L{defer.Deferred}.
"""
try:
result = self._iterator.next()
except StopIteration:
self._completeWith(TaskDone(), self._iterator)
except:
self._completeWith(TaskFailed(), Failure())
else:
if isinstance(result, defer.Deferred):
self.pause()
def failLater(f):
self._completeWith(TaskFailed(), f)
result.addCallbacks(lambda result: self.resume(),
failLater)
class Cooperator(object):
"""
Cooperative task scheduler.
"""
def __init__(self,
terminationPredicateFactory=_Timer,
scheduler=_defaultScheduler,
started=True):
"""
Create a scheduler-like object to which iterators may be added.
@param terminationPredicateFactory: A no-argument callable which will
be invoked at the beginning of each step and should return a
no-argument callable which will return True when the step should be
terminated. The default factory is time-based and allows iterators to
run for 1/100th of a second at a time.
@param scheduler: A one-argument callable which takes a no-argument
callable and should invoke it at some future point. This will be used
to schedule each step of this Cooperator.
@param started: A boolean which indicates whether iterators should be
stepped as soon as they are added, or if they will be queued up until
L{Cooperator.start} is called.
"""
self._tasks = []
self._metarator = iter(())
self._terminationPredicateFactory = terminationPredicateFactory
self._scheduler = scheduler
self._delayedCall = None
self._stopped = False
self._started = started
def coiterate(self, iterator, doneDeferred=None):
"""
Add an iterator to the list of iterators this L{Cooperator} is
currently running.
@param doneDeferred: If specified, this will be the Deferred used as
the completion deferred. It is suggested that you use the default,
which creates a new Deferred for you.
@return: a Deferred that will fire when the iterator finishes.
"""
if doneDeferred is None:
doneDeferred = defer.Deferred()
CooperativeTask(iterator, self).whenDone().chainDeferred(doneDeferred)
return doneDeferred
def cooperate(self, iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return CooperativeTask(iterator, self)
def _addTask(self, task):
"""
Add a L{CooperativeTask} object to this L{Cooperator}.
"""
if self._stopped:
self._tasks.append(task) # XXX silly, I know, but _completeWith
# does the inverse
task._completeWith(SchedulerStopped(), Failure(SchedulerStopped()))
else:
self._tasks.append(task)
self._reschedule()
def _removeTask(self, task):
"""
Remove a L{CooperativeTask} from this L{Cooperator}.
"""
self._tasks.remove(task)
# If no work left to do, cancel the delayed call:
if not self._tasks and self._delayedCall:
self._delayedCall.cancel()
self._delayedCall = None
def _tasksWhileNotStopped(self):
"""
Yield all L{CooperativeTask} objects in a loop as long as this
L{Cooperator}'s termination condition has not been met.
"""
terminator = self._terminationPredicateFactory()
while self._tasks:
for t in self._metarator:
yield t
if terminator():
return
self._metarator = iter(self._tasks)
def _tick(self):
"""
Run one scheduler tick.
"""
self._delayedCall = None
for taskObj in self._tasksWhileNotStopped():
taskObj._oneWorkUnit()
self._reschedule()
_mustScheduleOnStart = False
def _reschedule(self):
if not self._started:
self._mustScheduleOnStart = True
return
if self._delayedCall is None and self._tasks:
self._delayedCall = self._scheduler(self._tick)
def start(self):
"""
Begin scheduling steps.
"""
self._stopped = False
self._started = True
if self._mustScheduleOnStart:
del self._mustScheduleOnStart
self._reschedule()
def stop(self):
"""
Stop scheduling steps. Errback the completion Deferreds of all
iterators which have been added and forget about them.
"""
self._stopped = True
for taskObj in self._tasks:
taskObj._completeWith(SchedulerStopped(),
Failure(SchedulerStopped()))
self._tasks = []
if self._delayedCall is not None:
self._delayedCall.cancel()
self._delayedCall = None
_theCooperator = Cooperator()
def coiterate(iterator):
"""
Cooperatively iterate over the given iterator, dividing runtime between it
and all other iterators which have been passed to this function and not yet
exhausted.
"""
return _theCooperator.coiterate(iterator)
def cooperate(iterator):
"""
Start running the given iterator as a long-running cooperative task, by
calling next() on it as a periodic timed event.
@param iterator: the iterator to invoke.
@return: a L{CooperativeTask} object representing this task.
"""
return _theCooperator.cooperate(iterator)
class Clock:
"""
Provide a deterministic, easily-controlled implementation of
L{IReactorTime.callLater}. This is commonly useful for writing
deterministic unit tests for code which schedules events using this API.
"""
implements(IReactorTime)
rightNow = 0.0
def __init__(self):
self.calls = []
def seconds(self):
"""
Pretend to be time.time(). This is used internally when an operation
such as L{IDelayedCall.reset} needs to determine a a time value
relative to the current time.
@rtype: C{float}
@return: The time which should be considered the current time.
"""
return self.rightNow
def _sortCalls(self):
"""
Sort the pending calls according to the time they are scheduled.
"""
self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime()))
def callLater(self, when, what, *a, **kw):
"""
See L{twisted.internet.interfaces.IReactorTime.callLater}.
"""
dc = base.DelayedCall(self.seconds() + when,
what, a, kw,
self.calls.remove,
lambda c: None,
self.seconds)
self.calls.append(dc)
self._sortCalls()
return dc
def getDelayedCalls(self):
"""
See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls}
"""
return self.calls
def advance(self, amount):
"""
Move time on this clock forward by the given amount and run whatever
pending calls should be run.
@type amount: C{float}
@param amount: The number of seconds which to advance this clock's
time.
"""
self.rightNow += amount
self._sortCalls()
while self.calls and self.calls[0].getTime() <= self.seconds():
call = self.calls.pop(0)
call.called = 1
call.func(*call.args, **call.kw)
self._sortCalls()
def pump(self, timings):
"""
Advance incrementally by the given set of times.
@type timings: iterable of C{float}
"""
for amount in timings:
self.advance(amount)
def deferLater(clock, delay, callable, *args, **kw):
"""
Call the given function after a certain period of time has passed.
@type clock: L{IReactorTime} provider
@param clock: The object which will be used to schedule the delayed
call.
@type delay: C{float} or C{int}
@param delay: The number of seconds to wait before calling the function.
@param callable: The object to call after the delay.
@param *args: The positional arguments to pass to C{callable}.
@param **kw: The keyword arguments to pass to C{callable}.
@rtype: L{defer.Deferred}
@return: A deferred that fires with the result of the callable when the
specified time has elapsed.
"""
def deferLaterCancel(deferred):
delayedCall.cancel()
d = defer.Deferred(deferLaterCancel)
d.addCallback(lambda ignored: callable(*args, **kw))
delayedCall = clock.callLater(delay, d.callback, None)
return d
__all__ = [
'LoopingCall',
'Clock',
'SchedulerStopped', 'Cooperator', 'coiterate',
'deferLater',
]
|
"""
The operation could not complete because the scheduler was stopped in
progress or was already stopped.
"""
|
identifier_body
|
application_module.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ApplicationInitStatus} from './application_init';
import {ApplicationRef} from './application_ref';
import {APP_ID_RANDOM_PROVIDER} from './application_tokens';
import {IterableDiffers, KeyValueDiffers, defaultIterableDiffers, defaultKeyValueDiffers} from './change_detection/change_detection';
import {Inject, Optional, SkipSelf} from './di/metadata';
import {LOCALE_ID} from './i18n/tokens';
import {Compiler} from './linker/compiler';
import {NgModule} from './metadata';
export function _iterableDiffersFactory() {
return defaultIterableDiffers;
}
export function _keyValueDiffersFactory() {
return defaultKeyValueDiffers;
}
export function _localeFactory(locale?: string): string {
return locale || 'en-US';
}
/**
* This module includes the providers of @angular/core that are needed
* to bootstrap components via `ApplicationRef`.
*
* @experimental
*/
@NgModule({
providers: [
ApplicationRef,
ApplicationInitStatus,
Compiler,
APP_ID_RANDOM_PROVIDER,
{provide: IterableDiffers, useFactory: _iterableDiffersFactory},
{provide: KeyValueDiffers, useFactory: _keyValueDiffersFactory},
{
provide: LOCALE_ID,
useFactory: _localeFactory,
deps: [[new Inject(LOCALE_ID), new Optional(), new SkipSelf()]]
},
]
})
export class ApplicationModule {
// Inject ApplicationRef to make it eager...
constructor(appRef: ApplicationRef)
|
}
|
{}
|
identifier_body
|
application_module.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ApplicationInitStatus} from './application_init';
import {ApplicationRef} from './application_ref';
import {APP_ID_RANDOM_PROVIDER} from './application_tokens';
import {IterableDiffers, KeyValueDiffers, defaultIterableDiffers, defaultKeyValueDiffers} from './change_detection/change_detection';
import {Inject, Optional, SkipSelf} from './di/metadata';
import {LOCALE_ID} from './i18n/tokens';
import {Compiler} from './linker/compiler';
import {NgModule} from './metadata';
export function _iterableDiffersFactory() {
return defaultIterableDiffers;
}
export function _keyValueDiffersFactory() {
return defaultKeyValueDiffers;
}
export function _localeFactory(locale?: string): string {
return locale || 'en-US';
}
/**
* This module includes the providers of @angular/core that are needed
* to bootstrap components via `ApplicationRef`.
*
* @experimental
*/
@NgModule({
providers: [
ApplicationRef,
ApplicationInitStatus,
Compiler,
APP_ID_RANDOM_PROVIDER,
{provide: IterableDiffers, useFactory: _iterableDiffersFactory},
{provide: KeyValueDiffers, useFactory: _keyValueDiffersFactory},
{
provide: LOCALE_ID,
useFactory: _localeFactory,
deps: [[new Inject(LOCALE_ID), new Optional(), new SkipSelf()]]
},
]
})
export class ApplicationModule {
// Inject ApplicationRef to make it eager...
|
(appRef: ApplicationRef) {}
}
|
constructor
|
identifier_name
|
application_module.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ApplicationInitStatus} from './application_init';
import {ApplicationRef} from './application_ref';
import {APP_ID_RANDOM_PROVIDER} from './application_tokens';
import {IterableDiffers, KeyValueDiffers, defaultIterableDiffers, defaultKeyValueDiffers} from './change_detection/change_detection';
import {Inject, Optional, SkipSelf} from './di/metadata';
import {LOCALE_ID} from './i18n/tokens';
import {Compiler} from './linker/compiler';
import {NgModule} from './metadata';
export function _iterableDiffersFactory() {
|
}
export function _keyValueDiffersFactory() {
return defaultKeyValueDiffers;
}
export function _localeFactory(locale?: string): string {
return locale || 'en-US';
}
/**
* This module includes the providers of @angular/core that are needed
* to bootstrap components via `ApplicationRef`.
*
* @experimental
*/
@NgModule({
providers: [
ApplicationRef,
ApplicationInitStatus,
Compiler,
APP_ID_RANDOM_PROVIDER,
{provide: IterableDiffers, useFactory: _iterableDiffersFactory},
{provide: KeyValueDiffers, useFactory: _keyValueDiffersFactory},
{
provide: LOCALE_ID,
useFactory: _localeFactory,
deps: [[new Inject(LOCALE_ID), new Optional(), new SkipSelf()]]
},
]
})
export class ApplicationModule {
// Inject ApplicationRef to make it eager...
constructor(appRef: ApplicationRef) {}
}
|
return defaultIterableDiffers;
|
random_line_split
|
kendo.culture.pa-IN.js
|
/*
* Kendo UI v2014.3.1119 (http://www.telerik.com/kendo-ui)
* Copyright 2014 Telerik AD. All rights reserved.
*
* Kendo UI commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-complete
* If you do not own a commercial license, this file shall be governed by the trial license terms.
*/
(function(f, define){
define([], f);
})(function(){
(function( window, undefined ) {
var kendo = window.kendo || (window.kendo = { cultures: {} });
kendo.cultures["pa-IN"] = {
name: "pa-IN",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3,2],
percent: {
pattern: ["-n %","n %"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3,2],
symbol: "%"
},
currency: {
pattern: ["$ -n","$ n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3,2],
symbol: "ਰੁ"
}
},
calendars: {
standard: {
days: {
names: ["ਐਤਵਾਰ","ਸੋਮਵਾਰ","ਮੰਗਲਵਾਰ","ਬੁੱਧਵਾਰ","ਵੀਰਵਾਰ","ਸ਼ੁੱਕਰਵਾਰ","ਸ਼ਨਿੱਚਰਵਾਰ"],
namesAbbr: ["ਐਤ.","ਸੋਮ.","ਮੰਗਲ.","ਬੁੱਧ.","ਵੀਰ.","ਸ਼ੁਕਰ.","ਸ਼ਨਿੱਚਰ."],
namesShort: ["ਐ","ਸ","ਮ","ਬ","ਵ","ਸ਼","ਸ਼"]
},
months: {
names: ["ਜਨਵਰੀ","ਫ਼ਰਵਰੀ","ਮਾਰਚ","ਅਪ੍ਰੈਲ","ਮਈ","ਜੂਨ","ਜੁਲਾਈ","ਅਗਸਤ","ਸਤੰਬਰ","ਅਕਤੂਬਰ","ਨਵੰਬਰ","ਦਸੰਬਰ",""],
namesAbbr: ["ਜਨਵਰੀ","ਫ਼ਰਵਰੀ","ਮਾਰਚ","ਅਪ੍ਰੈਲ","ਮਈ","ਜੂਨ","ਜੁਲਾਈ","ਅਗਸਤ","ਸਤੰਬਰ","ਅਕਤੂਬਰ","ਨਵੰਬਰ","ਦਸੰਬਰ",""]
},
AM: ["ਸਵੇਰ","ਸਵੇਰ","ਸਵੇਰ"],
PM: ["ਸ਼ਾਮ","ਸ਼ਾਮ","ਸ਼ਾਮ"],
patterns: {
d: "dd-MM-yy",
|
M: "dd MMMM",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "tt hh:mm",
T: "tt hh:mm:ss",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM, yyyy",
Y: "MMMM, yyyy"
},
"/": "-",
":": ":",
firstDay: 1
}
}
}
})(this);
return window.kendo;
}, typeof define == 'function' && define.amd ? define : function(_, f){ f(); });
|
D: "dd MMMM yyyy dddd",
F: "dd MMMM yyyy dddd tt hh:mm:ss",
g: "dd-MM-yy tt hh:mm",
G: "dd-MM-yy tt hh:mm:ss",
m: "dd MMMM",
|
random_line_split
|
CacheKill.py
|
from plugins.external.sergio_proxy.plugins.plugin import Plugin
class CacheKill(Plugin):
name = "CacheKill Plugin"
optname = "cachekill"
desc = "Kills page caching by modifying headers."
implements = ["handleHeader","connectionMade"]
has_opts = True
bad_headers = ['if-none-match','if-modified-since']
def add_options(self,options):
options.add_argument("--preserve-cookies",action="store_true",
help="Preserve cookies (will allow caching in some situations).")
def handleHeader(self,request,key,value):
'''Handles all response headers'''
request.client.headers['Expires'] = "0"
request.client.headers['Cache-Control'] = "no-cache"
def connectionMade(self,request):
'''Handles outgoing request'''
request.headers['Pragma'] = 'no-cache'
for h in self.bad_headers:
|
if h in request.headers:
request.headers[h] = ""
|
random_line_split
|
|
CacheKill.py
|
from plugins.external.sergio_proxy.plugins.plugin import Plugin
class CacheKill(Plugin):
name = "CacheKill Plugin"
optname = "cachekill"
desc = "Kills page caching by modifying headers."
implements = ["handleHeader","connectionMade"]
has_opts = True
bad_headers = ['if-none-match','if-modified-since']
def
|
(self,options):
options.add_argument("--preserve-cookies",action="store_true",
help="Preserve cookies (will allow caching in some situations).")
def handleHeader(self,request,key,value):
'''Handles all response headers'''
request.client.headers['Expires'] = "0"
request.client.headers['Cache-Control'] = "no-cache"
def connectionMade(self,request):
'''Handles outgoing request'''
request.headers['Pragma'] = 'no-cache'
for h in self.bad_headers:
if h in request.headers:
request.headers[h] = ""
|
add_options
|
identifier_name
|
CacheKill.py
|
from plugins.external.sergio_proxy.plugins.plugin import Plugin
class CacheKill(Plugin):
name = "CacheKill Plugin"
optname = "cachekill"
desc = "Kills page caching by modifying headers."
implements = ["handleHeader","connectionMade"]
has_opts = True
bad_headers = ['if-none-match','if-modified-since']
def add_options(self,options):
options.add_argument("--preserve-cookies",action="store_true",
help="Preserve cookies (will allow caching in some situations).")
def handleHeader(self,request,key,value):
|
def connectionMade(self,request):
'''Handles outgoing request'''
request.headers['Pragma'] = 'no-cache'
for h in self.bad_headers:
if h in request.headers:
request.headers[h] = ""
|
'''Handles all response headers'''
request.client.headers['Expires'] = "0"
request.client.headers['Cache-Control'] = "no-cache"
|
identifier_body
|
CacheKill.py
|
from plugins.external.sergio_proxy.plugins.plugin import Plugin
class CacheKill(Plugin):
name = "CacheKill Plugin"
optname = "cachekill"
desc = "Kills page caching by modifying headers."
implements = ["handleHeader","connectionMade"]
has_opts = True
bad_headers = ['if-none-match','if-modified-since']
def add_options(self,options):
options.add_argument("--preserve-cookies",action="store_true",
help="Preserve cookies (will allow caching in some situations).")
def handleHeader(self,request,key,value):
'''Handles all response headers'''
request.client.headers['Expires'] = "0"
request.client.headers['Cache-Control'] = "no-cache"
def connectionMade(self,request):
'''Handles outgoing request'''
request.headers['Pragma'] = 'no-cache'
for h in self.bad_headers:
if h in request.headers:
|
request.headers[h] = ""
|
conditional_block
|
|
unifi.py
|
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def
|
(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
_LOGGER.debug('Device %s name %s', mac, name)
return name
|
__init__
|
identifier_name
|
unifi.py
|
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
|
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
_LOGGER.debug('Device %s name %s', mac, name)
return name
|
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
|
identifier_body
|
unifi.py
|
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
|
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
_LOGGER.debug('Device %s name %s', mac, name)
return name
|
_LOGGER.error('Invalid configuration')
return False
|
conditional_block
|
unifi.py
|
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
|
_LOGGER.debug('Device %s name %s', mac, name)
return name
|
random_line_split
|
|
callbacks.ts
|
class CallbacksTest {
constructor(private opts: qq.CallbackOptions) {
}
testCallbacks() {
const opts = this.opts;
interface CustomType {
myTypeOfClass: string;
}
opts.onAutoRetry = (id, name, attemptNumber) => {};
opts.onCancel = (id, name) => {};
|
opts.onDelete = (id) => {};
opts.onDeleteComplete = (id, xhr, isError) => {};
opts.onError = (id, name, errorReason, xhr) => {};
opts.onManualRetry = (id, name) => {
return true;
};
opts.onPasteReceived = (blob) => {};
opts.onProgress = (id, name, uploadedBytes, totalBytes) => {};
opts.onResume = (id: number, name: string, chunkData: CustomType) => {};
opts.onSessionRequestComplete = (response: CustomType[], success: boolean, xhrOrXdr: XMLHttpRequest) => {};
opts.onStatusChange = (id, oldStatus, newStatus) => {};
opts.onSubmit = (id, name) => {};
opts.onSubmitDelete = (id) => {};
opts.onSubmitted = (id, name) => {};
opts.onTotalProgress = (totalUploadedBytes, totalBytes) => {};
opts.onUpload = (id, name) => {};
opts.onUploadChunk = (id, name, chunkData) => {};
opts.onUploadChunkSuccess = (id: number, chunkData: qq.ChunkData, responseJSON: CustomType, xhr: XMLHttpRequest) => {};
opts.onValidate = (data, buttonContainer) => {};
opts.onValidateBatch = (fileOrBlobDataArray, buttonContaine) => {};
}
}
|
opts.onComplete = (id: number, name: string, responseJSON: CustomType, xhr: XMLHttpRequest) => {};
opts.onAllComplete = (succeeded, failed) => {};
|
random_line_split
|
callbacks.ts
|
class CallbacksTest {
|
(private opts: qq.CallbackOptions) {
}
testCallbacks() {
const opts = this.opts;
interface CustomType {
myTypeOfClass: string;
}
opts.onAutoRetry = (id, name, attemptNumber) => {};
opts.onCancel = (id, name) => {};
opts.onComplete = (id: number, name: string, responseJSON: CustomType, xhr: XMLHttpRequest) => {};
opts.onAllComplete = (succeeded, failed) => {};
opts.onDelete = (id) => {};
opts.onDeleteComplete = (id, xhr, isError) => {};
opts.onError = (id, name, errorReason, xhr) => {};
opts.onManualRetry = (id, name) => {
return true;
};
opts.onPasteReceived = (blob) => {};
opts.onProgress = (id, name, uploadedBytes, totalBytes) => {};
opts.onResume = (id: number, name: string, chunkData: CustomType) => {};
opts.onSessionRequestComplete = (response: CustomType[], success: boolean, xhrOrXdr: XMLHttpRequest) => {};
opts.onStatusChange = (id, oldStatus, newStatus) => {};
opts.onSubmit = (id, name) => {};
opts.onSubmitDelete = (id) => {};
opts.onSubmitted = (id, name) => {};
opts.onTotalProgress = (totalUploadedBytes, totalBytes) => {};
opts.onUpload = (id, name) => {};
opts.onUploadChunk = (id, name, chunkData) => {};
opts.onUploadChunkSuccess = (id: number, chunkData: qq.ChunkData, responseJSON: CustomType, xhr: XMLHttpRequest) => {};
opts.onValidate = (data, buttonContainer) => {};
opts.onValidateBatch = (fileOrBlobDataArray, buttonContaine) => {};
}
}
|
constructor
|
identifier_name
|
connected-position-strategy.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Direction} from '@angular/cdk/bidi';
import {Platform} from '@angular/cdk/platform';
import {CdkScrollable, ViewportRuler} from '@angular/cdk/scrolling';
import {ElementRef} from '@angular/core';
import {Observable} from 'rxjs';
import {OverlayContainer} from '../overlay-container';
import {OverlayReference} from '../overlay-reference';
import {
ConnectedOverlayPositionChange,
ConnectionPositionPair,
OriginConnectionPosition,
OverlayConnectionPosition,
} from './connected-position';
import {FlexibleConnectedPositionStrategy} from './flexible-connected-position-strategy';
import {PositionStrategy} from './position-strategy';
/**
* A strategy for positioning overlays. Using this strategy, an overlay is given an
* implicit position relative to some origin element. The relative position is defined in terms of
* a point on the origin element that is connected to a point on the overlay element. For example,
* a basic dropdown is connecting the bottom-left corner of the origin to the top-left corner
* of the overlay.
* @deprecated Use `FlexibleConnectedPositionStrategy` instead.
* @breaking-change 8.0.0
*/
export class ConnectedPositionStrategy implements PositionStrategy {
/**
* Reference to the underlying position strategy to which all the API calls are proxied.
* @docs-private
*/
_positionStrategy: FlexibleConnectedPositionStrategy;
/** The overlay to which this strategy is attached. */
private _overlayRef: OverlayReference;
private _direction: Direction | null;
/** Ordered list of preferred positions, from most to least desirable. */
_preferredPositions: ConnectionPositionPair[] = [];
/** Emits an event when the connection point changes. */
readonly onPositionChange: Observable<ConnectedOverlayPositionChange>;
constructor(
originPos: OriginConnectionPosition, overlayPos: OverlayConnectionPosition,
connectedTo: ElementRef<HTMLElement>, viewportRuler: ViewportRuler, document: Document,
platform: Platform, overlayContainer: OverlayContainer) {
// Since the `ConnectedPositionStrategy` is deprecated and we don't want to maintain
// the extra logic, we create an instance of the positioning strategy that has some
// defaults that make it behave as the old position strategy and to which we'll
// proxy all of the API calls.
this._positionStrategy = new FlexibleConnectedPositionStrategy(
connectedTo, viewportRuler, document, platform, overlayContainer)
.withFlexibleDimensions(false)
.withPush(false)
.withViewportMargin(0);
this.withFallbackPosition(originPos, overlayPos);
this.onPositionChange = this._positionStrategy.positionChanges;
}
/** Ordered list of preferred positions, from most to least desirable. */
get positions(): ConnectionPositionPair[] {
return this._preferredPositions;
}
/** Attach this position strategy to an overlay. */
attach(overlayRef: OverlayReference): void {
this._overlayRef = overlayRef;
this._positionStrategy.attach(overlayRef);
if (this._direction) {
overlayRef.setDirection(this._direction);
this._direction = null;
}
}
/** Disposes all resources used by the position strategy. */
dispose() {
this._positionStrategy.dispose();
}
/** @docs-private */
detach() {
this._positionStrategy.detach();
}
/**
* Updates the position of the overlay element, using whichever preferred position relative
* to the origin fits on-screen.
* @docs-private
*/
apply(): void {
this._positionStrategy.apply();
}
/**
* Re-positions the overlay element with the trigger in its last calculated position,
* even if a position higher in the "preferred positions" list would now fit. This
* allows one to re-align the panel without changing the orientation of the panel.
*/
recalculateLastPosition(): void {
this._positionStrategy.reapplyLastPosition();
}
/**
* Sets the list of Scrollable containers that host the origin element so that
* on reposition we can evaluate if it or the overlay has been clipped or outside view. Every
* Scrollable must be an ancestor element of the strategy's origin element.
*/
withScrollableContainers(scrollables: CdkScrollable[]) {
this._positionStrategy.withScrollableContainers(scrollables);
}
/**
* Adds a new preferred fallback position.
* @param originPos
* @param overlayPos
*/
withFallbackPosition(
originPos: OriginConnectionPosition,
overlayPos: OverlayConnectionPosition,
offsetX?: number,
offsetY?: number): this {
const position = new ConnectionPositionPair(originPos, overlayPos, offsetX, offsetY);
this._preferredPositions.push(position);
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the layout direction so the overlay's position can be adjusted to match.
* @param dir New layout direction.
*/
withDirection(dir: 'ltr' | 'rtl'): this {
// Since the direction might be declared before the strategy is attached,
// we save the value in a temporary property and we'll transfer it to the
// overlay ref on attachment.
if (this._overlayRef) {
this._overlayRef.setDirection(dir);
} else
|
return this;
}
/**
* Sets an offset for the overlay's connection point on the x-axis
* @param offset New offset in the X axis.
*/
withOffsetX(offset: number): this {
this._positionStrategy.withDefaultOffsetX(offset);
return this;
}
/**
* Sets an offset for the overlay's connection point on the y-axis
* @param offset New offset in the Y axis.
*/
withOffsetY(offset: number): this {
this._positionStrategy.withDefaultOffsetY(offset);
return this;
}
/**
* Sets whether the overlay's position should be locked in after it is positioned
* initially. When an overlay is locked in, it won't attempt to reposition itself
* when the position is re-applied (e.g. when the user scrolls away).
* @param isLocked Whether the overlay should locked in.
*/
withLockedPosition(isLocked: boolean): this {
this._positionStrategy.withLockedPosition(isLocked);
return this;
}
/**
* Overwrites the current set of positions with an array of new ones.
* @param positions Position pairs to be set on the strategy.
*/
withPositions(positions: ConnectionPositionPair[]): this {
this._preferredPositions = positions.slice();
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the origin element, relative to which to position the overlay.
* @param origin Reference to the new origin element.
*/
setOrigin(origin: ElementRef): this {
this._positionStrategy.setOrigin(origin);
return this;
}
}
|
{
this._direction = dir;
}
|
conditional_block
|
connected-position-strategy.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Direction} from '@angular/cdk/bidi';
import {Platform} from '@angular/cdk/platform';
import {CdkScrollable, ViewportRuler} from '@angular/cdk/scrolling';
import {ElementRef} from '@angular/core';
import {Observable} from 'rxjs';
import {OverlayContainer} from '../overlay-container';
import {OverlayReference} from '../overlay-reference';
import {
ConnectedOverlayPositionChange,
ConnectionPositionPair,
OriginConnectionPosition,
OverlayConnectionPosition,
} from './connected-position';
import {FlexibleConnectedPositionStrategy} from './flexible-connected-position-strategy';
import {PositionStrategy} from './position-strategy';
/**
* A strategy for positioning overlays. Using this strategy, an overlay is given an
* implicit position relative to some origin element. The relative position is defined in terms of
* a point on the origin element that is connected to a point on the overlay element. For example,
* a basic dropdown is connecting the bottom-left corner of the origin to the top-left corner
* of the overlay.
* @deprecated Use `FlexibleConnectedPositionStrategy` instead.
* @breaking-change 8.0.0
*/
export class ConnectedPositionStrategy implements PositionStrategy {
/**
* Reference to the underlying position strategy to which all the API calls are proxied.
* @docs-private
*/
_positionStrategy: FlexibleConnectedPositionStrategy;
/** The overlay to which this strategy is attached. */
private _overlayRef: OverlayReference;
private _direction: Direction | null;
/** Ordered list of preferred positions, from most to least desirable. */
_preferredPositions: ConnectionPositionPair[] = [];
/** Emits an event when the connection point changes. */
readonly onPositionChange: Observable<ConnectedOverlayPositionChange>;
constructor(
originPos: OriginConnectionPosition, overlayPos: OverlayConnectionPosition,
connectedTo: ElementRef<HTMLElement>, viewportRuler: ViewportRuler, document: Document,
platform: Platform, overlayContainer: OverlayContainer) {
// Since the `ConnectedPositionStrategy` is deprecated and we don't want to maintain
// the extra logic, we create an instance of the positioning strategy that has some
// defaults that make it behave as the old position strategy and to which we'll
// proxy all of the API calls.
this._positionStrategy = new FlexibleConnectedPositionStrategy(
connectedTo, viewportRuler, document, platform, overlayContainer)
.withFlexibleDimensions(false)
.withPush(false)
.withViewportMargin(0);
this.withFallbackPosition(originPos, overlayPos);
this.onPositionChange = this._positionStrategy.positionChanges;
}
/** Ordered list of preferred positions, from most to least desirable. */
get positions(): ConnectionPositionPair[] {
return this._preferredPositions;
}
/** Attach this position strategy to an overlay. */
attach(overlayRef: OverlayReference): void {
this._overlayRef = overlayRef;
this._positionStrategy.attach(overlayRef);
if (this._direction) {
overlayRef.setDirection(this._direction);
this._direction = null;
}
}
/** Disposes all resources used by the position strategy. */
dispose() {
this._positionStrategy.dispose();
}
/** @docs-private */
detach() {
this._positionStrategy.detach();
}
/**
* Updates the position of the overlay element, using whichever preferred position relative
* to the origin fits on-screen.
* @docs-private
*/
apply(): void {
this._positionStrategy.apply();
}
/**
* Re-positions the overlay element with the trigger in its last calculated position,
* even if a position higher in the "preferred positions" list would now fit. This
* allows one to re-align the panel without changing the orientation of the panel.
*/
recalculateLastPosition(): void {
this._positionStrategy.reapplyLastPosition();
}
/**
* Sets the list of Scrollable containers that host the origin element so that
* on reposition we can evaluate if it or the overlay has been clipped or outside view. Every
* Scrollable must be an ancestor element of the strategy's origin element.
*/
withScrollableContainers(scrollables: CdkScrollable[]) {
this._positionStrategy.withScrollableContainers(scrollables);
}
/**
* Adds a new preferred fallback position.
* @param originPos
* @param overlayPos
*/
withFallbackPosition(
originPos: OriginConnectionPosition,
overlayPos: OverlayConnectionPosition,
offsetX?: number,
offsetY?: number): this {
const position = new ConnectionPositionPair(originPos, overlayPos, offsetX, offsetY);
this._preferredPositions.push(position);
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the layout direction so the overlay's position can be adjusted to match.
* @param dir New layout direction.
*/
withDirection(dir: 'ltr' | 'rtl'): this {
// Since the direction might be declared before the strategy is attached,
// we save the value in a temporary property and we'll transfer it to the
// overlay ref on attachment.
if (this._overlayRef) {
this._overlayRef.setDirection(dir);
} else {
this._direction = dir;
}
return this;
}
/**
* Sets an offset for the overlay's connection point on the x-axis
* @param offset New offset in the X axis.
*/
withOffsetX(offset: number): this {
this._positionStrategy.withDefaultOffsetX(offset);
return this;
}
/**
* Sets an offset for the overlay's connection point on the y-axis
* @param offset New offset in the Y axis.
*/
withOffsetY(offset: number): this {
this._positionStrategy.withDefaultOffsetY(offset);
return this;
}
/**
* Sets whether the overlay's position should be locked in after it is positioned
* initially. When an overlay is locked in, it won't attempt to reposition itself
* when the position is re-applied (e.g. when the user scrolls away).
* @param isLocked Whether the overlay should locked in.
*/
withLockedPosition(isLocked: boolean): this {
this._positionStrategy.withLockedPosition(isLocked);
return this;
}
/**
* Overwrites the current set of positions with an array of new ones.
* @param positions Position pairs to be set on the strategy.
*/
withPositions(positions: ConnectionPositionPair[]): this
|
/**
* Sets the origin element, relative to which to position the overlay.
* @param origin Reference to the new origin element.
*/
setOrigin(origin: ElementRef): this {
this._positionStrategy.setOrigin(origin);
return this;
}
}
|
{
this._preferredPositions = positions.slice();
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
|
identifier_body
|
connected-position-strategy.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Direction} from '@angular/cdk/bidi';
import {Platform} from '@angular/cdk/platform';
import {CdkScrollable, ViewportRuler} from '@angular/cdk/scrolling';
import {ElementRef} from '@angular/core';
import {Observable} from 'rxjs';
import {OverlayContainer} from '../overlay-container';
import {OverlayReference} from '../overlay-reference';
import {
ConnectedOverlayPositionChange,
ConnectionPositionPair,
OriginConnectionPosition,
OverlayConnectionPosition,
} from './connected-position';
import {FlexibleConnectedPositionStrategy} from './flexible-connected-position-strategy';
import {PositionStrategy} from './position-strategy';
/**
* A strategy for positioning overlays. Using this strategy, an overlay is given an
* implicit position relative to some origin element. The relative position is defined in terms of
* a point on the origin element that is connected to a point on the overlay element. For example,
* a basic dropdown is connecting the bottom-left corner of the origin to the top-left corner
* of the overlay.
* @deprecated Use `FlexibleConnectedPositionStrategy` instead.
* @breaking-change 8.0.0
*/
export class ConnectedPositionStrategy implements PositionStrategy {
/**
* Reference to the underlying position strategy to which all the API calls are proxied.
* @docs-private
*/
_positionStrategy: FlexibleConnectedPositionStrategy;
/** The overlay to which this strategy is attached. */
private _overlayRef: OverlayReference;
private _direction: Direction | null;
/** Ordered list of preferred positions, from most to least desirable. */
_preferredPositions: ConnectionPositionPair[] = [];
/** Emits an event when the connection point changes. */
readonly onPositionChange: Observable<ConnectedOverlayPositionChange>;
constructor(
originPos: OriginConnectionPosition, overlayPos: OverlayConnectionPosition,
connectedTo: ElementRef<HTMLElement>, viewportRuler: ViewportRuler, document: Document,
platform: Platform, overlayContainer: OverlayContainer) {
// Since the `ConnectedPositionStrategy` is deprecated and we don't want to maintain
// the extra logic, we create an instance of the positioning strategy that has some
// defaults that make it behave as the old position strategy and to which we'll
// proxy all of the API calls.
this._positionStrategy = new FlexibleConnectedPositionStrategy(
connectedTo, viewportRuler, document, platform, overlayContainer)
.withFlexibleDimensions(false)
.withPush(false)
.withViewportMargin(0);
this.withFallbackPosition(originPos, overlayPos);
this.onPositionChange = this._positionStrategy.positionChanges;
}
/** Ordered list of preferred positions, from most to least desirable. */
get positions(): ConnectionPositionPair[] {
return this._preferredPositions;
}
/** Attach this position strategy to an overlay. */
attach(overlayRef: OverlayReference): void {
this._overlayRef = overlayRef;
this._positionStrategy.attach(overlayRef);
if (this._direction) {
overlayRef.setDirection(this._direction);
this._direction = null;
}
}
/** Disposes all resources used by the position strategy. */
dispose() {
this._positionStrategy.dispose();
}
/** @docs-private */
|
() {
this._positionStrategy.detach();
}
/**
* Updates the position of the overlay element, using whichever preferred position relative
* to the origin fits on-screen.
* @docs-private
*/
apply(): void {
this._positionStrategy.apply();
}
/**
* Re-positions the overlay element with the trigger in its last calculated position,
* even if a position higher in the "preferred positions" list would now fit. This
* allows one to re-align the panel without changing the orientation of the panel.
*/
recalculateLastPosition(): void {
this._positionStrategy.reapplyLastPosition();
}
/**
* Sets the list of Scrollable containers that host the origin element so that
* on reposition we can evaluate if it or the overlay has been clipped or outside view. Every
* Scrollable must be an ancestor element of the strategy's origin element.
*/
withScrollableContainers(scrollables: CdkScrollable[]) {
this._positionStrategy.withScrollableContainers(scrollables);
}
/**
* Adds a new preferred fallback position.
* @param originPos
* @param overlayPos
*/
withFallbackPosition(
originPos: OriginConnectionPosition,
overlayPos: OverlayConnectionPosition,
offsetX?: number,
offsetY?: number): this {
const position = new ConnectionPositionPair(originPos, overlayPos, offsetX, offsetY);
this._preferredPositions.push(position);
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the layout direction so the overlay's position can be adjusted to match.
* @param dir New layout direction.
*/
withDirection(dir: 'ltr' | 'rtl'): this {
// Since the direction might be declared before the strategy is attached,
// we save the value in a temporary property and we'll transfer it to the
// overlay ref on attachment.
if (this._overlayRef) {
this._overlayRef.setDirection(dir);
} else {
this._direction = dir;
}
return this;
}
/**
* Sets an offset for the overlay's connection point on the x-axis
* @param offset New offset in the X axis.
*/
withOffsetX(offset: number): this {
this._positionStrategy.withDefaultOffsetX(offset);
return this;
}
/**
* Sets an offset for the overlay's connection point on the y-axis
* @param offset New offset in the Y axis.
*/
withOffsetY(offset: number): this {
this._positionStrategy.withDefaultOffsetY(offset);
return this;
}
/**
* Sets whether the overlay's position should be locked in after it is positioned
* initially. When an overlay is locked in, it won't attempt to reposition itself
* when the position is re-applied (e.g. when the user scrolls away).
* @param isLocked Whether the overlay should locked in.
*/
withLockedPosition(isLocked: boolean): this {
this._positionStrategy.withLockedPosition(isLocked);
return this;
}
/**
* Overwrites the current set of positions with an array of new ones.
* @param positions Position pairs to be set on the strategy.
*/
withPositions(positions: ConnectionPositionPair[]): this {
this._preferredPositions = positions.slice();
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the origin element, relative to which to position the overlay.
* @param origin Reference to the new origin element.
*/
setOrigin(origin: ElementRef): this {
this._positionStrategy.setOrigin(origin);
return this;
}
}
|
detach
|
identifier_name
|
connected-position-strategy.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Direction} from '@angular/cdk/bidi';
import {Platform} from '@angular/cdk/platform';
import {CdkScrollable, ViewportRuler} from '@angular/cdk/scrolling';
import {ElementRef} from '@angular/core';
import {Observable} from 'rxjs';
import {OverlayContainer} from '../overlay-container';
import {OverlayReference} from '../overlay-reference';
import {
ConnectedOverlayPositionChange,
ConnectionPositionPair,
OriginConnectionPosition,
OverlayConnectionPosition,
} from './connected-position';
import {FlexibleConnectedPositionStrategy} from './flexible-connected-position-strategy';
import {PositionStrategy} from './position-strategy';
/**
* A strategy for positioning overlays. Using this strategy, an overlay is given an
* implicit position relative to some origin element. The relative position is defined in terms of
* a point on the origin element that is connected to a point on the overlay element. For example,
* a basic dropdown is connecting the bottom-left corner of the origin to the top-left corner
* of the overlay.
* @deprecated Use `FlexibleConnectedPositionStrategy` instead.
* @breaking-change 8.0.0
*/
export class ConnectedPositionStrategy implements PositionStrategy {
/**
* Reference to the underlying position strategy to which all the API calls are proxied.
|
* @docs-private
*/
_positionStrategy: FlexibleConnectedPositionStrategy;
/** The overlay to which this strategy is attached. */
private _overlayRef: OverlayReference;
private _direction: Direction | null;
/** Ordered list of preferred positions, from most to least desirable. */
_preferredPositions: ConnectionPositionPair[] = [];
/** Emits an event when the connection point changes. */
readonly onPositionChange: Observable<ConnectedOverlayPositionChange>;
constructor(
originPos: OriginConnectionPosition, overlayPos: OverlayConnectionPosition,
connectedTo: ElementRef<HTMLElement>, viewportRuler: ViewportRuler, document: Document,
platform: Platform, overlayContainer: OverlayContainer) {
// Since the `ConnectedPositionStrategy` is deprecated and we don't want to maintain
// the extra logic, we create an instance of the positioning strategy that has some
// defaults that make it behave as the old position strategy and to which we'll
// proxy all of the API calls.
this._positionStrategy = new FlexibleConnectedPositionStrategy(
connectedTo, viewportRuler, document, platform, overlayContainer)
.withFlexibleDimensions(false)
.withPush(false)
.withViewportMargin(0);
this.withFallbackPosition(originPos, overlayPos);
this.onPositionChange = this._positionStrategy.positionChanges;
}
/** Ordered list of preferred positions, from most to least desirable. */
get positions(): ConnectionPositionPair[] {
return this._preferredPositions;
}
/** Attach this position strategy to an overlay. */
attach(overlayRef: OverlayReference): void {
this._overlayRef = overlayRef;
this._positionStrategy.attach(overlayRef);
if (this._direction) {
overlayRef.setDirection(this._direction);
this._direction = null;
}
}
/** Disposes all resources used by the position strategy. */
dispose() {
this._positionStrategy.dispose();
}
/** @docs-private */
detach() {
this._positionStrategy.detach();
}
/**
* Updates the position of the overlay element, using whichever preferred position relative
* to the origin fits on-screen.
* @docs-private
*/
apply(): void {
this._positionStrategy.apply();
}
/**
* Re-positions the overlay element with the trigger in its last calculated position,
* even if a position higher in the "preferred positions" list would now fit. This
* allows one to re-align the panel without changing the orientation of the panel.
*/
recalculateLastPosition(): void {
this._positionStrategy.reapplyLastPosition();
}
/**
* Sets the list of Scrollable containers that host the origin element so that
* on reposition we can evaluate if it or the overlay has been clipped or outside view. Every
* Scrollable must be an ancestor element of the strategy's origin element.
*/
withScrollableContainers(scrollables: CdkScrollable[]) {
this._positionStrategy.withScrollableContainers(scrollables);
}
/**
* Adds a new preferred fallback position.
* @param originPos
* @param overlayPos
*/
withFallbackPosition(
originPos: OriginConnectionPosition,
overlayPos: OverlayConnectionPosition,
offsetX?: number,
offsetY?: number): this {
const position = new ConnectionPositionPair(originPos, overlayPos, offsetX, offsetY);
this._preferredPositions.push(position);
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the layout direction so the overlay's position can be adjusted to match.
* @param dir New layout direction.
*/
withDirection(dir: 'ltr' | 'rtl'): this {
// Since the direction might be declared before the strategy is attached,
// we save the value in a temporary property and we'll transfer it to the
// overlay ref on attachment.
if (this._overlayRef) {
this._overlayRef.setDirection(dir);
} else {
this._direction = dir;
}
return this;
}
/**
* Sets an offset for the overlay's connection point on the x-axis
* @param offset New offset in the X axis.
*/
withOffsetX(offset: number): this {
this._positionStrategy.withDefaultOffsetX(offset);
return this;
}
/**
* Sets an offset for the overlay's connection point on the y-axis
* @param offset New offset in the Y axis.
*/
withOffsetY(offset: number): this {
this._positionStrategy.withDefaultOffsetY(offset);
return this;
}
/**
* Sets whether the overlay's position should be locked in after it is positioned
* initially. When an overlay is locked in, it won't attempt to reposition itself
* when the position is re-applied (e.g. when the user scrolls away).
* @param isLocked Whether the overlay should locked in.
*/
withLockedPosition(isLocked: boolean): this {
this._positionStrategy.withLockedPosition(isLocked);
return this;
}
/**
* Overwrites the current set of positions with an array of new ones.
* @param positions Position pairs to be set on the strategy.
*/
withPositions(positions: ConnectionPositionPair[]): this {
this._preferredPositions = positions.slice();
this._positionStrategy.withPositions(this._preferredPositions);
return this;
}
/**
* Sets the origin element, relative to which to position the overlay.
* @param origin Reference to the new origin element.
*/
setOrigin(origin: ElementRef): this {
this._positionStrategy.setOrigin(origin);
return this;
}
}
|
random_line_split
|
|
lastIndexOf.js
|
define(['./_baseFindIndex', './_baseIsNaN', './_strictLastIndexOf', './toInteger'], function(baseFindIndex, baseIsNaN, strictLastIndexOf, toInteger) {
/** Used as a safe reference for `undefined` in pre-ES5 environments. */
var undefined;
/* Built-in method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max,
nativeMin = Math.min;
/**
* This method is like `_.indexOf` except that it iterates over elements of
* `array` from right to left.
*
* @static
* @memberOf _
* @since 0.1.0
* @category Array
* @param {Array} array The array to inspect.
* @param {*} value The value to search for.
* @param {number} [fromIndex=array.length-1] The index to search from.
* @returns {number} Returns the index of the matched value, else `-1`.
* @example
*
* _.lastIndexOf([1, 2, 1, 2], 2);
* // => 3
*
* // Search from the `fromIndex`.
* _.lastIndexOf([1, 2, 1, 2], 2, 2);
* // => 1
*/
function lastIndexOf(array, value, fromIndex)
|
return lastIndexOf;
});
|
{
var length = array ? array.length : 0;
if (!length) {
return -1;
}
var index = length;
if (fromIndex !== undefined) {
index = toInteger(fromIndex);
index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1);
}
return value === value
? strictLastIndexOf(array, value, index)
: baseFindIndex(array, baseIsNaN, index, true);
}
|
identifier_body
|
lastIndexOf.js
|
define(['./_baseFindIndex', './_baseIsNaN', './_strictLastIndexOf', './toInteger'], function(baseFindIndex, baseIsNaN, strictLastIndexOf, toInteger) {
/** Used as a safe reference for `undefined` in pre-ES5 environments. */
var undefined;
/* Built-in method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max,
nativeMin = Math.min;
/**
* This method is like `_.indexOf` except that it iterates over elements of
* `array` from right to left.
*
* @static
|
* @category Array
* @param {Array} array The array to inspect.
* @param {*} value The value to search for.
* @param {number} [fromIndex=array.length-1] The index to search from.
* @returns {number} Returns the index of the matched value, else `-1`.
* @example
*
* _.lastIndexOf([1, 2, 1, 2], 2);
* // => 3
*
* // Search from the `fromIndex`.
* _.lastIndexOf([1, 2, 1, 2], 2, 2);
* // => 1
*/
function lastIndexOf(array, value, fromIndex) {
var length = array ? array.length : 0;
if (!length) {
return -1;
}
var index = length;
if (fromIndex !== undefined) {
index = toInteger(fromIndex);
index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1);
}
return value === value
? strictLastIndexOf(array, value, index)
: baseFindIndex(array, baseIsNaN, index, true);
}
return lastIndexOf;
});
|
* @memberOf _
* @since 0.1.0
|
random_line_split
|
lastIndexOf.js
|
define(['./_baseFindIndex', './_baseIsNaN', './_strictLastIndexOf', './toInteger'], function(baseFindIndex, baseIsNaN, strictLastIndexOf, toInteger) {
/** Used as a safe reference for `undefined` in pre-ES5 environments. */
var undefined;
/* Built-in method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max,
nativeMin = Math.min;
/**
* This method is like `_.indexOf` except that it iterates over elements of
* `array` from right to left.
*
* @static
* @memberOf _
* @since 0.1.0
* @category Array
* @param {Array} array The array to inspect.
* @param {*} value The value to search for.
* @param {number} [fromIndex=array.length-1] The index to search from.
* @returns {number} Returns the index of the matched value, else `-1`.
* @example
*
* _.lastIndexOf([1, 2, 1, 2], 2);
* // => 3
*
* // Search from the `fromIndex`.
* _.lastIndexOf([1, 2, 1, 2], 2, 2);
* // => 1
*/
function lastIndexOf(array, value, fromIndex) {
var length = array ? array.length : 0;
if (!length)
|
var index = length;
if (fromIndex !== undefined) {
index = toInteger(fromIndex);
index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1);
}
return value === value
? strictLastIndexOf(array, value, index)
: baseFindIndex(array, baseIsNaN, index, true);
}
return lastIndexOf;
});
|
{
return -1;
}
|
conditional_block
|
lastIndexOf.js
|
define(['./_baseFindIndex', './_baseIsNaN', './_strictLastIndexOf', './toInteger'], function(baseFindIndex, baseIsNaN, strictLastIndexOf, toInteger) {
/** Used as a safe reference for `undefined` in pre-ES5 environments. */
var undefined;
/* Built-in method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max,
nativeMin = Math.min;
/**
* This method is like `_.indexOf` except that it iterates over elements of
* `array` from right to left.
*
* @static
* @memberOf _
* @since 0.1.0
* @category Array
* @param {Array} array The array to inspect.
* @param {*} value The value to search for.
* @param {number} [fromIndex=array.length-1] The index to search from.
* @returns {number} Returns the index of the matched value, else `-1`.
* @example
*
* _.lastIndexOf([1, 2, 1, 2], 2);
* // => 3
*
* // Search from the `fromIndex`.
* _.lastIndexOf([1, 2, 1, 2], 2, 2);
* // => 1
*/
function
|
(array, value, fromIndex) {
var length = array ? array.length : 0;
if (!length) {
return -1;
}
var index = length;
if (fromIndex !== undefined) {
index = toInteger(fromIndex);
index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1);
}
return value === value
? strictLastIndexOf(array, value, index)
: baseFindIndex(array, baseIsNaN, index, true);
}
return lastIndexOf;
});
|
lastIndexOf
|
identifier_name
|
urls.py
|
from django.conf import settings
from django.conf.urls import url
from plans.views import CreateOrderView, OrderListView, InvoiceDetailView, AccountActivationView, \
OrderPaymentReturnView, CurrentPlanView, UpgradePlanView, OrderView, BillingInfoRedirectView, \
BillingInfoCreateView, BillingInfoUpdateView, BillingInfoDeleteView, CreateOrderPlanChangeView, ChangePlanView, \
PricingView, FakePaymentsView
urlpatterns = [
url(r'^pricing/$', PricingView.as_view(), name='pricing'),
url(r'^account/$', CurrentPlanView.as_view(), name='current_plan'),
url(r'^account/activation/$', AccountActivationView.as_view(), name='account_activation'),
url(r'^upgrade/$', UpgradePlanView.as_view(), name='upgrade_plan'),
url(r'^order/extend/new/(?P<pk>\d+)/$', CreateOrderView.as_view(), name='create_order_plan'),
url(r'^order/upgrade/new/(?P<pk>\d+)/$', CreateOrderPlanChangeView.as_view(), name='create_order_plan_change'),
url(r'^change/(?P<pk>\d+)/$', ChangePlanView.as_view(), name='change_plan'),
url(r'^order/$', OrderListView.as_view(), name='order_list'),
url(r'^order/(?P<pk>\d+)/$', OrderView.as_view(), name='order'),
url(r'^order/(?P<pk>\d+)/payment/success/$', OrderPaymentReturnView.as_view(status='success'),
name='order_payment_success'),
url(r'^order/(?P<pk>\d+)/payment/failure/$', OrderPaymentReturnView.as_view(status='failure'),
name='order_payment_failure'),
url(r'^billing/$', BillingInfoRedirectView.as_view(), name='billing_info'),
url(r'^billing/create/$', BillingInfoCreateView.as_view(), name='billing_info_create'),
url(r'^billing/update/$', BillingInfoUpdateView.as_view(), name='billing_info_update'),
url(r'^billing/delete/$', BillingInfoDeleteView.as_view(), name='billing_info_delete'),
url(r'^invoice/(?P<pk>\d+)/preview/html/$', InvoiceDetailView.as_view(), name='invoice_preview_html'),
]
if getattr(settings, 'DEBUG', False):
|
urlpatterns += [
url(r'^fakepayments/(?P<pk>\d+)/$', FakePaymentsView.as_view(), name='fake_payments'),
]
|
conditional_block
|
|
urls.py
|
from django.conf import settings
from django.conf.urls import url
from plans.views import CreateOrderView, OrderListView, InvoiceDetailView, AccountActivationView, \
OrderPaymentReturnView, CurrentPlanView, UpgradePlanView, OrderView, BillingInfoRedirectView, \
BillingInfoCreateView, BillingInfoUpdateView, BillingInfoDeleteView, CreateOrderPlanChangeView, ChangePlanView, \
PricingView, FakePaymentsView
urlpatterns = [
url(r'^pricing/$', PricingView.as_view(), name='pricing'),
url(r'^account/$', CurrentPlanView.as_view(), name='current_plan'),
url(r'^account/activation/$', AccountActivationView.as_view(), name='account_activation'),
url(r'^upgrade/$', UpgradePlanView.as_view(), name='upgrade_plan'),
url(r'^order/extend/new/(?P<pk>\d+)/$', CreateOrderView.as_view(), name='create_order_plan'),
url(r'^order/upgrade/new/(?P<pk>\d+)/$', CreateOrderPlanChangeView.as_view(), name='create_order_plan_change'),
url(r'^change/(?P<pk>\d+)/$', ChangePlanView.as_view(), name='change_plan'),
url(r'^order/$', OrderListView.as_view(), name='order_list'),
url(r'^order/(?P<pk>\d+)/$', OrderView.as_view(), name='order'),
url(r'^order/(?P<pk>\d+)/payment/success/$', OrderPaymentReturnView.as_view(status='success'),
name='order_payment_success'),
url(r'^order/(?P<pk>\d+)/payment/failure/$', OrderPaymentReturnView.as_view(status='failure'),
name='order_payment_failure'),
url(r'^billing/$', BillingInfoRedirectView.as_view(), name='billing_info'),
url(r'^billing/create/$', BillingInfoCreateView.as_view(), name='billing_info_create'),
url(r'^billing/update/$', BillingInfoUpdateView.as_view(), name='billing_info_update'),
url(r'^billing/delete/$', BillingInfoDeleteView.as_view(), name='billing_info_delete'),
url(r'^invoice/(?P<pk>\d+)/preview/html/$', InvoiceDetailView.as_view(), name='invoice_preview_html'),
|
if getattr(settings, 'DEBUG', False):
urlpatterns += [
url(r'^fakepayments/(?P<pk>\d+)/$', FakePaymentsView.as_view(), name='fake_payments'),
]
|
]
|
random_line_split
|
FormatChangeSelectionTest.ts
|
import { describe, it } from '@ephox/bedrock-client';
import { TinyAssertions, TinyHooks, TinySelections } from '@ephox/mcagar';
import Editor from 'tinymce/core/api/Editor';
import Theme from 'tinymce/themes/silver/Theme';
describe('browser.tinymce.core.fmt.FormatChangeSelectionTest', () => {
const hook = TinyHooks.bddSetupLight<Editor>({
base_url: '/project/tinymce/js/tinymce'
}, [ Theme ]);
|
editor.setContent('<p><em><strong>a </strong>b<strong> c</strong></em></p>');
TinySelections.setSelection(editor, [ 0, 0, 1 ], 0, [ 0, 0, 2 ], 0);
editor.execCommand('italic');
TinyAssertions.assertContent(editor, '<p><em><strong>a </strong></em>b<em><strong> c</strong></em></p>');
TinyAssertions.assertSelection(editor, [ 0, 1 ], 0, [ 0, 2 ], 0);
});
});
|
it('Check selection after removing part of an inline format', () => {
const editor = hook.editor();
|
random_line_split
|
LaserMatrix.js
|
import React from 'react';
import Analyzer from 'parser/core/Analyzer';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS/index';
import TraitStatisticBox, { STATISTIC_ORDER } from 'interface/others/TraitStatisticBox';
import SpellLink from 'common/SpellLink';
/**
* Your spells and abilities have a chance to release a barrage of lasers, dealing 4058 Arcane damage
* split among all enemies and restoring 5073 health split among injured allies.
*/
class LaserMatrix extends Analyzer{
|
this.active = this.selectedCombatant.hasTrait(SPELLS.LASER_MATRIX.id);
}
on_byPlayer_heal(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_HEAL.id) {
this.healing += event.amount + (event.absorbed || 0);
}
}
on_byPlayer_damage(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_DAMAGE.id) {
this.damage += event.amount + (event.absorbed || 0);
}
}
// TODO - Show actual gain from Reorigination Array (as an own module perhaps?)
statistic(){
const healingThroughputPercent = this.owner.getPercentageOfTotalHealingDone(this.healing);
const damageThroughputPercent = this.owner.getPercentageOfTotalDamageDone(this.damage);
return(
<TraitStatisticBox
position={STATISTIC_ORDER.OPTIONAL()}
trait={SPELLS.LASER_MATRIX.id}
value={(
<>
{formatPercentage(healingThroughputPercent)} % healing<br />
{formatPercentage(damageThroughputPercent)} % damage<br />
Gained <SpellLink id={SPELLS.REORIGINATION_ARRAY.id} />
</>
)}
tooltip={(
<>
Healing done: {formatNumber(this.healing)} <br />
Damage done: {formatNumber(this.damage)}
</>
)}
/>
);
}
}
export default LaserMatrix;
|
healing = 0;
damage = 0;
constructor(...args){
super(...args);
|
random_line_split
|
LaserMatrix.js
|
import React from 'react';
import Analyzer from 'parser/core/Analyzer';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS/index';
import TraitStatisticBox, { STATISTIC_ORDER } from 'interface/others/TraitStatisticBox';
import SpellLink from 'common/SpellLink';
/**
* Your spells and abilities have a chance to release a barrage of lasers, dealing 4058 Arcane damage
* split among all enemies and restoring 5073 health split among injured allies.
*/
class LaserMatrix extends Analyzer{
healing = 0;
damage = 0;
constructor(...args){
super(...args);
this.active = this.selectedCombatant.hasTrait(SPELLS.LASER_MATRIX.id);
}
on_byPlayer_heal(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_HEAL.id) {
this.healing += event.amount + (event.absorbed || 0);
}
}
on_byPlayer_damage(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_DAMAGE.id)
|
}
// TODO - Show actual gain from Reorigination Array (as an own module perhaps?)
statistic(){
const healingThroughputPercent = this.owner.getPercentageOfTotalHealingDone(this.healing);
const damageThroughputPercent = this.owner.getPercentageOfTotalDamageDone(this.damage);
return(
<TraitStatisticBox
position={STATISTIC_ORDER.OPTIONAL()}
trait={SPELLS.LASER_MATRIX.id}
value={(
<>
{formatPercentage(healingThroughputPercent)} % healing<br />
{formatPercentage(damageThroughputPercent)} % damage<br />
Gained <SpellLink id={SPELLS.REORIGINATION_ARRAY.id} />
</>
)}
tooltip={(
<>
Healing done: {formatNumber(this.healing)} <br />
Damage done: {formatNumber(this.damage)}
</>
)}
/>
);
}
}
export default LaserMatrix;
|
{
this.damage += event.amount + (event.absorbed || 0);
}
|
conditional_block
|
LaserMatrix.js
|
import React from 'react';
import Analyzer from 'parser/core/Analyzer';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS/index';
import TraitStatisticBox, { STATISTIC_ORDER } from 'interface/others/TraitStatisticBox';
import SpellLink from 'common/SpellLink';
/**
* Your spells and abilities have a chance to release a barrage of lasers, dealing 4058 Arcane damage
* split among all enemies and restoring 5073 health split among injured allies.
*/
class LaserMatrix extends Analyzer{
healing = 0;
damage = 0;
constructor(...args){
super(...args);
this.active = this.selectedCombatant.hasTrait(SPELLS.LASER_MATRIX.id);
}
on_byPlayer_heal(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_HEAL.id) {
this.healing += event.amount + (event.absorbed || 0);
}
}
on_byPlayer_damage(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_DAMAGE.id) {
this.damage += event.amount + (event.absorbed || 0);
}
}
// TODO - Show actual gain from Reorigination Array (as an own module perhaps?)
statistic()
|
}
export default LaserMatrix;
|
{
const healingThroughputPercent = this.owner.getPercentageOfTotalHealingDone(this.healing);
const damageThroughputPercent = this.owner.getPercentageOfTotalDamageDone(this.damage);
return(
<TraitStatisticBox
position={STATISTIC_ORDER.OPTIONAL()}
trait={SPELLS.LASER_MATRIX.id}
value={(
<>
{formatPercentage(healingThroughputPercent)} % healing<br />
{formatPercentage(damageThroughputPercent)} % damage<br />
Gained <SpellLink id={SPELLS.REORIGINATION_ARRAY.id} />
</>
)}
tooltip={(
<>
Healing done: {formatNumber(this.healing)} <br />
Damage done: {formatNumber(this.damage)}
</>
)}
/>
);
}
|
identifier_body
|
LaserMatrix.js
|
import React from 'react';
import Analyzer from 'parser/core/Analyzer';
import { formatNumber, formatPercentage } from 'common/format';
import SPELLS from 'common/SPELLS/index';
import TraitStatisticBox, { STATISTIC_ORDER } from 'interface/others/TraitStatisticBox';
import SpellLink from 'common/SpellLink';
/**
* Your spells and abilities have a chance to release a barrage of lasers, dealing 4058 Arcane damage
* split among all enemies and restoring 5073 health split among injured allies.
*/
class
|
extends Analyzer{
healing = 0;
damage = 0;
constructor(...args){
super(...args);
this.active = this.selectedCombatant.hasTrait(SPELLS.LASER_MATRIX.id);
}
on_byPlayer_heal(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_HEAL.id) {
this.healing += event.amount + (event.absorbed || 0);
}
}
on_byPlayer_damage(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.LASER_MATRIX_DAMAGE.id) {
this.damage += event.amount + (event.absorbed || 0);
}
}
// TODO - Show actual gain from Reorigination Array (as an own module perhaps?)
statistic(){
const healingThroughputPercent = this.owner.getPercentageOfTotalHealingDone(this.healing);
const damageThroughputPercent = this.owner.getPercentageOfTotalDamageDone(this.damage);
return(
<TraitStatisticBox
position={STATISTIC_ORDER.OPTIONAL()}
trait={SPELLS.LASER_MATRIX.id}
value={(
<>
{formatPercentage(healingThroughputPercent)} % healing<br />
{formatPercentage(damageThroughputPercent)} % damage<br />
Gained <SpellLink id={SPELLS.REORIGINATION_ARRAY.id} />
</>
)}
tooltip={(
<>
Healing done: {formatNumber(this.healing)} <br />
Damage done: {formatNumber(this.damage)}
</>
)}
/>
);
}
}
export default LaserMatrix;
|
LaserMatrix
|
identifier_name
|
app.e2e-spec.ts
|
import { browser, element, by, logging } from 'protractor';
describe('Inputs and Outputs', () => {
beforeEach(() => browser.get(''));
// helper function used to test what's logged to the console
async function logChecker(contents: string)
|
it('should have title Inputs and Outputs', async () => {
const title = element.all(by.css('h1')).get(0);
expect(await title.getText()).toEqual('Inputs and Outputs');
});
it('should add 123 to the parent list', async () => {
const addToParentButton = element.all(by.css('button')).get(0);
const addToListInput = element.all(by.css('input')).get(0);
const addedItem = element.all(by.css('li')).get(4);
await addToListInput.sendKeys('123');
await addToParentButton.click();
expect(await addedItem.getText()).toEqual('123');
});
it('should delete item', async () => {
const deleteButton = element.all(by.css('button')).get(1);
const contents = 'Child';
await deleteButton.click();
await logChecker(contents);
});
it('should log buy the item', async () => {
const buyButton = element.all(by.css('button')).get(2);
const contents = 'Child';
await buyButton.click();
await logChecker(contents);
});
it('should save item for later', async () => {
const saveButton = element.all(by.css('button')).get(3);
const contents = 'Child';
await saveButton.click();
await logChecker(contents);
});
it('should add item to wishlist', async () => {
const addToParentButton = element.all(by.css('button')).get(4);
const addedItem = element.all(by.css('li')).get(6);
await addToParentButton.click();
expect(await addedItem.getText()).toEqual('Television');
});
});
|
{
const logs = await browser
.manage()
.logs()
.get(logging.Type.BROWSER);
const messages = logs.filter(({ message }) => message.indexOf(contents) !== -1);
expect(messages.length).toBeGreaterThan(0);
}
|
identifier_body
|
app.e2e-spec.ts
|
import { browser, element, by, logging } from 'protractor';
describe('Inputs and Outputs', () => {
beforeEach(() => browser.get(''));
// helper function used to test what's logged to the console
async function
|
(contents: string) {
const logs = await browser
.manage()
.logs()
.get(logging.Type.BROWSER);
const messages = logs.filter(({ message }) => message.indexOf(contents) !== -1);
expect(messages.length).toBeGreaterThan(0);
}
it('should have title Inputs and Outputs', async () => {
const title = element.all(by.css('h1')).get(0);
expect(await title.getText()).toEqual('Inputs and Outputs');
});
it('should add 123 to the parent list', async () => {
const addToParentButton = element.all(by.css('button')).get(0);
const addToListInput = element.all(by.css('input')).get(0);
const addedItem = element.all(by.css('li')).get(4);
await addToListInput.sendKeys('123');
await addToParentButton.click();
expect(await addedItem.getText()).toEqual('123');
});
it('should delete item', async () => {
const deleteButton = element.all(by.css('button')).get(1);
const contents = 'Child';
await deleteButton.click();
await logChecker(contents);
});
it('should log buy the item', async () => {
const buyButton = element.all(by.css('button')).get(2);
const contents = 'Child';
await buyButton.click();
await logChecker(contents);
});
it('should save item for later', async () => {
const saveButton = element.all(by.css('button')).get(3);
const contents = 'Child';
await saveButton.click();
await logChecker(contents);
});
it('should add item to wishlist', async () => {
const addToParentButton = element.all(by.css('button')).get(4);
const addedItem = element.all(by.css('li')).get(6);
await addToParentButton.click();
expect(await addedItem.getText()).toEqual('Television');
});
});
|
logChecker
|
identifier_name
|
app.e2e-spec.ts
|
import { browser, element, by, logging } from 'protractor';
describe('Inputs and Outputs', () => {
beforeEach(() => browser.get(''));
// helper function used to test what's logged to the console
async function logChecker(contents: string) {
const logs = await browser
.manage()
.logs()
.get(logging.Type.BROWSER);
const messages = logs.filter(({ message }) => message.indexOf(contents) !== -1);
expect(messages.length).toBeGreaterThan(0);
}
it('should have title Inputs and Outputs', async () => {
const title = element.all(by.css('h1')).get(0);
expect(await title.getText()).toEqual('Inputs and Outputs');
});
it('should add 123 to the parent list', async () => {
const addToParentButton = element.all(by.css('button')).get(0);
const addToListInput = element.all(by.css('input')).get(0);
const addedItem = element.all(by.css('li')).get(4);
await addToListInput.sendKeys('123');
await addToParentButton.click();
expect(await addedItem.getText()).toEqual('123');
});
it('should delete item', async () => {
const deleteButton = element.all(by.css('button')).get(1);
const contents = 'Child';
await deleteButton.click();
await logChecker(contents);
});
it('should log buy the item', async () => {
const buyButton = element.all(by.css('button')).get(2);
const contents = 'Child';
await buyButton.click();
await logChecker(contents);
});
it('should save item for later', async () => {
const saveButton = element.all(by.css('button')).get(3);
const contents = 'Child';
await saveButton.click();
await logChecker(contents);
});
it('should add item to wishlist', async () => {
const addToParentButton = element.all(by.css('button')).get(4);
const addedItem = element.all(by.css('li')).get(6);
await addToParentButton.click();
expect(await addedItem.getText()).toEqual('Television');
|
});
|
});
|
random_line_split
|
logging_config.py
|
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, string, socket, struct, os, traceback, types
try:
import thread
import threading
except ImportError:
thread = None
from SocketServer import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
In versions of ConfigParser which have the readfp method [typically
shipped in 2.x versions of Python], you can pass in a file-like object
rather than a filename, in which case the file-like object will be read
using readfp.
"""
import ConfigParser
cp = ConfigParser.ConfigParser(defaults)
if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
cp.readfp(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
if hasattr(logging, '_handlerList'):
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers)
finally:
logging._releaseLock()
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = string.split(name, '.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp.get("formatters", "keys")
if not len(flist):
return {}
flist = string.split(flist, ",")
formatters = {}
for form in flist:
form = string.strip(form)
sectname = "formatter_%s" % form
opts = cp.options(sectname)
if "format" in opts:
fs = cp.get(sectname, "format", 1)
else:
fs = None
if "datefmt" in opts:
dfs = cp.get(sectname, "datefmt", 1)
else:
dfs = None
c = logging.Formatter
if "class" in opts:
class_name = cp.get(sectname, "class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp.get("handlers", "keys")
if not len(hlist):
return {}
hlist = string.split(hlist, ",")
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
hand = string.strip(hand)
sectname = "handler_%s" % hand
klass = cp.get(sectname, "class")
opts = cp.options(sectname)
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = cp.get(sectname, "args")
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _install_loggers(cp, handlers):
"""Create and install loggers"""
# configure the root first
llist = cp.get("loggers", "keys")
llist = string.split(llist, ",")
llist = map(lambda x: string.strip(x), llist)
llist.remove("root")
sectname = "logger_root"
root = logging.root
log = root
opts = cp.options(sectname)
if "level" in opts:
level = cp.get(sectname, "level")
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
log.addHandler(handlers[string.strip(hand)])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = root.manager.loggerDict.keys()
#now set up the new ones...
for log in llist:
sectname = "logger_%s" % log
qn = cp.get(sectname, "qualname")
opts = cp.options(sectname)
if "propagate" in opts:
propagate = cp.getint(sectname, "propagate")
else:
propagate = 1
logger = logging.getLogger(qn)
if qn in existing:
existing.remove(qn)
if "level" in opts:
level = cp.get(sectname, "level")
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
logger.addHandler(handlers[string.strip(hand)])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
for log in existing:
root.manager.loggerDict[log].disabled = 1
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError, "listen() needs threading to work"
class ConfigStreamHandler(StreamRequestHandler):
|
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
def serve(rcvr, hdlr, port):
server = rcvr(port=port, handler=hdlr)
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return threading.Thread(target=serve,
args=(ConfigSocketReceiver,
ConfigStreamHandler, port))
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
if _listener:
logging._acquireLock()
_listener.abort = 1
_listener = None
logging._releaseLock()
|
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
#Apply new configuration. We'd like to be able to
#create a StringIO and pass that in, but unfortunately
#1.5.2 ConfigParser does not support reading file
#objects, only actual files. So we create a temporary
#file and remove it later.
file = tempfile.mktemp(".ini")
f = open(file, "w")
f.write(chunk)
f.close()
try:
fileConfig(file)
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
os.remove(file)
except socket.error, e:
if type(e.args) != types.TupleType:
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
|
identifier_body
|
logging_config.py
|
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, string, socket, struct, os, traceback, types
try:
import thread
import threading
except ImportError:
thread = None
from SocketServer import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
In versions of ConfigParser which have the readfp method [typically
shipped in 2.x versions of Python], you can pass in a file-like object
rather than a filename, in which case the file-like object will be read
using readfp.
"""
import ConfigParser
cp = ConfigParser.ConfigParser(defaults)
if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
cp.readfp(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
if hasattr(logging, '_handlerList'):
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers)
finally:
logging._releaseLock()
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = string.split(name, '.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp.get("formatters", "keys")
if not len(flist):
return {}
flist = string.split(flist, ",")
formatters = {}
for form in flist:
form = string.strip(form)
sectname = "formatter_%s" % form
opts = cp.options(sectname)
if "format" in opts:
fs = cp.get(sectname, "format", 1)
else:
fs = None
if "datefmt" in opts:
dfs = cp.get(sectname, "datefmt", 1)
else:
dfs = None
c = logging.Formatter
if "class" in opts:
class_name = cp.get(sectname, "class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp.get("handlers", "keys")
if not len(hlist):
return {}
hlist = string.split(hlist, ",")
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
hand = string.strip(hand)
sectname = "handler_%s" % hand
klass = cp.get(sectname, "class")
opts = cp.options(sectname)
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = cp.get(sectname, "args")
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _install_loggers(cp, handlers):
"""Create and install loggers"""
# configure the root first
llist = cp.get("loggers", "keys")
llist = string.split(llist, ",")
llist = map(lambda x: string.strip(x), llist)
llist.remove("root")
sectname = "logger_root"
root = logging.root
log = root
opts = cp.options(sectname)
if "level" in opts:
level = cp.get(sectname, "level")
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
log.addHandler(handlers[string.strip(hand)])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = root.manager.loggerDict.keys()
#now set up the new ones...
for log in llist:
sectname = "logger_%s" % log
qn = cp.get(sectname, "qualname")
opts = cp.options(sectname)
if "propagate" in opts:
propagate = cp.getint(sectname, "propagate")
else:
propagate = 1
logger = logging.getLogger(qn)
if qn in existing:
existing.remove(qn)
if "level" in opts:
level = cp.get(sectname, "level")
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
logger.addHandler(handlers[string.strip(hand)])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
for log in existing:
root.manager.loggerDict[log].disabled = 1
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError, "listen() needs threading to work"
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
#Apply new configuration. We'd like to be able to
#create a StringIO and pass that in, but unfortunately
#1.5.2 ConfigParser does not support reading file
#objects, only actual files. So we create a temporary
#file and remove it later.
file = tempfile.mktemp(".ini")
f = open(file, "w")
f.write(chunk)
f.close()
try:
fileConfig(file)
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
os.remove(file)
except socket.error, e:
if type(e.args) != types.TupleType:
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class
|
(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
def serve(rcvr, hdlr, port):
server = rcvr(port=port, handler=hdlr)
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return threading.Thread(target=serve,
args=(ConfigSocketReceiver,
ConfigStreamHandler, port))
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
if _listener:
logging._acquireLock()
_listener.abort = 1
_listener = None
logging._releaseLock()
|
ConfigSocketReceiver
|
identifier_name
|
logging_config.py
|
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, string, socket, struct, os, traceback, types
try:
import thread
import threading
except ImportError:
thread = None
from SocketServer import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
In versions of ConfigParser which have the readfp method [typically
shipped in 2.x versions of Python], you can pass in a file-like object
rather than a filename, in which case the file-like object will be read
using readfp.
"""
import ConfigParser
cp = ConfigParser.ConfigParser(defaults)
if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
cp.readfp(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
if hasattr(logging, '_handlerList'):
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers)
finally:
|
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = string.split(name, '.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp.get("formatters", "keys")
if not len(flist):
return {}
flist = string.split(flist, ",")
formatters = {}
for form in flist:
form = string.strip(form)
sectname = "formatter_%s" % form
opts = cp.options(sectname)
if "format" in opts:
fs = cp.get(sectname, "format", 1)
else:
fs = None
if "datefmt" in opts:
dfs = cp.get(sectname, "datefmt", 1)
else:
dfs = None
c = logging.Formatter
if "class" in opts:
class_name = cp.get(sectname, "class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp.get("handlers", "keys")
if not len(hlist):
return {}
hlist = string.split(hlist, ",")
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
hand = string.strip(hand)
sectname = "handler_%s" % hand
klass = cp.get(sectname, "class")
opts = cp.options(sectname)
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = cp.get(sectname, "args")
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _install_loggers(cp, handlers):
"""Create and install loggers"""
# configure the root first
llist = cp.get("loggers", "keys")
llist = string.split(llist, ",")
llist = map(lambda x: string.strip(x), llist)
llist.remove("root")
sectname = "logger_root"
root = logging.root
log = root
opts = cp.options(sectname)
if "level" in opts:
level = cp.get(sectname, "level")
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
log.addHandler(handlers[string.strip(hand)])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = root.manager.loggerDict.keys()
#now set up the new ones...
for log in llist:
sectname = "logger_%s" % log
qn = cp.get(sectname, "qualname")
opts = cp.options(sectname)
if "propagate" in opts:
propagate = cp.getint(sectname, "propagate")
else:
propagate = 1
logger = logging.getLogger(qn)
if qn in existing:
existing.remove(qn)
if "level" in opts:
level = cp.get(sectname, "level")
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
logger.addHandler(handlers[string.strip(hand)])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
for log in existing:
root.manager.loggerDict[log].disabled = 1
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError, "listen() needs threading to work"
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
#Apply new configuration. We'd like to be able to
#create a StringIO and pass that in, but unfortunately
#1.5.2 ConfigParser does not support reading file
#objects, only actual files. So we create a temporary
#file and remove it later.
file = tempfile.mktemp(".ini")
f = open(file, "w")
f.write(chunk)
f.close()
try:
fileConfig(file)
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
os.remove(file)
except socket.error, e:
if type(e.args) != types.TupleType:
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
def serve(rcvr, hdlr, port):
server = rcvr(port=port, handler=hdlr)
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return threading.Thread(target=serve,
args=(ConfigSocketReceiver,
ConfigStreamHandler, port))
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
if _listener:
logging._acquireLock()
_listener.abort = 1
_listener = None
logging._releaseLock()
|
logging._releaseLock()
|
random_line_split
|
logging_config.py
|
# Copyright 2001-2005 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, string, socket, struct, os, traceback, types
try:
import thread
import threading
except ImportError:
thread = None
from SocketServer import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
In versions of ConfigParser which have the readfp method [typically
shipped in 2.x versions of Python], you can pass in a file-like object
rather than a filename, in which case the file-like object will be read
using readfp.
"""
import ConfigParser
cp = ConfigParser.ConfigParser(defaults)
if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
cp.readfp(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
if hasattr(logging, '_handlerList'):
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers)
finally:
logging._releaseLock()
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = string.split(name, '.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp.get("formatters", "keys")
if not len(flist):
return {}
flist = string.split(flist, ",")
formatters = {}
for form in flist:
form = string.strip(form)
sectname = "formatter_%s" % form
opts = cp.options(sectname)
if "format" in opts:
fs = cp.get(sectname, "format", 1)
else:
fs = None
if "datefmt" in opts:
dfs = cp.get(sectname, "datefmt", 1)
else:
dfs = None
c = logging.Formatter
if "class" in opts:
class_name = cp.get(sectname, "class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp.get("handlers", "keys")
if not len(hlist):
return {}
hlist = string.split(hlist, ",")
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
hand = string.strip(hand)
sectname = "handler_%s" % hand
klass = cp.get(sectname, "class")
opts = cp.options(sectname)
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = cp.get(sectname, "args")
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
|
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _install_loggers(cp, handlers):
"""Create and install loggers"""
# configure the root first
llist = cp.get("loggers", "keys")
llist = string.split(llist, ",")
llist = map(lambda x: string.strip(x), llist)
llist.remove("root")
sectname = "logger_root"
root = logging.root
log = root
opts = cp.options(sectname)
if "level" in opts:
level = cp.get(sectname, "level")
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
log.addHandler(handlers[string.strip(hand)])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = root.manager.loggerDict.keys()
#now set up the new ones...
for log in llist:
sectname = "logger_%s" % log
qn = cp.get(sectname, "qualname")
opts = cp.options(sectname)
if "propagate" in opts:
propagate = cp.getint(sectname, "propagate")
else:
propagate = 1
logger = logging.getLogger(qn)
if qn in existing:
existing.remove(qn)
if "level" in opts:
level = cp.get(sectname, "level")
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
logger.addHandler(handlers[string.strip(hand)])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
for log in existing:
root.manager.loggerDict[log].disabled = 1
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError, "listen() needs threading to work"
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
#Apply new configuration. We'd like to be able to
#create a StringIO and pass that in, but unfortunately
#1.5.2 ConfigParser does not support reading file
#objects, only actual files. So we create a temporary
#file and remove it later.
file = tempfile.mktemp(".ini")
f = open(file, "w")
f.write(chunk)
f.close()
try:
fileConfig(file)
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
os.remove(file)
except socket.error, e:
if type(e.args) != types.TupleType:
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
def serve(rcvr, hdlr, port):
server = rcvr(port=port, handler=hdlr)
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return threading.Thread(target=serve,
args=(ConfigSocketReceiver,
ConfigStreamHandler, port))
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
if _listener:
logging._acquireLock()
_listener.abort = 1
_listener = None
logging._releaseLock()
|
fixups.append((h, target))
|
conditional_block
|
review_group_user.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.utils import six
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, NOT_LOGGED_IN,
PERMISSION_DENIED)
from reviewboard.reviews.models import Group
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.errors import INVALID_USER
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.user import UserResource
class ReviewGroupUserResource(UserResource):
"""Provides information on users that are members of a review group."""
allowed_methods = ('GET', 'POST', 'DELETE')
policy_id = 'review_group_user'
def get_queryset(self, request, group_name, local_site_name=None,
*args, **kwargs):
group = Group.objects.get(name=group_name,
local_site__name=local_site_name)
return group.users.all()
def has_access_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_list_access_permissions(self, request, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_modify_permissions(self, request, group, username, local_site):
return (
resources.review_group.has_modify_permissions(request, group) or
(request.user.username == username and
group.is_accessible_by(request.user))
)
def has_delete_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_mutable_by(request.user)
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(required={
'username': {
'type': six.text_type,
'description': 'The user to add to the group.',
},
})
def create(self, request, username, *args, **kwargs):
"""Adds a user to a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, username,
local_site)):
return self._no_access_error(request.user)
try:
if local_site:
user = local_site.users.get(username=username)
else:
user = User.objects.get(username=username)
except ObjectDoesNotExist:
return INVALID_USER
group.users.add(user)
return 201, {
self.item_result_key: user,
}
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
def delete(self, request, *args, **kwargs):
|
@webapi_check_local_site
@augment_method_from(WebAPIResource)
def get_list(self, *args, **kwargs):
"""Retrieves the list of users belonging to a specific review group.
This includes only the users who have active accounts on the site.
Any account that has been disabled (for inactivity, spam reasons,
or anything else) will be excluded from the list.
The list of users can be filtered down using the ``q`` and
``fullname`` parameters.
Setting ``q`` to a value will by default limit the results to
usernames starting with that value. This is a case-insensitive
comparison.
If ``fullname`` is set to ``1``, the first and last names will also be
checked along with the username. ``fullname`` is ignored if ``q``
is not set.
For example, accessing ``/api/users/?q=bo&fullname=1`` will list
any users with a username, first name or last name starting with
``bo``.
"""
pass
review_group_user_resource = ReviewGroupUserResource()
|
"""Removes a user from a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
user = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, user.username,
local_site)):
return self._no_access_error(request.user)
group.users.remove(user)
return 204, {}
|
identifier_body
|
review_group_user.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.utils import six
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, NOT_LOGGED_IN,
PERMISSION_DENIED)
from reviewboard.reviews.models import Group
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.errors import INVALID_USER
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.user import UserResource
class ReviewGroupUserResource(UserResource):
"""Provides information on users that are members of a review group."""
allowed_methods = ('GET', 'POST', 'DELETE')
policy_id = 'review_group_user'
def get_queryset(self, request, group_name, local_site_name=None,
*args, **kwargs):
group = Group.objects.get(name=group_name,
local_site__name=local_site_name)
return group.users.all()
def has_access_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_list_access_permissions(self, request, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_modify_permissions(self, request, group, username, local_site):
return (
resources.review_group.has_modify_permissions(request, group) or
(request.user.username == username and
group.is_accessible_by(request.user))
)
def has_delete_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_mutable_by(request.user)
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(required={
'username': {
'type': six.text_type,
'description': 'The user to add to the group.',
},
})
def create(self, request, username, *args, **kwargs):
"""Adds a user to a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, username,
local_site)):
return self._no_access_error(request.user)
try:
if local_site:
user = local_site.users.get(username=username)
else:
user = User.objects.get(username=username)
except ObjectDoesNotExist:
return INVALID_USER
group.users.add(user)
return 201, {
self.item_result_key: user,
}
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
def delete(self, request, *args, **kwargs):
|
try:
group = group_resource.get_object(request, *args, **kwargs)
user = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, user.username,
local_site)):
return self._no_access_error(request.user)
group.users.remove(user)
return 204, {}
@webapi_check_local_site
@augment_method_from(WebAPIResource)
def get_list(self, *args, **kwargs):
"""Retrieves the list of users belonging to a specific review group.
This includes only the users who have active accounts on the site.
Any account that has been disabled (for inactivity, spam reasons,
or anything else) will be excluded from the list.
The list of users can be filtered down using the ``q`` and
``fullname`` parameters.
Setting ``q`` to a value will by default limit the results to
usernames starting with that value. This is a case-insensitive
comparison.
If ``fullname`` is set to ``1``, the first and last names will also be
checked along with the username. ``fullname`` is ignored if ``q``
is not set.
For example, accessing ``/api/users/?q=bo&fullname=1`` will list
any users with a username, first name or last name starting with
``bo``.
"""
pass
review_group_user_resource = ReviewGroupUserResource()
|
"""Removes a user from a review group."""
group_resource = resources.review_group
|
random_line_split
|
review_group_user.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.utils import six
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, NOT_LOGGED_IN,
PERMISSION_DENIED)
from reviewboard.reviews.models import Group
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.errors import INVALID_USER
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.user import UserResource
class ReviewGroupUserResource(UserResource):
"""Provides information on users that are members of a review group."""
allowed_methods = ('GET', 'POST', 'DELETE')
policy_id = 'review_group_user'
def get_queryset(self, request, group_name, local_site_name=None,
*args, **kwargs):
group = Group.objects.get(name=group_name,
local_site__name=local_site_name)
return group.users.all()
def has_access_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_list_access_permissions(self, request, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_modify_permissions(self, request, group, username, local_site):
return (
resources.review_group.has_modify_permissions(request, group) or
(request.user.username == username and
group.is_accessible_by(request.user))
)
def has_delete_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_mutable_by(request.user)
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(required={
'username': {
'type': six.text_type,
'description': 'The user to add to the group.',
},
})
def create(self, request, username, *args, **kwargs):
"""Adds a user to a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, username,
local_site)):
return self._no_access_error(request.user)
try:
if local_site:
|
else:
user = User.objects.get(username=username)
except ObjectDoesNotExist:
return INVALID_USER
group.users.add(user)
return 201, {
self.item_result_key: user,
}
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
def delete(self, request, *args, **kwargs):
"""Removes a user from a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
user = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, user.username,
local_site)):
return self._no_access_error(request.user)
group.users.remove(user)
return 204, {}
@webapi_check_local_site
@augment_method_from(WebAPIResource)
def get_list(self, *args, **kwargs):
"""Retrieves the list of users belonging to a specific review group.
This includes only the users who have active accounts on the site.
Any account that has been disabled (for inactivity, spam reasons,
or anything else) will be excluded from the list.
The list of users can be filtered down using the ``q`` and
``fullname`` parameters.
Setting ``q`` to a value will by default limit the results to
usernames starting with that value. This is a case-insensitive
comparison.
If ``fullname`` is set to ``1``, the first and last names will also be
checked along with the username. ``fullname`` is ignored if ``q``
is not set.
For example, accessing ``/api/users/?q=bo&fullname=1`` will list
any users with a username, first name or last name starting with
``bo``.
"""
pass
review_group_user_resource = ReviewGroupUserResource()
|
user = local_site.users.get(username=username)
|
conditional_block
|
review_group_user.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.utils import six
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, NOT_LOGGED_IN,
PERMISSION_DENIED)
from reviewboard.reviews.models import Group
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.errors import INVALID_USER
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.user import UserResource
class ReviewGroupUserResource(UserResource):
"""Provides information on users that are members of a review group."""
allowed_methods = ('GET', 'POST', 'DELETE')
policy_id = 'review_group_user'
def get_queryset(self, request, group_name, local_site_name=None,
*args, **kwargs):
group = Group.objects.get(name=group_name,
local_site__name=local_site_name)
return group.users.all()
def has_access_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_list_access_permissions(self, request, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_accessible_by(request.user)
def has_modify_permissions(self, request, group, username, local_site):
return (
resources.review_group.has_modify_permissions(request, group) or
(request.user.username == username and
group.is_accessible_by(request.user))
)
def has_delete_permissions(self, request, user, *args, **kwargs):
group = resources.review_group.get_object(request, *args, **kwargs)
return group.is_mutable_by(request.user)
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(required={
'username': {
'type': six.text_type,
'description': 'The user to add to the group.',
},
})
def
|
(self, request, username, *args, **kwargs):
"""Adds a user to a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, username,
local_site)):
return self._no_access_error(request.user)
try:
if local_site:
user = local_site.users.get(username=username)
else:
user = User.objects.get(username=username)
except ObjectDoesNotExist:
return INVALID_USER
group.users.add(user)
return 201, {
self.item_result_key: user,
}
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_USER,
NOT_LOGGED_IN, PERMISSION_DENIED)
def delete(self, request, *args, **kwargs):
"""Removes a user from a review group."""
group_resource = resources.review_group
try:
group = group_resource.get_object(request, *args, **kwargs)
user = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
local_site = self._get_local_site(kwargs.get('local_site_name', None))
if (not group_resource.has_access_permissions(request, group) or
not self.has_modify_permissions(request, group, user.username,
local_site)):
return self._no_access_error(request.user)
group.users.remove(user)
return 204, {}
@webapi_check_local_site
@augment_method_from(WebAPIResource)
def get_list(self, *args, **kwargs):
"""Retrieves the list of users belonging to a specific review group.
This includes only the users who have active accounts on the site.
Any account that has been disabled (for inactivity, spam reasons,
or anything else) will be excluded from the list.
The list of users can be filtered down using the ``q`` and
``fullname`` parameters.
Setting ``q`` to a value will by default limit the results to
usernames starting with that value. This is a case-insensitive
comparison.
If ``fullname`` is set to ``1``, the first and last names will also be
checked along with the username. ``fullname`` is ignored if ``q``
is not set.
For example, accessing ``/api/users/?q=bo&fullname=1`` will list
any users with a username, first name or last name starting with
``bo``.
"""
pass
review_group_user_resource = ReviewGroupUserResource()
|
create
|
identifier_name
|
deps.py
|
#!/usr/bin/env python
# Finds all tasks and task outputs on the dependency paths from the given downstream task T
# up to the given source/upstream task S (optional). If the upstream task is not given,
# all upstream tasks on all dependancy paths of T will be returned.
# Terms:
# if the execution of Task T depends on the output of task S on a dependancy graph,
# T is called a downstream/sink task, S is called an upstream/source task.
# This is useful and practical way to find all upstream tasks of task T.
# For example suppose you have a daily computation that starts with a task named Daily.
# And suppose you have another task named Aggregate. Daily triggers a few tasks
# which eventually trigger Aggregate. Now, suppose you find a bug in Aggregate.
# You fixed the bug and now you want to rerun it, including all it's upstream deps.
#
# To do that you run:
# bin/deps.py --module daily_module Aggregate --daily-param1 xxx --upstream-family Daily
#
# This will output all the tasks on the dependency path between Daily and Aggregate. In
# effect, this is how you find all upstream tasks for Aggregate. Now you can delete its
# output and run Aggregate again. Daily will eventually trigget Aggregate and all tasks on
# the way.
#
# The same code here might be used as a CLI tool as well as a python module.
# In python, invoke find_deps(task, upstream_name) to get a set of all task instances on the
# paths between task T and upstream task S. You can then use the task instances to delete their output or
# perform other computation based on that.
#
# Example:
#
# PYTHONPATH=$PYTHONPATH:/path/to/your/luigi/tasks bin/deps.py \
# --module my.tasks MyDownstreamTask
# --downstream_task_param1 123456
# [--upstream-family MyUpstreamTask]
#
from __future__ import print_function
import luigi.interface
from luigi.contrib.ssh import RemoteTarget
from luigi.contrib.postgres import PostgresTarget
from luigi.contrib.s3 import S3Target
from luigi.target import FileSystemTarget
from luigi.task import flatten
from luigi import parameter
import sys
from luigi.cmdline_parser import CmdlineParser
import collections
def get_task_requires(task):
return set(flatten(task.requires()))
def dfs_paths(start_task, goal_task_family, path=None):
if path is None:
path = [start_task]
if start_task.task_family == goal_task_family or goal_task_family is None:
for item in path:
yield item
for next in get_task_requires(start_task) - set(path):
for t in dfs_paths(next, goal_task_family, path + [next]):
yield t
class upstream(luigi.task.Config):
'''
Used to provide the parameter upstream-family
'''
family = parameter.Parameter(default=None)
def find_deps(task, upstream_task_family):
'''
Finds all dependencies that start with the given task and have a path
to upstream_task_family
Returns all deps on all paths between task and upstream
'''
return set([t for t in dfs_paths(task, upstream_task_family)])
def find_deps_cli():
'''
Finds all tasks on all paths from provided CLI task
'''
cmdline_args = sys.argv[1:]
with CmdlineParser.global_instance(cmdline_args) as cp:
return find_deps(cp.get_task_obj(), upstream().family)
def get_task_output_description(task_output):
'''
Returns a task's output as a string
'''
output_description = "n/a"
if isinstance(task_output, RemoteTarget):
output_description = "[SSH] {0}:{1}".format(task_output._fs.remote_context.host, task_output.path)
elif isinstance(task_output, S3Target):
output_description = "[S3] {0}".format(task_output.path)
elif isinstance(task_output, FileSystemTarget):
output_description = "[FileSystem] {0}".format(task_output.path)
elif isinstance(task_output, PostgresTarget):
output_description = "[DB] {0}:{1}".format(task_output.host, task_output.table)
else:
output_description = "to be determined"
return output_description
def
|
():
deps = find_deps_cli()
for task in deps:
task_output = task.output()
if isinstance(task_output, dict):
output_descriptions = [get_task_output_description(output) for label, output in task_output.items()]
elif isinstance(task_output, collections.Iterable):
output_descriptions = [get_task_output_description(output) for output in task_output]
else:
output_descriptions = [get_task_output_description(task_output)]
print(" TASK: {0}".format(task))
for desc in output_descriptions:
print(" : {0}".format(desc))
if __name__ == '__main__':
main()
|
main
|
identifier_name
|
deps.py
|
#!/usr/bin/env python
# Finds all tasks and task outputs on the dependency paths from the given downstream task T
# up to the given source/upstream task S (optional). If the upstream task is not given,
# all upstream tasks on all dependancy paths of T will be returned.
# Terms:
# if the execution of Task T depends on the output of task S on a dependancy graph,
# T is called a downstream/sink task, S is called an upstream/source task.
# This is useful and practical way to find all upstream tasks of task T.
# For example suppose you have a daily computation that starts with a task named Daily.
# And suppose you have another task named Aggregate. Daily triggers a few tasks
# which eventually trigger Aggregate. Now, suppose you find a bug in Aggregate.
# You fixed the bug and now you want to rerun it, including all it's upstream deps.
#
# To do that you run:
# bin/deps.py --module daily_module Aggregate --daily-param1 xxx --upstream-family Daily
#
# This will output all the tasks on the dependency path between Daily and Aggregate. In
# effect, this is how you find all upstream tasks for Aggregate. Now you can delete its
# output and run Aggregate again. Daily will eventually trigget Aggregate and all tasks on
# the way.
#
# The same code here might be used as a CLI tool as well as a python module.
# In python, invoke find_deps(task, upstream_name) to get a set of all task instances on the
# paths between task T and upstream task S. You can then use the task instances to delete their output or
# perform other computation based on that.
#
# Example:
#
# PYTHONPATH=$PYTHONPATH:/path/to/your/luigi/tasks bin/deps.py \
# --module my.tasks MyDownstreamTask
# --downstream_task_param1 123456
# [--upstream-family MyUpstreamTask]
#
from __future__ import print_function
import luigi.interface
from luigi.contrib.ssh import RemoteTarget
from luigi.contrib.postgres import PostgresTarget
from luigi.contrib.s3 import S3Target
from luigi.target import FileSystemTarget
from luigi.task import flatten
from luigi import parameter
import sys
from luigi.cmdline_parser import CmdlineParser
import collections
def get_task_requires(task):
return set(flatten(task.requires()))
def dfs_paths(start_task, goal_task_family, path=None):
if path is None:
path = [start_task]
if start_task.task_family == goal_task_family or goal_task_family is None:
for item in path:
yield item
for next in get_task_requires(start_task) - set(path):
for t in dfs_paths(next, goal_task_family, path + [next]):
yield t
class upstream(luigi.task.Config):
'''
Used to provide the parameter upstream-family
'''
family = parameter.Parameter(default=None)
def find_deps(task, upstream_task_family):
'''
Finds all dependencies that start with the given task and have a path
to upstream_task_family
Returns all deps on all paths between task and upstream
'''
return set([t for t in dfs_paths(task, upstream_task_family)])
def find_deps_cli():
'''
Finds all tasks on all paths from provided CLI task
'''
cmdline_args = sys.argv[1:]
with CmdlineParser.global_instance(cmdline_args) as cp:
return find_deps(cp.get_task_obj(), upstream().family)
def get_task_output_description(task_output):
'''
Returns a task's output as a string
'''
output_description = "n/a"
if isinstance(task_output, RemoteTarget):
output_description = "[SSH] {0}:{1}".format(task_output._fs.remote_context.host, task_output.path)
elif isinstance(task_output, S3Target):
output_description = "[S3] {0}".format(task_output.path)
elif isinstance(task_output, FileSystemTarget):
output_description = "[FileSystem] {0}".format(task_output.path)
elif isinstance(task_output, PostgresTarget):
output_description = "[DB] {0}:{1}".format(task_output.host, task_output.table)
else:
output_description = "to be determined"
return output_description
|
deps = find_deps_cli()
for task in deps:
task_output = task.output()
if isinstance(task_output, dict):
output_descriptions = [get_task_output_description(output) for label, output in task_output.items()]
elif isinstance(task_output, collections.Iterable):
output_descriptions = [get_task_output_description(output) for output in task_output]
else:
output_descriptions = [get_task_output_description(task_output)]
print(" TASK: {0}".format(task))
for desc in output_descriptions:
print(" : {0}".format(desc))
if __name__ == '__main__':
main()
|
def main():
|
random_line_split
|
deps.py
|
#!/usr/bin/env python
# Finds all tasks and task outputs on the dependency paths from the given downstream task T
# up to the given source/upstream task S (optional). If the upstream task is not given,
# all upstream tasks on all dependancy paths of T will be returned.
# Terms:
# if the execution of Task T depends on the output of task S on a dependancy graph,
# T is called a downstream/sink task, S is called an upstream/source task.
# This is useful and practical way to find all upstream tasks of task T.
# For example suppose you have a daily computation that starts with a task named Daily.
# And suppose you have another task named Aggregate. Daily triggers a few tasks
# which eventually trigger Aggregate. Now, suppose you find a bug in Aggregate.
# You fixed the bug and now you want to rerun it, including all it's upstream deps.
#
# To do that you run:
# bin/deps.py --module daily_module Aggregate --daily-param1 xxx --upstream-family Daily
#
# This will output all the tasks on the dependency path between Daily and Aggregate. In
# effect, this is how you find all upstream tasks for Aggregate. Now you can delete its
# output and run Aggregate again. Daily will eventually trigget Aggregate and all tasks on
# the way.
#
# The same code here might be used as a CLI tool as well as a python module.
# In python, invoke find_deps(task, upstream_name) to get a set of all task instances on the
# paths between task T and upstream task S. You can then use the task instances to delete their output or
# perform other computation based on that.
#
# Example:
#
# PYTHONPATH=$PYTHONPATH:/path/to/your/luigi/tasks bin/deps.py \
# --module my.tasks MyDownstreamTask
# --downstream_task_param1 123456
# [--upstream-family MyUpstreamTask]
#
from __future__ import print_function
import luigi.interface
from luigi.contrib.ssh import RemoteTarget
from luigi.contrib.postgres import PostgresTarget
from luigi.contrib.s3 import S3Target
from luigi.target import FileSystemTarget
from luigi.task import flatten
from luigi import parameter
import sys
from luigi.cmdline_parser import CmdlineParser
import collections
def get_task_requires(task):
return set(flatten(task.requires()))
def dfs_paths(start_task, goal_task_family, path=None):
|
class upstream(luigi.task.Config):
'''
Used to provide the parameter upstream-family
'''
family = parameter.Parameter(default=None)
def find_deps(task, upstream_task_family):
'''
Finds all dependencies that start with the given task and have a path
to upstream_task_family
Returns all deps on all paths between task and upstream
'''
return set([t for t in dfs_paths(task, upstream_task_family)])
def find_deps_cli():
'''
Finds all tasks on all paths from provided CLI task
'''
cmdline_args = sys.argv[1:]
with CmdlineParser.global_instance(cmdline_args) as cp:
return find_deps(cp.get_task_obj(), upstream().family)
def get_task_output_description(task_output):
'''
Returns a task's output as a string
'''
output_description = "n/a"
if isinstance(task_output, RemoteTarget):
output_description = "[SSH] {0}:{1}".format(task_output._fs.remote_context.host, task_output.path)
elif isinstance(task_output, S3Target):
output_description = "[S3] {0}".format(task_output.path)
elif isinstance(task_output, FileSystemTarget):
output_description = "[FileSystem] {0}".format(task_output.path)
elif isinstance(task_output, PostgresTarget):
output_description = "[DB] {0}:{1}".format(task_output.host, task_output.table)
else:
output_description = "to be determined"
return output_description
def main():
deps = find_deps_cli()
for task in deps:
task_output = task.output()
if isinstance(task_output, dict):
output_descriptions = [get_task_output_description(output) for label, output in task_output.items()]
elif isinstance(task_output, collections.Iterable):
output_descriptions = [get_task_output_description(output) for output in task_output]
else:
output_descriptions = [get_task_output_description(task_output)]
print(" TASK: {0}".format(task))
for desc in output_descriptions:
print(" : {0}".format(desc))
if __name__ == '__main__':
main()
|
if path is None:
path = [start_task]
if start_task.task_family == goal_task_family or goal_task_family is None:
for item in path:
yield item
for next in get_task_requires(start_task) - set(path):
for t in dfs_paths(next, goal_task_family, path + [next]):
yield t
|
identifier_body
|
deps.py
|
#!/usr/bin/env python
# Finds all tasks and task outputs on the dependency paths from the given downstream task T
# up to the given source/upstream task S (optional). If the upstream task is not given,
# all upstream tasks on all dependancy paths of T will be returned.
# Terms:
# if the execution of Task T depends on the output of task S on a dependancy graph,
# T is called a downstream/sink task, S is called an upstream/source task.
# This is useful and practical way to find all upstream tasks of task T.
# For example suppose you have a daily computation that starts with a task named Daily.
# And suppose you have another task named Aggregate. Daily triggers a few tasks
# which eventually trigger Aggregate. Now, suppose you find a bug in Aggregate.
# You fixed the bug and now you want to rerun it, including all it's upstream deps.
#
# To do that you run:
# bin/deps.py --module daily_module Aggregate --daily-param1 xxx --upstream-family Daily
#
# This will output all the tasks on the dependency path between Daily and Aggregate. In
# effect, this is how you find all upstream tasks for Aggregate. Now you can delete its
# output and run Aggregate again. Daily will eventually trigget Aggregate and all tasks on
# the way.
#
# The same code here might be used as a CLI tool as well as a python module.
# In python, invoke find_deps(task, upstream_name) to get a set of all task instances on the
# paths between task T and upstream task S. You can then use the task instances to delete their output or
# perform other computation based on that.
#
# Example:
#
# PYTHONPATH=$PYTHONPATH:/path/to/your/luigi/tasks bin/deps.py \
# --module my.tasks MyDownstreamTask
# --downstream_task_param1 123456
# [--upstream-family MyUpstreamTask]
#
from __future__ import print_function
import luigi.interface
from luigi.contrib.ssh import RemoteTarget
from luigi.contrib.postgres import PostgresTarget
from luigi.contrib.s3 import S3Target
from luigi.target import FileSystemTarget
from luigi.task import flatten
from luigi import parameter
import sys
from luigi.cmdline_parser import CmdlineParser
import collections
def get_task_requires(task):
return set(flatten(task.requires()))
def dfs_paths(start_task, goal_task_family, path=None):
if path is None:
path = [start_task]
if start_task.task_family == goal_task_family or goal_task_family is None:
for item in path:
yield item
for next in get_task_requires(start_task) - set(path):
for t in dfs_paths(next, goal_task_family, path + [next]):
yield t
class upstream(luigi.task.Config):
'''
Used to provide the parameter upstream-family
'''
family = parameter.Parameter(default=None)
def find_deps(task, upstream_task_family):
'''
Finds all dependencies that start with the given task and have a path
to upstream_task_family
Returns all deps on all paths between task and upstream
'''
return set([t for t in dfs_paths(task, upstream_task_family)])
def find_deps_cli():
'''
Finds all tasks on all paths from provided CLI task
'''
cmdline_args = sys.argv[1:]
with CmdlineParser.global_instance(cmdline_args) as cp:
return find_deps(cp.get_task_obj(), upstream().family)
def get_task_output_description(task_output):
'''
Returns a task's output as a string
'''
output_description = "n/a"
if isinstance(task_output, RemoteTarget):
output_description = "[SSH] {0}:{1}".format(task_output._fs.remote_context.host, task_output.path)
elif isinstance(task_output, S3Target):
output_description = "[S3] {0}".format(task_output.path)
elif isinstance(task_output, FileSystemTarget):
output_description = "[FileSystem] {0}".format(task_output.path)
elif isinstance(task_output, PostgresTarget):
output_description = "[DB] {0}:{1}".format(task_output.host, task_output.table)
else:
|
return output_description
def main():
deps = find_deps_cli()
for task in deps:
task_output = task.output()
if isinstance(task_output, dict):
output_descriptions = [get_task_output_description(output) for label, output in task_output.items()]
elif isinstance(task_output, collections.Iterable):
output_descriptions = [get_task_output_description(output) for output in task_output]
else:
output_descriptions = [get_task_output_description(task_output)]
print(" TASK: {0}".format(task))
for desc in output_descriptions:
print(" : {0}".format(desc))
if __name__ == '__main__':
main()
|
output_description = "to be determined"
|
conditional_block
|
kendo-test-json.ts
|
/// <reference path="../Scripts/typings/aurelia/aurelia.d.ts"/>
/// <reference path="../Scripts/typings/jquery/jquery.d.ts"/>
/// <reference path="../Scripts/typings/kendo/kendo.all.d.ts"/>
/// <reference path="services/products.ts"/>
//import $ = require("jquery");
//import k = require("kendo");
import products = require("./services/products");
export class KendoTest {
constructor()
|
attached() {
console.log("kendo-test attached :)");
var dataSource = new kendo.data.DataSource({
type: "json",
transport: {
read: "./dist/services/products.json"
},
pageSize: 21
});
$("#pager").kendoPager({
dataSource: dataSource
});
$("#listView").kendoListView({
dataSource: dataSource,
template: kendo.template($("#template").html())
});
}
}
// http://demos.telerik.com/kendo-ui/content/shared/js/products.js
|
{
console.log("kendo-test constructed :)");
}
|
identifier_body
|
kendo-test-json.ts
|
/// <reference path="../Scripts/typings/aurelia/aurelia.d.ts"/>
/// <reference path="../Scripts/typings/jquery/jquery.d.ts"/>
/// <reference path="../Scripts/typings/kendo/kendo.all.d.ts"/>
/// <reference path="services/products.ts"/>
//import $ = require("jquery");
//import k = require("kendo");
import products = require("./services/products");
export class KendoTest {
constructor() {
|
attached() {
console.log("kendo-test attached :)");
var dataSource = new kendo.data.DataSource({
type: "json",
transport: {
read: "./dist/services/products.json"
},
pageSize: 21
});
$("#pager").kendoPager({
dataSource: dataSource
});
$("#listView").kendoListView({
dataSource: dataSource,
template: kendo.template($("#template").html())
});
}
}
// http://demos.telerik.com/kendo-ui/content/shared/js/products.js
|
console.log("kendo-test constructed :)");
}
|
random_line_split
|
kendo-test-json.ts
|
/// <reference path="../Scripts/typings/aurelia/aurelia.d.ts"/>
/// <reference path="../Scripts/typings/jquery/jquery.d.ts"/>
/// <reference path="../Scripts/typings/kendo/kendo.all.d.ts"/>
/// <reference path="services/products.ts"/>
//import $ = require("jquery");
//import k = require("kendo");
import products = require("./services/products");
export class KendoTest {
|
() {
console.log("kendo-test constructed :)");
}
attached() {
console.log("kendo-test attached :)");
var dataSource = new kendo.data.DataSource({
type: "json",
transport: {
read: "./dist/services/products.json"
},
pageSize: 21
});
$("#pager").kendoPager({
dataSource: dataSource
});
$("#listView").kendoListView({
dataSource: dataSource,
template: kendo.template($("#template").html())
});
}
}
// http://demos.telerik.com/kendo-ui/content/shared/js/products.js
|
constructor
|
identifier_name
|
net_logging.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["deprecated"],
"supported_by": "network",
}
DOCUMENTATION = """module: net_logging
author: Ganesh Nalawade (@ganeshrn)
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging on network devices.
deprecated:
removed_in: '2.13'
alternative: Use platform-specific "[netos]_logging" module
why: Updated modules released with more functionality
extends_documentation_fragment:
- ansible.netcommon.network_agnostic
options:
dest:
description:
- Destination of the logs.
choices:
- console
- host
name:
|
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
purge:
description:
- Purge logging not defined in the I(aggregate) parameter.
default: false
state:
description:
- State of the logging configuration.
default: present
choices:
- present
- absent
"""
EXAMPLES = """
- name: configure console logging
net_logging:
dest: console
facility: any
level: critical
- name: remove console logging configuration
net_logging:
dest: console
state: absent
- name: configure host logging
net_logging:
dest: host
name: 192.0.2.1
facility: kernel
level: critical
- name: Configure file logging using aggregate
net_logging:
dest: file
aggregate:
- name: test-1
facility: pfe
level: critical
- name: test-2
facility: kernel
level: emergency
- name: Delete file logging using aggregate
net_logging:
dest: file
aggregate:
- name: test-1
facility: pfe
level: critical
- name: test-2
facility: kernel
level: emergency
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- logging console critical
"""
|
description:
- If value of C(dest) is I(host) it indicates file-name the host name to be notified.
facility:
description:
- Set logging facility.
|
random_line_split
|
localDataFactory.ts
|
///<reference path="../coreReferences.ts"/>
/**
* Created by Johanna on 2015-08-28.
*/
module Moosetrail.Core.DataAccess {
export class LocalDataFactory {
private prefix = "data";
protected q: ng.IQService;
private localStorage: ng.local.storage.ILocalStorageService;
static $inject = ["$q", "localStorageService"];
public constructor($q: ng.IQService, local: angular.local.storage.ILocalStorageService) {
this.q = $q;
this.localStorage = local;
}
protected getDataWithKey(key: string): ng.IPromise<DataResult> {
var def = this.q.defer();
var foundData = this.localStorage.get(this.prefix + key);
if (foundData instanceof NoDataObj) {
def.resolve(new DataResult(null, 0, DataSource.LocalStorage));
}
else if (foundData != null) {
def.resolve(new DataResult(foundData, 0, DataSource.LocalStorage));
} else {
def.reject(new DataResult(null, -1, DataSource.LocalStorage));
}
return def.promise;
}
protected
|
(key: string, data: any): void {
if (data != null)
this.localStorage.set(this.prefix + key, data);
else
this.localStorage.set(this.prefix + key, new NoDataObj());
}
protected getPlainData(identifier: string): any {
return this.localStorage.get(this.prefix + identifier);
}
}
class NoDataObj {
}
}
|
setDataWithKey
|
identifier_name
|
localDataFactory.ts
|
///<reference path="../coreReferences.ts"/>
/**
* Created by Johanna on 2015-08-28.
*/
module Moosetrail.Core.DataAccess {
export class LocalDataFactory {
private prefix = "data";
protected q: ng.IQService;
private localStorage: ng.local.storage.ILocalStorageService;
static $inject = ["$q", "localStorageService"];
public constructor($q: ng.IQService, local: angular.local.storage.ILocalStorageService) {
this.q = $q;
this.localStorage = local;
}
protected getDataWithKey(key: string): ng.IPromise<DataResult> {
var def = this.q.defer();
var foundData = this.localStorage.get(this.prefix + key);
if (foundData instanceof NoDataObj)
|
else if (foundData != null) {
def.resolve(new DataResult(foundData, 0, DataSource.LocalStorage));
} else {
def.reject(new DataResult(null, -1, DataSource.LocalStorage));
}
return def.promise;
}
protected setDataWithKey(key: string, data: any): void {
if (data != null)
this.localStorage.set(this.prefix + key, data);
else
this.localStorage.set(this.prefix + key, new NoDataObj());
}
protected getPlainData(identifier: string): any {
return this.localStorage.get(this.prefix + identifier);
}
}
class NoDataObj {
}
}
|
{
def.resolve(new DataResult(null, 0, DataSource.LocalStorage));
}
|
conditional_block
|
localDataFactory.ts
|
///<reference path="../coreReferences.ts"/>
/**
* Created by Johanna on 2015-08-28.
*/
module Moosetrail.Core.DataAccess {
export class LocalDataFactory {
private prefix = "data";
protected q: ng.IQService;
private localStorage: ng.local.storage.ILocalStorageService;
static $inject = ["$q", "localStorageService"];
public constructor($q: ng.IQService, local: angular.local.storage.ILocalStorageService) {
this.q = $q;
this.localStorage = local;
}
protected getDataWithKey(key: string): ng.IPromise<DataResult> {
var def = this.q.defer();
var foundData = this.localStorage.get(this.prefix + key);
if (foundData instanceof NoDataObj) {
def.resolve(new DataResult(null, 0, DataSource.LocalStorage));
}
else if (foundData != null) {
def.resolve(new DataResult(foundData, 0, DataSource.LocalStorage));
} else {
def.reject(new DataResult(null, -1, DataSource.LocalStorage));
}
return def.promise;
}
protected setDataWithKey(key: string, data: any): void {
if (data != null)
this.localStorage.set(this.prefix + key, data);
else
this.localStorage.set(this.prefix + key, new NoDataObj());
}
protected getPlainData(identifier: string): any {
return this.localStorage.get(this.prefix + identifier);
}
}
class NoDataObj {
|
}
|
}
|
random_line_split
|
localDataFactory.ts
|
///<reference path="../coreReferences.ts"/>
/**
* Created by Johanna on 2015-08-28.
*/
module Moosetrail.Core.DataAccess {
export class LocalDataFactory {
private prefix = "data";
protected q: ng.IQService;
private localStorage: ng.local.storage.ILocalStorageService;
static $inject = ["$q", "localStorageService"];
public constructor($q: ng.IQService, local: angular.local.storage.ILocalStorageService) {
this.q = $q;
this.localStorage = local;
}
protected getDataWithKey(key: string): ng.IPromise<DataResult> {
var def = this.q.defer();
var foundData = this.localStorage.get(this.prefix + key);
if (foundData instanceof NoDataObj) {
def.resolve(new DataResult(null, 0, DataSource.LocalStorage));
}
else if (foundData != null) {
def.resolve(new DataResult(foundData, 0, DataSource.LocalStorage));
} else {
def.reject(new DataResult(null, -1, DataSource.LocalStorage));
}
return def.promise;
}
protected setDataWithKey(key: string, data: any): void
|
protected getPlainData(identifier: string): any {
return this.localStorage.get(this.prefix + identifier);
}
}
class NoDataObj {
}
}
|
{
if (data != null)
this.localStorage.set(this.prefix + key, data);
else
this.localStorage.set(this.prefix + key, new NoDataObj());
}
|
identifier_body
|
karaoke.py
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
import smallsmilhandler
import sys
import os
class KaraokeLocal(smallsmilhandler.SmallSMILHandler):
def __init__(self, fich):
parser = make_parser()
sHandler = smallsmilhandler.SmallSMILHandler()
parser.setContentHandler(sHandler)
parser.parse(fich)
self.list_tags = sHandler.get_tags()
def __str__(self):
|
def do_local(self):
list_recurso = []
for diccionarios in self.list_tags:
for clave in diccionarios.keys():
if clave == "src":
recurso = diccionarios[clave]
os.system("wget -q " + recurso)
list_recurso = recurso.split("/")
recurso = list_recurso[-1]
diccionarios[clave] = recurso
if __name__ == "__main__":
try:
fich = open(sys.argv[1])
except IndexError:
print "Usage: python karaoke.py file.smil."
KL = KaraokeLocal(fich)
print KL
KL.do_local()
print KL
|
todo = ""
for diccionarios in self.list_tags:
frase = ""
for clave in diccionarios.keys():
if clave != "name" and diccionarios[clave] != "":
frase = frase + clave + "=" + diccionarios[clave] + "\t"
todo = todo + diccionarios['name'] + "\t" + frase + "\n"
return todo
|
identifier_body
|
karaoke.py
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
import smallsmilhandler
import sys
import os
class KaraokeLocal(smallsmilhandler.SmallSMILHandler):
def __init__(self, fich):
parser = make_parser()
sHandler = smallsmilhandler.SmallSMILHandler()
parser.setContentHandler(sHandler)
parser.parse(fich)
|
self.list_tags = sHandler.get_tags()
def __str__(self):
todo = ""
for diccionarios in self.list_tags:
frase = ""
for clave in diccionarios.keys():
if clave != "name" and diccionarios[clave] != "":
frase = frase + clave + "=" + diccionarios[clave] + "\t"
todo = todo + diccionarios['name'] + "\t" + frase + "\n"
return todo
def do_local(self):
list_recurso = []
for diccionarios in self.list_tags:
for clave in diccionarios.keys():
if clave == "src":
recurso = diccionarios[clave]
os.system("wget -q " + recurso)
list_recurso = recurso.split("/")
recurso = list_recurso[-1]
diccionarios[clave] = recurso
if __name__ == "__main__":
try:
fich = open(sys.argv[1])
except IndexError:
print "Usage: python karaoke.py file.smil."
KL = KaraokeLocal(fich)
print KL
KL.do_local()
print KL
|
random_line_split
|
|
karaoke.py
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
import smallsmilhandler
import sys
import os
class KaraokeLocal(smallsmilhandler.SmallSMILHandler):
def __init__(self, fich):
parser = make_parser()
sHandler = smallsmilhandler.SmallSMILHandler()
parser.setContentHandler(sHandler)
parser.parse(fich)
self.list_tags = sHandler.get_tags()
def __str__(self):
todo = ""
for diccionarios in self.list_tags:
frase = ""
for clave in diccionarios.keys():
if clave != "name" and diccionarios[clave] != "":
frase = frase + clave + "=" + diccionarios[clave] + "\t"
todo = todo + diccionarios['name'] + "\t" + frase + "\n"
return todo
def do_local(self):
list_recurso = []
for diccionarios in self.list_tags:
for clave in diccionarios.keys():
if clave == "src":
|
if __name__ == "__main__":
try:
fich = open(sys.argv[1])
except IndexError:
print "Usage: python karaoke.py file.smil."
KL = KaraokeLocal(fich)
print KL
KL.do_local()
print KL
|
recurso = diccionarios[clave]
os.system("wget -q " + recurso)
list_recurso = recurso.split("/")
recurso = list_recurso[-1]
diccionarios[clave] = recurso
|
conditional_block
|
karaoke.py
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
import smallsmilhandler
import sys
import os
class KaraokeLocal(smallsmilhandler.SmallSMILHandler):
def __init__(self, fich):
parser = make_parser()
sHandler = smallsmilhandler.SmallSMILHandler()
parser.setContentHandler(sHandler)
parser.parse(fich)
self.list_tags = sHandler.get_tags()
def
|
(self):
todo = ""
for diccionarios in self.list_tags:
frase = ""
for clave in diccionarios.keys():
if clave != "name" and diccionarios[clave] != "":
frase = frase + clave + "=" + diccionarios[clave] + "\t"
todo = todo + diccionarios['name'] + "\t" + frase + "\n"
return todo
def do_local(self):
list_recurso = []
for diccionarios in self.list_tags:
for clave in diccionarios.keys():
if clave == "src":
recurso = diccionarios[clave]
os.system("wget -q " + recurso)
list_recurso = recurso.split("/")
recurso = list_recurso[-1]
diccionarios[clave] = recurso
if __name__ == "__main__":
try:
fich = open(sys.argv[1])
except IndexError:
print "Usage: python karaoke.py file.smil."
KL = KaraokeLocal(fich)
print KL
KL.do_local()
print KL
|
__str__
|
identifier_name
|
resourceGained.ts
|
import { AnyEvent, EventType } from 'parser/core/Events';
import metric from 'parser/core/metric';
interface ResourcesGained {
[targetId: number]: {
[resourceTypeId: number]: {
[spellId: number]: number;
};
};
}
/**
* Returns an object with the total resource gained per resource.
*/
const resourceGained = (events: AnyEvent[]) =>
events.reduce<ResourcesGained>((obj, event) => {
if (event.type === EventType.ResourceChange) {
obj[event.targetID] = obj[event.targetID] || {};
obj[event.targetID][event.resourceChangeType] =
obj[event.targetID][event.resourceChangeType] || {};
obj[event.targetID][event.resourceChangeType][event.ability.guid] =
(obj[event.targetID][event.resourceChangeType][event.ability.guid] ?? 0) +
(event.resourceChange - event.waste);
|
export const sumResourceGained = (
resourcesGained: ResourcesGained,
resourceId: number,
playerId: number,
) =>
Object.values(resourcesGained[playerId]?.[resourceId]).reduce((sum, item) => sum + item, 0) || 0;
export const sumResourceGainedByPlayerBySpell = (
resourcesGained: ResourcesGained,
resourceId: number,
playerId: number,
spellId: number,
) => resourcesGained[playerId]?.[resourceId]?.[spellId] || 0;
export const sumResourceGainedByPlayerPerSpell = (
resourcesGained: ResourcesGained,
resourceId: number,
playerId: number,
) => resourcesGained[playerId]?.[resourceId];
export const sumResourceGainedBySpell = (
resourcesGained: ResourcesGained,
resourceId: number,
spellId: number,
) =>
Object.values(resourcesGained)
.map((obj) => obj[resourceId]?.[spellId] || 0)
.reduce((a, b) => a + b);
|
}
return obj;
}, {});
export default metric(resourceGained);
|
random_line_split
|
resourceGained.ts
|
import { AnyEvent, EventType } from 'parser/core/Events';
import metric from 'parser/core/metric';
interface ResourcesGained {
[targetId: number]: {
[resourceTypeId: number]: {
[spellId: number]: number;
};
};
}
/**
* Returns an object with the total resource gained per resource.
*/
const resourceGained = (events: AnyEvent[]) =>
events.reduce<ResourcesGained>((obj, event) => {
if (event.type === EventType.ResourceChange)
|
return obj;
}, {});
export default metric(resourceGained);
export const sumResourceGained = (
resourcesGained: ResourcesGained,
resourceId: number,
playerId: number,
) =>
Object.values(resourcesGained[playerId]?.[resourceId]).reduce((sum, item) => sum + item, 0) || 0;
export const sumResourceGainedByPlayerBySpell = (
resourcesGained: ResourcesGained,
resourceId: number,
playerId: number,
spellId: number,
) => resourcesGained[playerId]?.[resourceId]?.[spellId] || 0;
export const sumResourceGainedByPlayerPerSpell = (
resourcesGained: ResourcesGained,
resourceId: number,
playerId: number,
) => resourcesGained[playerId]?.[resourceId];
export const sumResourceGainedBySpell = (
resourcesGained: ResourcesGained,
resourceId: number,
spellId: number,
) =>
Object.values(resourcesGained)
.map((obj) => obj[resourceId]?.[spellId] || 0)
.reduce((a, b) => a + b);
|
{
obj[event.targetID] = obj[event.targetID] || {};
obj[event.targetID][event.resourceChangeType] =
obj[event.targetID][event.resourceChangeType] || {};
obj[event.targetID][event.resourceChangeType][event.ability.guid] =
(obj[event.targetID][event.resourceChangeType][event.ability.guid] ?? 0) +
(event.resourceChange - event.waste);
}
|
conditional_block
|
test_auth_saml2.py
|
from __future__ import absolute_import
import six
import pytest
import base64
from sentry.utils.compat import mock
from exam import fixture
from six.moves.urllib.parse import urlencode, urlparse, parse_qs
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from sentry.auth.authenticators import TotpInterface
from sentry.auth.providers.saml2.provider import SAML2Provider, Attributes, HAS_SAML2
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
AuthProvider,
Organization,
)
from sentry.testutils import AuthProviderTestCase
from sentry.testutils.helpers import Feature
from sentry.utils.compat import map
dummy_provider_config = {
"idp": {
"entity_id": "https://example.com/saml/metadata/1234",
"x509cert": "foo_x509_cert",
"sso_url": "http://example.com/sso_url",
"slo_url": "http://example.com/slo_url",
},
"attribute_mapping": {
Attributes.IDENTIFIER: "user_id",
Attributes.USER_EMAIL: "email",
Attributes.FIRST_NAME: "first_name",
Attributes.LAST_NAME: "last_name",
},
}
class DummySAML2Provider(SAML2Provider):
def get_saml_setup_pipeline(self):
return []
def build_config(self, state):
|
@pytest.mark.skipif(not HAS_SAML2, reason="SAML2 library is not installed")
class AuthSAML2Test(AuthProviderTestCase):
provider = DummySAML2Provider
provider_name = "saml2_dummy"
def setUp(self):
self.user = self.create_user("[email protected]")
self.org = self.create_organization(owner=self.user, name="saml2-org")
# enable require 2FA and enroll user
TotpInterface().enroll(self.user)
self.org.update(flags=models.F("flags").bitor(Organization.flags.require_2fa))
assert self.org.flags.require_2fa.is_set
self.auth_provider = AuthProvider.objects.create(
provider=self.provider_name, config=dummy_provider_config, organization=self.org
)
# The system.url-prefix, which is used to generate absolute URLs, must
# have a TLD for the SAML2 library to consider the URL generated for
# the ACS endpoint valid.
self.url_prefix = settings.SENTRY_OPTIONS.get("system.url-prefix")
settings.SENTRY_OPTIONS.update({"system.url-prefix": "http://testserver.com"})
super(AuthSAML2Test, self).setUp()
def tearDown(self):
# restore url-prefix config
settings.SENTRY_OPTIONS.update({"system.url-prefix": self.url_prefix})
super(AuthSAML2Test, self).tearDown()
@fixture
def login_path(self):
return reverse("sentry-auth-organization", args=["saml2-org"])
@fixture
def acs_path(self):
return reverse("sentry-auth-organization-saml-acs", args=["saml2-org"])
@fixture
def setup_path(self):
return reverse("sentry-organization-auth-provider-settings", args=["saml2-org"])
def test_redirects_to_idp(self):
resp = self.client.post(self.login_path, {"init": True})
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/sso_url"
assert "SAMLRequest" in query
def accept_auth(self, **kargs):
saml_response = self.load_fixture("saml2_auth_response.xml")
saml_response = base64.b64encode(saml_response).decode("utf-8")
# Disable validation of the SAML2 mock response
is_valid = "onelogin.saml2.response.OneLogin_Saml2_Response.is_valid"
with mock.patch(is_valid, return_value=True):
return self.client.post(self.acs_path, {"SAMLResponse": saml_response}, **kargs)
def test_auth_sp_initiated(self):
# Start auth process from SP side
self.client.post(self.login_path, {"init": True})
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
def test_auth_idp_initiated(self):
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
@mock.patch("sentry.auth.helper.logger")
def test_auth_setup(self, auth_log):
self.auth_provider.delete()
self.login_as(self.user)
data = {"init": True, "provider": self.provider_name}
with Feature(["organizations:sso-basic", "organizations:sso-saml2"]):
setup = self.client.post(self.setup_path, data)
assert setup.status_code == 302
redirect = urlparse(setup.get("Location", ""))
assert redirect.path == "/sso_url"
auth = self.accept_auth(follow=True)
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 2
assert messages[0] == "You have successfully linked your account to your SSO provider."
assert messages[1].startswith("SSO has been configured for your organization")
# require 2FA disabled when saml is enabled
org = Organization.objects.get(id=self.org.id)
assert not org.flags.require_2fa.is_set
event = AuditLogEntry.objects.get(
target_object=org.id, event=AuditLogEntryEvent.ORG_EDIT, actor=self.user
)
assert "require_2fa to False when enabling SSO" in event.get_note()
auth_log.info.assert_called_once_with(
"Require 2fa disabled during sso setup", extra={"organization_id": self.org.id}
)
def test_auth_idp_initiated_no_provider(self):
self.auth_provider.delete()
auth = self.accept_auth(follow=True)
assert auth.status_code == 200
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 1
assert messages[0] == "The organization does not exist or does not have SAML SSO enabled."
def test_saml_metadata(self):
path = reverse("sentry-auth-organization-saml-metadata", args=["saml2-org"])
resp = self.client.get(path)
assert resp.status_code == 200
assert resp.get("content-type") == "text/xml"
def test_logout_request(self):
saml_request = self.load_fixture("saml2_slo_request.xml")
saml_request = base64.b64encode(saml_request)
self.login_as(self.user)
path = reverse("sentry-auth-organization-saml-sls", args=["saml2-org"])
path = path + "?" + urlencode({"SAMLRequest": saml_request})
resp = self.client.get(path)
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/slo_url"
assert "SAMLResponse" in query
updated = type(self.user).objects.get(pk=self.user.id)
assert updated.session_nonce != self.user.session_nonce
|
return dummy_provider_config
|
identifier_body
|
test_auth_saml2.py
|
from __future__ import absolute_import
import six
import pytest
|
import base64
from sentry.utils.compat import mock
from exam import fixture
from six.moves.urllib.parse import urlencode, urlparse, parse_qs
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from sentry.auth.authenticators import TotpInterface
from sentry.auth.providers.saml2.provider import SAML2Provider, Attributes, HAS_SAML2
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
AuthProvider,
Organization,
)
from sentry.testutils import AuthProviderTestCase
from sentry.testutils.helpers import Feature
from sentry.utils.compat import map
dummy_provider_config = {
"idp": {
"entity_id": "https://example.com/saml/metadata/1234",
"x509cert": "foo_x509_cert",
"sso_url": "http://example.com/sso_url",
"slo_url": "http://example.com/slo_url",
},
"attribute_mapping": {
Attributes.IDENTIFIER: "user_id",
Attributes.USER_EMAIL: "email",
Attributes.FIRST_NAME: "first_name",
Attributes.LAST_NAME: "last_name",
},
}
class DummySAML2Provider(SAML2Provider):
def get_saml_setup_pipeline(self):
return []
def build_config(self, state):
return dummy_provider_config
@pytest.mark.skipif(not HAS_SAML2, reason="SAML2 library is not installed")
class AuthSAML2Test(AuthProviderTestCase):
provider = DummySAML2Provider
provider_name = "saml2_dummy"
def setUp(self):
self.user = self.create_user("[email protected]")
self.org = self.create_organization(owner=self.user, name="saml2-org")
# enable require 2FA and enroll user
TotpInterface().enroll(self.user)
self.org.update(flags=models.F("flags").bitor(Organization.flags.require_2fa))
assert self.org.flags.require_2fa.is_set
self.auth_provider = AuthProvider.objects.create(
provider=self.provider_name, config=dummy_provider_config, organization=self.org
)
# The system.url-prefix, which is used to generate absolute URLs, must
# have a TLD for the SAML2 library to consider the URL generated for
# the ACS endpoint valid.
self.url_prefix = settings.SENTRY_OPTIONS.get("system.url-prefix")
settings.SENTRY_OPTIONS.update({"system.url-prefix": "http://testserver.com"})
super(AuthSAML2Test, self).setUp()
def tearDown(self):
# restore url-prefix config
settings.SENTRY_OPTIONS.update({"system.url-prefix": self.url_prefix})
super(AuthSAML2Test, self).tearDown()
@fixture
def login_path(self):
return reverse("sentry-auth-organization", args=["saml2-org"])
@fixture
def acs_path(self):
return reverse("sentry-auth-organization-saml-acs", args=["saml2-org"])
@fixture
def setup_path(self):
return reverse("sentry-organization-auth-provider-settings", args=["saml2-org"])
def test_redirects_to_idp(self):
resp = self.client.post(self.login_path, {"init": True})
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/sso_url"
assert "SAMLRequest" in query
def accept_auth(self, **kargs):
saml_response = self.load_fixture("saml2_auth_response.xml")
saml_response = base64.b64encode(saml_response).decode("utf-8")
# Disable validation of the SAML2 mock response
is_valid = "onelogin.saml2.response.OneLogin_Saml2_Response.is_valid"
with mock.patch(is_valid, return_value=True):
return self.client.post(self.acs_path, {"SAMLResponse": saml_response}, **kargs)
def test_auth_sp_initiated(self):
# Start auth process from SP side
self.client.post(self.login_path, {"init": True})
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
def test_auth_idp_initiated(self):
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
@mock.patch("sentry.auth.helper.logger")
def test_auth_setup(self, auth_log):
self.auth_provider.delete()
self.login_as(self.user)
data = {"init": True, "provider": self.provider_name}
with Feature(["organizations:sso-basic", "organizations:sso-saml2"]):
setup = self.client.post(self.setup_path, data)
assert setup.status_code == 302
redirect = urlparse(setup.get("Location", ""))
assert redirect.path == "/sso_url"
auth = self.accept_auth(follow=True)
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 2
assert messages[0] == "You have successfully linked your account to your SSO provider."
assert messages[1].startswith("SSO has been configured for your organization")
# require 2FA disabled when saml is enabled
org = Organization.objects.get(id=self.org.id)
assert not org.flags.require_2fa.is_set
event = AuditLogEntry.objects.get(
target_object=org.id, event=AuditLogEntryEvent.ORG_EDIT, actor=self.user
)
assert "require_2fa to False when enabling SSO" in event.get_note()
auth_log.info.assert_called_once_with(
"Require 2fa disabled during sso setup", extra={"organization_id": self.org.id}
)
def test_auth_idp_initiated_no_provider(self):
self.auth_provider.delete()
auth = self.accept_auth(follow=True)
assert auth.status_code == 200
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 1
assert messages[0] == "The organization does not exist or does not have SAML SSO enabled."
def test_saml_metadata(self):
path = reverse("sentry-auth-organization-saml-metadata", args=["saml2-org"])
resp = self.client.get(path)
assert resp.status_code == 200
assert resp.get("content-type") == "text/xml"
def test_logout_request(self):
saml_request = self.load_fixture("saml2_slo_request.xml")
saml_request = base64.b64encode(saml_request)
self.login_as(self.user)
path = reverse("sentry-auth-organization-saml-sls", args=["saml2-org"])
path = path + "?" + urlencode({"SAMLRequest": saml_request})
resp = self.client.get(path)
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/slo_url"
assert "SAMLResponse" in query
updated = type(self.user).objects.get(pk=self.user.id)
assert updated.session_nonce != self.user.session_nonce
|
random_line_split
|
|
test_auth_saml2.py
|
from __future__ import absolute_import
import six
import pytest
import base64
from sentry.utils.compat import mock
from exam import fixture
from six.moves.urllib.parse import urlencode, urlparse, parse_qs
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from sentry.auth.authenticators import TotpInterface
from sentry.auth.providers.saml2.provider import SAML2Provider, Attributes, HAS_SAML2
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
AuthProvider,
Organization,
)
from sentry.testutils import AuthProviderTestCase
from sentry.testutils.helpers import Feature
from sentry.utils.compat import map
dummy_provider_config = {
"idp": {
"entity_id": "https://example.com/saml/metadata/1234",
"x509cert": "foo_x509_cert",
"sso_url": "http://example.com/sso_url",
"slo_url": "http://example.com/slo_url",
},
"attribute_mapping": {
Attributes.IDENTIFIER: "user_id",
Attributes.USER_EMAIL: "email",
Attributes.FIRST_NAME: "first_name",
Attributes.LAST_NAME: "last_name",
},
}
class
|
(SAML2Provider):
def get_saml_setup_pipeline(self):
return []
def build_config(self, state):
return dummy_provider_config
@pytest.mark.skipif(not HAS_SAML2, reason="SAML2 library is not installed")
class AuthSAML2Test(AuthProviderTestCase):
provider = DummySAML2Provider
provider_name = "saml2_dummy"
def setUp(self):
self.user = self.create_user("[email protected]")
self.org = self.create_organization(owner=self.user, name="saml2-org")
# enable require 2FA and enroll user
TotpInterface().enroll(self.user)
self.org.update(flags=models.F("flags").bitor(Organization.flags.require_2fa))
assert self.org.flags.require_2fa.is_set
self.auth_provider = AuthProvider.objects.create(
provider=self.provider_name, config=dummy_provider_config, organization=self.org
)
# The system.url-prefix, which is used to generate absolute URLs, must
# have a TLD for the SAML2 library to consider the URL generated for
# the ACS endpoint valid.
self.url_prefix = settings.SENTRY_OPTIONS.get("system.url-prefix")
settings.SENTRY_OPTIONS.update({"system.url-prefix": "http://testserver.com"})
super(AuthSAML2Test, self).setUp()
def tearDown(self):
# restore url-prefix config
settings.SENTRY_OPTIONS.update({"system.url-prefix": self.url_prefix})
super(AuthSAML2Test, self).tearDown()
@fixture
def login_path(self):
return reverse("sentry-auth-organization", args=["saml2-org"])
@fixture
def acs_path(self):
return reverse("sentry-auth-organization-saml-acs", args=["saml2-org"])
@fixture
def setup_path(self):
return reverse("sentry-organization-auth-provider-settings", args=["saml2-org"])
def test_redirects_to_idp(self):
resp = self.client.post(self.login_path, {"init": True})
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/sso_url"
assert "SAMLRequest" in query
def accept_auth(self, **kargs):
saml_response = self.load_fixture("saml2_auth_response.xml")
saml_response = base64.b64encode(saml_response).decode("utf-8")
# Disable validation of the SAML2 mock response
is_valid = "onelogin.saml2.response.OneLogin_Saml2_Response.is_valid"
with mock.patch(is_valid, return_value=True):
return self.client.post(self.acs_path, {"SAMLResponse": saml_response}, **kargs)
def test_auth_sp_initiated(self):
# Start auth process from SP side
self.client.post(self.login_path, {"init": True})
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
def test_auth_idp_initiated(self):
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
@mock.patch("sentry.auth.helper.logger")
def test_auth_setup(self, auth_log):
self.auth_provider.delete()
self.login_as(self.user)
data = {"init": True, "provider": self.provider_name}
with Feature(["organizations:sso-basic", "organizations:sso-saml2"]):
setup = self.client.post(self.setup_path, data)
assert setup.status_code == 302
redirect = urlparse(setup.get("Location", ""))
assert redirect.path == "/sso_url"
auth = self.accept_auth(follow=True)
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 2
assert messages[0] == "You have successfully linked your account to your SSO provider."
assert messages[1].startswith("SSO has been configured for your organization")
# require 2FA disabled when saml is enabled
org = Organization.objects.get(id=self.org.id)
assert not org.flags.require_2fa.is_set
event = AuditLogEntry.objects.get(
target_object=org.id, event=AuditLogEntryEvent.ORG_EDIT, actor=self.user
)
assert "require_2fa to False when enabling SSO" in event.get_note()
auth_log.info.assert_called_once_with(
"Require 2fa disabled during sso setup", extra={"organization_id": self.org.id}
)
def test_auth_idp_initiated_no_provider(self):
self.auth_provider.delete()
auth = self.accept_auth(follow=True)
assert auth.status_code == 200
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 1
assert messages[0] == "The organization does not exist or does not have SAML SSO enabled."
def test_saml_metadata(self):
path = reverse("sentry-auth-organization-saml-metadata", args=["saml2-org"])
resp = self.client.get(path)
assert resp.status_code == 200
assert resp.get("content-type") == "text/xml"
def test_logout_request(self):
saml_request = self.load_fixture("saml2_slo_request.xml")
saml_request = base64.b64encode(saml_request)
self.login_as(self.user)
path = reverse("sentry-auth-organization-saml-sls", args=["saml2-org"])
path = path + "?" + urlencode({"SAMLRequest": saml_request})
resp = self.client.get(path)
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/slo_url"
assert "SAMLResponse" in query
updated = type(self.user).objects.get(pk=self.user.id)
assert updated.session_nonce != self.user.session_nonce
|
DummySAML2Provider
|
identifier_name
|
mrps.py
|
#!/usr/bin/env python
import sys
import configparser
import os
import shutil
from PyQt5 import QtWidgets
from PyQt5 import QtWebKitWidgets
from PyQt5 import QtCore
# Read config file
home_dir = os.path.expanduser("~")
conf_path = os.path.join(home_dir, ".config/mrps/mrps.conf")
config = configparser.ConfigParser(delimiters=('='))
config.read(conf_path)
def clean_up():
os.remove(html_file_full)
shutil.rmtree(os.path.join(o_file_dir, "reveal.js"))
app = QtWidgets.QApplication(sys.argv)
app.aboutToQuit.connect(clean_up)
if len(sys.argv) == 2:
o_file_full = os.path.abspath(sys.argv[1])
else:
o_file_full = QtWidgets.QFileDialog.getOpenFileName()[0]
if o_file_full:
o_file_dir = os.path.dirname(o_file_full)
o_file_name = os.path.basename(os.path.normpath(o_file_full))
o_file_name_bare = os.path.splitext(o_file_name)[0]
html_file_full = os.path.join(o_file_dir, o_file_name_bare + ".html")
shutil.copytree(os.path.normpath(config['DEFAULT']['revealjs_path']), os.path.join(o_file_dir, "reveal.js"))
md_file = open(o_file_full, 'r')
md_content = md_file.read()
md_file.close()
f = open(html_file_full, 'w')
f.write(config['DEFAULT']['html_top'] + '\n\n' +
md_content + '\n\n' +
config['DEFAULT']['html_bottom'])
f.close()
web = QtWebKitWidgets.QWebView()
web.load(QtCore.QUrl('file://' + html_file_full))
web.show()
sys.exit(app.exec_())
else:
|
exit()
|
random_line_split
|
|
mrps.py
|
#!/usr/bin/env python
import sys
import configparser
import os
import shutil
from PyQt5 import QtWidgets
from PyQt5 import QtWebKitWidgets
from PyQt5 import QtCore
# Read config file
home_dir = os.path.expanduser("~")
conf_path = os.path.join(home_dir, ".config/mrps/mrps.conf")
config = configparser.ConfigParser(delimiters=('='))
config.read(conf_path)
def
|
():
os.remove(html_file_full)
shutil.rmtree(os.path.join(o_file_dir, "reveal.js"))
app = QtWidgets.QApplication(sys.argv)
app.aboutToQuit.connect(clean_up)
if len(sys.argv) == 2:
o_file_full = os.path.abspath(sys.argv[1])
else:
o_file_full = QtWidgets.QFileDialog.getOpenFileName()[0]
if o_file_full:
o_file_dir = os.path.dirname(o_file_full)
o_file_name = os.path.basename(os.path.normpath(o_file_full))
o_file_name_bare = os.path.splitext(o_file_name)[0]
html_file_full = os.path.join(o_file_dir, o_file_name_bare + ".html")
shutil.copytree(os.path.normpath(config['DEFAULT']['revealjs_path']), os.path.join(o_file_dir, "reveal.js"))
md_file = open(o_file_full, 'r')
md_content = md_file.read()
md_file.close()
f = open(html_file_full, 'w')
f.write(config['DEFAULT']['html_top'] + '\n\n' +
md_content + '\n\n' +
config['DEFAULT']['html_bottom'])
f.close()
web = QtWebKitWidgets.QWebView()
web.load(QtCore.QUrl('file://' + html_file_full))
web.show()
sys.exit(app.exec_())
else:
exit()
|
clean_up
|
identifier_name
|
mrps.py
|
#!/usr/bin/env python
import sys
import configparser
import os
import shutil
from PyQt5 import QtWidgets
from PyQt5 import QtWebKitWidgets
from PyQt5 import QtCore
# Read config file
home_dir = os.path.expanduser("~")
conf_path = os.path.join(home_dir, ".config/mrps/mrps.conf")
config = configparser.ConfigParser(delimiters=('='))
config.read(conf_path)
def clean_up():
|
app = QtWidgets.QApplication(sys.argv)
app.aboutToQuit.connect(clean_up)
if len(sys.argv) == 2:
o_file_full = os.path.abspath(sys.argv[1])
else:
o_file_full = QtWidgets.QFileDialog.getOpenFileName()[0]
if o_file_full:
o_file_dir = os.path.dirname(o_file_full)
o_file_name = os.path.basename(os.path.normpath(o_file_full))
o_file_name_bare = os.path.splitext(o_file_name)[0]
html_file_full = os.path.join(o_file_dir, o_file_name_bare + ".html")
shutil.copytree(os.path.normpath(config['DEFAULT']['revealjs_path']), os.path.join(o_file_dir, "reveal.js"))
md_file = open(o_file_full, 'r')
md_content = md_file.read()
md_file.close()
f = open(html_file_full, 'w')
f.write(config['DEFAULT']['html_top'] + '\n\n' +
md_content + '\n\n' +
config['DEFAULT']['html_bottom'])
f.close()
web = QtWebKitWidgets.QWebView()
web.load(QtCore.QUrl('file://' + html_file_full))
web.show()
sys.exit(app.exec_())
else:
exit()
|
os.remove(html_file_full)
shutil.rmtree(os.path.join(o_file_dir, "reveal.js"))
|
identifier_body
|
mrps.py
|
#!/usr/bin/env python
import sys
import configparser
import os
import shutil
from PyQt5 import QtWidgets
from PyQt5 import QtWebKitWidgets
from PyQt5 import QtCore
# Read config file
home_dir = os.path.expanduser("~")
conf_path = os.path.join(home_dir, ".config/mrps/mrps.conf")
config = configparser.ConfigParser(delimiters=('='))
config.read(conf_path)
def clean_up():
os.remove(html_file_full)
shutil.rmtree(os.path.join(o_file_dir, "reveal.js"))
app = QtWidgets.QApplication(sys.argv)
app.aboutToQuit.connect(clean_up)
if len(sys.argv) == 2:
o_file_full = os.path.abspath(sys.argv[1])
else:
|
if o_file_full:
o_file_dir = os.path.dirname(o_file_full)
o_file_name = os.path.basename(os.path.normpath(o_file_full))
o_file_name_bare = os.path.splitext(o_file_name)[0]
html_file_full = os.path.join(o_file_dir, o_file_name_bare + ".html")
shutil.copytree(os.path.normpath(config['DEFAULT']['revealjs_path']), os.path.join(o_file_dir, "reveal.js"))
md_file = open(o_file_full, 'r')
md_content = md_file.read()
md_file.close()
f = open(html_file_full, 'w')
f.write(config['DEFAULT']['html_top'] + '\n\n' +
md_content + '\n\n' +
config['DEFAULT']['html_bottom'])
f.close()
web = QtWebKitWidgets.QWebView()
web.load(QtCore.QUrl('file://' + html_file_full))
web.show()
sys.exit(app.exec_())
else:
exit()
|
o_file_full = QtWidgets.QFileDialog.getOpenFileName()[0]
|
conditional_block
|
di_bindings.ts
|
// TODO (jteplitz602): This whole file is nearly identical to core/application.ts.
// There should be a way to refactor application so that this file is unnecessary. See #3277
import {Injector, bind, Binding} from "angular2/src/core/di";
import {DEFAULT_PIPES} from 'angular2/src/core/pipes';
import {AnimationBuilder} from 'angular2/src/animate/animation_builder';
import {BrowserDetails} from 'angular2/src/animate/browser_details';
import {Reflector, reflector} from 'angular2/src/core/reflection/reflection';
import {Parser, Lexer} from 'angular2/src/core/change_detection/change_detection';
import {
EventManager,
DomEventsPlugin,
EVENT_MANAGER_PLUGINS
} from 'angular2/src/core/render/dom/events/event_manager';
import {ProtoViewFactory} from 'angular2/src/core/linker/proto_view_factory';
import {BrowserDomAdapter} from 'angular2/src/core/dom/browser_adapter';
import {KeyEventsPlugin} from 'angular2/src/core/render/dom/events/key_events';
import {HammerGesturesPlugin} from 'angular2/src/core/render/dom/events/hammer_gestures';
import {AppViewPool, APP_VIEW_POOL_CAPACITY} from 'angular2/src/core/linker/view_pool';
import {Renderer} from 'angular2/src/core/render/api';
import {AppRootUrl} from 'angular2/src/core/compiler/app_root_url';
import {DomRenderer, DomRenderer_, DOCUMENT} from 'angular2/src/core/render/render';
import {APP_ID_RANDOM_BINDING} from 'angular2/src/core/application_tokens';
import {ElementSchemaRegistry} from 'angular2/src/core/compiler/schema/element_schema_registry';
import {
DomElementSchemaRegistry
} from 'angular2/src/core/compiler/schema/dom_element_schema_registry';
import {
SharedStylesHost,
DomSharedStylesHost
} from 'angular2/src/core/render/dom/shared_styles_host';
import {DOM} from 'angular2/src/core/dom/dom_adapter';
import {NgZone} from 'angular2/src/core/zone/ng_zone';
import {AppViewManager, AppViewManager_} from 'angular2/src/core/linker/view_manager';
import {AppViewManagerUtils} from 'angular2/src/core/linker/view_manager_utils';
import {AppViewListener} from 'angular2/src/core/linker/view_listener';
import {ViewResolver} from 'angular2/src/core/linker/view_resolver';
import {DirectiveResolver} from 'angular2/src/core/linker/directive_resolver';
import {ExceptionHandler} from 'angular2/src/core/facade/exceptions';
import {
DynamicComponentLoader,
DynamicComponentLoader_
} from 'angular2/src/core/linker/dynamic_component_loader';
import {UrlResolver} from 'angular2/src/core/compiler/url_resolver';
import {Testability} from 'angular2/src/core/testability/testability';
import {XHR} from 'angular2/src/core/compiler/xhr';
import {XHRImpl} from 'angular2/src/core/compiler/xhr_impl';
import {Serializer} from 'angular2/src/web_workers/shared/serializer';
import {ON_WEB_WORKER} from 'angular2/src/web_workers/shared/api';
import {RenderProtoViewRefStore} from 'angular2/src/web_workers/shared/render_proto_view_ref_store';
import {
RenderViewWithFragmentsStore
} from 'angular2/src/web_workers/shared/render_view_with_fragments_store';
import {AnchorBasedAppRootUrl} from 'angular2/src/core/compiler/anchor_based_app_root_url';
import {WebWorkerApplication} from 'angular2/src/web_workers/ui/impl';
import {MessageBus} from 'angular2/src/web_workers/shared/message_bus';
import {MessageBasedRenderer} from 'angular2/src/web_workers/ui/renderer';
|
import {WebWorkerSetup} from 'angular2/src/web_workers/ui/setup';
import {
ServiceMessageBrokerFactory,
ServiceMessageBrokerFactory_
} from 'angular2/src/web_workers/shared/service_message_broker';
import {
ClientMessageBrokerFactory,
ClientMessageBrokerFactory_
} from 'angular2/src/web_workers/shared/client_message_broker';
var _rootInjector: Injector;
// Contains everything that is safe to share between applications.
var _rootBindings = [bind(Reflector).toValue(reflector)];
// TODO: This code is nearly identical to core/application. There should be a way to only write it
// once
function _injectorBindings(): any[] {
return [
bind(DOCUMENT)
.toValue(DOM.defaultDoc()),
EventManager,
new Binding(EVENT_MANAGER_PLUGINS, {toClass: DomEventsPlugin, multi: true}),
new Binding(EVENT_MANAGER_PLUGINS, {toClass: KeyEventsPlugin, multi: true}),
new Binding(EVENT_MANAGER_PLUGINS, {toClass: HammerGesturesPlugin, multi: true}),
bind(DomRenderer).toClass(DomRenderer_),
bind(Renderer).toAlias(DomRenderer),
APP_ID_RANDOM_BINDING,
DomSharedStylesHost,
bind(SharedStylesHost).toAlias(DomSharedStylesHost),
Serializer,
bind(ON_WEB_WORKER).toValue(false),
bind(ElementSchemaRegistry).toValue(new DomElementSchemaRegistry()),
RenderViewWithFragmentsStore,
RenderProtoViewRefStore,
AppViewPool,
bind(APP_VIEW_POOL_CAPACITY).toValue(10000),
bind(AppViewManager).toClass(AppViewManager_),
AppViewManagerUtils,
AppViewListener,
ProtoViewFactory,
ViewResolver,
DEFAULT_PIPES,
DirectiveResolver,
Parser,
Lexer,
bind(ExceptionHandler).toFactory(() => new ExceptionHandler(DOM), []),
bind(XHR).toValue(new XHRImpl()),
UrlResolver,
bind(DynamicComponentLoader).toClass(DynamicComponentLoader_),
Testability,
AnchorBasedAppRootUrl,
bind(AppRootUrl).toAlias(AnchorBasedAppRootUrl),
WebWorkerApplication,
WebWorkerSetup,
MessageBasedXHRImpl,
MessageBasedRenderer,
bind(ServiceMessageBrokerFactory).toClass(ServiceMessageBrokerFactory_),
bind(ClientMessageBrokerFactory).toClass(ClientMessageBrokerFactory_),
BrowserDetails,
AnimationBuilder,
];
}
export function createInjector(zone: NgZone, bus: MessageBus): Injector {
BrowserDomAdapter.makeCurrent();
_rootBindings.push(bind(NgZone).toValue(zone));
_rootBindings.push(bind(MessageBus).toValue(bus));
var injector: Injector = Injector.resolveAndCreate(_rootBindings);
return injector.resolveAndCreateChild(_injectorBindings());
}
|
import {MessageBasedXHRImpl} from 'angular2/src/web_workers/ui/xhr_impl';
|
random_line_split
|
di_bindings.ts
|
// TODO (jteplitz602): This whole file is nearly identical to core/application.ts.
// There should be a way to refactor application so that this file is unnecessary. See #3277
import {Injector, bind, Binding} from "angular2/src/core/di";
import {DEFAULT_PIPES} from 'angular2/src/core/pipes';
import {AnimationBuilder} from 'angular2/src/animate/animation_builder';
import {BrowserDetails} from 'angular2/src/animate/browser_details';
import {Reflector, reflector} from 'angular2/src/core/reflection/reflection';
import {Parser, Lexer} from 'angular2/src/core/change_detection/change_detection';
import {
EventManager,
DomEventsPlugin,
EVENT_MANAGER_PLUGINS
} from 'angular2/src/core/render/dom/events/event_manager';
import {ProtoViewFactory} from 'angular2/src/core/linker/proto_view_factory';
import {BrowserDomAdapter} from 'angular2/src/core/dom/browser_adapter';
import {KeyEventsPlugin} from 'angular2/src/core/render/dom/events/key_events';
import {HammerGesturesPlugin} from 'angular2/src/core/render/dom/events/hammer_gestures';
import {AppViewPool, APP_VIEW_POOL_CAPACITY} from 'angular2/src/core/linker/view_pool';
import {Renderer} from 'angular2/src/core/render/api';
import {AppRootUrl} from 'angular2/src/core/compiler/app_root_url';
import {DomRenderer, DomRenderer_, DOCUMENT} from 'angular2/src/core/render/render';
import {APP_ID_RANDOM_BINDING} from 'angular2/src/core/application_tokens';
import {ElementSchemaRegistry} from 'angular2/src/core/compiler/schema/element_schema_registry';
import {
DomElementSchemaRegistry
} from 'angular2/src/core/compiler/schema/dom_element_schema_registry';
import {
SharedStylesHost,
DomSharedStylesHost
} from 'angular2/src/core/render/dom/shared_styles_host';
import {DOM} from 'angular2/src/core/dom/dom_adapter';
import {NgZone} from 'angular2/src/core/zone/ng_zone';
import {AppViewManager, AppViewManager_} from 'angular2/src/core/linker/view_manager';
import {AppViewManagerUtils} from 'angular2/src/core/linker/view_manager_utils';
import {AppViewListener} from 'angular2/src/core/linker/view_listener';
import {ViewResolver} from 'angular2/src/core/linker/view_resolver';
import {DirectiveResolver} from 'angular2/src/core/linker/directive_resolver';
import {ExceptionHandler} from 'angular2/src/core/facade/exceptions';
import {
DynamicComponentLoader,
DynamicComponentLoader_
} from 'angular2/src/core/linker/dynamic_component_loader';
import {UrlResolver} from 'angular2/src/core/compiler/url_resolver';
import {Testability} from 'angular2/src/core/testability/testability';
import {XHR} from 'angular2/src/core/compiler/xhr';
import {XHRImpl} from 'angular2/src/core/compiler/xhr_impl';
import {Serializer} from 'angular2/src/web_workers/shared/serializer';
import {ON_WEB_WORKER} from 'angular2/src/web_workers/shared/api';
import {RenderProtoViewRefStore} from 'angular2/src/web_workers/shared/render_proto_view_ref_store';
import {
RenderViewWithFragmentsStore
} from 'angular2/src/web_workers/shared/render_view_with_fragments_store';
import {AnchorBasedAppRootUrl} from 'angular2/src/core/compiler/anchor_based_app_root_url';
import {WebWorkerApplication} from 'angular2/src/web_workers/ui/impl';
import {MessageBus} from 'angular2/src/web_workers/shared/message_bus';
import {MessageBasedRenderer} from 'angular2/src/web_workers/ui/renderer';
import {MessageBasedXHRImpl} from 'angular2/src/web_workers/ui/xhr_impl';
import {WebWorkerSetup} from 'angular2/src/web_workers/ui/setup';
import {
ServiceMessageBrokerFactory,
ServiceMessageBrokerFactory_
} from 'angular2/src/web_workers/shared/service_message_broker';
import {
ClientMessageBrokerFactory,
ClientMessageBrokerFactory_
} from 'angular2/src/web_workers/shared/client_message_broker';
var _rootInjector: Injector;
// Contains everything that is safe to share between applications.
var _rootBindings = [bind(Reflector).toValue(reflector)];
// TODO: This code is nearly identical to core/application. There should be a way to only write it
// once
function _injectorBindings(): any[] {
return [
bind(DOCUMENT)
.toValue(DOM.defaultDoc()),
EventManager,
new Binding(EVENT_MANAGER_PLUGINS, {toClass: DomEventsPlugin, multi: true}),
new Binding(EVENT_MANAGER_PLUGINS, {toClass: KeyEventsPlugin, multi: true}),
new Binding(EVENT_MANAGER_PLUGINS, {toClass: HammerGesturesPlugin, multi: true}),
bind(DomRenderer).toClass(DomRenderer_),
bind(Renderer).toAlias(DomRenderer),
APP_ID_RANDOM_BINDING,
DomSharedStylesHost,
bind(SharedStylesHost).toAlias(DomSharedStylesHost),
Serializer,
bind(ON_WEB_WORKER).toValue(false),
bind(ElementSchemaRegistry).toValue(new DomElementSchemaRegistry()),
RenderViewWithFragmentsStore,
RenderProtoViewRefStore,
AppViewPool,
bind(APP_VIEW_POOL_CAPACITY).toValue(10000),
bind(AppViewManager).toClass(AppViewManager_),
AppViewManagerUtils,
AppViewListener,
ProtoViewFactory,
ViewResolver,
DEFAULT_PIPES,
DirectiveResolver,
Parser,
Lexer,
bind(ExceptionHandler).toFactory(() => new ExceptionHandler(DOM), []),
bind(XHR).toValue(new XHRImpl()),
UrlResolver,
bind(DynamicComponentLoader).toClass(DynamicComponentLoader_),
Testability,
AnchorBasedAppRootUrl,
bind(AppRootUrl).toAlias(AnchorBasedAppRootUrl),
WebWorkerApplication,
WebWorkerSetup,
MessageBasedXHRImpl,
MessageBasedRenderer,
bind(ServiceMessageBrokerFactory).toClass(ServiceMessageBrokerFactory_),
bind(ClientMessageBrokerFactory).toClass(ClientMessageBrokerFactory_),
BrowserDetails,
AnimationBuilder,
];
}
export function
|
(zone: NgZone, bus: MessageBus): Injector {
BrowserDomAdapter.makeCurrent();
_rootBindings.push(bind(NgZone).toValue(zone));
_rootBindings.push(bind(MessageBus).toValue(bus));
var injector: Injector = Injector.resolveAndCreate(_rootBindings);
return injector.resolveAndCreateChild(_injectorBindings());
}
|
createInjector
|
identifier_name
|
di_bindings.ts
|
// TODO (jteplitz602): This whole file is nearly identical to core/application.ts.
// There should be a way to refactor application so that this file is unnecessary. See #3277
import {Injector, bind, Binding} from "angular2/src/core/di";
import {DEFAULT_PIPES} from 'angular2/src/core/pipes';
import {AnimationBuilder} from 'angular2/src/animate/animation_builder';
import {BrowserDetails} from 'angular2/src/animate/browser_details';
import {Reflector, reflector} from 'angular2/src/core/reflection/reflection';
import {Parser, Lexer} from 'angular2/src/core/change_detection/change_detection';
import {
EventManager,
DomEventsPlugin,
EVENT_MANAGER_PLUGINS
} from 'angular2/src/core/render/dom/events/event_manager';
import {ProtoViewFactory} from 'angular2/src/core/linker/proto_view_factory';
import {BrowserDomAdapter} from 'angular2/src/core/dom/browser_adapter';
import {KeyEventsPlugin} from 'angular2/src/core/render/dom/events/key_events';
import {HammerGesturesPlugin} from 'angular2/src/core/render/dom/events/hammer_gestures';
import {AppViewPool, APP_VIEW_POOL_CAPACITY} from 'angular2/src/core/linker/view_pool';
import {Renderer} from 'angular2/src/core/render/api';
import {AppRootUrl} from 'angular2/src/core/compiler/app_root_url';
import {DomRenderer, DomRenderer_, DOCUMENT} from 'angular2/src/core/render/render';
import {APP_ID_RANDOM_BINDING} from 'angular2/src/core/application_tokens';
import {ElementSchemaRegistry} from 'angular2/src/core/compiler/schema/element_schema_registry';
import {
DomElementSchemaRegistry
} from 'angular2/src/core/compiler/schema/dom_element_schema_registry';
import {
SharedStylesHost,
DomSharedStylesHost
} from 'angular2/src/core/render/dom/shared_styles_host';
import {DOM} from 'angular2/src/core/dom/dom_adapter';
import {NgZone} from 'angular2/src/core/zone/ng_zone';
import {AppViewManager, AppViewManager_} from 'angular2/src/core/linker/view_manager';
import {AppViewManagerUtils} from 'angular2/src/core/linker/view_manager_utils';
import {AppViewListener} from 'angular2/src/core/linker/view_listener';
import {ViewResolver} from 'angular2/src/core/linker/view_resolver';
import {DirectiveResolver} from 'angular2/src/core/linker/directive_resolver';
import {ExceptionHandler} from 'angular2/src/core/facade/exceptions';
import {
DynamicComponentLoader,
DynamicComponentLoader_
} from 'angular2/src/core/linker/dynamic_component_loader';
import {UrlResolver} from 'angular2/src/core/compiler/url_resolver';
import {Testability} from 'angular2/src/core/testability/testability';
import {XHR} from 'angular2/src/core/compiler/xhr';
import {XHRImpl} from 'angular2/src/core/compiler/xhr_impl';
import {Serializer} from 'angular2/src/web_workers/shared/serializer';
import {ON_WEB_WORKER} from 'angular2/src/web_workers/shared/api';
import {RenderProtoViewRefStore} from 'angular2/src/web_workers/shared/render_proto_view_ref_store';
import {
RenderViewWithFragmentsStore
} from 'angular2/src/web_workers/shared/render_view_with_fragments_store';
import {AnchorBasedAppRootUrl} from 'angular2/src/core/compiler/anchor_based_app_root_url';
import {WebWorkerApplication} from 'angular2/src/web_workers/ui/impl';
import {MessageBus} from 'angular2/src/web_workers/shared/message_bus';
import {MessageBasedRenderer} from 'angular2/src/web_workers/ui/renderer';
import {MessageBasedXHRImpl} from 'angular2/src/web_workers/ui/xhr_impl';
import {WebWorkerSetup} from 'angular2/src/web_workers/ui/setup';
import {
ServiceMessageBrokerFactory,
ServiceMessageBrokerFactory_
} from 'angular2/src/web_workers/shared/service_message_broker';
import {
ClientMessageBrokerFactory,
ClientMessageBrokerFactory_
} from 'angular2/src/web_workers/shared/client_message_broker';
var _rootInjector: Injector;
// Contains everything that is safe to share between applications.
var _rootBindings = [bind(Reflector).toValue(reflector)];
// TODO: This code is nearly identical to core/application. There should be a way to only write it
// once
function _injectorBindings(): any[]
|
export function createInjector(zone: NgZone, bus: MessageBus): Injector {
BrowserDomAdapter.makeCurrent();
_rootBindings.push(bind(NgZone).toValue(zone));
_rootBindings.push(bind(MessageBus).toValue(bus));
var injector: Injector = Injector.resolveAndCreate(_rootBindings);
return injector.resolveAndCreateChild(_injectorBindings());
}
|
{
return [
bind(DOCUMENT)
.toValue(DOM.defaultDoc()),
EventManager,
new Binding(EVENT_MANAGER_PLUGINS, {toClass: DomEventsPlugin, multi: true}),
new Binding(EVENT_MANAGER_PLUGINS, {toClass: KeyEventsPlugin, multi: true}),
new Binding(EVENT_MANAGER_PLUGINS, {toClass: HammerGesturesPlugin, multi: true}),
bind(DomRenderer).toClass(DomRenderer_),
bind(Renderer).toAlias(DomRenderer),
APP_ID_RANDOM_BINDING,
DomSharedStylesHost,
bind(SharedStylesHost).toAlias(DomSharedStylesHost),
Serializer,
bind(ON_WEB_WORKER).toValue(false),
bind(ElementSchemaRegistry).toValue(new DomElementSchemaRegistry()),
RenderViewWithFragmentsStore,
RenderProtoViewRefStore,
AppViewPool,
bind(APP_VIEW_POOL_CAPACITY).toValue(10000),
bind(AppViewManager).toClass(AppViewManager_),
AppViewManagerUtils,
AppViewListener,
ProtoViewFactory,
ViewResolver,
DEFAULT_PIPES,
DirectiveResolver,
Parser,
Lexer,
bind(ExceptionHandler).toFactory(() => new ExceptionHandler(DOM), []),
bind(XHR).toValue(new XHRImpl()),
UrlResolver,
bind(DynamicComponentLoader).toClass(DynamicComponentLoader_),
Testability,
AnchorBasedAppRootUrl,
bind(AppRootUrl).toAlias(AnchorBasedAppRootUrl),
WebWorkerApplication,
WebWorkerSetup,
MessageBasedXHRImpl,
MessageBasedRenderer,
bind(ServiceMessageBrokerFactory).toClass(ServiceMessageBrokerFactory_),
bind(ClientMessageBrokerFactory).toClass(ClientMessageBrokerFactory_),
BrowserDetails,
AnimationBuilder,
];
}
|
identifier_body
|
LandingPage.tsx
|
import * as React from "react";
import * as weavejs from "weavejs";
import {Weave} from "weavejs";
import WeaveComponentRenderer = weavejs.ui.WeaveComponentRenderer;
import MiscUtils = weavejs.util.MiscUtils;
import WeaveApp from "weaveapp/WeaveApp";
import GetStartedComponent from "weaveapp/dialog/GetStartedComponent";
const WEAVE_EXTERNAL_TOOLS = "WeaveExternalTools";
export declare type LandingPageView = "splash"|"default"|"file"|"tour list" |"tour";
export interface LandingPageProps
{
initialView:LandingPageView;
weave:Weave;
weaveAppRef:(weaveApp:WeaveApp)=>void;
}
export interface LandingPageState
{
view:LandingPageView;
}
export default class LandingPage extends React.Component<LandingPageProps, LandingPageState>
{
urlParams:any;
|
(props:LandingPageProps)
{
super(props);
this.urlParams = MiscUtils.getUrlParams();
var weaveExternalTools: any;
/* Wrap this in a try/catch so we don't crash if there's a security exception from accessing a window in another domain. */
try
{
weaveExternalTools = window && window.opener && (window.opener as any)[WEAVE_EXTERNAL_TOOLS];
}
catch (e)
{
weaveExternalTools = null;
}
var view = props.initialView;
var exportedFromFlash = weaveExternalTools && weaveExternalTools[window.name];
if (this.urlParams.skipIntro || this.urlParams.file || exportedFromFlash)
view = "default" as LandingPageView;
this.state = {view: view as LandingPageView};
}
loadGetStartedComponentWithTourList=()=>{
this.props.weave.history.clearHistory(); // important to clear the hsitory created by prev tour
this.props.weave.root.removeAllObjects(); // important to clear the all the session state object created by prev tour
this.setState({
view:"tour list"
});
};
render():JSX.Element
{
if (this.state.view == "splash" || this.state.view == "tour list")
{
return (
<GetStartedComponent style={ {width: "100%", height: "100%"} }
showInteractiveTourList={this.state.view == "tour list"}
onViewSelect={(view:LandingPageView) => {this.setState({view})}} />
);
}
return (
<WeaveApp
ref={this.props.weaveAppRef}
weave={this.props.weave}
style={{width: "100%", height: "100%"}}
showFileDialog={this.state.view == "file"}
enableTour={this.state.view == "tour"}
readUrlParams={true}
onClose={this.loadGetStartedComponentWithTourList}
/>
)
}
}
|
constructor
|
identifier_name
|
LandingPage.tsx
|
import * as React from "react";
import * as weavejs from "weavejs";
import {Weave} from "weavejs";
import WeaveComponentRenderer = weavejs.ui.WeaveComponentRenderer;
import MiscUtils = weavejs.util.MiscUtils;
import WeaveApp from "weaveapp/WeaveApp";
import GetStartedComponent from "weaveapp/dialog/GetStartedComponent";
const WEAVE_EXTERNAL_TOOLS = "WeaveExternalTools";
export declare type LandingPageView = "splash"|"default"|"file"|"tour list" |"tour";
export interface LandingPageProps
{
initialView:LandingPageView;
weave:Weave;
weaveAppRef:(weaveApp:WeaveApp)=>void;
}
export interface LandingPageState
{
view:LandingPageView;
}
export default class LandingPage extends React.Component<LandingPageProps, LandingPageState>
{
urlParams:any;
constructor(props:LandingPageProps)
{
super(props);
this.urlParams = MiscUtils.getUrlParams();
var weaveExternalTools: any;
/* Wrap this in a try/catch so we don't crash if there's a security exception from accessing a window in another domain. */
try
{
weaveExternalTools = window && window.opener && (window.opener as any)[WEAVE_EXTERNAL_TOOLS];
}
catch (e)
{
weaveExternalTools = null;
}
var view = props.initialView;
var exportedFromFlash = weaveExternalTools && weaveExternalTools[window.name];
if (this.urlParams.skipIntro || this.urlParams.file || exportedFromFlash)
view = "default" as LandingPageView;
this.state = {view: view as LandingPageView};
}
loadGetStartedComponentWithTourList=()=>{
this.props.weave.history.clearHistory(); // important to clear the hsitory created by prev tour
this.props.weave.root.removeAllObjects(); // important to clear the all the session state object created by prev tour
this.setState({
view:"tour list"
});
};
render():JSX.Element
{
if (this.state.view == "splash" || this.state.view == "tour list")
{
return (
<GetStartedComponent style={ {width: "100%", height: "100%"} }
showInteractiveTourList={this.state.view == "tour list"}
onViewSelect={(view:LandingPageView) => {this.setState({view})}} />
|
return (
<WeaveApp
ref={this.props.weaveAppRef}
weave={this.props.weave}
style={{width: "100%", height: "100%"}}
showFileDialog={this.state.view == "file"}
enableTour={this.state.view == "tour"}
readUrlParams={true}
onClose={this.loadGetStartedComponentWithTourList}
/>
)
}
}
|
);
}
|
random_line_split
|
LandingPage.tsx
|
import * as React from "react";
import * as weavejs from "weavejs";
import {Weave} from "weavejs";
import WeaveComponentRenderer = weavejs.ui.WeaveComponentRenderer;
import MiscUtils = weavejs.util.MiscUtils;
import WeaveApp from "weaveapp/WeaveApp";
import GetStartedComponent from "weaveapp/dialog/GetStartedComponent";
const WEAVE_EXTERNAL_TOOLS = "WeaveExternalTools";
export declare type LandingPageView = "splash"|"default"|"file"|"tour list" |"tour";
export interface LandingPageProps
{
initialView:LandingPageView;
weave:Weave;
weaveAppRef:(weaveApp:WeaveApp)=>void;
}
export interface LandingPageState
{
view:LandingPageView;
}
export default class LandingPage extends React.Component<LandingPageProps, LandingPageState>
{
urlParams:any;
constructor(props:LandingPageProps)
|
loadGetStartedComponentWithTourList=()=>{
this.props.weave.history.clearHistory(); // important to clear the hsitory created by prev tour
this.props.weave.root.removeAllObjects(); // important to clear the all the session state object created by prev tour
this.setState({
view:"tour list"
});
};
render():JSX.Element
{
if (this.state.view == "splash" || this.state.view == "tour list")
{
return (
<GetStartedComponent style={ {width: "100%", height: "100%"} }
showInteractiveTourList={this.state.view == "tour list"}
onViewSelect={(view:LandingPageView) => {this.setState({view})}} />
);
}
return (
<WeaveApp
ref={this.props.weaveAppRef}
weave={this.props.weave}
style={{width: "100%", height: "100%"}}
showFileDialog={this.state.view == "file"}
enableTour={this.state.view == "tour"}
readUrlParams={true}
onClose={this.loadGetStartedComponentWithTourList}
/>
)
}
}
|
{
super(props);
this.urlParams = MiscUtils.getUrlParams();
var weaveExternalTools: any;
/* Wrap this in a try/catch so we don't crash if there's a security exception from accessing a window in another domain. */
try
{
weaveExternalTools = window && window.opener && (window.opener as any)[WEAVE_EXTERNAL_TOOLS];
}
catch (e)
{
weaveExternalTools = null;
}
var view = props.initialView;
var exportedFromFlash = weaveExternalTools && weaveExternalTools[window.name];
if (this.urlParams.skipIntro || this.urlParams.file || exportedFromFlash)
view = "default" as LandingPageView;
this.state = {view: view as LandingPageView};
}
|
identifier_body
|
Highlights.tsx
|
import * as React from 'react';
|
<span className="feature-icon">
<i className="fa fa-cloud icon"></i>
</span>
<div className="tech-container">
<h3>Web</h3>
<p>
Experience in connecting users with content they need. Seth has created web APIs, for
consumption by other systems and mobile devices. An API centric model is essential for a growing
flexible business. Those APIs power user interfaces Seth has created. These complex web apps,
using the latest front end technologies, make for fast and responsive user experiences.
</p>
</div>
</div>
<div className="col-sm-4 tech-highlight">
<span className="feature-icon">
<i className="fa fa-database icon"></i>
</span>
<div className="tech-container">
<h3>Big Data</h3>
<p>
With a background in traditional relational databases and experience in the more
loose No-SQL data stores, Seth has worked with a broad range of storage systems for
a wide range data set requirements. Architecting large scale systems to process thousands
of requests per minute is a passion of Seth's.
</p>
</div>
</div>
<div className="col-sm-4 tech-highlight">
<span className="feature-icon">
<i className="fa fa-lock icon"></i>
</span>
<div className="tech-container">
<h3>Security</h3>
<p>
Everyone has the right to share content securely and anonymously. Systems Seth helps create
are secure using the latest proven web and cryptographic technologies to keep company
and user data safe. Sharing data, to only the desired recipient is key.
</p>
</div>
</div>
</div>
</section>
);
|
export const Highlights = () => (
<section id="highlights" className="container-fluid">
<div className="row">
<div className="col-sm-4 tech-highlight">
|
random_line_split
|
test.ts
|
/*
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
|
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import incrsumabs2 = require( './index' );
// TESTS //
// The function returns an accumulator function...
{
incrsumabs2(); // $ExpectType accumulator
}
// The compiler throws an error if the function is provided arguments...
{
incrsumabs2( '5' ); // $ExpectError
incrsumabs2( 5 ); // $ExpectError
incrsumabs2( true ); // $ExpectError
incrsumabs2( false ); // $ExpectError
incrsumabs2( null ); // $ExpectError
incrsumabs2( undefined ); // $ExpectError
incrsumabs2( [] ); // $ExpectError
incrsumabs2( {} ); // $ExpectError
incrsumabs2( ( x: number ): number => x ); // $ExpectError
}
// The function returns an accumulator function which returns an accumulated result...
{
const acc = incrsumabs2();
acc(); // $ExpectType number | null
acc( 3.14 ); // $ExpectType number | null
}
// The compiler throws an error if the returned accumulator function is provided invalid arguments...
{
const acc = incrsumabs2();
acc( '5' ); // $ExpectError
acc( true ); // $ExpectError
acc( false ); // $ExpectError
acc( null ); // $ExpectError
acc( [] ); // $ExpectError
acc( {} ); // $ExpectError
acc( ( x: number ): number => x ); // $ExpectError
}
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
random_line_split
|
javax.swing.plaf.basic.BasicEditorPaneUI.d.ts
|
declare namespace javax {
namespace swing {
namespace plaf {
namespace basic {
class
|
extends javax.swing.plaf.basic.BasicTextUI {
public static createUI(arg0: javax.swing.JComponent): javax.swing.plaf.ComponentUI
public constructor()
protected getPropertyPrefix(): java.lang.String
public installUI(arg0: javax.swing.JComponent): void
public uninstallUI(arg0: javax.swing.JComponent): void
public getEditorKit(arg0: javax.swing.text.JTextComponent): javax.swing.text.EditorKit
getActionMap(): javax.swing.ActionMap
protected propertyChange(arg0: java.beans.PropertyChangeEvent): void
removeActions(arg0: javax.swing.ActionMap, arg1: javax.swing.Action[]): void
addActions(arg0: javax.swing.ActionMap, arg1: javax.swing.Action[]): void
updateDisplayProperties(arg0: java.awt.Font, arg1: java.awt.Color): void
cleanDisplayProperties(): void
}
}
}
}
}
|
BasicEditorPaneUI
|
identifier_name
|
javax.swing.plaf.basic.BasicEditorPaneUI.d.ts
|
declare namespace javax {
namespace swing {
namespace plaf {
namespace basic {
class BasicEditorPaneUI extends javax.swing.plaf.basic.BasicTextUI {
public static createUI(arg0: javax.swing.JComponent): javax.swing.plaf.ComponentUI
public constructor()
protected getPropertyPrefix(): java.lang.String
public installUI(arg0: javax.swing.JComponent): void
public uninstallUI(arg0: javax.swing.JComponent): void
public getEditorKit(arg0: javax.swing.text.JTextComponent): javax.swing.text.EditorKit
getActionMap(): javax.swing.ActionMap
protected propertyChange(arg0: java.beans.PropertyChangeEvent): void
removeActions(arg0: javax.swing.ActionMap, arg1: javax.swing.Action[]): void
addActions(arg0: javax.swing.ActionMap, arg1: javax.swing.Action[]): void
updateDisplayProperties(arg0: java.awt.Font, arg1: java.awt.Color): void
cleanDisplayProperties(): void
}
|
}
}
}
}
|
random_line_split
|
|
exhaustMap.ts
|
import { Operator } from '../Operator';
import { Observable } from '../Observable';
import { Subscriber } from '../Subscriber';
import { Subscription } from '../Subscription';
import { OuterSubscriber } from '../OuterSubscriber';
import { InnerSubscriber } from '../InnerSubscriber';
import { subscribeToResult } from '../util/subscribeToResult';
import { ObservableInput, OperatorFunction } from '../types';
import { map } from './map';
import { from } from '../observable/from';
/* tslint:disable:max-line-length */
export function exhaustMap<T, R>(project: (value: T, index: number) => ObservableInput<R>): OperatorFunction<T, R>;
/** @deprecated resultSelector is no longer supported. Use inner map instead. */
export function exhaustMap<T, R>(project: (value: T, index: number) => ObservableInput<R>, resultSelector: undefined): OperatorFunction<T, R>;
/** @deprecated resultSelector is no longer supported. Use inner map instead. */
export function exhaustMap<T, I, R>(project: (value: T, index: number) => ObservableInput<I>, resultSelector: (outerValue: T, innerValue: I, outerIndex: number, innerIndex: number) => R): OperatorFunction<T, R>;
/* tslint:enable:max-line-length */
/**
* Projects each source value to an Observable which is merged in the output
* Observable only if the previous projected Observable has completed.
*
* <span class="informal">Maps each value to an Observable, then flattens all of
* these inner Observables using {@link exhaust}.</span>
*
* <img src="./img/exhaustMap.png" width="100%">
*
* Returns an Observable that emits items based on applying a function that you
* supply to each item emitted by the source Observable, where that function
* returns an (so-called "inner") Observable. When it projects a source value to
* an Observable, the output Observable begins emitting the items emitted by
* that projected Observable. However, `exhaustMap` ignores every new projected
* Observable if the previous projected Observable has not yet completed. Once
* that one completes, it will accept and flatten the next projected Observable
* and repeat this process.
*
* ## Example
* Run a finite timer for each click, only if there is no currently active timer
* ```javascript
* const clicks = fromEvent(document, 'click');
* const result = clicks.pipe(
* exhaustMap((ev) => interval(1000).pipe(take(5))),
* );
* result.subscribe(x => console.log(x));
* ```
*
* @see {@link concatMap}
* @see {@link exhaust}
* @see {@link mergeMap}
* @see {@link switchMap}
*
* @param {function(value: T, ?index: number): ObservableInput} project A function
* that, when applied to an item emitted by the source Observable, returns an
* Observable.
* @return {Observable} An Observable containing projected Observables
* of each item of the source, ignoring projected Observables that start before
* their preceding Observable has completed.
* @method exhaustMap
* @owner Observable
*/
export function exhaustMap<T, I, R>(
project: (value: T, index: number) => ObservableInput<I>,
resultSelector?: (outerValue: T, innerValue: I, outerIndex: number, innerIndex: number) => R,
): OperatorFunction<T, I|R> {
if (resultSelector) {
// DEPRECATED PATH
return (source: Observable<T>) => source.pipe(
exhaustMap((a, i) => from(project(a, i)).pipe(
map((b, ii) => resultSelector(a, b, i, ii)),
)),
);
}
return (source: Observable<T>) =>
source.lift(new ExhauseMapOperator(project));
}
class ExhauseMapOperator<T, R> implements Operator<T, R> {
|
(private project: (value: T, index: number) => ObservableInput<R>) {
}
call(subscriber: Subscriber<R>, source: any): any {
return source.subscribe(new ExhaustMapSubscriber(subscriber, this.project));
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
class ExhaustMapSubscriber<T, R> extends OuterSubscriber<T, R> {
private hasSubscription = false;
private hasCompleted = false;
private index = 0;
constructor(destination: Subscriber<R>,
private project: (value: T, index: number) => ObservableInput<R>) {
super(destination);
}
protected _next(value: T): void {
if (!this.hasSubscription) {
this.tryNext(value);
}
}
private tryNext(value: T): void {
const index = this.index++;
const destination = this.destination;
try {
const result = this.project(value, index);
this.hasSubscription = true;
this.add(subscribeToResult(this, result, value, index));
} catch (err) {
destination.error(err);
}
}
protected _complete(): void {
this.hasCompleted = true;
if (!this.hasSubscription) {
this.destination.complete();
}
}
notifyNext(outerValue: T, innerValue: R,
outerIndex: number, innerIndex: number,
innerSub: InnerSubscriber<T, R>): void {
this.destination.next(innerValue);
}
notifyError(err: any): void {
this.destination.error(err);
}
notifyComplete(innerSub: Subscription): void {
this.remove(innerSub);
this.hasSubscription = false;
if (this.hasCompleted) {
this.destination.complete();
}
}
}
|
constructor
|
identifier_name
|
exhaustMap.ts
|
import { Operator } from '../Operator';
import { Observable } from '../Observable';
import { Subscriber } from '../Subscriber';
import { Subscription } from '../Subscription';
import { OuterSubscriber } from '../OuterSubscriber';
import { InnerSubscriber } from '../InnerSubscriber';
import { subscribeToResult } from '../util/subscribeToResult';
import { ObservableInput, OperatorFunction } from '../types';
import { map } from './map';
import { from } from '../observable/from';
/* tslint:disable:max-line-length */
export function exhaustMap<T, R>(project: (value: T, index: number) => ObservableInput<R>): OperatorFunction<T, R>;
/** @deprecated resultSelector is no longer supported. Use inner map instead. */
export function exhaustMap<T, R>(project: (value: T, index: number) => ObservableInput<R>, resultSelector: undefined): OperatorFunction<T, R>;
/** @deprecated resultSelector is no longer supported. Use inner map instead. */
export function exhaustMap<T, I, R>(project: (value: T, index: number) => ObservableInput<I>, resultSelector: (outerValue: T, innerValue: I, outerIndex: number, innerIndex: number) => R): OperatorFunction<T, R>;
/* tslint:enable:max-line-length */
/**
* Projects each source value to an Observable which is merged in the output
* Observable only if the previous projected Observable has completed.
*
* <span class="informal">Maps each value to an Observable, then flattens all of
* these inner Observables using {@link exhaust}.</span>
*
* <img src="./img/exhaustMap.png" width="100%">
*
* Returns an Observable that emits items based on applying a function that you
* supply to each item emitted by the source Observable, where that function
* returns an (so-called "inner") Observable. When it projects a source value to
* an Observable, the output Observable begins emitting the items emitted by
* that projected Observable. However, `exhaustMap` ignores every new projected
* Observable if the previous projected Observable has not yet completed. Once
* that one completes, it will accept and flatten the next projected Observable
* and repeat this process.
*
* ## Example
* Run a finite timer for each click, only if there is no currently active timer
* ```javascript
* const clicks = fromEvent(document, 'click');
* const result = clicks.pipe(
* exhaustMap((ev) => interval(1000).pipe(take(5))),
* );
* result.subscribe(x => console.log(x));
* ```
*
* @see {@link concatMap}
* @see {@link exhaust}
* @see {@link mergeMap}
* @see {@link switchMap}
*
* @param {function(value: T, ?index: number): ObservableInput} project A function
* that, when applied to an item emitted by the source Observable, returns an
* Observable.
* @return {Observable} An Observable containing projected Observables
* of each item of the source, ignoring projected Observables that start before
* their preceding Observable has completed.
* @method exhaustMap
* @owner Observable
*/
export function exhaustMap<T, I, R>(
project: (value: T, index: number) => ObservableInput<I>,
resultSelector?: (outerValue: T, innerValue: I, outerIndex: number, innerIndex: number) => R,
): OperatorFunction<T, I|R> {
if (resultSelector) {
// DEPRECATED PATH
return (source: Observable<T>) => source.pipe(
exhaustMap((a, i) => from(project(a, i)).pipe(
map((b, ii) => resultSelector(a, b, i, ii)),
)),
);
}
return (source: Observable<T>) =>
source.lift(new ExhauseMapOperator(project));
}
class ExhauseMapOperator<T, R> implements Operator<T, R> {
constructor(private project: (value: T, index: number) => ObservableInput<R>) {
}
call(subscriber: Subscriber<R>, source: any): any {
return source.subscribe(new ExhaustMapSubscriber(subscriber, this.project));
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
class ExhaustMapSubscriber<T, R> extends OuterSubscriber<T, R> {
private hasSubscription = false;
private hasCompleted = false;
private index = 0;
constructor(destination: Subscriber<R>,
private project: (value: T, index: number) => ObservableInput<R>) {
super(destination);
}
protected _next(value: T): void {
if (!this.hasSubscription) {
this.tryNext(value);
}
}
private tryNext(value: T): void {
const index = this.index++;
const destination = this.destination;
try {
const result = this.project(value, index);
this.hasSubscription = true;
this.add(subscribeToResult(this, result, value, index));
} catch (err) {
destination.error(err);
}
}
protected _complete(): void {
this.hasCompleted = true;
if (!this.hasSubscription) {
this.destination.complete();
}
}
notifyNext(outerValue: T, innerValue: R,
outerIndex: number, innerIndex: number,
innerSub: InnerSubscriber<T, R>): void {
this.destination.next(innerValue);
}
notifyError(err: any): void {
this.destination.error(err);
}
notifyComplete(innerSub: Subscription): void {
this.remove(innerSub);
|
}
}
|
this.hasSubscription = false;
if (this.hasCompleted) {
this.destination.complete();
}
|
random_line_split
|
exhaustMap.ts
|
import { Operator } from '../Operator';
import { Observable } from '../Observable';
import { Subscriber } from '../Subscriber';
import { Subscription } from '../Subscription';
import { OuterSubscriber } from '../OuterSubscriber';
import { InnerSubscriber } from '../InnerSubscriber';
import { subscribeToResult } from '../util/subscribeToResult';
import { ObservableInput, OperatorFunction } from '../types';
import { map } from './map';
import { from } from '../observable/from';
/* tslint:disable:max-line-length */
export function exhaustMap<T, R>(project: (value: T, index: number) => ObservableInput<R>): OperatorFunction<T, R>;
/** @deprecated resultSelector is no longer supported. Use inner map instead. */
export function exhaustMap<T, R>(project: (value: T, index: number) => ObservableInput<R>, resultSelector: undefined): OperatorFunction<T, R>;
/** @deprecated resultSelector is no longer supported. Use inner map instead. */
export function exhaustMap<T, I, R>(project: (value: T, index: number) => ObservableInput<I>, resultSelector: (outerValue: T, innerValue: I, outerIndex: number, innerIndex: number) => R): OperatorFunction<T, R>;
/* tslint:enable:max-line-length */
/**
* Projects each source value to an Observable which is merged in the output
* Observable only if the previous projected Observable has completed.
*
* <span class="informal">Maps each value to an Observable, then flattens all of
* these inner Observables using {@link exhaust}.</span>
*
* <img src="./img/exhaustMap.png" width="100%">
*
* Returns an Observable that emits items based on applying a function that you
* supply to each item emitted by the source Observable, where that function
* returns an (so-called "inner") Observable. When it projects a source value to
* an Observable, the output Observable begins emitting the items emitted by
* that projected Observable. However, `exhaustMap` ignores every new projected
* Observable if the previous projected Observable has not yet completed. Once
* that one completes, it will accept and flatten the next projected Observable
* and repeat this process.
*
* ## Example
* Run a finite timer for each click, only if there is no currently active timer
* ```javascript
* const clicks = fromEvent(document, 'click');
* const result = clicks.pipe(
* exhaustMap((ev) => interval(1000).pipe(take(5))),
* );
* result.subscribe(x => console.log(x));
* ```
*
* @see {@link concatMap}
* @see {@link exhaust}
* @see {@link mergeMap}
* @see {@link switchMap}
*
* @param {function(value: T, ?index: number): ObservableInput} project A function
* that, when applied to an item emitted by the source Observable, returns an
* Observable.
* @return {Observable} An Observable containing projected Observables
* of each item of the source, ignoring projected Observables that start before
* their preceding Observable has completed.
* @method exhaustMap
* @owner Observable
*/
export function exhaustMap<T, I, R>(
project: (value: T, index: number) => ObservableInput<I>,
resultSelector?: (outerValue: T, innerValue: I, outerIndex: number, innerIndex: number) => R,
): OperatorFunction<T, I|R> {
if (resultSelector) {
// DEPRECATED PATH
return (source: Observable<T>) => source.pipe(
exhaustMap((a, i) => from(project(a, i)).pipe(
map((b, ii) => resultSelector(a, b, i, ii)),
)),
);
}
return (source: Observable<T>) =>
source.lift(new ExhauseMapOperator(project));
}
class ExhauseMapOperator<T, R> implements Operator<T, R> {
constructor(private project: (value: T, index: number) => ObservableInput<R>) {
}
call(subscriber: Subscriber<R>, source: any): any {
return source.subscribe(new ExhaustMapSubscriber(subscriber, this.project));
}
}
/**
* We need this JSDoc comment for affecting ESDoc.
* @ignore
* @extends {Ignored}
*/
class ExhaustMapSubscriber<T, R> extends OuterSubscriber<T, R> {
private hasSubscription = false;
private hasCompleted = false;
private index = 0;
constructor(destination: Subscriber<R>,
private project: (value: T, index: number) => ObservableInput<R>) {
super(destination);
}
protected _next(value: T): void {
if (!this.hasSubscription) {
this.tryNext(value);
}
}
private tryNext(value: T): void {
const index = this.index++;
const destination = this.destination;
try {
const result = this.project(value, index);
this.hasSubscription = true;
this.add(subscribeToResult(this, result, value, index));
} catch (err) {
destination.error(err);
}
}
protected _complete(): void {
this.hasCompleted = true;
if (!this.hasSubscription) {
this.destination.complete();
}
}
notifyNext(outerValue: T, innerValue: R,
outerIndex: number, innerIndex: number,
innerSub: InnerSubscriber<T, R>): void {
this.destination.next(innerValue);
}
notifyError(err: any): void {
this.destination.error(err);
}
notifyComplete(innerSub: Subscription): void {
this.remove(innerSub);
this.hasSubscription = false;
if (this.hasCompleted)
|
}
}
|
{
this.destination.complete();
}
|
conditional_block
|
06. System components.js
|
/**
* Created by Vicky on 6/13/2017.
*/
function
|
(strArr) {
let components = new Map();
for (let line of strArr) {
let [system, component, subcomponent] = line.split(' | ');
if (!components.has(system)) {
components.set(system, new Map());
}
if (!components.get(system).has(component)) {
components.get(system).set(component, []);
}
components.get(system).get(component).push(subcomponent);
}
components = [...components].sort(compareSystems);
for (let [system, innerMap] of components) {
console.log(system);
innerMap = [...innerMap].sort(subCompSort);
for (let [component, subCompArr] of innerMap) {
console.log('|||' + component);
for (let subComp of subCompArr) {
console.log('||||||' + subComp);
}
}
}
function subCompSort(a, b) {
return a[1].length < b[1].length;
}
function compareSystems(a, b) {
if ([...a[1]].length > [...b[1]].length) {
return -1;
} else if ([...a[1]].length < [...b[1]].length) {
return 1;
} else {
if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return 0;
}
}
}
}
systemComponents([
'SULS | Main Site | Home Page',
'SULS | Main Site | Login Page',
'SULS | Main Site | Register Page',
'SULS | Judge Site | Login Page',
'SULS | Judge Site | Submittion Page',
'Lambda | CoreA | A23',
'SULS | Digital Site | Login Page',
'Lambda | CoreB | B24',
'Lambda | CoreA | A24',
'Lambda | CoreA | A25',
'Lambda | CoreC | C4',
'Indice | Session | Default Storage',
'Indice | Session | Default Security',
])
|
systemComponents
|
identifier_name
|
06. System components.js
|
/**
* Created by Vicky on 6/13/2017.
*/
function systemComponents(strArr) {
let components = new Map();
for (let line of strArr) {
let [system, component, subcomponent] = line.split(' | ');
if (!components.has(system)) {
components.set(system, new Map());
}
if (!components.get(system).has(component)) {
components.get(system).set(component, []);
}
components.get(system).get(component).push(subcomponent);
}
components = [...components].sort(compareSystems);
for (let [system, innerMap] of components) {
console.log(system);
innerMap = [...innerMap].sort(subCompSort);
for (let [component, subCompArr] of innerMap) {
console.log('|||' + component);
for (let subComp of subCompArr) {
console.log('||||||' + subComp);
}
}
}
function subCompSort(a, b)
|
function compareSystems(a, b) {
if ([...a[1]].length > [...b[1]].length) {
return -1;
} else if ([...a[1]].length < [...b[1]].length) {
return 1;
} else {
if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return 0;
}
}
}
}
systemComponents([
'SULS | Main Site | Home Page',
'SULS | Main Site | Login Page',
'SULS | Main Site | Register Page',
'SULS | Judge Site | Login Page',
'SULS | Judge Site | Submittion Page',
'Lambda | CoreA | A23',
'SULS | Digital Site | Login Page',
'Lambda | CoreB | B24',
'Lambda | CoreA | A24',
'Lambda | CoreA | A25',
'Lambda | CoreC | C4',
'Indice | Session | Default Storage',
'Indice | Session | Default Security',
])
|
{
return a[1].length < b[1].length;
}
|
identifier_body
|
06. System components.js
|
/**
* Created by Vicky on 6/13/2017.
*/
function systemComponents(strArr) {
let components = new Map();
for (let line of strArr) {
let [system, component, subcomponent] = line.split(' | ');
if (!components.has(system)) {
|
components.set(system, new Map());
}
if (!components.get(system).has(component)) {
components.get(system).set(component, []);
}
components.get(system).get(component).push(subcomponent);
}
components = [...components].sort(compareSystems);
for (let [system, innerMap] of components) {
console.log(system);
innerMap = [...innerMap].sort(subCompSort);
for (let [component, subCompArr] of innerMap) {
console.log('|||' + component);
for (let subComp of subCompArr) {
console.log('||||||' + subComp);
}
}
}
function subCompSort(a, b) {
return a[1].length < b[1].length;
}
function compareSystems(a, b) {
if ([...a[1]].length > [...b[1]].length) {
return -1;
} else if ([...a[1]].length < [...b[1]].length) {
return 1;
} else {
if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return 0;
}
}
}
}
systemComponents([
'SULS | Main Site | Home Page',
'SULS | Main Site | Login Page',
'SULS | Main Site | Register Page',
'SULS | Judge Site | Login Page',
'SULS | Judge Site | Submittion Page',
'Lambda | CoreA | A23',
'SULS | Digital Site | Login Page',
'Lambda | CoreB | B24',
'Lambda | CoreA | A24',
'Lambda | CoreA | A25',
'Lambda | CoreC | C4',
'Indice | Session | Default Storage',
'Indice | Session | Default Security',
])
|
random_line_split
|
|
06. System components.js
|
/**
* Created by Vicky on 6/13/2017.
*/
function systemComponents(strArr) {
let components = new Map();
for (let line of strArr) {
let [system, component, subcomponent] = line.split(' | ');
if (!components.has(system)) {
components.set(system, new Map());
}
if (!components.get(system).has(component)) {
components.get(system).set(component, []);
}
components.get(system).get(component).push(subcomponent);
}
components = [...components].sort(compareSystems);
for (let [system, innerMap] of components) {
console.log(system);
innerMap = [...innerMap].sort(subCompSort);
for (let [component, subCompArr] of innerMap) {
console.log('|||' + component);
for (let subComp of subCompArr) {
console.log('||||||' + subComp);
}
}
}
function subCompSort(a, b) {
return a[1].length < b[1].length;
}
function compareSystems(a, b) {
if ([...a[1]].length > [...b[1]].length)
|
else if ([...a[1]].length < [...b[1]].length) {
return 1;
} else {
if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return 0;
}
}
}
}
systemComponents([
'SULS | Main Site | Home Page',
'SULS | Main Site | Login Page',
'SULS | Main Site | Register Page',
'SULS | Judge Site | Login Page',
'SULS | Judge Site | Submittion Page',
'Lambda | CoreA | A23',
'SULS | Digital Site | Login Page',
'Lambda | CoreB | B24',
'Lambda | CoreA | A24',
'Lambda | CoreA | A25',
'Lambda | CoreC | C4',
'Indice | Session | Default Storage',
'Indice | Session | Default Security',
])
|
{
return -1;
}
|
conditional_block
|
ifttt.js
|
var request = require('request');
var IFTTT_CONNECTION_TIMEOUT_MS = 20000;
module.exports = function (RED) {
// This is a config node holding the keys for connecting to PubNub
function IftttKeyNode(n) {
RED.nodes.createNode(this, n);
}
RED.nodes.registerType('ifttt-key', IftttKeyNode, {credentials: {key: {type: 'text'}}});
// This is the output node.
function IftttOutNode(config)
|
RED.nodes.registerType('ifttt out', IftttOutNode);
};
|
{
RED.nodes.createNode(this, config);
var node = this;
node.config = config;
node.key = RED.nodes.getNode(config.key);
this.on('input', function (msg) {
node.status({fill: 'blue', shape: 'dot', text: 'Sending...'});
var iftttPayload = {};
if (msg.payload) {
iftttPayload.value1 = msg.payload.value1;
iftttPayload.value2 = msg.payload.value2;
iftttPayload.value3 = msg.payload.value3;
}
var eventName = msg.payload.eventName ? msg.payload.eventName : node.config.eventName;
request({
uri: 'https://maker.ifttt.com/trigger/' + eventName + '/with/key/' + node.key.credentials.key,
method: 'POST',
timeout: IFTTT_CONNECTION_TIMEOUT_MS,
json: iftttPayload
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
node.status({fill: 'green', shape: 'dot', text: 'Sent!'});
} else {
var errorMessage;
try {
errorMessage = (JSON.parse(body).hasOwnProperty('errors')) ? JSON.parse(body).errors[0].message : JSON.parse(body);
} catch (e) {
node.error("IFTTT Read error");
errorMessage = e;
}
node.status({fill: 'red', shape: 'dot', text: 'Error!'});
node.error(errorMessage);
}
setTimeout(function () {
node.status({});
}, 1000);
});
});
}
|
identifier_body
|
ifttt.js
|
var request = require('request');
var IFTTT_CONNECTION_TIMEOUT_MS = 20000;
module.exports = function (RED) {
// This is a config node holding the keys for connecting to PubNub
function IftttKeyNode(n) {
RED.nodes.createNode(this, n);
}
RED.nodes.registerType('ifttt-key', IftttKeyNode, {credentials: {key: {type: 'text'}}});
// This is the output node.
function IftttOutNode(config) {
RED.nodes.createNode(this, config);
var node = this;
node.config = config;
node.key = RED.nodes.getNode(config.key);
this.on('input', function (msg) {
node.status({fill: 'blue', shape: 'dot', text: 'Sending...'});
var iftttPayload = {};
if (msg.payload) {
iftttPayload.value1 = msg.payload.value1;
iftttPayload.value2 = msg.payload.value2;
|
}
var eventName = msg.payload.eventName ? msg.payload.eventName : node.config.eventName;
request({
uri: 'https://maker.ifttt.com/trigger/' + eventName + '/with/key/' + node.key.credentials.key,
method: 'POST',
timeout: IFTTT_CONNECTION_TIMEOUT_MS,
json: iftttPayload
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
node.status({fill: 'green', shape: 'dot', text: 'Sent!'});
} else {
var errorMessage;
try {
errorMessage = (JSON.parse(body).hasOwnProperty('errors')) ? JSON.parse(body).errors[0].message : JSON.parse(body);
} catch (e) {
node.error("IFTTT Read error");
errorMessage = e;
}
node.status({fill: 'red', shape: 'dot', text: 'Error!'});
node.error(errorMessage);
}
setTimeout(function () {
node.status({});
}, 1000);
});
});
}
RED.nodes.registerType('ifttt out', IftttOutNode);
};
|
iftttPayload.value3 = msg.payload.value3;
|
random_line_split
|
ifttt.js
|
var request = require('request');
var IFTTT_CONNECTION_TIMEOUT_MS = 20000;
module.exports = function (RED) {
// This is a config node holding the keys for connecting to PubNub
function IftttKeyNode(n) {
RED.nodes.createNode(this, n);
}
RED.nodes.registerType('ifttt-key', IftttKeyNode, {credentials: {key: {type: 'text'}}});
// This is the output node.
function
|
(config) {
RED.nodes.createNode(this, config);
var node = this;
node.config = config;
node.key = RED.nodes.getNode(config.key);
this.on('input', function (msg) {
node.status({fill: 'blue', shape: 'dot', text: 'Sending...'});
var iftttPayload = {};
if (msg.payload) {
iftttPayload.value1 = msg.payload.value1;
iftttPayload.value2 = msg.payload.value2;
iftttPayload.value3 = msg.payload.value3;
}
var eventName = msg.payload.eventName ? msg.payload.eventName : node.config.eventName;
request({
uri: 'https://maker.ifttt.com/trigger/' + eventName + '/with/key/' + node.key.credentials.key,
method: 'POST',
timeout: IFTTT_CONNECTION_TIMEOUT_MS,
json: iftttPayload
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
node.status({fill: 'green', shape: 'dot', text: 'Sent!'});
} else {
var errorMessage;
try {
errorMessage = (JSON.parse(body).hasOwnProperty('errors')) ? JSON.parse(body).errors[0].message : JSON.parse(body);
} catch (e) {
node.error("IFTTT Read error");
errorMessage = e;
}
node.status({fill: 'red', shape: 'dot', text: 'Error!'});
node.error(errorMessage);
}
setTimeout(function () {
node.status({});
}, 1000);
});
});
}
RED.nodes.registerType('ifttt out', IftttOutNode);
};
|
IftttOutNode
|
identifier_name
|
ifttt.js
|
var request = require('request');
var IFTTT_CONNECTION_TIMEOUT_MS = 20000;
module.exports = function (RED) {
// This is a config node holding the keys for connecting to PubNub
function IftttKeyNode(n) {
RED.nodes.createNode(this, n);
}
RED.nodes.registerType('ifttt-key', IftttKeyNode, {credentials: {key: {type: 'text'}}});
// This is the output node.
function IftttOutNode(config) {
RED.nodes.createNode(this, config);
var node = this;
node.config = config;
node.key = RED.nodes.getNode(config.key);
this.on('input', function (msg) {
node.status({fill: 'blue', shape: 'dot', text: 'Sending...'});
var iftttPayload = {};
if (msg.payload) {
iftttPayload.value1 = msg.payload.value1;
iftttPayload.value2 = msg.payload.value2;
iftttPayload.value3 = msg.payload.value3;
}
var eventName = msg.payload.eventName ? msg.payload.eventName : node.config.eventName;
request({
uri: 'https://maker.ifttt.com/trigger/' + eventName + '/with/key/' + node.key.credentials.key,
method: 'POST',
timeout: IFTTT_CONNECTION_TIMEOUT_MS,
json: iftttPayload
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
node.status({fill: 'green', shape: 'dot', text: 'Sent!'});
} else
|
setTimeout(function () {
node.status({});
}, 1000);
});
});
}
RED.nodes.registerType('ifttt out', IftttOutNode);
};
|
{
var errorMessage;
try {
errorMessage = (JSON.parse(body).hasOwnProperty('errors')) ? JSON.parse(body).errors[0].message : JSON.parse(body);
} catch (e) {
node.error("IFTTT Read error");
errorMessage = e;
}
node.status({fill: 'red', shape: 'dot', text: 'Error!'});
node.error(errorMessage);
}
|
conditional_block
|
app.js
|
import {inject} from 'aurelia-framework';
import {EditSessionFactory} from '../editing/edit-session-factory';
import {CurrentFileChangedEvent} from '../editing/current-file-changed-event';
import {QueryString} from '../editing/query-string';
import {defaultGist} from '../github/default-gist';
import {Importer} from '../import/importer';
import {Focus} from './focus';
import alertify from 'alertify';
@inject(EditSessionFactory, Importer, QueryString, Focus)
export class App {
editSession = null;
constructor(editSessionFactory, importer, queryString, focus) {
this.editSessionFactory = editSessionFactory;
this.importer = importer;
this.queryString = queryString;
this.focus = focus;
addEventListener('beforeunload', ::this.beforeUnload);
}
beforeUnload(event) {
if (this.editSession && this.editSession.dirty) {
event.returnValue = 'You have unsaved work in this Gist.';
}
}
currentFileChanged(event) {
if (event.file.name === '') {
this.focus.set('filename');
} else {
this.focus.set('editor');
}
}
setEditSession(editSession) {
if (this.fileChangedSub) {
this.fileChangedSub.dispose();
}
this.editSession = editSession;
this.fileChangedSub = editSession.subscribe(CurrentFileChangedEvent, ::this.currentFileChanged);
this.editSession.resetWorker().then(::this.editSession.run);
}
activate() {
return this.queryString.read()
.then(gist => this.setEditSession(this.editSessionFactory.create(gist)));
}
newGist()
|
import(urlOrId) {
this.importer.import(urlOrId)
.then(gist => {
this.queryString.write(gist, true);
return this.editSessionFactory.create(gist);
})
.then(::this.setEditSession)
.then(() => alertify.success('Import successful.'), reason => alertify.error(reason));
}
}
|
{
this.queryString.clear();
this.setEditSession(this.editSessionFactory.create(defaultGist));
}
|
identifier_body
|
app.js
|
import {inject} from 'aurelia-framework';
import {EditSessionFactory} from '../editing/edit-session-factory';
import {CurrentFileChangedEvent} from '../editing/current-file-changed-event';
import {QueryString} from '../editing/query-string';
import {defaultGist} from '../github/default-gist';
import {Importer} from '../import/importer';
import {Focus} from './focus';
import alertify from 'alertify';
@inject(EditSessionFactory, Importer, QueryString, Focus)
export class App {
editSession = null;
constructor(editSessionFactory, importer, queryString, focus) {
this.editSessionFactory = editSessionFactory;
this.importer = importer;
this.queryString = queryString;
this.focus = focus;
addEventListener('beforeunload', ::this.beforeUnload);
}
beforeUnload(event) {
if (this.editSession && this.editSession.dirty) {
event.returnValue = 'You have unsaved work in this Gist.';
}
}
currentFileChanged(event) {
if (event.file.name === '')
|
else {
this.focus.set('editor');
}
}
setEditSession(editSession) {
if (this.fileChangedSub) {
this.fileChangedSub.dispose();
}
this.editSession = editSession;
this.fileChangedSub = editSession.subscribe(CurrentFileChangedEvent, ::this.currentFileChanged);
this.editSession.resetWorker().then(::this.editSession.run);
}
activate() {
return this.queryString.read()
.then(gist => this.setEditSession(this.editSessionFactory.create(gist)));
}
newGist() {
this.queryString.clear();
this.setEditSession(this.editSessionFactory.create(defaultGist));
}
import(urlOrId) {
this.importer.import(urlOrId)
.then(gist => {
this.queryString.write(gist, true);
return this.editSessionFactory.create(gist);
})
.then(::this.setEditSession)
.then(() => alertify.success('Import successful.'), reason => alertify.error(reason));
}
}
|
{
this.focus.set('filename');
}
|
conditional_block
|
app.js
|
import {inject} from 'aurelia-framework';
import {EditSessionFactory} from '../editing/edit-session-factory';
import {CurrentFileChangedEvent} from '../editing/current-file-changed-event';
import {QueryString} from '../editing/query-string';
import {defaultGist} from '../github/default-gist';
import {Importer} from '../import/importer';
import {Focus} from './focus';
import alertify from 'alertify';
@inject(EditSessionFactory, Importer, QueryString, Focus)
export class App {
editSession = null;
constructor(editSessionFactory, importer, queryString, focus) {
this.editSessionFactory = editSessionFactory;
this.importer = importer;
this.queryString = queryString;
this.focus = focus;
addEventListener('beforeunload', ::this.beforeUnload);
}
beforeUnload(event) {
if (this.editSession && this.editSession.dirty) {
event.returnValue = 'You have unsaved work in this Gist.';
}
}
currentFileChanged(event) {
if (event.file.name === '') {
this.focus.set('filename');
} else {
this.focus.set('editor');
}
}
setEditSession(editSession) {
if (this.fileChangedSub) {
this.fileChangedSub.dispose();
}
this.editSession = editSession;
this.fileChangedSub = editSession.subscribe(CurrentFileChangedEvent, ::this.currentFileChanged);
this.editSession.resetWorker().then(::this.editSession.run);
}
activate() {
return this.queryString.read()
.then(gist => this.setEditSession(this.editSessionFactory.create(gist)));
}
|
() {
this.queryString.clear();
this.setEditSession(this.editSessionFactory.create(defaultGist));
}
import(urlOrId) {
this.importer.import(urlOrId)
.then(gist => {
this.queryString.write(gist, true);
return this.editSessionFactory.create(gist);
})
.then(::this.setEditSession)
.then(() => alertify.success('Import successful.'), reason => alertify.error(reason));
}
}
|
newGist
|
identifier_name
|
app.js
|
import {inject} from 'aurelia-framework';
import {EditSessionFactory} from '../editing/edit-session-factory';
import {CurrentFileChangedEvent} from '../editing/current-file-changed-event';
import {QueryString} from '../editing/query-string';
import {defaultGist} from '../github/default-gist';
import {Importer} from '../import/importer';
import {Focus} from './focus';
import alertify from 'alertify';
@inject(EditSessionFactory, Importer, QueryString, Focus)
export class App {
editSession = null;
constructor(editSessionFactory, importer, queryString, focus) {
this.editSessionFactory = editSessionFactory;
this.importer = importer;
this.queryString = queryString;
this.focus = focus;
addEventListener('beforeunload', ::this.beforeUnload);
}
beforeUnload(event) {
if (this.editSession && this.editSession.dirty) {
event.returnValue = 'You have unsaved work in this Gist.';
}
}
currentFileChanged(event) {
if (event.file.name === '') {
this.focus.set('filename');
} else {
this.focus.set('editor');
}
}
setEditSession(editSession) {
if (this.fileChangedSub) {
this.fileChangedSub.dispose();
|
this.editSession.resetWorker().then(::this.editSession.run);
}
activate() {
return this.queryString.read()
.then(gist => this.setEditSession(this.editSessionFactory.create(gist)));
}
newGist() {
this.queryString.clear();
this.setEditSession(this.editSessionFactory.create(defaultGist));
}
import(urlOrId) {
this.importer.import(urlOrId)
.then(gist => {
this.queryString.write(gist, true);
return this.editSessionFactory.create(gist);
})
.then(::this.setEditSession)
.then(() => alertify.success('Import successful.'), reason => alertify.error(reason));
}
}
|
}
this.editSession = editSession;
this.fileChangedSub = editSession.subscribe(CurrentFileChangedEvent, ::this.currentFileChanged);
|
random_line_split
|
GoogleSource.js
|
/**
* Copyright (c) 2008-2011 The Open Planning Project
*
* Published under the BSD license.
* See https://github.com/opengeo/gxp/raw/master/license.txt for the full text
* of the license.
*/
/**
* @requires plugins/LayerSource.js
*/
/** api: (define)
* module = gxp.plugins
* class = GoogleSource
*/
/** api: (extends)
* plugins/LayerSource.js
*/
Ext.namespace("gxp.plugins");
/** api: constructor
* .. class:: GoolgeSource(config)
*
* Plugin for using Google layers with :class:`gxp.Viewer` instances. The
* plugin uses the GMaps v3 API and also takes care of loading the
* required Google resources.
*
* Available layer names for this source are "ROADMAP", "SATELLITE",
* "HYBRID" and "TERRAIN"
*/
/** api: example
* The configuration in the ``sources`` property of the :class:`gxp.Viewer` is
* straightforward:
*
* .. code-block:: javascript
*
* "google": {
* ptype: "gxp_google"
* }
*
* A typical configuration for a layer from this source (in the ``layers``
* array of the viewer's ``map`` config option would look like this:
*
* .. code-block:: javascript
*
* {
* source: "google",
* name: "TERRAIN"
* }
*
*/
gxp.plugins.GoogleSource = Ext.extend(gxp.plugins.LayerSource, {
/** api: ptype = gxp_googlesource */
ptype: "gxp_googlesource",
/** config: config[timeout]
* ``Number``
* The time (in milliseconds) to wait before giving up on the Google Maps
* script loading. This layer source will not be availble if the script
* does not load within the given timeout. Default is 7000 (seven seconds).
*/
timeout: 7000,
/** api: property[store]
* ``GeoExt.data.LayerStore`` containing records with "ROADMAP",
* "SATELLITE", "HYBRID" and "TERRAIN" name fields.
*/
/** api: config[title]
* ``String``
* A descriptive title for this layer source (i18n).
*/
title: "Google Layers",
/** api: config[roadmapAbstract]
* ``String``
* Description of the ROADMAP layer (i18n).
*/
roadmapAbstract: "Show street map",
/** api: config[satelliteAbstract]
* ``String``
* Description of the SATELLITE layer (i18n).
*/
satelliteAbstract: "Show satellite imagery",
/** api: config[hybridAbstract]
* ``String``
* Description of the HYBRID layer (i18n).
*/
hybridAbstract: "Show imagery with street names",
/** api: config[terrainAbstract]
* ``String``
* Description of the TERRAIN layer (i18n).
*/
terrainAbstract: "Show street map with terrain",
constructor: function(config) {
this.config = config;
gxp.plugins.GoogleSource.superclass.constructor.apply(this, arguments);
},
/** api: method[createStore]
*
* Creates a store of layer records. Fires "ready" when store is loaded.
*/
createStore: function() {
gxp.plugins.GoogleSource.loader.onLoad({
timeout: this.timeout,
callback: this.syncCreateStore,
errback: function() {
delete this.store;
this.fireEvent(
"failure",
this,
"The Google Maps script failed to load within the provided timeout (" + (this.timeout / 1000) + " s)."
);
},
scope: this
});
},
/** private: method[syncCreateStore]
*
* Creates a store of layers. This requires that the API script has already
* loaded. Fires the "ready" event when the store is loaded.
*/
syncCreateStore: function() {
// TODO: The abstracts ("alt" properties) should be derived from the
// MapType objects themselves. It doesn't look like there is currently
// a way to get the default map types before creating a map object.
// http://code.google.com/p/gmaps-api-issues/issues/detail?id=2562
// TODO: We may also be able to determine the MAX_ZOOM_LEVEL for each
// layer type. If not, consider setting them on the OpenLayers level.
var mapTypes = {
"ROADMAP": {"abstract": this.roadmapAbstract, MAX_ZOOM_LEVEL: 20},
"SATELLITE": {"abstract": this.satelliteAbstract},
"HYBRID": {"abstract": this.hybridAbstract},
"TERRAIN": {"abstract": this.terrainAbstract, MAX_ZOOM_LEVEL: 15}
};
var layers = [];
var name, mapType;
for (name in mapTypes) {
mapType = google.maps.MapTypeId[name];
layers.push(new OpenLayers.Layer.Google(
// TODO: get MapType object name
// http://code.google.com/p/gmaps-api-issues/issues/detail?id=2562
"Google " + mapType.replace(/\w/, function(c) {return c.toUpperCase();}), {
type: mapType,
typeName: name,
MAX_ZOOM_LEVEL: mapTypes[name].MAX_ZOOM_LEVEL,
maxExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34),
restrictedExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34),
projection: this.projection
}
));
}
this.store = new GeoExt.data.LayerStore({
layers: layers,
fields: [
{name: "source", type: "string"},
{name: "name", type: "string", mapping: "typeName"},
{name: "abstract", type: "string"},
{name: "group", type: "string", defaultValue: "background"},
{name: "fixed", type: "boolean", defaultValue: true},
{name: "selected", type: "boolean"}
]
});
this.store.each(function(l) {
l.set("abstract", mapTypes[l.get("name")]["abstract"]);
});
this.fireEvent("ready", this);
},
/** api: method[createLayerRecord]
* :arg config: ``Object`` The application config for this layer.
* :returns: ``GeoExt.data.LayerRecord``
*
* Create a layer record given the config.
*/
createLayerRecord: function(config) {
var record;
var cmp = function(l) {
return l.get("name") === config.name;
};
// only return layer if app does not have it already
if (this.target.mapPanel.layers.findBy(cmp) == -1)
|
return record;
}
});
/**
* Create a loader singleton that all plugin instances can use.
*/
gxp.plugins.GoogleSource.loader = new (Ext.extend(Ext.util.Observable, {
/** private: property[ready]
* ``Boolean``
* This plugin type is ready to use.
*/
ready: !!(window.google && google.maps),
/** private: property[loading]
* ``Boolean``
* The resources for this plugin type are loading.
*/
loading: false,
constructor: function() {
this.addEvents(
/** private: event[ready]
* Fires when this plugin type is ready.
*/
"ready",
/** private: event[failure]
* Fires when script loading fails.
*/
"failure"
);
return Ext.util.Observable.prototype.constructor.apply(this, arguments);
},
/** private: method[onScriptLoad]
* Called when all resources required by this plugin type have loaded.
*/
onScriptLoad: function() {
// the google loader calls this in the window scope
var monitor = gxp.plugins.GoogleSource.loader;
if (!monitor.ready) {
monitor.ready = true;
monitor.loading = false;
monitor.fireEvent("ready");
}
},
/** api: method[gxp.plugins.GoogleSource.loader.onLoad]
* :arg options: ``Object``
*
* Options:
*
* * callback - ``Function`` Called when script loads.
* * errback - ``Function`` Called if loading fails.
* * timeout - ``Number`` Time to wait before deciding that loading failed
* (in milliseconds).
* * scope - ``Object`` The ``this`` object for callbacks.
*/
onLoad: function(options) {
if (this.ready) {
// call this in the next turn for consistent return before callback
window.setTimeout(function() {
options.callback.call(options.scope);
}, 0);
} else if (!this.loading) {
this.loadScript(options);
} else {
this.on({
ready: options.callback,
failure: options.errback || Ext.emptyFn,
scope: options.scope
});
}
},
/** private: method[onScriptLoad]
* Called when all resources required by this plugin type have loaded.
*/
loadScript: function(options) {
var params = {
autoload: Ext.encode({
modules: [{
name: "maps",
version: 3.3,
nocss: "true",
callback: "gxp.plugins.GoogleSource.loader.onScriptLoad",
other_params: "sensor=false"
}]
})
};
var script = document.createElement("script");
script.src = "http://www.google.com/jsapi?" + Ext.urlEncode(params);
// cancel loading if monitor is not ready within timeout
var errback = options.errback || Ext.emptyFn;
var timeout = options.timeout || gxp.plugins.GoogleSource.prototype.timeout;
window.setTimeout((function() {
if (!gxp.plugins.GoogleSource.loader.ready) {
this.loading = false;
this.ready = false;
document.getElementsByTagName("head")[0].removeChild(script);
errback.call(options.scope);
this.fireEvent("failure");
this.purgeListeners();
}
}).createDelegate(this), timeout);
// register callback for ready
this.on({
ready: options.callback,
scope: options.scope
});
this.loading = true;
document.getElementsByTagName("head")[0].appendChild(script);
}
}))();
Ext.preg(gxp.plugins.GoogleSource.prototype.ptype, gxp.plugins.GoogleSource);
|
{
// records can be in only one store
record = this.store.getAt(this.store.findBy(cmp)).clone();
var layer = record.getLayer();
// set layer title from config
if (config.title) {
/**
* Because the layer title data is duplicated, we have
* to set it in both places. After records have been
* added to the store, the store handles this
* synchronization.
*/
layer.setName(config.title);
record.set("title", config.title);
}
// set visibility from config
if ("visibility" in config) {
layer.visibility = config.visibility;
}
record.set("selected", config.selected || false);
record.set("source", config.source);
record.set("name", config.name);
if ("group" in config) {
record.set("group", config.group);
}
record.commit();
}
|
conditional_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.