file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
mrp_workcenter_load.py | # -*- coding: utf-8 -*- | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class mrp_workcenter_load(osv.osv_memory):
_name = 'mrp.workcenter.load'
_description = 'Work Center Load'
_columns = {
'time_unit': fields.selection([('day', 'Day by day'),('week', 'Per week'),('month', 'Per month')],'Type of period', required=True),
'measure_unit': fields.selection([('hours', 'Amount in hours'),('cycles', 'Amount in cycles')],'Amount measuring unit', required=True),
}
def print_report(self, cr, uid, ids, context=None):
""" To print the report of Work Center Load
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Report
"""
if context is None:
context = {}
datas = {'ids' : context.get('active_ids',[])}
res = self.read(cr, uid, ids, ['time_unit','measure_unit'])
res = res and res[0] or {}
datas['form'] = res
return {
'type' : 'ir.actions.report.xml',
'report_name':'mrp.workcenter.load',
'datas' : datas,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# | random_line_split |
mrp_workcenter_load.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class mrp_workcenter_load(osv.osv_memory):
_name = 'mrp.workcenter.load'
_description = 'Work Center Load'
_columns = {
'time_unit': fields.selection([('day', 'Day by day'),('week', 'Per week'),('month', 'Per month')],'Type of period', required=True),
'measure_unit': fields.selection([('hours', 'Amount in hours'),('cycles', 'Amount in cycles')],'Amount measuring unit', required=True),
}
def print_report(self, cr, uid, ids, context=None):
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| """ To print the report of Work Center Load
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Report
"""
if context is None:
context = {}
datas = {'ids' : context.get('active_ids',[])}
res = self.read(cr, uid, ids, ['time_unit','measure_unit'])
res = res and res[0] or {}
datas['form'] = res
return {
'type' : 'ir.actions.report.xml',
'report_name':'mrp.workcenter.load',
'datas' : datas,
} | identifier_body |
mrp_workcenter_load.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class mrp_workcenter_load(osv.osv_memory):
_name = 'mrp.workcenter.load'
_description = 'Work Center Load'
_columns = {
'time_unit': fields.selection([('day', 'Day by day'),('week', 'Per week'),('month', 'Per month')],'Type of period', required=True),
'measure_unit': fields.selection([('hours', 'Amount in hours'),('cycles', 'Amount in cycles')],'Amount measuring unit', required=True),
}
def | (self, cr, uid, ids, context=None):
""" To print the report of Work Center Load
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Report
"""
if context is None:
context = {}
datas = {'ids' : context.get('active_ids',[])}
res = self.read(cr, uid, ids, ['time_unit','measure_unit'])
res = res and res[0] or {}
datas['form'] = res
return {
'type' : 'ir.actions.report.xml',
'report_name':'mrp.workcenter.load',
'datas' : datas,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| print_report | identifier_name |
mrp_workcenter_load.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class mrp_workcenter_load(osv.osv_memory):
_name = 'mrp.workcenter.load'
_description = 'Work Center Load'
_columns = {
'time_unit': fields.selection([('day', 'Day by day'),('week', 'Per week'),('month', 'Per month')],'Type of period', required=True),
'measure_unit': fields.selection([('hours', 'Amount in hours'),('cycles', 'Amount in cycles')],'Amount measuring unit', required=True),
}
def print_report(self, cr, uid, ids, context=None):
""" To print the report of Work Center Load
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Report
"""
if context is None:
|
datas = {'ids' : context.get('active_ids',[])}
res = self.read(cr, uid, ids, ['time_unit','measure_unit'])
res = res and res[0] or {}
datas['form'] = res
return {
'type' : 'ir.actions.report.xml',
'report_name':'mrp.workcenter.load',
'datas' : datas,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| context = {} | conditional_block |
debugEditorContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import nls = require('vs/nls');
import { TPromise } from 'vs/base/common/winjs.base';
import { RunOnceScheduler } from 'vs/base/common/async';
import lifecycle = require('vs/base/common/lifecycle');
import env = require('vs/base/common/platform');
import uri from 'vs/base/common/uri';
import { IAction, Action } from 'vs/base/common/actions';
import { KeyCode } from 'vs/base/common/keyCodes';
import keyboard = require('vs/base/browser/keyboardEvent');
import editorbrowser = require('vs/editor/browser/editorBrowser');
import editorcommon = require('vs/editor/common/editorCommon');
import { DebugHoverWidget } from 'vs/workbench/parts/debug/browser/debugHover';
import debugactions = require('vs/workbench/parts/debug/electron-browser/debugActions');
import debug = require('vs/workbench/parts/debug/common/debug');
import { IWorkspaceContextService } from 'vs/workbench/services/workspace/common/contextService';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IContextMenuService } from 'vs/platform/contextview/browser/contextView';
const HOVER_DELAY = 300;
export class DebugEditorContribution implements debug.IDebugEditorContribution {
private toDispose: lifecycle.IDisposable[];
private breakpointHintDecoration: string[];
private hoverWidget: DebugHoverWidget;
private showHoverScheduler: RunOnceScheduler;
private hideHoverScheduler: RunOnceScheduler;
private hoverRange: editorcommon.IEditorRange;
private hoveringOver: string;
static getDebugEditorContribution(editor: editorcommon.ICommonCodeEditor): DebugEditorContribution {
return <DebugEditorContribution>editor.getContribution(debug.EDITOR_CONTRIBUTION_ID);
}
constructor(
private editor: editorbrowser.ICodeEditor,
@debug.IDebugService private debugService: debug.IDebugService,
@IWorkspaceContextService private contextService: IWorkspaceContextService,
@IContextMenuService private contextMenuService: IContextMenuService,
@IInstantiationService private instantiationService:IInstantiationService
) {
this.breakpointHintDecoration = [];
this.hoverWidget = new DebugHoverWidget(this.editor, this.debugService, this.instantiationService);
this.toDispose = [this.hoverWidget];
this.showHoverScheduler = new RunOnceScheduler(() => this.showHover(this.hoverRange, this.hoveringOver, false), HOVER_DELAY);
this.hideHoverScheduler = new RunOnceScheduler(() => this.hoverWidget.hide(), HOVER_DELAY);
this.registerListeners();
}
private getContextMenuActions(breakpoint: debug.IBreakpoint, uri: uri, lineNumber: number): TPromise<IAction[]> {
const actions = [];
if (breakpoint) {
actions.push(this.instantiationService.createInstance(debugactions.RemoveBreakpointAction, debugactions.RemoveBreakpointAction.ID, debugactions.RemoveBreakpointAction.LABEL));
actions.push(this.instantiationService.createInstance(debugactions.EditConditionalBreakpointAction, debugactions.EditConditionalBreakpointAction.ID, debugactions.EditConditionalBreakpointAction.LABEL, this.editor, lineNumber));
actions.push(this.instantiationService.createInstance(debugactions.ToggleEnablementAction, debugactions.ToggleEnablementAction.ID, debugactions.ToggleEnablementAction.LABEL));
} else {
actions.push(new Action(
'addBreakpoint',
nls.localize('addBreakpoint', "Add Breakpoint"),
null,
true,
() => this.debugService.addBreakpoints([{ uri, lineNumber }])
));
actions.push(this.instantiationService.createInstance(debugactions.AddConditionalBreakpointAction, debugactions.AddConditionalBreakpointAction.ID, debugactions.AddConditionalBreakpointAction.LABEL, this.editor, lineNumber));
}
return TPromise.as(actions);
}
private registerListeners(): void {
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => {
if (e.target.type !== editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN || /* after last line */ e.target.detail) {
return;
}
if (!this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
return;
}
const lineNumber = e.target.position.lineNumber;
const uri = this.editor.getModel().getAssociatedResource();
if (e.event.rightButton || (env.isMacintosh && e.event.leftButton && e.event.ctrlKey)) {
const anchor = { x: e.event.posx + 1, y: e.event.posy };
const breakpoint = this.debugService.getModel().getBreakpoints().filter(bp => bp.lineNumber === lineNumber && bp.source.uri.toString() === uri.toString()).pop();
this.contextMenuService.showContextMenu({
getAnchor: () => anchor,
getActions: () => this.getContextMenuActions(breakpoint, uri, lineNumber),
getActionsContext: () => breakpoint
});
} else {
const breakpoint = this.debugService.getModel().getBreakpoints()
.filter(bp => bp.source.uri.toString() === uri.toString() && bp.lineNumber === lineNumber).pop();
if (breakpoint) {
this.debugService.removeBreakpoints(breakpoint.getId());
} else {
this.debugService.addBreakpoints([{ uri, lineNumber }]);
}
}
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => {
var showBreakpointHintAtLineNumber = -1;
if (e.target.type === editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN && this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
if (!e.target.detail) {
// is not after last line
showBreakpointHintAtLineNumber = e.target.position.lineNumber;
}
}
this.ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber);
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => {
this.ensureBreakpointHintDecoration(-1);
}));
this.toDispose.push(this.debugService.onDidChangeState(state => this.onDebugStateUpdate(state)));
// hover listeners & hover widget
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseMove(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => this.hoverWidget.hide()));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.KeyDown, (e: keyboard.IKeyboardEvent) => this.onKeyDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.ModelChanged, () => this.hideHoverWidget()));
this.toDispose.push(this.editor.addListener2('scroll', () => this.hideHoverWidget));
}
public getId(): string {
return debug.EDITOR_CONTRIBUTION_ID;
}
public showHover(range: editorcommon.IEditorRange, hoveringOver: string, focus: boolean): TPromise<void> {
return this.hoverWidget.showAt(range, hoveringOver, focus);
}
private ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber: number): void {
var newDecoration: editorcommon.IModelDeltaDecoration[] = [];
if (showBreakpointHintAtLineNumber !== -1) {
newDecoration.push({
options: DebugEditorContribution.BREAKPOINT_HELPER_DECORATION,
range: {
startLineNumber: showBreakpointHintAtLineNumber,
startColumn: 1,
endLineNumber: showBreakpointHintAtLineNumber,
endColumn: 1
}
});
}
this.breakpointHintDecoration = this.editor.deltaDecorations(this.breakpointHintDecoration, newDecoration);
}
private onDebugStateUpdate(state: debug.State): void {
if (state !== debug.State.Stopped) {
this.hideHoverWidget();
}
this.contextService.updateOptions('editor', {
hover: state !== debug.State.Stopped
});
}
private hideHoverWidget(): void {
if (!this.hideHoverScheduler.isScheduled() && this.hoverWidget.isVisible) {
this.hideHoverScheduler.schedule();
}
this.showHoverScheduler.cancel();
this.hoveringOver = null;
}
// hover business
private onEditorMouseDown(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (mouseEvent.target.type === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID) {
return;
}
this.hideHoverWidget();
}
private onEditorMouseMove(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (this.debugService.state !== debug.State.Stopped) {
return;
}
const targetType = mouseEvent.target.type;
const stopKey = env.isMacintosh ? 'metaKey' : 'ctrlKey';
if (targetType === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID && !(<any>mouseEvent.event)[stopKey]) {
// mouse moved on top of debug hover widget
return;
}
if (targetType === editorcommon.MouseTargetType.CONTENT_TEXT) {
const wordAtPosition = this.editor.getModel().getWordAtPosition(mouseEvent.target.range.getStartPosition());
if (wordAtPosition && this.hoveringOver !== wordAtPosition.word) {
this.hoverRange = mouseEvent.target.range;
this.hoveringOver = wordAtPosition.word;
this.showHoverScheduler.schedule();
}
} else {
this.hideHoverWidget();
}
}
private onKeyDown(e: keyboard.IKeyboardEvent): void {
const stopKey = env.isMacintosh ? KeyCode.Meta : KeyCode.Ctrl;
if (e.keyCode !== stopKey) {
// do not hide hover when Ctrl/Meta is pressed
this.hideHoverWidget();
}
}
// end hover business
private static BREAKPOINT_HELPER_DECORATION: editorcommon.IModelDecorationOptions = {
glyphMarginClassName: 'debug-breakpoint-hint-glyph',
stickiness: editorcommon.TrackedRangeStickiness.NeverGrowsWhenTypingAtEdges
};
public | (): void {
this.toDispose = lifecycle.dispose(this.toDispose);
}
}
| dispose | identifier_name |
debugEditorContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import nls = require('vs/nls');
import { TPromise } from 'vs/base/common/winjs.base';
import { RunOnceScheduler } from 'vs/base/common/async';
import lifecycle = require('vs/base/common/lifecycle');
import env = require('vs/base/common/platform');
import uri from 'vs/base/common/uri';
import { IAction, Action } from 'vs/base/common/actions';
import { KeyCode } from 'vs/base/common/keyCodes';
import keyboard = require('vs/base/browser/keyboardEvent');
import editorbrowser = require('vs/editor/browser/editorBrowser');
import editorcommon = require('vs/editor/common/editorCommon');
import { DebugHoverWidget } from 'vs/workbench/parts/debug/browser/debugHover';
import debugactions = require('vs/workbench/parts/debug/electron-browser/debugActions');
import debug = require('vs/workbench/parts/debug/common/debug');
import { IWorkspaceContextService } from 'vs/workbench/services/workspace/common/contextService';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IContextMenuService } from 'vs/platform/contextview/browser/contextView';
const HOVER_DELAY = 300;
export class DebugEditorContribution implements debug.IDebugEditorContribution {
private toDispose: lifecycle.IDisposable[];
private breakpointHintDecoration: string[];
private hoverWidget: DebugHoverWidget;
private showHoverScheduler: RunOnceScheduler;
private hideHoverScheduler: RunOnceScheduler;
private hoverRange: editorcommon.IEditorRange;
private hoveringOver: string;
static getDebugEditorContribution(editor: editorcommon.ICommonCodeEditor): DebugEditorContribution {
return <DebugEditorContribution>editor.getContribution(debug.EDITOR_CONTRIBUTION_ID);
}
constructor(
private editor: editorbrowser.ICodeEditor,
@debug.IDebugService private debugService: debug.IDebugService,
@IWorkspaceContextService private contextService: IWorkspaceContextService,
@IContextMenuService private contextMenuService: IContextMenuService,
@IInstantiationService private instantiationService:IInstantiationService
) {
this.breakpointHintDecoration = [];
this.hoverWidget = new DebugHoverWidget(this.editor, this.debugService, this.instantiationService);
this.toDispose = [this.hoverWidget];
this.showHoverScheduler = new RunOnceScheduler(() => this.showHover(this.hoverRange, this.hoveringOver, false), HOVER_DELAY);
this.hideHoverScheduler = new RunOnceScheduler(() => this.hoverWidget.hide(), HOVER_DELAY);
this.registerListeners();
}
private getContextMenuActions(breakpoint: debug.IBreakpoint, uri: uri, lineNumber: number): TPromise<IAction[]> {
const actions = [];
if (breakpoint) {
actions.push(this.instantiationService.createInstance(debugactions.RemoveBreakpointAction, debugactions.RemoveBreakpointAction.ID, debugactions.RemoveBreakpointAction.LABEL));
actions.push(this.instantiationService.createInstance(debugactions.EditConditionalBreakpointAction, debugactions.EditConditionalBreakpointAction.ID, debugactions.EditConditionalBreakpointAction.LABEL, this.editor, lineNumber));
actions.push(this.instantiationService.createInstance(debugactions.ToggleEnablementAction, debugactions.ToggleEnablementAction.ID, debugactions.ToggleEnablementAction.LABEL));
} else {
actions.push(new Action(
'addBreakpoint',
nls.localize('addBreakpoint', "Add Breakpoint"),
null,
true,
() => this.debugService.addBreakpoints([{ uri, lineNumber }])
));
actions.push(this.instantiationService.createInstance(debugactions.AddConditionalBreakpointAction, debugactions.AddConditionalBreakpointAction.ID, debugactions.AddConditionalBreakpointAction.LABEL, this.editor, lineNumber));
}
return TPromise.as(actions);
}
private registerListeners(): void {
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => { | return;
}
if (!this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
return;
}
const lineNumber = e.target.position.lineNumber;
const uri = this.editor.getModel().getAssociatedResource();
if (e.event.rightButton || (env.isMacintosh && e.event.leftButton && e.event.ctrlKey)) {
const anchor = { x: e.event.posx + 1, y: e.event.posy };
const breakpoint = this.debugService.getModel().getBreakpoints().filter(bp => bp.lineNumber === lineNumber && bp.source.uri.toString() === uri.toString()).pop();
this.contextMenuService.showContextMenu({
getAnchor: () => anchor,
getActions: () => this.getContextMenuActions(breakpoint, uri, lineNumber),
getActionsContext: () => breakpoint
});
} else {
const breakpoint = this.debugService.getModel().getBreakpoints()
.filter(bp => bp.source.uri.toString() === uri.toString() && bp.lineNumber === lineNumber).pop();
if (breakpoint) {
this.debugService.removeBreakpoints(breakpoint.getId());
} else {
this.debugService.addBreakpoints([{ uri, lineNumber }]);
}
}
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => {
var showBreakpointHintAtLineNumber = -1;
if (e.target.type === editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN && this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
if (!e.target.detail) {
// is not after last line
showBreakpointHintAtLineNumber = e.target.position.lineNumber;
}
}
this.ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber);
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => {
this.ensureBreakpointHintDecoration(-1);
}));
this.toDispose.push(this.debugService.onDidChangeState(state => this.onDebugStateUpdate(state)));
// hover listeners & hover widget
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseMove(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => this.hoverWidget.hide()));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.KeyDown, (e: keyboard.IKeyboardEvent) => this.onKeyDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.ModelChanged, () => this.hideHoverWidget()));
this.toDispose.push(this.editor.addListener2('scroll', () => this.hideHoverWidget));
}
public getId(): string {
return debug.EDITOR_CONTRIBUTION_ID;
}
public showHover(range: editorcommon.IEditorRange, hoveringOver: string, focus: boolean): TPromise<void> {
return this.hoverWidget.showAt(range, hoveringOver, focus);
}
private ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber: number): void {
var newDecoration: editorcommon.IModelDeltaDecoration[] = [];
if (showBreakpointHintAtLineNumber !== -1) {
newDecoration.push({
options: DebugEditorContribution.BREAKPOINT_HELPER_DECORATION,
range: {
startLineNumber: showBreakpointHintAtLineNumber,
startColumn: 1,
endLineNumber: showBreakpointHintAtLineNumber,
endColumn: 1
}
});
}
this.breakpointHintDecoration = this.editor.deltaDecorations(this.breakpointHintDecoration, newDecoration);
}
private onDebugStateUpdate(state: debug.State): void {
if (state !== debug.State.Stopped) {
this.hideHoverWidget();
}
this.contextService.updateOptions('editor', {
hover: state !== debug.State.Stopped
});
}
private hideHoverWidget(): void {
if (!this.hideHoverScheduler.isScheduled() && this.hoverWidget.isVisible) {
this.hideHoverScheduler.schedule();
}
this.showHoverScheduler.cancel();
this.hoveringOver = null;
}
// hover business
private onEditorMouseDown(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (mouseEvent.target.type === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID) {
return;
}
this.hideHoverWidget();
}
private onEditorMouseMove(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (this.debugService.state !== debug.State.Stopped) {
return;
}
const targetType = mouseEvent.target.type;
const stopKey = env.isMacintosh ? 'metaKey' : 'ctrlKey';
if (targetType === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID && !(<any>mouseEvent.event)[stopKey]) {
// mouse moved on top of debug hover widget
return;
}
if (targetType === editorcommon.MouseTargetType.CONTENT_TEXT) {
const wordAtPosition = this.editor.getModel().getWordAtPosition(mouseEvent.target.range.getStartPosition());
if (wordAtPosition && this.hoveringOver !== wordAtPosition.word) {
this.hoverRange = mouseEvent.target.range;
this.hoveringOver = wordAtPosition.word;
this.showHoverScheduler.schedule();
}
} else {
this.hideHoverWidget();
}
}
private onKeyDown(e: keyboard.IKeyboardEvent): void {
const stopKey = env.isMacintosh ? KeyCode.Meta : KeyCode.Ctrl;
if (e.keyCode !== stopKey) {
// do not hide hover when Ctrl/Meta is pressed
this.hideHoverWidget();
}
}
// end hover business
private static BREAKPOINT_HELPER_DECORATION: editorcommon.IModelDecorationOptions = {
glyphMarginClassName: 'debug-breakpoint-hint-glyph',
stickiness: editorcommon.TrackedRangeStickiness.NeverGrowsWhenTypingAtEdges
};
public dispose(): void {
this.toDispose = lifecycle.dispose(this.toDispose);
}
} | if (e.target.type !== editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN || /* after last line */ e.target.detail) { | random_line_split |
debugEditorContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import nls = require('vs/nls');
import { TPromise } from 'vs/base/common/winjs.base';
import { RunOnceScheduler } from 'vs/base/common/async';
import lifecycle = require('vs/base/common/lifecycle');
import env = require('vs/base/common/platform');
import uri from 'vs/base/common/uri';
import { IAction, Action } from 'vs/base/common/actions';
import { KeyCode } from 'vs/base/common/keyCodes';
import keyboard = require('vs/base/browser/keyboardEvent');
import editorbrowser = require('vs/editor/browser/editorBrowser');
import editorcommon = require('vs/editor/common/editorCommon');
import { DebugHoverWidget } from 'vs/workbench/parts/debug/browser/debugHover';
import debugactions = require('vs/workbench/parts/debug/electron-browser/debugActions');
import debug = require('vs/workbench/parts/debug/common/debug');
import { IWorkspaceContextService } from 'vs/workbench/services/workspace/common/contextService';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IContextMenuService } from 'vs/platform/contextview/browser/contextView';
const HOVER_DELAY = 300;
export class DebugEditorContribution implements debug.IDebugEditorContribution {
private toDispose: lifecycle.IDisposable[];
private breakpointHintDecoration: string[];
private hoverWidget: DebugHoverWidget;
private showHoverScheduler: RunOnceScheduler;
private hideHoverScheduler: RunOnceScheduler;
private hoverRange: editorcommon.IEditorRange;
private hoveringOver: string;
static getDebugEditorContribution(editor: editorcommon.ICommonCodeEditor): DebugEditorContribution {
return <DebugEditorContribution>editor.getContribution(debug.EDITOR_CONTRIBUTION_ID);
}
constructor(
private editor: editorbrowser.ICodeEditor,
@debug.IDebugService private debugService: debug.IDebugService,
@IWorkspaceContextService private contextService: IWorkspaceContextService,
@IContextMenuService private contextMenuService: IContextMenuService,
@IInstantiationService private instantiationService:IInstantiationService
) {
this.breakpointHintDecoration = [];
this.hoverWidget = new DebugHoverWidget(this.editor, this.debugService, this.instantiationService);
this.toDispose = [this.hoverWidget];
this.showHoverScheduler = new RunOnceScheduler(() => this.showHover(this.hoverRange, this.hoveringOver, false), HOVER_DELAY);
this.hideHoverScheduler = new RunOnceScheduler(() => this.hoverWidget.hide(), HOVER_DELAY);
this.registerListeners();
}
private getContextMenuActions(breakpoint: debug.IBreakpoint, uri: uri, lineNumber: number): TPromise<IAction[]> {
const actions = [];
if (breakpoint) {
actions.push(this.instantiationService.createInstance(debugactions.RemoveBreakpointAction, debugactions.RemoveBreakpointAction.ID, debugactions.RemoveBreakpointAction.LABEL));
actions.push(this.instantiationService.createInstance(debugactions.EditConditionalBreakpointAction, debugactions.EditConditionalBreakpointAction.ID, debugactions.EditConditionalBreakpointAction.LABEL, this.editor, lineNumber));
actions.push(this.instantiationService.createInstance(debugactions.ToggleEnablementAction, debugactions.ToggleEnablementAction.ID, debugactions.ToggleEnablementAction.LABEL));
} else {
actions.push(new Action(
'addBreakpoint',
nls.localize('addBreakpoint', "Add Breakpoint"),
null,
true,
() => this.debugService.addBreakpoints([{ uri, lineNumber }])
));
actions.push(this.instantiationService.createInstance(debugactions.AddConditionalBreakpointAction, debugactions.AddConditionalBreakpointAction.ID, debugactions.AddConditionalBreakpointAction.LABEL, this.editor, lineNumber));
}
return TPromise.as(actions);
}
private registerListeners(): void {
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => {
if (e.target.type !== editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN || /* after last line */ e.target.detail) {
return;
}
if (!this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
return;
}
const lineNumber = e.target.position.lineNumber;
const uri = this.editor.getModel().getAssociatedResource();
if (e.event.rightButton || (env.isMacintosh && e.event.leftButton && e.event.ctrlKey)) {
const anchor = { x: e.event.posx + 1, y: e.event.posy };
const breakpoint = this.debugService.getModel().getBreakpoints().filter(bp => bp.lineNumber === lineNumber && bp.source.uri.toString() === uri.toString()).pop();
this.contextMenuService.showContextMenu({
getAnchor: () => anchor,
getActions: () => this.getContextMenuActions(breakpoint, uri, lineNumber),
getActionsContext: () => breakpoint
});
} else {
const breakpoint = this.debugService.getModel().getBreakpoints()
.filter(bp => bp.source.uri.toString() === uri.toString() && bp.lineNumber === lineNumber).pop();
if (breakpoint) {
this.debugService.removeBreakpoints(breakpoint.getId());
} else {
this.debugService.addBreakpoints([{ uri, lineNumber }]);
}
}
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => {
var showBreakpointHintAtLineNumber = -1;
if (e.target.type === editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN && this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
if (!e.target.detail) {
// is not after last line
showBreakpointHintAtLineNumber = e.target.position.lineNumber;
}
}
this.ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber);
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => {
this.ensureBreakpointHintDecoration(-1);
}));
this.toDispose.push(this.debugService.onDidChangeState(state => this.onDebugStateUpdate(state)));
// hover listeners & hover widget
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseMove(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => this.hoverWidget.hide()));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.KeyDown, (e: keyboard.IKeyboardEvent) => this.onKeyDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.ModelChanged, () => this.hideHoverWidget()));
this.toDispose.push(this.editor.addListener2('scroll', () => this.hideHoverWidget));
}
public getId(): string {
return debug.EDITOR_CONTRIBUTION_ID;
}
public showHover(range: editorcommon.IEditorRange, hoveringOver: string, focus: boolean): TPromise<void> {
return this.hoverWidget.showAt(range, hoveringOver, focus);
}
private ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber: number): void {
var newDecoration: editorcommon.IModelDeltaDecoration[] = [];
if (showBreakpointHintAtLineNumber !== -1) {
newDecoration.push({
options: DebugEditorContribution.BREAKPOINT_HELPER_DECORATION,
range: {
startLineNumber: showBreakpointHintAtLineNumber,
startColumn: 1,
endLineNumber: showBreakpointHintAtLineNumber,
endColumn: 1
}
});
}
this.breakpointHintDecoration = this.editor.deltaDecorations(this.breakpointHintDecoration, newDecoration);
}
private onDebugStateUpdate(state: debug.State): void {
if (state !== debug.State.Stopped) {
this.hideHoverWidget();
}
this.contextService.updateOptions('editor', {
hover: state !== debug.State.Stopped
});
}
private hideHoverWidget(): void {
if (!this.hideHoverScheduler.isScheduled() && this.hoverWidget.isVisible) {
this.hideHoverScheduler.schedule();
}
this.showHoverScheduler.cancel();
this.hoveringOver = null;
}
// hover business
private onEditorMouseDown(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (mouseEvent.target.type === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID) |
this.hideHoverWidget();
}
private onEditorMouseMove(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (this.debugService.state !== debug.State.Stopped) {
return;
}
const targetType = mouseEvent.target.type;
const stopKey = env.isMacintosh ? 'metaKey' : 'ctrlKey';
if (targetType === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID && !(<any>mouseEvent.event)[stopKey]) {
// mouse moved on top of debug hover widget
return;
}
if (targetType === editorcommon.MouseTargetType.CONTENT_TEXT) {
const wordAtPosition = this.editor.getModel().getWordAtPosition(mouseEvent.target.range.getStartPosition());
if (wordAtPosition && this.hoveringOver !== wordAtPosition.word) {
this.hoverRange = mouseEvent.target.range;
this.hoveringOver = wordAtPosition.word;
this.showHoverScheduler.schedule();
}
} else {
this.hideHoverWidget();
}
}
private onKeyDown(e: keyboard.IKeyboardEvent): void {
const stopKey = env.isMacintosh ? KeyCode.Meta : KeyCode.Ctrl;
if (e.keyCode !== stopKey) {
// do not hide hover when Ctrl/Meta is pressed
this.hideHoverWidget();
}
}
// end hover business
private static BREAKPOINT_HELPER_DECORATION: editorcommon.IModelDecorationOptions = {
glyphMarginClassName: 'debug-breakpoint-hint-glyph',
stickiness: editorcommon.TrackedRangeStickiness.NeverGrowsWhenTypingAtEdges
};
public dispose(): void {
this.toDispose = lifecycle.dispose(this.toDispose);
}
}
| {
return;
} | conditional_block |
debugEditorContribution.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import nls = require('vs/nls');
import { TPromise } from 'vs/base/common/winjs.base';
import { RunOnceScheduler } from 'vs/base/common/async';
import lifecycle = require('vs/base/common/lifecycle');
import env = require('vs/base/common/platform');
import uri from 'vs/base/common/uri';
import { IAction, Action } from 'vs/base/common/actions';
import { KeyCode } from 'vs/base/common/keyCodes';
import keyboard = require('vs/base/browser/keyboardEvent');
import editorbrowser = require('vs/editor/browser/editorBrowser');
import editorcommon = require('vs/editor/common/editorCommon');
import { DebugHoverWidget } from 'vs/workbench/parts/debug/browser/debugHover';
import debugactions = require('vs/workbench/parts/debug/electron-browser/debugActions');
import debug = require('vs/workbench/parts/debug/common/debug');
import { IWorkspaceContextService } from 'vs/workbench/services/workspace/common/contextService';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IContextMenuService } from 'vs/platform/contextview/browser/contextView';
const HOVER_DELAY = 300;
export class DebugEditorContribution implements debug.IDebugEditorContribution {
private toDispose: lifecycle.IDisposable[];
private breakpointHintDecoration: string[];
private hoverWidget: DebugHoverWidget;
private showHoverScheduler: RunOnceScheduler;
private hideHoverScheduler: RunOnceScheduler;
private hoverRange: editorcommon.IEditorRange;
private hoveringOver: string;
static getDebugEditorContribution(editor: editorcommon.ICommonCodeEditor): DebugEditorContribution {
return <DebugEditorContribution>editor.getContribution(debug.EDITOR_CONTRIBUTION_ID);
}
constructor(
private editor: editorbrowser.ICodeEditor,
@debug.IDebugService private debugService: debug.IDebugService,
@IWorkspaceContextService private contextService: IWorkspaceContextService,
@IContextMenuService private contextMenuService: IContextMenuService,
@IInstantiationService private instantiationService:IInstantiationService
) {
this.breakpointHintDecoration = [];
this.hoverWidget = new DebugHoverWidget(this.editor, this.debugService, this.instantiationService);
this.toDispose = [this.hoverWidget];
this.showHoverScheduler = new RunOnceScheduler(() => this.showHover(this.hoverRange, this.hoveringOver, false), HOVER_DELAY);
this.hideHoverScheduler = new RunOnceScheduler(() => this.hoverWidget.hide(), HOVER_DELAY);
this.registerListeners();
}
private getContextMenuActions(breakpoint: debug.IBreakpoint, uri: uri, lineNumber: number): TPromise<IAction[]> {
const actions = [];
if (breakpoint) {
actions.push(this.instantiationService.createInstance(debugactions.RemoveBreakpointAction, debugactions.RemoveBreakpointAction.ID, debugactions.RemoveBreakpointAction.LABEL));
actions.push(this.instantiationService.createInstance(debugactions.EditConditionalBreakpointAction, debugactions.EditConditionalBreakpointAction.ID, debugactions.EditConditionalBreakpointAction.LABEL, this.editor, lineNumber));
actions.push(this.instantiationService.createInstance(debugactions.ToggleEnablementAction, debugactions.ToggleEnablementAction.ID, debugactions.ToggleEnablementAction.LABEL));
} else {
actions.push(new Action(
'addBreakpoint',
nls.localize('addBreakpoint', "Add Breakpoint"),
null,
true,
() => this.debugService.addBreakpoints([{ uri, lineNumber }])
));
actions.push(this.instantiationService.createInstance(debugactions.AddConditionalBreakpointAction, debugactions.AddConditionalBreakpointAction.ID, debugactions.AddConditionalBreakpointAction.LABEL, this.editor, lineNumber));
}
return TPromise.as(actions);
}
private registerListeners(): void {
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => {
if (e.target.type !== editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN || /* after last line */ e.target.detail) {
return;
}
if (!this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
return;
}
const lineNumber = e.target.position.lineNumber;
const uri = this.editor.getModel().getAssociatedResource();
if (e.event.rightButton || (env.isMacintosh && e.event.leftButton && e.event.ctrlKey)) {
const anchor = { x: e.event.posx + 1, y: e.event.posy };
const breakpoint = this.debugService.getModel().getBreakpoints().filter(bp => bp.lineNumber === lineNumber && bp.source.uri.toString() === uri.toString()).pop();
this.contextMenuService.showContextMenu({
getAnchor: () => anchor,
getActions: () => this.getContextMenuActions(breakpoint, uri, lineNumber),
getActionsContext: () => breakpoint
});
} else {
const breakpoint = this.debugService.getModel().getBreakpoints()
.filter(bp => bp.source.uri.toString() === uri.toString() && bp.lineNumber === lineNumber).pop();
if (breakpoint) {
this.debugService.removeBreakpoints(breakpoint.getId());
} else {
this.debugService.addBreakpoints([{ uri, lineNumber }]);
}
}
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => {
var showBreakpointHintAtLineNumber = -1;
if (e.target.type === editorcommon.MouseTargetType.GUTTER_GLYPH_MARGIN && this.debugService.getConfigurationManager().canSetBreakpointsIn(this.editor.getModel())) {
if (!e.target.detail) {
// is not after last line
showBreakpointHintAtLineNumber = e.target.position.lineNumber;
}
}
this.ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber);
}));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => {
this.ensureBreakpointHintDecoration(-1);
}));
this.toDispose.push(this.debugService.onDidChangeState(state => this.onDebugStateUpdate(state)));
// hover listeners & hover widget
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseDown, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseMove, (e: editorbrowser.IEditorMouseEvent) => this.onEditorMouseMove(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.MouseLeave, (e: editorbrowser.IEditorMouseEvent) => this.hoverWidget.hide()));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.KeyDown, (e: keyboard.IKeyboardEvent) => this.onKeyDown(e)));
this.toDispose.push(this.editor.addListener2(editorcommon.EventType.ModelChanged, () => this.hideHoverWidget()));
this.toDispose.push(this.editor.addListener2('scroll', () => this.hideHoverWidget));
}
public getId(): string {
return debug.EDITOR_CONTRIBUTION_ID;
}
public showHover(range: editorcommon.IEditorRange, hoveringOver: string, focus: boolean): TPromise<void> {
return this.hoverWidget.showAt(range, hoveringOver, focus);
}
private ensureBreakpointHintDecoration(showBreakpointHintAtLineNumber: number): void {
var newDecoration: editorcommon.IModelDeltaDecoration[] = [];
if (showBreakpointHintAtLineNumber !== -1) {
newDecoration.push({
options: DebugEditorContribution.BREAKPOINT_HELPER_DECORATION,
range: {
startLineNumber: showBreakpointHintAtLineNumber,
startColumn: 1,
endLineNumber: showBreakpointHintAtLineNumber,
endColumn: 1
}
});
}
this.breakpointHintDecoration = this.editor.deltaDecorations(this.breakpointHintDecoration, newDecoration);
}
private onDebugStateUpdate(state: debug.State): void {
if (state !== debug.State.Stopped) {
this.hideHoverWidget();
}
this.contextService.updateOptions('editor', {
hover: state !== debug.State.Stopped
});
}
private hideHoverWidget(): void |
// hover business
private onEditorMouseDown(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (mouseEvent.target.type === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID) {
return;
}
this.hideHoverWidget();
}
private onEditorMouseMove(mouseEvent: editorbrowser.IEditorMouseEvent): void {
if (this.debugService.state !== debug.State.Stopped) {
return;
}
const targetType = mouseEvent.target.type;
const stopKey = env.isMacintosh ? 'metaKey' : 'ctrlKey';
if (targetType === editorcommon.MouseTargetType.CONTENT_WIDGET && mouseEvent.target.detail === DebugHoverWidget.ID && !(<any>mouseEvent.event)[stopKey]) {
// mouse moved on top of debug hover widget
return;
}
if (targetType === editorcommon.MouseTargetType.CONTENT_TEXT) {
const wordAtPosition = this.editor.getModel().getWordAtPosition(mouseEvent.target.range.getStartPosition());
if (wordAtPosition && this.hoveringOver !== wordAtPosition.word) {
this.hoverRange = mouseEvent.target.range;
this.hoveringOver = wordAtPosition.word;
this.showHoverScheduler.schedule();
}
} else {
this.hideHoverWidget();
}
}
private onKeyDown(e: keyboard.IKeyboardEvent): void {
const stopKey = env.isMacintosh ? KeyCode.Meta : KeyCode.Ctrl;
if (e.keyCode !== stopKey) {
// do not hide hover when Ctrl/Meta is pressed
this.hideHoverWidget();
}
}
// end hover business
private static BREAKPOINT_HELPER_DECORATION: editorcommon.IModelDecorationOptions = {
glyphMarginClassName: 'debug-breakpoint-hint-glyph',
stickiness: editorcommon.TrackedRangeStickiness.NeverGrowsWhenTypingAtEdges
};
public dispose(): void {
this.toDispose = lifecycle.dispose(this.toDispose);
}
}
| {
if (!this.hideHoverScheduler.isScheduled() && this.hoverWidget.isVisible) {
this.hideHoverScheduler.schedule();
}
this.showHoverScheduler.cancel();
this.hoveringOver = null;
} | identifier_body |
mouse.ts | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
/*
* CoCalc's Xpra HTML Client
*
* ---
*
* Xpra
* Copyright (c) 2013-2017 Antoine Martin <[email protected]>
* Copyright (c) 2016 David Brushinski <[email protected]>
* Copyright (c) 2014 Joshua Higgins <[email protected]>
* Copyright (c) 2015-2016 Spikes, Inc.
* Copyright (c) 2018-2019 SageMath, Inc.
* Licensed under MPL 2.0, see:
* http://www.mozilla.org/MPL/2.0/
*/
/**
* CoCalc Xpra Client
*/
import { Surface } from "./surface";
import { Keyboard } from "./keyboard";
function get_wheel_event_name(): string {
const element = document.createElement("div");
for (const name of ["wheel", "mousewheel", "DOMMouseScroll"]) {
const n = `on${name}`;
element.setAttribute(n, "return;");
if (typeof element[n] === "function") {
return name;
}
}
console.warn("Unable to determine wheel event name");
return "broken-mousewheel";
}
const WHEEL_EVENT_NAME = get_wheel_event_name();
// normalize_wheel: https://github.com/facebook/fixed-data-table/blob/master/src/vendor_upstream/dom/normalizeWheel.js
// BSD license
import { PIXEL_STEP, LINE_HEIGHT, PAGE_HEIGHT } from "./constants";
function normalize_wheel(
ev: any
): {
spinX: number;
spinY: number;
pixelX: number;
pixelY: number;
deltaMode: number;
} {
let spinX = 0,
spinY = 0,
pixelX = 0,
pixelY = 0;
ev = (ev as any).originalEvent;
// Legacy
if ("detail" in ev) {
spinY = ev.detail;
}
if ("wheelDelta" in ev) {
spinY = -ev.wheelDelta / 120;
}
if ("wheelDeltaY" in ev) {
spinY = -ev.wheelDeltaY / 120;
}
if ("wheelDeltaX" in ev) {
spinX = -ev.wheelDeltaX / 120;
}
// side scrolling on FF with DOMMouseScroll
if ("axis" in ev && ev.axis === ev.HORIZONTAL_AXIS) {
spinX = spinY;
spinY = 0;
}
pixelX = spinX * PIXEL_STEP;
pixelY = spinY * PIXEL_STEP;
if ("deltaY" in ev) {
pixelY = ev.deltaY;
}
if ("deltaX" in ev) {
pixelX = ev.deltaX;
}
if ((pixelX || pixelY) && ev.deltaMode) {
if (ev.deltaMode == 1) {
// delta in LINE units
pixelX *= LINE_HEIGHT;
pixelY *= LINE_HEIGHT;
} else {
// delta in PAGE units
pixelX *= PAGE_HEIGHT;
pixelY *= PAGE_HEIGHT;
}
}
// Fall-back if spin cannot be determined
if (pixelX && !spinX) {
spinX = pixelX < 1 ? -1 : 1;
}
if (pixelY && !spinY) {
spinY = pixelY < 1 ? -1 : 1;
}
return {
spinX,
spinY,
pixelX,
pixelY,
deltaMode: ev.deltaMode || 0,
};
}
function getMouseButton(ev: MouseEvent): number {
let button: number = ev.which
? Math.max(0, ev.which)
: ev.button
? Math.max(0, ev.button) + 1
: 0;
if (button === 4) {
button = 8;
} else if (button === 5) {
button = 9;
}
return button;
}
function getMouse(
ev: MouseEvent,
surface: Surface
): { x: number; y: number; button: number; buttons: number[] } | undefined {
const { top, left, bottom, right } = surface.canvas.getBoundingClientRect();
if (
ev.clientX < left ||
ev.clientX >= right ||
ev.clientY < top ||
ev.clientY >= bottom
) {
// mouse not actually on the surface.
return;
}
if (right === left || top === bottom) {
// degenerate size
return;
}
const x = Math.round(
surface.canvas.width * ((ev.clientX - left) / (right - left)) + surface.x
);
const y = Math.round(
surface.canvas.height * ((ev.clientY - top) / (bottom - top)) + surface.y
);
const buttons = [];
const button = getMouseButton(ev);
return { x, y, button, buttons };
}
/**
* The mouse input handler class
*/
export class Mouse {
private send: Function;
private keyboard: Keyboard;
private findSurface: Function;
private wheel_delta_x: number = 0;
private wheel_delta_y: number = 0;
constructor(send: Function, keyboard: Keyboard, findSurface: Function) {
this.send = send;
this.keyboard = keyboard;
this.findSurface = findSurface;
}
process(ev: MouseEvent): Surface | undefined {
if (ev.clientX == null || ev.clientY == null) {
| const elt_at = document.elementFromPoint(ev.clientX, ev.clientY);
if (!elt_at) {
// nothing under mouse, so no point. (possible? I don't know.)
return;
}
// TODO: right now we abuse things a bit to store the wid on the canvas itself.
const wid: number | undefined = (elt_at as any).wid;
if (wid === undefined) {
return;
}
const surface: Surface | undefined = this.findSurface(wid);
if (surface === undefined) {
// TODO: this shouldn't happen, or if it does, probably
// we should do something special to fix it?
console.warn(
`process mouse -- weird, we clicked on surface ${wid} but can't find it`
);
return;
}
const modifiers: string[] = this.keyboard.modifiers(ev);
const mouse = getMouse(ev, surface);
if (mouse == null) {
return;
}
const { x, y, button, buttons } = mouse;
switch (ev.type) {
case "mousemove": {
this.send("pointer-position", wid, [x, y], modifiers, buttons);
break;
}
case "mousedown":
case "mouseup": {
const pressed = ev.type === "mousedown";
surface.do_close_on_click();
this.send(
"button-action",
wid,
button,
pressed,
[x, y],
modifiers,
buttons
);
break;
}
case WHEEL_EVENT_NAME: {
this.do_window_mouse_scroll({ ev, wid, x, y, buttons, modifiers });
return;
}
}
return surface;
}
do_window_mouse_scroll({
ev,
wid,
x,
y,
buttons,
modifiers,
}: {
ev: MouseEvent;
wid: number;
x: number;
y: number;
buttons: number[];
modifiers: string[];
}): void {
// I think server support for wheel.precise is not available in
// CoCalc -- I think it depends on the uinput Python module,
// and won't work without kernel support that is not allowed by
// Docker for security reasons. So we instead "send
// synthetic click+release as many times as needed".
const wheel = normalize_wheel(ev);
const INCREMENT = 120;
//clamp to prevent event floods:
const px = Math.min(INCREMENT * 10, wheel.pixelX);
const py = Math.min(INCREMENT * 10, wheel.pixelY);
const apx = Math.abs(px);
const apy = Math.abs(py);
// Generate a single event if we can, or add to accumulators:
if (apx >= 40 && apx <= 160) {
this.wheel_delta_x = px > 0 ? INCREMENT : -INCREMENT;
} else {
this.wheel_delta_x += px;
}
if (apy >= 40 && apy <= 160) {
this.wheel_delta_y = py > 0 ? INCREMENT : -INCREMENT;
} else {
this.wheel_delta_y += py;
}
// Send synthetic click+release as many times as needed:
let wx = Math.abs(this.wheel_delta_x);
let wy = Math.abs(this.wheel_delta_y);
const btn_x = this.wheel_delta_x >= 0 ? 6 : 7;
const btn_y = this.wheel_delta_y >= 0 ? 5 : 4;
while (wx >= INCREMENT) {
wx -= INCREMENT;
this.send("button-action", wid, btn_x, true, [x, y], modifiers, buttons);
this.send("button-action", wid, btn_x, false, [x, y], modifiers, buttons);
}
while (wy >= INCREMENT) {
wy -= INCREMENT;
this.send("button-action", wid, btn_y, true, [x, y], modifiers, buttons);
this.send("button-action", wid, btn_y, false, [x, y], modifiers, buttons);
}
// Store left overs:
this.wheel_delta_x = this.wheel_delta_x >= 0 ? wx : -wx;
this.wheel_delta_y = this.wheel_delta_y >= 0 ? wy : -wy;
}
}
| // happens with touch events for now...
return;
}
| conditional_block |
mouse.ts | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
/*
* CoCalc's Xpra HTML Client
*
* ---
*
* Xpra
* Copyright (c) 2013-2017 Antoine Martin <[email protected]>
* Copyright (c) 2016 David Brushinski <[email protected]>
* Copyright (c) 2014 Joshua Higgins <[email protected]>
* Copyright (c) 2015-2016 Spikes, Inc.
* Copyright (c) 2018-2019 SageMath, Inc.
* Licensed under MPL 2.0, see:
* http://www.mozilla.org/MPL/2.0/
*/
/**
* CoCalc Xpra Client
*/
import { Surface } from "./surface";
import { Keyboard } from "./keyboard";
function get_wheel_event_name(): string {
const element = document.createElement("div");
for (const name of ["wheel", "mousewheel", "DOMMouseScroll"]) {
const n = `on${name}`;
element.setAttribute(n, "return;");
if (typeof element[n] === "function") {
return name;
}
}
console.warn("Unable to determine wheel event name");
return "broken-mousewheel";
}
const WHEEL_EVENT_NAME = get_wheel_event_name();
// normalize_wheel: https://github.com/facebook/fixed-data-table/blob/master/src/vendor_upstream/dom/normalizeWheel.js
// BSD license
import { PIXEL_STEP, LINE_HEIGHT, PAGE_HEIGHT } from "./constants";
function normalize_wheel(
ev: any
): {
spinX: number;
spinY: number;
pixelX: number;
pixelY: number;
deltaMode: number;
} {
let spinX = 0,
spinY = 0,
pixelX = 0,
pixelY = 0;
ev = (ev as any).originalEvent;
// Legacy
if ("detail" in ev) {
spinY = ev.detail;
}
if ("wheelDelta" in ev) {
spinY = -ev.wheelDelta / 120;
}
if ("wheelDeltaY" in ev) {
spinY = -ev.wheelDeltaY / 120;
}
if ("wheelDeltaX" in ev) {
spinX = -ev.wheelDeltaX / 120;
}
// side scrolling on FF with DOMMouseScroll
if ("axis" in ev && ev.axis === ev.HORIZONTAL_AXIS) {
spinX = spinY;
spinY = 0;
}
pixelX = spinX * PIXEL_STEP;
pixelY = spinY * PIXEL_STEP;
if ("deltaY" in ev) {
pixelY = ev.deltaY;
}
if ("deltaX" in ev) {
pixelX = ev.deltaX;
}
if ((pixelX || pixelY) && ev.deltaMode) {
if (ev.deltaMode == 1) {
// delta in LINE units
pixelX *= LINE_HEIGHT;
pixelY *= LINE_HEIGHT;
} else {
// delta in PAGE units
pixelX *= PAGE_HEIGHT;
pixelY *= PAGE_HEIGHT;
}
}
// Fall-back if spin cannot be determined
if (pixelX && !spinX) {
spinX = pixelX < 1 ? -1 : 1;
}
if (pixelY && !spinY) {
spinY = pixelY < 1 ? -1 : 1;
}
return {
spinX,
spinY,
pixelX,
pixelY,
deltaMode: ev.deltaMode || 0,
};
}
function getMouseButton(ev: MouseEvent): number {
let button: number = ev.which
? Math.max(0, ev.which)
: ev.button
? Math.max(0, ev.button) + 1
: 0;
if (button === 4) {
button = 8;
} else if (button === 5) {
button = 9;
}
return button;
}
function getMouse(
ev: MouseEvent,
surface: Surface
): { x: number; y: number; button: number; buttons: number[] } | undefined {
const { top, left, bottom, right } = surface.canvas.getBoundingClientRect();
if (
ev.clientX < left ||
ev.clientX >= right ||
ev.clientY < top ||
ev.clientY >= bottom
) {
// mouse not actually on the surface.
return;
}
if (right === left || top === bottom) {
// degenerate size
return;
}
const x = Math.round(
surface.canvas.width * ((ev.clientX - left) / (right - left)) + surface.x
);
const y = Math.round(
surface.canvas.height * ((ev.clientY - top) / (bottom - top)) + surface.y
);
const buttons = [];
const button = getMouseButton(ev);
return { x, y, button, buttons };
}
/**
* The mouse input handler class
*/
export class Mouse {
private send: Function;
private keyboard: Keyboard;
private findSurface: Function;
private wheel_delta_x: number = 0;
private wheel_delta_y: number = 0;
constructor(send: Function, keyboard: Keyboard, findSurface: Function) {
this.send = send;
this.keyboard = keyboard;
this.findSurface = findSurface;
}
process(ev: MouseEvent): Surface | undefined {
if (ev.clientX == null || ev.clientY == null) {
// happens with touch events for now...
return;
}
const elt_at = document.elementFromPoint(ev.clientX, ev.clientY);
if (!elt_at) {
// nothing under mouse, so no point. (possible? I don't know.)
return;
}
// TODO: right now we abuse things a bit to store the wid on the canvas itself.
const wid: number | undefined = (elt_at as any).wid;
if (wid === undefined) {
return;
}
const surface: Surface | undefined = this.findSurface(wid);
if (surface === undefined) {
// TODO: this shouldn't happen, or if it does, probably
// we should do something special to fix it?
console.warn(
`process mouse -- weird, we clicked on surface ${wid} but can't find it`
);
return;
}
const modifiers: string[] = this.keyboard.modifiers(ev);
const mouse = getMouse(ev, surface);
if (mouse == null) {
return;
}
const { x, y, button, buttons } = mouse;
switch (ev.type) {
case "mousemove": {
this.send("pointer-position", wid, [x, y], modifiers, buttons);
break;
}
case "mousedown":
case "mouseup": {
const pressed = ev.type === "mousedown";
surface.do_close_on_click();
this.send(
"button-action",
wid,
button,
pressed,
[x, y],
modifiers,
buttons
);
break;
}
case WHEEL_EVENT_NAME: {
this.do_window_mouse_scroll({ ev, wid, x, y, buttons, modifiers });
return;
}
}
return surface;
}
do_ | ev,
wid,
x,
y,
buttons,
modifiers,
}: {
ev: MouseEvent;
wid: number;
x: number;
y: number;
buttons: number[];
modifiers: string[];
}): void {
// I think server support for wheel.precise is not available in
// CoCalc -- I think it depends on the uinput Python module,
// and won't work without kernel support that is not allowed by
// Docker for security reasons. So we instead "send
// synthetic click+release as many times as needed".
const wheel = normalize_wheel(ev);
const INCREMENT = 120;
//clamp to prevent event floods:
const px = Math.min(INCREMENT * 10, wheel.pixelX);
const py = Math.min(INCREMENT * 10, wheel.pixelY);
const apx = Math.abs(px);
const apy = Math.abs(py);
// Generate a single event if we can, or add to accumulators:
if (apx >= 40 && apx <= 160) {
this.wheel_delta_x = px > 0 ? INCREMENT : -INCREMENT;
} else {
this.wheel_delta_x += px;
}
if (apy >= 40 && apy <= 160) {
this.wheel_delta_y = py > 0 ? INCREMENT : -INCREMENT;
} else {
this.wheel_delta_y += py;
}
// Send synthetic click+release as many times as needed:
let wx = Math.abs(this.wheel_delta_x);
let wy = Math.abs(this.wheel_delta_y);
const btn_x = this.wheel_delta_x >= 0 ? 6 : 7;
const btn_y = this.wheel_delta_y >= 0 ? 5 : 4;
while (wx >= INCREMENT) {
wx -= INCREMENT;
this.send("button-action", wid, btn_x, true, [x, y], modifiers, buttons);
this.send("button-action", wid, btn_x, false, [x, y], modifiers, buttons);
}
while (wy >= INCREMENT) {
wy -= INCREMENT;
this.send("button-action", wid, btn_y, true, [x, y], modifiers, buttons);
this.send("button-action", wid, btn_y, false, [x, y], modifiers, buttons);
}
// Store left overs:
this.wheel_delta_x = this.wheel_delta_x >= 0 ? wx : -wx;
this.wheel_delta_y = this.wheel_delta_y >= 0 ? wy : -wy;
}
}
| window_mouse_scroll({
| identifier_name |
mouse.ts | /*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
/*
* CoCalc's Xpra HTML Client
*
* ---
*
* Xpra
* Copyright (c) 2013-2017 Antoine Martin <[email protected]>
* Copyright (c) 2016 David Brushinski <[email protected]>
* Copyright (c) 2014 Joshua Higgins <[email protected]>
* Copyright (c) 2015-2016 Spikes, Inc.
* Copyright (c) 2018-2019 SageMath, Inc.
* Licensed under MPL 2.0, see:
* http://www.mozilla.org/MPL/2.0/
*/
/**
* CoCalc Xpra Client
*/
import { Surface } from "./surface";
import { Keyboard } from "./keyboard";
function get_wheel_event_name(): string {
const element = document.createElement("div");
for (const name of ["wheel", "mousewheel", "DOMMouseScroll"]) {
const n = `on${name}`;
element.setAttribute(n, "return;");
if (typeof element[n] === "function") {
return name;
}
}
console.warn("Unable to determine wheel event name");
return "broken-mousewheel";
}
const WHEEL_EVENT_NAME = get_wheel_event_name();
// normalize_wheel: https://github.com/facebook/fixed-data-table/blob/master/src/vendor_upstream/dom/normalizeWheel.js
// BSD license
import { PIXEL_STEP, LINE_HEIGHT, PAGE_HEIGHT } from "./constants";
function normalize_wheel(
ev: any
): {
spinX: number;
spinY: number;
pixelX: number;
pixelY: number;
deltaMode: number;
} {
let spinX = 0,
spinY = 0,
pixelX = 0,
pixelY = 0;
ev = (ev as any).originalEvent;
// Legacy
if ("detail" in ev) {
spinY = ev.detail;
}
if ("wheelDelta" in ev) {
spinY = -ev.wheelDelta / 120;
}
if ("wheelDeltaY" in ev) {
spinY = -ev.wheelDeltaY / 120;
}
if ("wheelDeltaX" in ev) {
spinX = -ev.wheelDeltaX / 120;
}
// side scrolling on FF with DOMMouseScroll
if ("axis" in ev && ev.axis === ev.HORIZONTAL_AXIS) {
spinX = spinY;
spinY = 0;
}
pixelX = spinX * PIXEL_STEP;
pixelY = spinY * PIXEL_STEP;
if ("deltaY" in ev) {
pixelY = ev.deltaY;
}
if ("deltaX" in ev) {
pixelX = ev.deltaX;
}
if ((pixelX || pixelY) && ev.deltaMode) {
if (ev.deltaMode == 1) {
// delta in LINE units
pixelX *= LINE_HEIGHT;
pixelY *= LINE_HEIGHT;
} else {
// delta in PAGE units
pixelX *= PAGE_HEIGHT;
pixelY *= PAGE_HEIGHT;
}
}
// Fall-back if spin cannot be determined
if (pixelX && !spinX) {
spinX = pixelX < 1 ? -1 : 1;
}
if (pixelY && !spinY) {
spinY = pixelY < 1 ? -1 : 1;
}
return {
spinX,
spinY,
pixelX,
pixelY,
deltaMode: ev.deltaMode || 0,
};
}
function getMouseButton(ev: MouseEvent): number {
let button: number = ev.which
? Math.max(0, ev.which)
: ev.button
? Math.max(0, ev.button) + 1
: 0;
if (button === 4) {
button = 8;
} else if (button === 5) {
button = 9;
}
return button;
}
function getMouse(
ev: MouseEvent,
surface: Surface
): { x: number; y: number; button: number; buttons: number[] } | undefined {
const { top, left, bottom, right } = surface.canvas.getBoundingClientRect();
if (
ev.clientX < left ||
ev.clientX >= right ||
ev.clientY < top ||
ev.clientY >= bottom
) {
// mouse not actually on the surface.
return;
}
if (right === left || top === bottom) {
// degenerate size
return;
}
const x = Math.round(
surface.canvas.width * ((ev.clientX - left) / (right - left)) + surface.x
);
const y = Math.round(
surface.canvas.height * ((ev.clientY - top) / (bottom - top)) + surface.y
);
const buttons = [];
const button = getMouseButton(ev);
return { x, y, button, buttons };
}
/**
* The mouse input handler class
*/
export class Mouse {
private send: Function;
private keyboard: Keyboard;
private findSurface: Function;
private wheel_delta_x: number = 0;
private wheel_delta_y: number = 0;
constructor(send: Function, keyboard: Keyboard, findSurface: Function) {
this.send = send;
this.keyboard = keyboard;
this.findSurface = findSurface;
}
process(ev: MouseEvent): Surface | undefined {
if (ev.clientX == null || ev.clientY == null) {
// happens with touch events for now...
return;
}
const elt_at = document.elementFromPoint(ev.clientX, ev.clientY);
if (!elt_at) {
// nothing under mouse, so no point. (possible? I don't know.)
return;
}
// TODO: right now we abuse things a bit to store the wid on the canvas itself.
const wid: number | undefined = (elt_at as any).wid;
if (wid === undefined) {
return;
}
const surface: Surface | undefined = this.findSurface(wid);
if (surface === undefined) {
// TODO: this shouldn't happen, or if it does, probably
// we should do something special to fix it?
console.warn(
`process mouse -- weird, we clicked on surface ${wid} but can't find it`
);
return;
}
const modifiers: string[] = this.keyboard.modifiers(ev);
const mouse = getMouse(ev, surface);
if (mouse == null) {
return;
}
const { x, y, button, buttons } = mouse;
switch (ev.type) {
case "mousemove": {
this.send("pointer-position", wid, [x, y], modifiers, buttons);
break;
}
case "mousedown":
case "mouseup": {
const pressed = ev.type === "mousedown";
surface.do_close_on_click();
this.send(
"button-action",
wid,
button,
pressed,
[x, y],
modifiers,
buttons
);
break;
}
case WHEEL_EVENT_NAME: {
this.do_window_mouse_scroll({ ev, wid, x, y, buttons, modifiers });
return;
}
}
return surface;
}
do_window_mouse_scroll({
ev,
wid,
x,
y,
buttons,
modifiers,
}: {
ev: MouseEvent;
wid: number;
x: number;
y: number;
buttons: number[];
modifiers: string[];
}): void {
// I think server support for wheel.precise is not available in
// CoCalc -- I think it depends on the uinput Python module,
// and won't work without kernel support that is not allowed by
// Docker for security reasons. So we instead "send
// synthetic click+release as many times as needed".
const wheel = normalize_wheel(ev);
const INCREMENT = 120;
//clamp to prevent event floods:
const px = Math.min(INCREMENT * 10, wheel.pixelX);
const py = Math.min(INCREMENT * 10, wheel.pixelY);
const apx = Math.abs(px);
const apy = Math.abs(py);
// Generate a single event if we can, or add to accumulators: | }
if (apy >= 40 && apy <= 160) {
this.wheel_delta_y = py > 0 ? INCREMENT : -INCREMENT;
} else {
this.wheel_delta_y += py;
}
// Send synthetic click+release as many times as needed:
let wx = Math.abs(this.wheel_delta_x);
let wy = Math.abs(this.wheel_delta_y);
const btn_x = this.wheel_delta_x >= 0 ? 6 : 7;
const btn_y = this.wheel_delta_y >= 0 ? 5 : 4;
while (wx >= INCREMENT) {
wx -= INCREMENT;
this.send("button-action", wid, btn_x, true, [x, y], modifiers, buttons);
this.send("button-action", wid, btn_x, false, [x, y], modifiers, buttons);
}
while (wy >= INCREMENT) {
wy -= INCREMENT;
this.send("button-action", wid, btn_y, true, [x, y], modifiers, buttons);
this.send("button-action", wid, btn_y, false, [x, y], modifiers, buttons);
}
// Store left overs:
this.wheel_delta_x = this.wheel_delta_x >= 0 ? wx : -wx;
this.wheel_delta_y = this.wheel_delta_y >= 0 ? wy : -wy;
}
} | if (apx >= 40 && apx <= 160) {
this.wheel_delta_x = px > 0 ? INCREMENT : -INCREMENT;
} else {
this.wheel_delta_x += px; | random_line_split |
context.ts | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
import * as React from 'react';
import { query } from '@csegames/camelot-unchained/lib/graphql/query';
import { useConfig } from '@csegames/camelot-unchained/lib/graphql/react';
import { Ability, CUQuery, ItemDefRef } from 'gql/interfaces';
export const HUDGraphQLQueryConfig = () => ({
url: game.webAPIHost + '/graphql',
requestOptions: {
headers: {
Authorization: `Bearer ${game.accessToken}`,
shardID: `${game.shardID}`,
characterID: game.selfPlayerState.characterID,
},
},
});
export const HUDGraphQLSubscriptionConfig = () => ({
url: game.webAPIHost.replace('http', 'ws') + '/graphql',
initPayload: {
shardID: game.shardID,
Authorization: `Bearer ${game.accessToken}`,
characterID: game.selfPlayerState.characterID,
},
});
// Initialize config used accross all gql requests
useConfig(HUDGraphQLQueryConfig, HUDGraphQLSubscriptionConfig);
export interface HUDGraphQLQueryResult<T> {
ok: boolean;
statusText: string;
statusCode: number;
data: T;
refetch: () => void;
}
export interface HUDContextState {
skills: HUDGraphQLQueryResult<Ability[]>;
itemDefRefs: HUDGraphQLQueryResult<ItemDefRef[]>;
}
const defaultQueryResultInfo = {
ok: false,
statusText: '',
refetch: () => {},
statusCode: 0,
};
export const defaultContextState: HUDContextState = {
skills: {
...defaultQueryResultInfo,
data: [],
},
itemDefRefs: {
...defaultQueryResultInfo,
data: [],
},
};
export const HUDContext = React.createContext(defaultContextState);
export const abilitiesQuery = `
{
myCharacter {
abilities {
id
name
icon
description
tracks
}
}
}
`;
export async function | (): Promise<HUDGraphQLQueryResult<Ability[]>> {
const res = await query<Pick<CUQuery, 'myCharacter'>>({
query: abilitiesQuery,
operationName: null,
namedQuery: null,
variables: {},
}, HUDGraphQLQueryConfig());
const abilities = res.data && res.data.myCharacter ? res.data.myCharacter.abilities : [];
return {
...res,
data: abilities,
refetch: fetchAbilities,
};
}
export const itemDefRefsQuery = `
{
game {
items {
id
description
name
iconUrl
itemType
defaultResourceID
numericItemDefID
isStackableItem
deploySettings {
resourceID
isDoor
snapToGround
rotateYaw
rotatePitch
rotateRoll
}
gearSlotSets {
gearSlots {
id
}
}
isVox
}
}
}
`;
export async function fetchItemDefRefs() {
const res = await query<Pick<CUQuery, 'game'>>({
query: itemDefRefsQuery,
operationName: null,
namedQuery: null,
variables: {},
}, HUDGraphQLQueryConfig());
return {
...res,
data: res.data && res.data.game ? res.data.game.items : [],
refetch: fetchItemDefRefs,
};
}
| fetchAbilities | identifier_name |
context.ts | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
import * as React from 'react';
import { query } from '@csegames/camelot-unchained/lib/graphql/query';
import { useConfig } from '@csegames/camelot-unchained/lib/graphql/react';
import { Ability, CUQuery, ItemDefRef } from 'gql/interfaces';
export const HUDGraphQLQueryConfig = () => ({
url: game.webAPIHost + '/graphql',
requestOptions: {
headers: {
Authorization: `Bearer ${game.accessToken}`,
shardID: `${game.shardID}`,
characterID: game.selfPlayerState.characterID,
},
},
});
export const HUDGraphQLSubscriptionConfig = () => ({
url: game.webAPIHost.replace('http', 'ws') + '/graphql',
initPayload: {
shardID: game.shardID,
Authorization: `Bearer ${game.accessToken}`,
characterID: game.selfPlayerState.characterID,
},
});
// Initialize config used accross all gql requests
useConfig(HUDGraphQLQueryConfig, HUDGraphQLSubscriptionConfig);
export interface HUDGraphQLQueryResult<T> {
ok: boolean;
statusText: string;
statusCode: number;
data: T;
refetch: () => void;
}
export interface HUDContextState {
skills: HUDGraphQLQueryResult<Ability[]>;
itemDefRefs: HUDGraphQLQueryResult<ItemDefRef[]>;
}
const defaultQueryResultInfo = {
ok: false,
statusText: '',
refetch: () => {},
statusCode: 0,
};
export const defaultContextState: HUDContextState = {
skills: {
...defaultQueryResultInfo,
data: [],
},
itemDefRefs: {
...defaultQueryResultInfo,
data: [],
},
};
export const HUDContext = React.createContext(defaultContextState);
export const abilitiesQuery = `
{
myCharacter {
abilities {
id
name
icon
description
tracks
}
}
}
`;
export async function fetchAbilities(): Promise<HUDGraphQLQueryResult<Ability[]>> {
const res = await query<Pick<CUQuery, 'myCharacter'>>({
query: abilitiesQuery,
operationName: null,
namedQuery: null,
variables: {},
}, HUDGraphQLQueryConfig());
const abilities = res.data && res.data.myCharacter ? res.data.myCharacter.abilities : [];
return {
...res,
data: abilities,
refetch: fetchAbilities,
};
}
export const itemDefRefsQuery = `
{
game {
items {
id
description
name
iconUrl
itemType
defaultResourceID
numericItemDefID
isStackableItem
deploySettings {
resourceID
isDoor
snapToGround
rotateYaw
rotatePitch
rotateRoll
}
gearSlotSets {
gearSlots {
id
}
}
isVox
}
}
}
`;
export async function fetchItemDefRefs() | {
const res = await query<Pick<CUQuery, 'game'>>({
query: itemDefRefsQuery,
operationName: null,
namedQuery: null,
variables: {},
}, HUDGraphQLQueryConfig());
return {
...res,
data: res.data && res.data.game ? res.data.game.items : [],
refetch: fetchItemDefRefs,
};
} | identifier_body |
|
context.ts | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
import * as React from 'react';
import { query } from '@csegames/camelot-unchained/lib/graphql/query';
import { useConfig } from '@csegames/camelot-unchained/lib/graphql/react';
import { Ability, CUQuery, ItemDefRef } from 'gql/interfaces';
export const HUDGraphQLQueryConfig = () => ({
url: game.webAPIHost + '/graphql',
requestOptions: {
headers: {
Authorization: `Bearer ${game.accessToken}`,
shardID: `${game.shardID}`,
characterID: game.selfPlayerState.characterID,
},
},
});
export const HUDGraphQLSubscriptionConfig = () => ({
url: game.webAPIHost.replace('http', 'ws') + '/graphql',
initPayload: {
shardID: game.shardID,
Authorization: `Bearer ${game.accessToken}`,
characterID: game.selfPlayerState.characterID,
},
});
// Initialize config used accross all gql requests
useConfig(HUDGraphQLQueryConfig, HUDGraphQLSubscriptionConfig);
export interface HUDGraphQLQueryResult<T> {
ok: boolean;
statusText: string;
statusCode: number;
data: T;
refetch: () => void;
}
export interface HUDContextState {
skills: HUDGraphQLQueryResult<Ability[]>;
itemDefRefs: HUDGraphQLQueryResult<ItemDefRef[]>;
}
const defaultQueryResultInfo = {
ok: false,
statusText: '',
refetch: () => {},
statusCode: 0,
};
export const defaultContextState: HUDContextState = {
skills: {
...defaultQueryResultInfo,
data: [],
},
itemDefRefs: {
...defaultQueryResultInfo,
data: [],
},
};
export const HUDContext = React.createContext(defaultContextState);
export const abilitiesQuery = `
{
myCharacter {
abilities {
id
name
icon
description
tracks
}
}
}
`;
export async function fetchAbilities(): Promise<HUDGraphQLQueryResult<Ability[]>> {
const res = await query<Pick<CUQuery, 'myCharacter'>>({
query: abilitiesQuery,
operationName: null,
namedQuery: null,
variables: {},
}, HUDGraphQLQueryConfig());
const abilities = res.data && res.data.myCharacter ? res.data.myCharacter.abilities : [];
return {
...res,
data: abilities,
refetch: fetchAbilities,
};
}
export const itemDefRefsQuery = `
{
game { | description
name
iconUrl
itemType
defaultResourceID
numericItemDefID
isStackableItem
deploySettings {
resourceID
isDoor
snapToGround
rotateYaw
rotatePitch
rotateRoll
}
gearSlotSets {
gearSlots {
id
}
}
isVox
}
}
}
`;
export async function fetchItemDefRefs() {
const res = await query<Pick<CUQuery, 'game'>>({
query: itemDefRefsQuery,
operationName: null,
namedQuery: null,
variables: {},
}, HUDGraphQLQueryConfig());
return {
...res,
data: res.data && res.data.game ? res.data.game.items : [],
refetch: fetchItemDefRefs,
};
} | items {
id | random_line_split |
srp-gre-admin.js | //Utilizing GMap API v3 (no API key needed)
function srp_geocode(){
if(jQuery.trim(jQuery('#listings_address').val()) != '' && jQuery.trim(jQuery('#listings_city').val()) != '' && jQuery.trim(jQuery('#listings_state').val()) != '' && jQuery.trim(jQuery('#listings_postcode').val()) != '') |
}
function srp_geocode_test(lat, lng){
var test = '<a href="http://maps.google.com/maps?hl=en&q=' + lat + ' ' + lng + '" target="_blank">Check if location is correct</a>';
jQuery('#test_geo_link').html(test);
jQuery("#listings_latitude").triggerHandler("focus");
}
//Adding Get Coordinates button at the bottom of the listing editor page created by the GRE plugin.
jQuery(document).ready( function() {
// BEGIN for GRE Plugin
var geo_button = '<p><input id="srp_get_coord" type="button" name="get_coord" value="Get Lat/Long" /><span id="test_geo_link"></span></p>';
if(typeof(jQuery('#listings3-div div')) !== 'undefined'){
jQuery('#listings3-div div').append(geo_button);
}
if(jQuery('#listings_latitude').val() != '' && jQuery('#listings_longitude').val() != ''){
var lat = jQuery('#listings_latitude').val();
var lng = jQuery('#listings_longitude').val();
srp_geocode_test(lat, lng);
}
jQuery('#srp_get_coord').bind('click', function() {
srp_geocode();
});
// END for GRE Plugin
//Overriding Thickbox' tb_remove function because it breaks tabs
window.tb_remove = function() {
_fixed_tb_remove();
};
function _fixed_tb_remove() {
jQuery("#TB_imageOff").unbind("click");
jQuery("#TB_closeWindowButton").unbind("click");
jQuery("#TB_window").fadeOut("fast",function(){jQuery('#TB_window,#TB_overlay,#TB_HideSelect').unload("#TB_ajaxContent").unbind().remove();});
jQuery("#TB_load").remove();
if (typeof document.body.style.maxHeight == "undefined") {//if IE 6
jQuery("body","html").css({height: "auto", width: "auto"});
jQuery("html").css("overflow","");
}
jQuery(document).unbind('.thickbox');
return false;
}
}); | {
var address = jQuery('#listings_address').val() + ', ' + jQuery('#listings_city').val() + ', ' + jQuery('#listings_state').val() + ' ' + jQuery('#listings_postcode').val();
var geocoder;
geocoder = new google.maps.Geocoder();
if (geocoder) {
geocoder.geocode( { 'address': address}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
var latlng = results[0].geometry.location;
jQuery('#listings_latitude').val(latlng.lat());
jQuery('#listings_longitude').val(latlng.lng());
srp_geocode_test(latlng.lat(), latlng.lng());
}else{
alert("Geocode was not successful for the following reason: " + status);
}
});
}
return false;
} | conditional_block |
srp-gre-admin.js | //Utilizing GMap API v3 (no API key needed)
function srp_geocode(){
if(jQuery.trim(jQuery('#listings_address').val()) != '' && jQuery.trim(jQuery('#listings_city').val()) != '' && jQuery.trim(jQuery('#listings_state').val()) != '' && jQuery.trim(jQuery('#listings_postcode').val()) != ''){
var address = jQuery('#listings_address').val() + ', ' + jQuery('#listings_city').val() + ', ' + jQuery('#listings_state').val() + ' ' + jQuery('#listings_postcode').val();
var geocoder;
geocoder = new google.maps.Geocoder();
if (geocoder) {
geocoder.geocode( { 'address': address}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
var latlng = results[0].geometry.location;
jQuery('#listings_latitude').val(latlng.lat());
jQuery('#listings_longitude').val(latlng.lng());
srp_geocode_test(latlng.lat(), latlng.lng());
}else{
alert("Geocode was not successful for the following reason: " + status);
}
});
}
return false;
}
}
function srp_geocode_test(lat, lng){
var test = '<a href="http://maps.google.com/maps?hl=en&q=' + lat + ' ' + lng + '" target="_blank">Check if location is correct</a>';
jQuery('#test_geo_link').html(test);
jQuery("#listings_latitude").triggerHandler("focus");
}
//Adding Get Coordinates button at the bottom of the listing editor page created by the GRE plugin.
jQuery(document).ready( function() {
// BEGIN for GRE Plugin
var geo_button = '<p><input id="srp_get_coord" type="button" name="get_coord" value="Get Lat/Long" /><span id="test_geo_link"></span></p>';
if(typeof(jQuery('#listings3-div div')) !== 'undefined'){
jQuery('#listings3-div div').append(geo_button);
}
if(jQuery('#listings_latitude').val() != '' && jQuery('#listings_longitude').val() != ''){
var lat = jQuery('#listings_latitude').val();
var lng = jQuery('#listings_longitude').val();
srp_geocode_test(lat, lng);
}
jQuery('#srp_get_coord').bind('click', function() {
srp_geocode();
});
// END for GRE Plugin
//Overriding Thickbox' tb_remove function because it breaks tabs
window.tb_remove = function() {
_fixed_tb_remove();
};
function | () {
jQuery("#TB_imageOff").unbind("click");
jQuery("#TB_closeWindowButton").unbind("click");
jQuery("#TB_window").fadeOut("fast",function(){jQuery('#TB_window,#TB_overlay,#TB_HideSelect').unload("#TB_ajaxContent").unbind().remove();});
jQuery("#TB_load").remove();
if (typeof document.body.style.maxHeight == "undefined") {//if IE 6
jQuery("body","html").css({height: "auto", width: "auto"});
jQuery("html").css("overflow","");
}
jQuery(document).unbind('.thickbox');
return false;
}
}); | _fixed_tb_remove | identifier_name |
srp-gre-admin.js | //Utilizing GMap API v3 (no API key needed)
function srp_geocode(){
if(jQuery.trim(jQuery('#listings_address').val()) != '' && jQuery.trim(jQuery('#listings_city').val()) != '' && jQuery.trim(jQuery('#listings_state').val()) != '' && jQuery.trim(jQuery('#listings_postcode').val()) != ''){
var address = jQuery('#listings_address').val() + ', ' + jQuery('#listings_city').val() + ', ' + jQuery('#listings_state').val() + ' ' + jQuery('#listings_postcode').val();
var geocoder;
geocoder = new google.maps.Geocoder();
if (geocoder) {
geocoder.geocode( { 'address': address}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
var latlng = results[0].geometry.location;
jQuery('#listings_latitude').val(latlng.lat());
jQuery('#listings_longitude').val(latlng.lng());
srp_geocode_test(latlng.lat(), latlng.lng());
}else{
alert("Geocode was not successful for the following reason: " + status);
}
});
}
return false;
}
}
function srp_geocode_test(lat, lng){
var test = '<a href="http://maps.google.com/maps?hl=en&q=' + lat + ' ' + lng + '" target="_blank">Check if location is correct</a>';
jQuery('#test_geo_link').html(test);
jQuery("#listings_latitude").triggerHandler("focus");
}
//Adding Get Coordinates button at the bottom of the listing editor page created by the GRE plugin.
jQuery(document).ready( function() {
// BEGIN for GRE Plugin
var geo_button = '<p><input id="srp_get_coord" type="button" name="get_coord" value="Get Lat/Long" /><span id="test_geo_link"></span></p>';
if(typeof(jQuery('#listings3-div div')) !== 'undefined'){
jQuery('#listings3-div div').append(geo_button);
}
if(jQuery('#listings_latitude').val() != '' && jQuery('#listings_longitude').val() != ''){
var lat = jQuery('#listings_latitude').val();
var lng = jQuery('#listings_longitude').val();
srp_geocode_test(lat, lng);
}
jQuery('#srp_get_coord').bind('click', function() {
srp_geocode();
});
// END for GRE Plugin
//Overriding Thickbox' tb_remove function because it breaks tabs
window.tb_remove = function() {
_fixed_tb_remove();
};
function _fixed_tb_remove() {
jQuery("#TB_imageOff").unbind("click");
jQuery("#TB_closeWindowButton").unbind("click");
jQuery("#TB_window").fadeOut("fast",function(){jQuery('#TB_window,#TB_overlay,#TB_HideSelect').unload("#TB_ajaxContent").unbind().remove();});
jQuery("#TB_load").remove();
| }
jQuery(document).unbind('.thickbox');
return false;
}
}); | if (typeof document.body.style.maxHeight == "undefined") {//if IE 6
jQuery("body","html").css({height: "auto", width: "auto"});
jQuery("html").css("overflow","");
| random_line_split |
srp-gre-admin.js | //Utilizing GMap API v3 (no API key needed)
function srp_geocode() |
function srp_geocode_test(lat, lng){
var test = '<a href="http://maps.google.com/maps?hl=en&q=' + lat + ' ' + lng + '" target="_blank">Check if location is correct</a>';
jQuery('#test_geo_link').html(test);
jQuery("#listings_latitude").triggerHandler("focus");
}
//Adding Get Coordinates button at the bottom of the listing editor page created by the GRE plugin.
jQuery(document).ready( function() {
// BEGIN for GRE Plugin
var geo_button = '<p><input id="srp_get_coord" type="button" name="get_coord" value="Get Lat/Long" /><span id="test_geo_link"></span></p>';
if(typeof(jQuery('#listings3-div div')) !== 'undefined'){
jQuery('#listings3-div div').append(geo_button);
}
if(jQuery('#listings_latitude').val() != '' && jQuery('#listings_longitude').val() != ''){
var lat = jQuery('#listings_latitude').val();
var lng = jQuery('#listings_longitude').val();
srp_geocode_test(lat, lng);
}
jQuery('#srp_get_coord').bind('click', function() {
srp_geocode();
});
// END for GRE Plugin
//Overriding Thickbox' tb_remove function because it breaks tabs
window.tb_remove = function() {
_fixed_tb_remove();
};
function _fixed_tb_remove() {
jQuery("#TB_imageOff").unbind("click");
jQuery("#TB_closeWindowButton").unbind("click");
jQuery("#TB_window").fadeOut("fast",function(){jQuery('#TB_window,#TB_overlay,#TB_HideSelect').unload("#TB_ajaxContent").unbind().remove();});
jQuery("#TB_load").remove();
if (typeof document.body.style.maxHeight == "undefined") {//if IE 6
jQuery("body","html").css({height: "auto", width: "auto"});
jQuery("html").css("overflow","");
}
jQuery(document).unbind('.thickbox');
return false;
}
}); | {
if(jQuery.trim(jQuery('#listings_address').val()) != '' && jQuery.trim(jQuery('#listings_city').val()) != '' && jQuery.trim(jQuery('#listings_state').val()) != '' && jQuery.trim(jQuery('#listings_postcode').val()) != ''){
var address = jQuery('#listings_address').val() + ', ' + jQuery('#listings_city').val() + ', ' + jQuery('#listings_state').val() + ' ' + jQuery('#listings_postcode').val();
var geocoder;
geocoder = new google.maps.Geocoder();
if (geocoder) {
geocoder.geocode( { 'address': address}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
var latlng = results[0].geometry.location;
jQuery('#listings_latitude').val(latlng.lat());
jQuery('#listings_longitude').val(latlng.lng());
srp_geocode_test(latlng.lat(), latlng.lng());
}else{
alert("Geocode was not successful for the following reason: " + status);
}
});
}
return false;
}
} | identifier_body |
card_validation_test.py | import unittest
from card_validation import (
numberToMatrix,
getOddDigits,
getEvenDigits,
sumOfDoubleOddPlace,
sumOfEvenPlace,
getDigit,
isValid
)
class CardValidationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CardValidationTest, self).__init__(*args, **kwargs)
self.card_number = "4388576018410707"
self.matrix = numberToMatrix(self.card_number)
self.odds = getOddDigits(self.matrix)
self.evens = getEvenDigits(self.matrix)
def test_numberToMatrix(self):
self.assertEqual(self.matrix.__class__, list)
def test_getOddDigits(self):
self.assertEqual(self.odds.__class__, list)
def test_getEvenDigits(self):
self.assertEqual(self.evens.__class__, list)
def test_sumOfDoubleOddPlace(self):
self.assertEqual(sumOfDoubleOddPlace(self.odds), 29)
def | (self):
self.assertEqual(getDigit(9), 9)
def test_isValid(self):
self.assertEqual(isValid(self.card_number), True)
if __name__ == "__main__":
unittest.main()
| test_getDigit | identifier_name |
card_validation_test.py | import unittest
from card_validation import (
numberToMatrix,
getOddDigits,
getEvenDigits,
sumOfDoubleOddPlace,
sumOfEvenPlace,
getDigit,
isValid
)
class CardValidationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CardValidationTest, self).__init__(*args, **kwargs)
self.card_number = "4388576018410707"
self.matrix = numberToMatrix(self.card_number)
self.odds = getOddDigits(self.matrix)
self.evens = getEvenDigits(self.matrix)
def test_numberToMatrix(self):
self.assertEqual(self.matrix.__class__, list)
def test_getOddDigits(self):
self.assertEqual(self.odds.__class__, list)
def test_getEvenDigits(self):
self.assertEqual(self.evens.__class__, list)
def test_sumOfDoubleOddPlace(self):
self.assertEqual(sumOfDoubleOddPlace(self.odds), 29)
def test_getDigit(self):
self.assertEqual(getDigit(9), 9)
def test_isValid(self):
self.assertEqual(isValid(self.card_number), True)
if __name__ == "__main__":
| unittest.main() | conditional_block |
|
card_validation_test.py | import unittest
from card_validation import (
numberToMatrix,
getOddDigits,
getEvenDigits,
sumOfDoubleOddPlace,
sumOfEvenPlace,
getDigit,
isValid
)
class CardValidationTest(unittest.TestCase):
def __init__(self, *args, **kwargs): | super(CardValidationTest, self).__init__(*args, **kwargs)
self.card_number = "4388576018410707"
self.matrix = numberToMatrix(self.card_number)
self.odds = getOddDigits(self.matrix)
self.evens = getEvenDigits(self.matrix)
def test_numberToMatrix(self):
self.assertEqual(self.matrix.__class__, list)
def test_getOddDigits(self):
self.assertEqual(self.odds.__class__, list)
def test_getEvenDigits(self):
self.assertEqual(self.evens.__class__, list)
def test_sumOfDoubleOddPlace(self):
self.assertEqual(sumOfDoubleOddPlace(self.odds), 29)
def test_getDigit(self):
self.assertEqual(getDigit(9), 9)
def test_isValid(self):
self.assertEqual(isValid(self.card_number), True)
if __name__ == "__main__":
unittest.main() | random_line_split |
|
card_validation_test.py | import unittest
from card_validation import (
numberToMatrix,
getOddDigits,
getEvenDigits,
sumOfDoubleOddPlace,
sumOfEvenPlace,
getDigit,
isValid
)
class CardValidationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CardValidationTest, self).__init__(*args, **kwargs)
self.card_number = "4388576018410707"
self.matrix = numberToMatrix(self.card_number)
self.odds = getOddDigits(self.matrix)
self.evens = getEvenDigits(self.matrix)
def test_numberToMatrix(self):
|
def test_getOddDigits(self):
self.assertEqual(self.odds.__class__, list)
def test_getEvenDigits(self):
self.assertEqual(self.evens.__class__, list)
def test_sumOfDoubleOddPlace(self):
self.assertEqual(sumOfDoubleOddPlace(self.odds), 29)
def test_getDigit(self):
self.assertEqual(getDigit(9), 9)
def test_isValid(self):
self.assertEqual(isValid(self.card_number), True)
if __name__ == "__main__":
unittest.main()
| self.assertEqual(self.matrix.__class__, list) | identifier_body |
bala.py | # -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilasengine.actores.actor import Actor
| def __init__(self, pilas, x=0, y=0, rotacion=0, velocidad_maxima=9,
angulo_de_movimiento=90):
"""
Construye la Bala.
:param x: Posición x del proyectil.
:param y: Posición y del proyectil.
:param velocidad_maxima: Velocidad máxima que alcanzará el proyectil.
:param angulo_de_movimiento: Angulo en que se moverá el Actor..
"""
super(Bala, self).__init__(pilas=pilas, x=x, y=y)
self.imagen = pilas.imagenes.cargar('disparos/bola_amarilla.png')
self.rotacion = rotacion
self.radio_de_colision = 5
self.hacer(pilas.comportamientos.Proyectil,
velocidad_maxima=velocidad_maxima,
aceleracion=1,
angulo_de_movimiento=angulo_de_movimiento,
gravedad=0)
self.aprender(self.pilas.habilidades.EliminarseSiSaleDePantalla)
self.cuando_se_elimina = None
def eliminar(self):
if self.cuando_se_elimina:
self.cuando_se_elimina(self)
super(Bala, self).eliminar() | class Bala(Actor):
""" Representa una bala que va en línea recta. """
| random_line_split |
bala.py | # -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilasengine.actores.actor import Actor
class Bala(Actor):
""" Representa una bala que va en línea recta. """
def __init__(self, pilas, x=0, y=0, rotacion=0, velocidad_maxima=9,
angulo_de_movimiento=90):
"""
Construye la Bala.
:param x: Posición x del proyectil.
:param y: Posición y del proyectil.
:param velocidad_maxima: Velocidad máxima que alcanzará el proyectil.
:param angulo_de_movimiento: Angulo en que se moverá el Actor..
"""
super(Bala, self).__init__(pilas=pilas, x=x, y=y)
self.imagen = pilas.imagenes.cargar('disparos/bola_amarilla.png')
self.rotacion = rotacion
self.radio_de_colision = 5
self.hacer(pilas.comportamientos.Proyectil,
velocidad_maxima=velocidad_maxima,
aceleracion=1,
angulo_de_movimiento=angulo_de_movimiento,
gravedad=0)
self.aprender(self.pilas.habilidades.EliminarseSiSaleDePantalla)
self.cuando_se_elimina = None
def eliminar(self):
if self.cuando_se_elimina:
self.c | super(Bala, self).eliminar()
| uando_se_elimina(self)
| conditional_block |
bala.py | # -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilasengine.actores.actor import Actor
class | (Actor):
""" Representa una bala que va en línea recta. """
def __init__(self, pilas, x=0, y=0, rotacion=0, velocidad_maxima=9,
angulo_de_movimiento=90):
"""
Construye la Bala.
:param x: Posición x del proyectil.
:param y: Posición y del proyectil.
:param velocidad_maxima: Velocidad máxima que alcanzará el proyectil.
:param angulo_de_movimiento: Angulo en que se moverá el Actor..
"""
super(Bala, self).__init__(pilas=pilas, x=x, y=y)
self.imagen = pilas.imagenes.cargar('disparos/bola_amarilla.png')
self.rotacion = rotacion
self.radio_de_colision = 5
self.hacer(pilas.comportamientos.Proyectil,
velocidad_maxima=velocidad_maxima,
aceleracion=1,
angulo_de_movimiento=angulo_de_movimiento,
gravedad=0)
self.aprender(self.pilas.habilidades.EliminarseSiSaleDePantalla)
self.cuando_se_elimina = None
def eliminar(self):
if self.cuando_se_elimina:
self.cuando_se_elimina(self)
super(Bala, self).eliminar()
| Bala | identifier_name |
bala.py | # -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilasengine.actores.actor import Actor
class Bala(Actor):
| """ Representa una bala que va en línea recta. """
def __init__(self, pilas, x=0, y=0, rotacion=0, velocidad_maxima=9,
angulo_de_movimiento=90):
"""
Construye la Bala.
:param x: Posición x del proyectil.
:param y: Posición y del proyectil.
:param velocidad_maxima: Velocidad máxima que alcanzará el proyectil.
:param angulo_de_movimiento: Angulo en que se moverá el Actor..
"""
super(Bala, self).__init__(pilas=pilas, x=x, y=y)
self.imagen = pilas.imagenes.cargar('disparos/bola_amarilla.png')
self.rotacion = rotacion
self.radio_de_colision = 5
self.hacer(pilas.comportamientos.Proyectil,
velocidad_maxima=velocidad_maxima,
aceleracion=1,
angulo_de_movimiento=angulo_de_movimiento,
gravedad=0)
self.aprender(self.pilas.habilidades.EliminarseSiSaleDePantalla)
self.cuando_se_elimina = None
def eliminar(self):
if self.cuando_se_elimina:
self.cuando_se_elimina(self)
super(Bala, self).eliminar()
| identifier_body |
|
main.rs | // Copyright 2015 The Athena Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate zeus;
extern crate rustc_serialize;
extern crate docopt;
extern crate toml;
mod commands;
use std::error::Error;
use docopt::Docopt;
static USAGE: &'static str = "
Athena's project build system.
Usage:
zeus <command> [<args>...]
zeus
Some common zeus commands are:
version Display version info and exit
list Display a list of commands
new Create a new athena project
setup Sets up all athena tools for this project
See 'zeus help <command>' for more information on a specific command.
";
#[derive(RustcDecodable, Debug)]
struct Flags {
arg_command: String,
arg_args: Vec<String>
}
fn main() {
// Parse in the command line flags
let flags: Flags = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
// Run the actual command
let result = match &flags.arg_command[..] {
"list" => commands::list::execute(),
"new" => commands::new::execute(),
"setup" => commands::setup::execute(),
"" => display_usage(),
_ => display_not_found()
};
// Set the exit code depending on the result
match result {
Ok(_) => std::process::exit(0),
Err(err) => |
}
}
// ### Misc Command Handlers ###
fn display_usage() -> Result<(), Box<Error>> {
println!("{}", USAGE);
return Ok(());
}
fn display_not_found() -> Result<(), Box<Error>> {
unimplemented!();
}
| {
println!("{}", err);
std::process::exit(1)
} | conditional_block |
main.rs | // Copyright 2015 The Athena Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate zeus;
extern crate rustc_serialize;
extern crate docopt;
extern crate toml;
mod commands;
use std::error::Error;
use docopt::Docopt;
static USAGE: &'static str = "
Athena's project build system.
Usage:
zeus <command> [<args>...]
zeus
Some common zeus commands are:
version Display version info and exit
list Display a list of commands
new Create a new athena project
setup Sets up all athena tools for this project
See 'zeus help <command>' for more information on a specific command.
";
#[derive(RustcDecodable, Debug)]
struct Flags {
arg_command: String,
arg_args: Vec<String>
}
fn main() {
// Parse in the command line flags
let flags: Flags = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
| // Run the actual command
let result = match &flags.arg_command[..] {
"list" => commands::list::execute(),
"new" => commands::new::execute(),
"setup" => commands::setup::execute(),
"" => display_usage(),
_ => display_not_found()
};
// Set the exit code depending on the result
match result {
Ok(_) => std::process::exit(0),
Err(err) => {
println!("{}", err);
std::process::exit(1)
}
}
}
// ### Misc Command Handlers ###
fn display_usage() -> Result<(), Box<Error>> {
println!("{}", USAGE);
return Ok(());
}
fn display_not_found() -> Result<(), Box<Error>> {
unimplemented!();
} | random_line_split |
|
main.rs | // Copyright 2015 The Athena Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate zeus;
extern crate rustc_serialize;
extern crate docopt;
extern crate toml;
mod commands;
use std::error::Error;
use docopt::Docopt;
static USAGE: &'static str = "
Athena's project build system.
Usage:
zeus <command> [<args>...]
zeus
Some common zeus commands are:
version Display version info and exit
list Display a list of commands
new Create a new athena project
setup Sets up all athena tools for this project
See 'zeus help <command>' for more information on a specific command.
";
#[derive(RustcDecodable, Debug)]
struct | {
arg_command: String,
arg_args: Vec<String>
}
fn main() {
// Parse in the command line flags
let flags: Flags = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
// Run the actual command
let result = match &flags.arg_command[..] {
"list" => commands::list::execute(),
"new" => commands::new::execute(),
"setup" => commands::setup::execute(),
"" => display_usage(),
_ => display_not_found()
};
// Set the exit code depending on the result
match result {
Ok(_) => std::process::exit(0),
Err(err) => {
println!("{}", err);
std::process::exit(1)
}
}
}
// ### Misc Command Handlers ###
fn display_usage() -> Result<(), Box<Error>> {
println!("{}", USAGE);
return Ok(());
}
fn display_not_found() -> Result<(), Box<Error>> {
unimplemented!();
}
| Flags | identifier_name |
main.rs | // Copyright 2015 The Athena Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate zeus;
extern crate rustc_serialize;
extern crate docopt;
extern crate toml;
mod commands;
use std::error::Error;
use docopt::Docopt;
static USAGE: &'static str = "
Athena's project build system.
Usage:
zeus <command> [<args>...]
zeus
Some common zeus commands are:
version Display version info and exit
list Display a list of commands
new Create a new athena project
setup Sets up all athena tools for this project
See 'zeus help <command>' for more information on a specific command.
";
#[derive(RustcDecodable, Debug)]
struct Flags {
arg_command: String,
arg_args: Vec<String>
}
fn main() |
// ### Misc Command Handlers ###
fn display_usage() -> Result<(), Box<Error>> {
println!("{}", USAGE);
return Ok(());
}
fn display_not_found() -> Result<(), Box<Error>> {
unimplemented!();
}
| {
// Parse in the command line flags
let flags: Flags = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
// Run the actual command
let result = match &flags.arg_command[..] {
"list" => commands::list::execute(),
"new" => commands::new::execute(),
"setup" => commands::setup::execute(),
"" => display_usage(),
_ => display_not_found()
};
// Set the exit code depending on the result
match result {
Ok(_) => std::process::exit(0),
Err(err) => {
println!("{}", err);
std::process::exit(1)
}
}
} | identifier_body |
show.js | import PlanRequiredRoute from "../plan-required";
import Notify from 'ember-notify';
export default PlanRequiredRoute.extend({
model: function(params) {
var _this = this;
return this.store.find('entry', params.entry_id).then(function(entry) {
// Force a reload if the meta data is out of date
var meta = _this.store.metadataFor("entry");
if (meta.current_entry !== entry.get('entryDate')) {
return entry.reload();
} else {
return entry;
}
}, function(data) {
if (data.status === 404) {
// Set the meta data | entryDate: params.entry_id
});
return entry;
} else {
Notify.error(data.responseText, {closeAfter: 5000});
}
});
},
setupController: function(controller, model) {
this._super(controller, model);
var meta = this.store.metadataFor("entry");
controller.setProperties({
nextEntry: meta.next_entry,
randomEntry: meta.random_entry,
prevEntry: meta.prev_entry,
entryDatePretty: moment(model.get('entryDate')).format("MMMM Do, YYYY")
});
}
}); | var meta = data.responseJSON.meta;
_this.store.metaForType("entry", meta);
// Build the dummy record, for use in the new form
var entry = _this.store.createRecord('entry', { | random_line_split |
show.js | import PlanRequiredRoute from "../plan-required";
import Notify from 'ember-notify';
export default PlanRequiredRoute.extend({
model: function(params) {
var _this = this;
return this.store.find('entry', params.entry_id).then(function(entry) {
// Force a reload if the meta data is out of date
var meta = _this.store.metadataFor("entry");
if (meta.current_entry !== entry.get('entryDate')) {
return entry.reload();
} else {
return entry;
}
}, function(data) {
if (data.status === 404) {
// Set the meta data
var meta = data.responseJSON.meta;
_this.store.metaForType("entry", meta);
// Build the dummy record, for use in the new form
var entry = _this.store.createRecord('entry', {
entryDate: params.entry_id
});
return entry;
} else |
});
},
setupController: function(controller, model) {
this._super(controller, model);
var meta = this.store.metadataFor("entry");
controller.setProperties({
nextEntry: meta.next_entry,
randomEntry: meta.random_entry,
prevEntry: meta.prev_entry,
entryDatePretty: moment(model.get('entryDate')).format("MMMM Do, YYYY")
});
}
});
| {
Notify.error(data.responseText, {closeAfter: 5000});
} | conditional_block |
coolmoviezone.py | # -*- coding: utf-8 -*-
# ..#######.########.#######.##....#..######..######.########....###...########.#######.########..######.
# .##.....#.##.....#.##......###...#.##....#.##....#.##.....#...##.##..##.....#.##......##.....#.##....##
# .##.....#.##.....#.##......####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
'''
OpenScrapers Project
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from openscrapers.modules import cleantitle, source_utils, cfscrape
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['coolmoviezone.online']
self.base_link = 'https://coolmoviezone.online'
self.scraper = cfscrape.create_scraper()
def movie(self, imdb, title, localtitle, aliases, year):
try:
title = cleantitle.geturl(title)
url = self.base_link + '/%s-%s' % (title, year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
|
def resolve(self, url):
return url
| try:
sources = []
r = self.scraper.get(url).content
match = re.compile('<td align="center"><strong><a href="(.+?)"').findall(r)
for url in match:
host = url.split('//')[1].replace('www.', '')
host = host.split('/')[0].split('.')[0].title()
quality = source_utils.check_sd_url(url)
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False,
'debridonly': False})
except Exception:
return
return sources | identifier_body |
coolmoviezone.py | # -*- coding: utf-8 -*-
# ..#######.########.#######.##....#..######..######.########....###...########.#######.########..######.
# .##.....#.##.....#.##......###...#.##....#.##....#.##.....#...##.##..##.....#.##......##.....#.##....##
# .##.....#.##.....#.##......####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
'''
OpenScrapers Project
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from openscrapers.modules import cleantitle, source_utils, cfscrape
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['coolmoviezone.online']
self.base_link = 'https://coolmoviezone.online'
self.scraper = cfscrape.create_scraper()
def | (self, imdb, title, localtitle, aliases, year):
try:
title = cleantitle.geturl(title)
url = self.base_link + '/%s-%s' % (title, year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
r = self.scraper.get(url).content
match = re.compile('<td align="center"><strong><a href="(.+?)"').findall(r)
for url in match:
host = url.split('//')[1].replace('www.', '')
host = host.split('/')[0].split('.')[0].title()
quality = source_utils.check_sd_url(url)
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False,
'debridonly': False})
except Exception:
return
return sources
def resolve(self, url):
return url
| movie | identifier_name |
coolmoviezone.py | # -*- coding: utf-8 -*-
# ..#######.########.#######.##....#..######..######.########....###...########.#######.########..######.
# .##.....#.##.....#.##......###...#.##....#.##....#.##.....#...##.##..##.....#.##......##.....#.##....##
# .##.....#.##.....#.##......####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
'''
OpenScrapers Project
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from openscrapers.modules import cleantitle, source_utils, cfscrape
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['coolmoviezone.online']
self.base_link = 'https://coolmoviezone.online'
self.scraper = cfscrape.create_scraper()
def movie(self, imdb, title, localtitle, aliases, year):
try:
title = cleantitle.geturl(title)
url = self.base_link + '/%s-%s' % (title, year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
r = self.scraper.get(url).content
match = re.compile('<td align="center"><strong><a href="(.+?)"').findall(r)
for url in match:
|
except Exception:
return
return sources
def resolve(self, url):
return url
| host = url.split('//')[1].replace('www.', '')
host = host.split('/')[0].split('.')[0].title()
quality = source_utils.check_sd_url(url)
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False,
'debridonly': False}) | conditional_block |
coolmoviezone.py | # -*- coding: utf-8 -*-
# ..#######.########.#######.##....#..######..######.########....###...########.#######.########..######.
# .##.....#.##.....#.##......###...#.##....#.##....#.##.....#...##.##..##.....#.##......##.....#.##....##
# .##.....#.##.....#.##......####..#.##......##......##.....#..##...##.##.....#.##......##.....#.##......
# .##.....#.########.######..##.##.#..######.##......########.##.....#.########.######..########..######.
# .##.....#.##.......##......##..###.......#.##......##...##..########.##.......##......##...##........##
# .##.....#.##.......##......##...##.##....#.##....#.##....##.##.....#.##.......##......##....##.##....##
# ..#######.##.......#######.##....#..######..######.##.....#.##.....#.##.......#######.##.....#..######.
'''
OpenScrapers Project
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from openscrapers.modules import cleantitle, source_utils, cfscrape
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['coolmoviezone.online']
self.base_link = 'https://coolmoviezone.online'
self.scraper = cfscrape.create_scraper()
def movie(self, imdb, title, localtitle, aliases, year):
try:
title = cleantitle.geturl(title)
url = self.base_link + '/%s-%s' % (title, year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
r = self.scraper.get(url).content
match = re.compile('<td align="center"><strong><a href="(.+?)"').findall(r)
for url in match: | 'debridonly': False})
except Exception:
return
return sources
def resolve(self, url):
return url | host = url.split('//')[1].replace('www.', '')
host = host.split('/')[0].split('.')[0].title()
quality = source_utils.check_sd_url(url)
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, | random_line_split |
first_attempt.py | # need to pass it a file, where data starts, path to write
# things to import
import sys
import pandas
import scipy
import numpy
from scipy import stats
from scipy.stats import t
# arguments being passed
path_of_file=sys.argv[1]
last_metadata_column=int(sys.argv[2])
path_to_write=sys.argv[3]
# spearman p calc based on two tailed t-test
def spearmanp(r,n):
tstat=r*numpy.sqrt((n-2)/(1-r**2))
return t.cdf(-abs(tstat),n-2)*2
| df_corr_matrix=df_data_only.corr(method="spearman")
#make column based on rows (called indexes in python)
df_corr_matrix["otus"]=df_corr_matrix.index
#melt dataframe but maintain indices now called otus
df_melt=pandas.melt(df_corr_matrix,id_vars="otus")
# remove NAs or NaNs which are result of non-existent otus (all 0 values)
df_melt=df_melt[numpy.isfinite(df_melt.value)]
df_melt['p.value']=spearmanp(df_melt.value,df_sub.shape[0])
#write the file
df_melt.to_csv(path_to_write,index=False) | # read in the data
df=pandas.read_table(path_of_file,index_col=False)
# remove metadata columns
df_data_only=df.drop(df.columns[[range(0,last_metadata_column)]],axis=1)
#make correlation matrix | random_line_split |
first_attempt.py | # need to pass it a file, where data starts, path to write
# things to import
import sys
import pandas
import scipy
import numpy
from scipy import stats
from scipy.stats import t
# arguments being passed
path_of_file=sys.argv[1]
last_metadata_column=int(sys.argv[2])
path_to_write=sys.argv[3]
# spearman p calc based on two tailed t-test
def spearmanp(r,n):
|
# read in the data
df=pandas.read_table(path_of_file,index_col=False)
# remove metadata columns
df_data_only=df.drop(df.columns[[range(0,last_metadata_column)]],axis=1)
#make correlation matrix
df_corr_matrix=df_data_only.corr(method="spearman")
#make column based on rows (called indexes in python)
df_corr_matrix["otus"]=df_corr_matrix.index
#melt dataframe but maintain indices now called otus
df_melt=pandas.melt(df_corr_matrix,id_vars="otus")
# remove NAs or NaNs which are result of non-existent otus (all 0 values)
df_melt=df_melt[numpy.isfinite(df_melt.value)]
df_melt['p.value']=spearmanp(df_melt.value,df_sub.shape[0])
#write the file
df_melt.to_csv(path_to_write,index=False)
| tstat=r*numpy.sqrt((n-2)/(1-r**2))
return t.cdf(-abs(tstat),n-2)*2 | identifier_body |
first_attempt.py | # need to pass it a file, where data starts, path to write
# things to import
import sys
import pandas
import scipy
import numpy
from scipy import stats
from scipy.stats import t
# arguments being passed
path_of_file=sys.argv[1]
last_metadata_column=int(sys.argv[2])
path_to_write=sys.argv[3]
# spearman p calc based on two tailed t-test
def | (r,n):
tstat=r*numpy.sqrt((n-2)/(1-r**2))
return t.cdf(-abs(tstat),n-2)*2
# read in the data
df=pandas.read_table(path_of_file,index_col=False)
# remove metadata columns
df_data_only=df.drop(df.columns[[range(0,last_metadata_column)]],axis=1)
#make correlation matrix
df_corr_matrix=df_data_only.corr(method="spearman")
#make column based on rows (called indexes in python)
df_corr_matrix["otus"]=df_corr_matrix.index
#melt dataframe but maintain indices now called otus
df_melt=pandas.melt(df_corr_matrix,id_vars="otus")
# remove NAs or NaNs which are result of non-existent otus (all 0 values)
df_melt=df_melt[numpy.isfinite(df_melt.value)]
df_melt['p.value']=spearmanp(df_melt.value,df_sub.shape[0])
#write the file
df_melt.to_csv(path_to_write,index=False)
| spearmanp | identifier_name |
middleware.py | from collections import OrderedDict
from django.middleware.locale import LocaleMiddleware
from django.utils import translation
from django.conf import settings
from instance.models import WriteItInstanceConfig
def get_language_from_request(request, check_path=False):
if check_path:
lang_code = translation.get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
try:
lang_code = WriteItInstanceConfig.objects.get(writeitinstance__slug=request.subdomain).default_language
except WriteItInstanceConfig.DoesNotExist:
lang_code = None
if lang_code is not None and translation.check_for_language(lang_code):
|
# Call with check_path False as we've already done that above!
return translation.get_language_from_request(request, check_path=False)
class InstanceLocaleMiddleware(LocaleMiddleware):
def process_request(self, request):
"""Same as parent, except calling our own get_language_from_request"""
check_path = self.is_language_prefix_patterns_used()
language = get_language_from_request(request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
| return lang_code | conditional_block |
middleware.py | from collections import OrderedDict
from django.middleware.locale import LocaleMiddleware | from instance.models import WriteItInstanceConfig
def get_language_from_request(request, check_path=False):
if check_path:
lang_code = translation.get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
try:
lang_code = WriteItInstanceConfig.objects.get(writeitinstance__slug=request.subdomain).default_language
except WriteItInstanceConfig.DoesNotExist:
lang_code = None
if lang_code is not None and translation.check_for_language(lang_code):
return lang_code
# Call with check_path False as we've already done that above!
return translation.get_language_from_request(request, check_path=False)
class InstanceLocaleMiddleware(LocaleMiddleware):
def process_request(self, request):
"""Same as parent, except calling our own get_language_from_request"""
check_path = self.is_language_prefix_patterns_used()
language = get_language_from_request(request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language() | from django.utils import translation
from django.conf import settings
| random_line_split |
middleware.py | from collections import OrderedDict
from django.middleware.locale import LocaleMiddleware
from django.utils import translation
from django.conf import settings
from instance.models import WriteItInstanceConfig
def | (request, check_path=False):
if check_path:
lang_code = translation.get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
try:
lang_code = WriteItInstanceConfig.objects.get(writeitinstance__slug=request.subdomain).default_language
except WriteItInstanceConfig.DoesNotExist:
lang_code = None
if lang_code is not None and translation.check_for_language(lang_code):
return lang_code
# Call with check_path False as we've already done that above!
return translation.get_language_from_request(request, check_path=False)
class InstanceLocaleMiddleware(LocaleMiddleware):
def process_request(self, request):
"""Same as parent, except calling our own get_language_from_request"""
check_path = self.is_language_prefix_patterns_used()
language = get_language_from_request(request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
| get_language_from_request | identifier_name |
middleware.py | from collections import OrderedDict
from django.middleware.locale import LocaleMiddleware
from django.utils import translation
from django.conf import settings
from instance.models import WriteItInstanceConfig
def get_language_from_request(request, check_path=False):
if check_path:
lang_code = translation.get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
try:
lang_code = WriteItInstanceConfig.objects.get(writeitinstance__slug=request.subdomain).default_language
except WriteItInstanceConfig.DoesNotExist:
lang_code = None
if lang_code is not None and translation.check_for_language(lang_code):
return lang_code
# Call with check_path False as we've already done that above!
return translation.get_language_from_request(request, check_path=False)
class InstanceLocaleMiddleware(LocaleMiddleware):
def process_request(self, request):
| """Same as parent, except calling our own get_language_from_request"""
check_path = self.is_language_prefix_patterns_used()
language = get_language_from_request(request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language() | identifier_body |
|
iosxr.py | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$"),
re.compile(br']]>]]>[\r\n]?')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I), |
def on_open_shell(self):
try:
for cmd in (b'terminal length 0', b'terminal width 512', b'terminal exec prompt no-timestamp'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters') | re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Failed to commit", re.I)
] | random_line_split |
iosxr.py | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class | (TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$"),
re.compile(br']]>]]>[\r\n]?')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Failed to commit", re.I)
]
def on_open_shell(self):
try:
for cmd in (b'terminal length 0', b'terminal width 512', b'terminal exec prompt no-timestamp'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
| TerminalModule | identifier_name |
iosxr.py | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
| terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$"),
re.compile(br']]>]]>[\r\n]?')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Failed to commit", re.I)
]
def on_open_shell(self):
try:
for cmd in (b'terminal length 0', b'terminal width 512', b'terminal exec prompt no-timestamp'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters') | identifier_body |
|
iosxr.py | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$"),
re.compile(br']]>]]>[\r\n]?')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Failed to commit", re.I)
]
def on_open_shell(self):
try:
for cmd in (b'terminal length 0', b'terminal width 512', b'terminal exec prompt no-timestamp'):
|
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
| self._exec_cli_command(cmd) | conditional_block |
StickyHeaderStep.ts | import { UiFinder, Waiter } from '@ephox/agar';
import { after, before, context, it } from '@ephox/bedrock-client';
import { SugarBody } from '@ephox/sugar';
import { TinyHooks } from '@ephox/wrap-mcagar';
import { assert } from 'chai';
import Editor from 'tinymce/core/api/Editor';
import FullscreenPlugin from 'tinymce/plugins/fullscreen/Plugin';
import { ToolbarLocation, ToolbarMode } from 'tinymce/themes/silver/api/Options';
import * as MenuUtils from './MenuUtils';
import * as PageScroll from './PageScroll';
import * as StickyUtils from './StickyHeaderUtils';
const testStickyHeader = (toolbarMode: ToolbarMode, toolbarLocation: ToolbarLocation) => {
const isToolbarTop = toolbarLocation === ToolbarLocation.top;
context('Test editor with toolbar_mode: ' + toolbarMode, () => {
const hook = TinyHooks.bddSetup<Editor>({
plugins: 'fullscreen',
base_url: '/project/tinymce/js/tinymce',
toolbar: 'align | fontsize | fontfamily | blocks | styles | insertfile | forecolor | backcolor ',
resize: 'both',
min_height: 300,
min_width: 350,
height: 400,
width: 500,
max_height: 500,
max_width: 550,
toolbar_mode: toolbarMode,
toolbar_location: toolbarLocation,
toolbar_sticky: true,
}, [ FullscreenPlugin ], true);
PageScroll.bddSetup(hook.editor, 5000);
before(async () => {
// Need to wait for a fraction for some reason on safari,
// otherwise the initial scrolling doesn't work
await Waiter.pWait(100);
});
it('Checking startup structure', async () => {
await StickyUtils.pAssertEditorContainer(isToolbarTop, StickyUtils.expectedInFullView);
StickyUtils.assertEditorClasses(false);
});
it('Checking scroll event listeners are bound, scroll by 1px then assert', async () => {
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 1, StickyUtils.expectedScrollEventBound);
});
it('Scroll to half the editor should have sticky css markings', async () => {
const editor = hook.editor();
const contentAreaContainerHeight = editor.getContentAreaContainer().clientHeight;
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 200, StickyUtils.expectedHalfView);
await StickyUtils.pAssertHeaderDocked(isToolbarTop);
StickyUtils.assertEditorClasses(true);
assert.equal(
editor.getContentAreaContainer().clientHeight,
contentAreaContainerHeight,
'ContentAreaContainer height should be the same before as after docking'
);
});
it('Scroll down so the editor is hidden from view, it should have hidden css markings', async () => {
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 500, StickyUtils.expectedEditorHidden);
await StickyUtils.pAssertHeaderDocked(isToolbarTop);
});
it('Scroll editor into view should not have sticky', async () => {
// Move the editor out of view first
StickyUtils.scrollRelativeEditor(500, isToolbarTop);
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, -100, StickyUtils.expectedInFullView);
StickyUtils.assertEditorClasses(false);
});
context('with open toolbar drawer', () => {
before(async () => {
// Ensure the editor is in view
StickyUtils.scrollRelativeEditor(-100, isToolbarTop);
// Open the more drawer
if (toolbarMode !== ToolbarMode.default) |
});
after(async () => {
if (toolbarMode !== ToolbarMode.default) {
await MenuUtils.pCloseMore(toolbarMode);
}
});
it('Open align menu and check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenAlignMenu('open align'), 1, isToolbarTop);
});
it('Open nested Formats menu Align and check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenNestedMenus([
{
// first open this menu
label: 'Open nested formats dropdown',
selector: 'button[aria-label=Formats]'
},
{
// opening the first menu should reveal the next menu which contains Align, open Align
label: 'Open Align menu item',
selector: 'div[title=Align][role=menuitem]' // note we are using title instead of aria-label for some items here.
}
]), 2, isToolbarTop);
});
it('Open menubar Formats menu => Formats => Inline => check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenNestedMenus([
{
label: 'Open menu bar Formats menu',
selector: 'button:contains(Format)[role=menuitem]'
},
{
label: 'then Formats submenu',
selector: 'div[title=Formats][role=menuitem]'
},
{
label: 'then Inline submenu',
selector: 'div[title=Inline][role=menuitem]'
}
]), 3, isToolbarTop);
});
it('Open text color palette => check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenNestedMenus([
{
label: 'Open splitmenu item, color palette',
selector: 'div[title="Text color"][aria-expanded=false]'
}
]), 1, isToolbarTop);
});
});
it('Toggle fullscreen mode and ensure header moves from docked -> undocked -> docked', async () => {
const editor = hook.editor();
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 200, StickyUtils.expectedHalfView);
editor.execCommand('mceFullscreen');
await UiFinder.pWaitForVisible('Wait for fullscreen to be activated', SugarBody.body(), '.tox-fullscreen');
await StickyUtils.pAssertEditorContainer(isToolbarTop, StickyUtils.expectedInFullView);
editor.execCommand('mceFullscreen');
await Waiter.pTryUntil('Wait for fullscreen to be deactivated', () => UiFinder.notExists(SugarBody.body(), '.tox-fullscreen'));
// TODO: Figure out why Chrome 78 needs this wait on MacOS. I suspect it might be because fullscreen sets overflow hidden
// and we're setting the scroll position before the window has updated
await Waiter.pWait(100);
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 200, StickyUtils.expectedHalfView);
});
it('TINY-7337: Checking toolbar_sticky_offset updated sticky header position', async () => {
const editor = hook.editor();
editor.options.set('toolbar_sticky_offset', 54);
await StickyUtils.pAssertHeaderPosition(toolbarLocation, 54);
editor.options.unset('toolbar_sticky_offset');
});
});
};
export {
testStickyHeader
};
| {
await MenuUtils.pOpenMore(toolbarMode);
MenuUtils.assertMoreDrawerInViewport(toolbarMode);
} | conditional_block |
StickyHeaderStep.ts | import { UiFinder, Waiter } from '@ephox/agar';
import { after, before, context, it } from '@ephox/bedrock-client';
import { SugarBody } from '@ephox/sugar';
import { TinyHooks } from '@ephox/wrap-mcagar';
import { assert } from 'chai';
import Editor from 'tinymce/core/api/Editor';
import FullscreenPlugin from 'tinymce/plugins/fullscreen/Plugin';
import { ToolbarLocation, ToolbarMode } from 'tinymce/themes/silver/api/Options';
import * as MenuUtils from './MenuUtils';
import * as PageScroll from './PageScroll';
import * as StickyUtils from './StickyHeaderUtils';
const testStickyHeader = (toolbarMode: ToolbarMode, toolbarLocation: ToolbarLocation) => {
const isToolbarTop = toolbarLocation === ToolbarLocation.top;
context('Test editor with toolbar_mode: ' + toolbarMode, () => {
const hook = TinyHooks.bddSetup<Editor>({
plugins: 'fullscreen',
base_url: '/project/tinymce/js/tinymce',
toolbar: 'align | fontsize | fontfamily | blocks | styles | insertfile | forecolor | backcolor ',
resize: 'both',
min_height: 300,
min_width: 350,
height: 400,
width: 500,
max_height: 500,
max_width: 550,
toolbar_mode: toolbarMode,
toolbar_location: toolbarLocation,
toolbar_sticky: true,
}, [ FullscreenPlugin ], true);
PageScroll.bddSetup(hook.editor, 5000);
before(async () => {
// Need to wait for a fraction for some reason on safari,
// otherwise the initial scrolling doesn't work
await Waiter.pWait(100);
});
it('Checking startup structure', async () => {
await StickyUtils.pAssertEditorContainer(isToolbarTop, StickyUtils.expectedInFullView);
StickyUtils.assertEditorClasses(false);
});
it('Checking scroll event listeners are bound, scroll by 1px then assert', async () => {
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 1, StickyUtils.expectedScrollEventBound);
});
it('Scroll to half the editor should have sticky css markings', async () => {
const editor = hook.editor();
const contentAreaContainerHeight = editor.getContentAreaContainer().clientHeight;
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 200, StickyUtils.expectedHalfView); | StickyUtils.assertEditorClasses(true);
assert.equal(
editor.getContentAreaContainer().clientHeight,
contentAreaContainerHeight,
'ContentAreaContainer height should be the same before as after docking'
);
});
it('Scroll down so the editor is hidden from view, it should have hidden css markings', async () => {
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 500, StickyUtils.expectedEditorHidden);
await StickyUtils.pAssertHeaderDocked(isToolbarTop);
});
it('Scroll editor into view should not have sticky', async () => {
// Move the editor out of view first
StickyUtils.scrollRelativeEditor(500, isToolbarTop);
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, -100, StickyUtils.expectedInFullView);
StickyUtils.assertEditorClasses(false);
});
context('with open toolbar drawer', () => {
before(async () => {
// Ensure the editor is in view
StickyUtils.scrollRelativeEditor(-100, isToolbarTop);
// Open the more drawer
if (toolbarMode !== ToolbarMode.default) {
await MenuUtils.pOpenMore(toolbarMode);
MenuUtils.assertMoreDrawerInViewport(toolbarMode);
}
});
after(async () => {
if (toolbarMode !== ToolbarMode.default) {
await MenuUtils.pCloseMore(toolbarMode);
}
});
it('Open align menu and check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenAlignMenu('open align'), 1, isToolbarTop);
});
it('Open nested Formats menu Align and check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenNestedMenus([
{
// first open this menu
label: 'Open nested formats dropdown',
selector: 'button[aria-label=Formats]'
},
{
// opening the first menu should reveal the next menu which contains Align, open Align
label: 'Open Align menu item',
selector: 'div[title=Align][role=menuitem]' // note we are using title instead of aria-label for some items here.
}
]), 2, isToolbarTop);
});
it('Open menubar Formats menu => Formats => Inline => check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenNestedMenus([
{
label: 'Open menu bar Formats menu',
selector: 'button:contains(Format)[role=menuitem]'
},
{
label: 'then Formats submenu',
selector: 'div[title=Formats][role=menuitem]'
},
{
label: 'then Inline submenu',
selector: 'div[title=Inline][role=menuitem]'
}
]), 3, isToolbarTop);
});
it('Open text color palette => check sticky states', async () => {
await StickyUtils.pOpenMenuAndTestScrolling(() => MenuUtils.pOpenNestedMenus([
{
label: 'Open splitmenu item, color palette',
selector: 'div[title="Text color"][aria-expanded=false]'
}
]), 1, isToolbarTop);
});
});
it('Toggle fullscreen mode and ensure header moves from docked -> undocked -> docked', async () => {
const editor = hook.editor();
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 200, StickyUtils.expectedHalfView);
editor.execCommand('mceFullscreen');
await UiFinder.pWaitForVisible('Wait for fullscreen to be activated', SugarBody.body(), '.tox-fullscreen');
await StickyUtils.pAssertEditorContainer(isToolbarTop, StickyUtils.expectedInFullView);
editor.execCommand('mceFullscreen');
await Waiter.pTryUntil('Wait for fullscreen to be deactivated', () => UiFinder.notExists(SugarBody.body(), '.tox-fullscreen'));
// TODO: Figure out why Chrome 78 needs this wait on MacOS. I suspect it might be because fullscreen sets overflow hidden
// and we're setting the scroll position before the window has updated
await Waiter.pWait(100);
await StickyUtils.pScrollAndAssertStructure(isToolbarTop, 200, StickyUtils.expectedHalfView);
});
it('TINY-7337: Checking toolbar_sticky_offset updated sticky header position', async () => {
const editor = hook.editor();
editor.options.set('toolbar_sticky_offset', 54);
await StickyUtils.pAssertHeaderPosition(toolbarLocation, 54);
editor.options.unset('toolbar_sticky_offset');
});
});
};
export {
testStickyHeader
}; | await StickyUtils.pAssertHeaderDocked(isToolbarTop); | random_line_split |
file_util_test.py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import itertools
import os
import sys
import tempfile
from absl.testing import absltest
import numpy as np
from six.moves import cPickle
from simulation_research.traffic import file_util
class UtilTest(absltest.TestCase):
def setUp(self):
|
def test_append_line_to_file(self):
r"""Tests the output file.
The output file contains the following.
hello world
(hello) "world"
(hello) !!!!!!!!!!! @~#$%^&*()_+"world"
aaaaaaaa
bbbbbbbbbb
backslash\ backslash
backslash\ backslash
backslash\\ backslash
backslash\\\ backslash
backslash\\ backslash
"""
input_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa\nbbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
file_path = os.path.join(self._output_dir, 'test_append_line_to_file.txt')
for line in input_lines:
file_util.append_line_to_file(file_path, line)
self.assertTrue(file_util.f_exists(file_path))
# Note that the linebreak in the input_lines[3].
target_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa',
'bbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
with file_util.f_open(file_path, 'r') as actual_file:
line_counter = 0
read_lines = actual_file.readlines()
for line in read_lines:
# Linebreak is appended to the target string.
self.assertEqual(line, target_lines[line_counter] + '\n')
line_counter += 1
target_line_number = len(target_lines)
self.assertEqual(target_line_number, line_counter)
def test_save_load_variable(self):
file_path = os.path.join(self._output_dir, 'test_output_data.pkl')
# Case 1: Nested dictionary.
data = {'zz': 1, 'b': 234, 123: 'asdfa', 'dict': {'a': 123, 't': 123}}
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 2: 2-level nested dictionary.
data = collections.defaultdict(
lambda: collections.defaultdict(list))
data['first']['A'] = [1, 2, 3]
data['first']['B'] = [1, 2, 3]
data['second']['B'] = [1, 2, 3]
data['second']['C'] = [1, 2, 3]
data['third']['C'] = [1, 2, 3]
data['third']['D'] = [1, 2, 3]
data['path'] = 'asdfas/asdf/asdfasdf/'
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 3: Large array. If the size is too large, the test will timeout.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10000
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertListEqual(data, actual_variable)
self.assertIsInstance(actual_variable, list)
# Case 4: numpy array.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10
data = np.array(data)
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
np.testing.assert_array_equal(data, actual_variable)
self.assertIsInstance(actual_variable, np.ndarray)
# Case 5: A list of tuples.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
# Saving zip variable does not affect the iterative variable.
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
# python2 treats `actual_variable` as a list, however, python3 treats it as
# an iterative object.
self.assertListEqual(list(actual_variable), list(data))
# Case 6: In python2, the itertools.tee cannot be saved by cPickle. However,
# in python3, it can be saved.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
data_tee, _ = itertools.tee(data)
python_version = sys.version_info[0]
try:
file_util.save_variable(file_path, data_tee)
pickle_save_correctly = True
except cPickle.PicklingError:
pickle_save_correctly = False
self.assertTrue((pickle_save_correctly and python_version == 3) or
(not pickle_save_correctly and python_version == 2))
if __name__ == '__main__':
absltest.main()
| super(UtilTest, self).setUp()
self._output_dir = tempfile.mkdtemp(dir=absltest.get_default_test_tmpdir()) | identifier_body |
file_util_test.py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function | import itertools
import os
import sys
import tempfile
from absl.testing import absltest
import numpy as np
from six.moves import cPickle
from simulation_research.traffic import file_util
class UtilTest(absltest.TestCase):
def setUp(self):
super(UtilTest, self).setUp()
self._output_dir = tempfile.mkdtemp(dir=absltest.get_default_test_tmpdir())
def test_append_line_to_file(self):
r"""Tests the output file.
The output file contains the following.
hello world
(hello) "world"
(hello) !!!!!!!!!!! @~#$%^&*()_+"world"
aaaaaaaa
bbbbbbbbbb
backslash\ backslash
backslash\ backslash
backslash\\ backslash
backslash\\\ backslash
backslash\\ backslash
"""
input_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa\nbbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
file_path = os.path.join(self._output_dir, 'test_append_line_to_file.txt')
for line in input_lines:
file_util.append_line_to_file(file_path, line)
self.assertTrue(file_util.f_exists(file_path))
# Note that the linebreak in the input_lines[3].
target_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa',
'bbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
with file_util.f_open(file_path, 'r') as actual_file:
line_counter = 0
read_lines = actual_file.readlines()
for line in read_lines:
# Linebreak is appended to the target string.
self.assertEqual(line, target_lines[line_counter] + '\n')
line_counter += 1
target_line_number = len(target_lines)
self.assertEqual(target_line_number, line_counter)
def test_save_load_variable(self):
file_path = os.path.join(self._output_dir, 'test_output_data.pkl')
# Case 1: Nested dictionary.
data = {'zz': 1, 'b': 234, 123: 'asdfa', 'dict': {'a': 123, 't': 123}}
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 2: 2-level nested dictionary.
data = collections.defaultdict(
lambda: collections.defaultdict(list))
data['first']['A'] = [1, 2, 3]
data['first']['B'] = [1, 2, 3]
data['second']['B'] = [1, 2, 3]
data['second']['C'] = [1, 2, 3]
data['third']['C'] = [1, 2, 3]
data['third']['D'] = [1, 2, 3]
data['path'] = 'asdfas/asdf/asdfasdf/'
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 3: Large array. If the size is too large, the test will timeout.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10000
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertListEqual(data, actual_variable)
self.assertIsInstance(actual_variable, list)
# Case 4: numpy array.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10
data = np.array(data)
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
np.testing.assert_array_equal(data, actual_variable)
self.assertIsInstance(actual_variable, np.ndarray)
# Case 5: A list of tuples.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
# Saving zip variable does not affect the iterative variable.
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
# python2 treats `actual_variable` as a list, however, python3 treats it as
# an iterative object.
self.assertListEqual(list(actual_variable), list(data))
# Case 6: In python2, the itertools.tee cannot be saved by cPickle. However,
# in python3, it can be saved.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
data_tee, _ = itertools.tee(data)
python_version = sys.version_info[0]
try:
file_util.save_variable(file_path, data_tee)
pickle_save_correctly = True
except cPickle.PicklingError:
pickle_save_correctly = False
self.assertTrue((pickle_save_correctly and python_version == 3) or
(not pickle_save_correctly and python_version == 2))
if __name__ == '__main__':
absltest.main() |
import collections | random_line_split |
file_util_test.py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import itertools
import os
import sys
import tempfile
from absl.testing import absltest
import numpy as np
from six.moves import cPickle
from simulation_research.traffic import file_util
class UtilTest(absltest.TestCase):
def setUp(self):
super(UtilTest, self).setUp()
self._output_dir = tempfile.mkdtemp(dir=absltest.get_default_test_tmpdir())
def test_append_line_to_file(self):
r"""Tests the output file.
The output file contains the following.
hello world
(hello) "world"
(hello) !!!!!!!!!!! @~#$%^&*()_+"world"
aaaaaaaa
bbbbbbbbbb
backslash\ backslash
backslash\ backslash
backslash\\ backslash
backslash\\\ backslash
backslash\\ backslash
"""
input_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa\nbbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
file_path = os.path.join(self._output_dir, 'test_append_line_to_file.txt')
for line in input_lines:
file_util.append_line_to_file(file_path, line)
self.assertTrue(file_util.f_exists(file_path))
# Note that the linebreak in the input_lines[3].
target_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa',
'bbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
with file_util.f_open(file_path, 'r') as actual_file:
line_counter = 0
read_lines = actual_file.readlines()
for line in read_lines:
# Linebreak is appended to the target string.
|
target_line_number = len(target_lines)
self.assertEqual(target_line_number, line_counter)
def test_save_load_variable(self):
file_path = os.path.join(self._output_dir, 'test_output_data.pkl')
# Case 1: Nested dictionary.
data = {'zz': 1, 'b': 234, 123: 'asdfa', 'dict': {'a': 123, 't': 123}}
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 2: 2-level nested dictionary.
data = collections.defaultdict(
lambda: collections.defaultdict(list))
data['first']['A'] = [1, 2, 3]
data['first']['B'] = [1, 2, 3]
data['second']['B'] = [1, 2, 3]
data['second']['C'] = [1, 2, 3]
data['third']['C'] = [1, 2, 3]
data['third']['D'] = [1, 2, 3]
data['path'] = 'asdfas/asdf/asdfasdf/'
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 3: Large array. If the size is too large, the test will timeout.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10000
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertListEqual(data, actual_variable)
self.assertIsInstance(actual_variable, list)
# Case 4: numpy array.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10
data = np.array(data)
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
np.testing.assert_array_equal(data, actual_variable)
self.assertIsInstance(actual_variable, np.ndarray)
# Case 5: A list of tuples.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
# Saving zip variable does not affect the iterative variable.
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
# python2 treats `actual_variable` as a list, however, python3 treats it as
# an iterative object.
self.assertListEqual(list(actual_variable), list(data))
# Case 6: In python2, the itertools.tee cannot be saved by cPickle. However,
# in python3, it can be saved.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
data_tee, _ = itertools.tee(data)
python_version = sys.version_info[0]
try:
file_util.save_variable(file_path, data_tee)
pickle_save_correctly = True
except cPickle.PicklingError:
pickle_save_correctly = False
self.assertTrue((pickle_save_correctly and python_version == 3) or
(not pickle_save_correctly and python_version == 2))
if __name__ == '__main__':
absltest.main()
| self.assertEqual(line, target_lines[line_counter] + '\n')
line_counter += 1 | conditional_block |
file_util_test.py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import itertools
import os
import sys
import tempfile
from absl.testing import absltest
import numpy as np
from six.moves import cPickle
from simulation_research.traffic import file_util
class UtilTest(absltest.TestCase):
def setUp(self):
super(UtilTest, self).setUp()
self._output_dir = tempfile.mkdtemp(dir=absltest.get_default_test_tmpdir())
def | (self):
r"""Tests the output file.
The output file contains the following.
hello world
(hello) "world"
(hello) !!!!!!!!!!! @~#$%^&*()_+"world"
aaaaaaaa
bbbbbbbbbb
backslash\ backslash
backslash\ backslash
backslash\\ backslash
backslash\\\ backslash
backslash\\ backslash
"""
input_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa\nbbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
file_path = os.path.join(self._output_dir, 'test_append_line_to_file.txt')
for line in input_lines:
file_util.append_line_to_file(file_path, line)
self.assertTrue(file_util.f_exists(file_path))
# Note that the linebreak in the input_lines[3].
target_lines = ['hello world',
'(hello) "world"',
'(hello) !!!!!!!!!!! @~#$%^&*()_+"world"',
'aaaaaaaa',
'bbbbbbbbbb',
r'backslash\ backslash',
'backslash\\ backslash',
r'backslash\\ backslash',
r'backslash\\\ backslash',
'backslash\\\\ backslash']
with file_util.f_open(file_path, 'r') as actual_file:
line_counter = 0
read_lines = actual_file.readlines()
for line in read_lines:
# Linebreak is appended to the target string.
self.assertEqual(line, target_lines[line_counter] + '\n')
line_counter += 1
target_line_number = len(target_lines)
self.assertEqual(target_line_number, line_counter)
def test_save_load_variable(self):
file_path = os.path.join(self._output_dir, 'test_output_data.pkl')
# Case 1: Nested dictionary.
data = {'zz': 1, 'b': 234, 123: 'asdfa', 'dict': {'a': 123, 't': 123}}
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 2: 2-level nested dictionary.
data = collections.defaultdict(
lambda: collections.defaultdict(list))
data['first']['A'] = [1, 2, 3]
data['first']['B'] = [1, 2, 3]
data['second']['B'] = [1, 2, 3]
data['second']['C'] = [1, 2, 3]
data['third']['C'] = [1, 2, 3]
data['third']['D'] = [1, 2, 3]
data['path'] = 'asdfas/asdf/asdfasdf/'
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertEqual(data, actual_variable)
self.assertIsInstance(actual_variable, dict)
# Case 3: Large array. If the size is too large, the test will timeout.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10000
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
self.assertListEqual(data, actual_variable)
self.assertIsInstance(actual_variable, list)
# Case 4: numpy array.
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] * 10
data = np.array(data)
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
np.testing.assert_array_equal(data, actual_variable)
self.assertIsInstance(actual_variable, np.ndarray)
# Case 5: A list of tuples.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
# Saving zip variable does not affect the iterative variable.
file_util.save_variable(file_path, data)
actual_variable = file_util.load_variable(file_path)
# python2 treats `actual_variable` as a list, however, python3 treats it as
# an iterative object.
self.assertListEqual(list(actual_variable), list(data))
# Case 6: In python2, the itertools.tee cannot be saved by cPickle. However,
# in python3, it can be saved.
x = [1, 2, 3]
y = ['a', 'b', 'c']
data = zip(x, y)
data_tee, _ = itertools.tee(data)
python_version = sys.version_info[0]
try:
file_util.save_variable(file_path, data_tee)
pickle_save_correctly = True
except cPickle.PicklingError:
pickle_save_correctly = False
self.assertTrue((pickle_save_correctly and python_version == 3) or
(not pickle_save_correctly and python_version == 2))
if __name__ == '__main__':
absltest.main()
| test_append_line_to_file | identifier_name |
account_connectivity.py | # ~*~ coding: utf-8 ~*~
from celery import shared_task
from django.utils.translation import ugettext as _, gettext_noop
from common.utils import get_logger
from orgs.utils import org_aware_func
from ..models import Connectivity
from . import const
from .utils import check_asset_can_run_ansible
logger = get_logger(__file__)
__all__ = [
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
'get_test_account_connectivity_tasks', 'test_user_connectivity',
'run_adhoc',
]
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks
def run_adhoc(task_name, tasks, inventory):
"""
:param task_name
:param tasks
:param inventory
"""
from ops.ansible.runner import AdHocRunner
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
result = runner.run(tasks, 'all', task_name)
return result.results_raw, result.results_summary
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
"""
from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks:
logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary
@org_aware_func("account")
def test_account_connectivity_util(account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
acco | shared_task(queue="ansible")
def test_accounts_connectivity_manual(accounts):
"""
:param accounts: <AuthBook>对象
"""
for account in accounts:
task_name = gettext_noop("Test account connectivity: ") + str(account)
test_account_connectivity_util(account, task_name)
print(".\n")
| unt.set_connectivity(Connectivity.failed)
@ | conditional_block |
account_connectivity.py | # ~*~ coding: utf-8 ~*~
from celery import shared_task
from django.utils.translation import ugettext as _, gettext_noop
from common.utils import get_logger
from orgs.utils import org_aware_func
from ..models import Connectivity
from . import const
from .utils import check_asset_can_run_ansible
logger = get_logger(__file__)
__all__ = [
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
'get_test_account_connectivity_tasks', 'test_user_connectivity',
'run_adhoc',
]
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks
def run_adhoc(task_name, tasks, inventory):
|
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
"""
from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks:
logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary
@org_aware_func("account")
def test_account_connectivity_util(account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
account.set_connectivity(Connectivity.failed)
@shared_task(queue="ansible")
def test_accounts_connectivity_manual(accounts):
"""
:param accounts: <AuthBook>对象
"""
for account in accounts:
task_name = gettext_noop("Test account connectivity: ") + str(account)
test_account_connectivity_util(account, task_name)
print(".\n")
| """
:param task_name
:param tasks
:param inventory
"""
from ops.ansible.runner import AdHocRunner
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
result = runner.run(tasks, 'all', task_name)
return result.results_raw, result.results_summary | identifier_body |
account_connectivity.py | # ~*~ coding: utf-8 ~*~
from celery import shared_task
from django.utils.translation import ugettext as _, gettext_noop
from common.utils import get_logger
from orgs.utils import org_aware_func
from ..models import Connectivity
from . import const
from .utils import check_asset_can_run_ansible
logger = get_logger(__file__)
__all__ = [
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
'get_test_account_connectivity_tasks', 'test_user_connectivity',
'run_adhoc',
]
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks
def run_adhoc(task_name, tasks, inventory):
"""
:param task_name
:param tasks
:param inventory
"""
from ops.ansible.runner import AdHocRunner
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
result = runner.run(tasks, 'all', task_name)
return result.results_raw, result.results_summary
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
"""
from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks:
logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary
@org_aware_func("account")
def | (account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
account.set_connectivity(Connectivity.failed)
@shared_task(queue="ansible")
def test_accounts_connectivity_manual(accounts):
"""
:param accounts: <AuthBook>对象
"""
for account in accounts:
task_name = gettext_noop("Test account connectivity: ") + str(account)
test_account_connectivity_util(account, task_name)
print(".\n")
| test_account_connectivity_util | identifier_name |
account_connectivity.py | # ~*~ coding: utf-8 ~*~
from celery import shared_task
from django.utils.translation import ugettext as _, gettext_noop
from common.utils import get_logger
from orgs.utils import org_aware_func
from ..models import Connectivity
from . import const
from .utils import check_asset_can_run_ansible
logger = get_logger(__file__)
__all__ = [
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
'get_test_account_connectivity_tasks', 'test_user_connectivity',
'run_adhoc',
]
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks
def run_adhoc(task_name, tasks, inventory):
"""
:param task_name
:param tasks
:param inventory
"""
from ops.ansible.runner import AdHocRunner
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
result = runner.run(tasks, 'all', task_name)
return result.results_raw, result.results_summary
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
""" | logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary
@org_aware_func("account")
def test_account_connectivity_util(account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
account.set_connectivity(Connectivity.failed)
@shared_task(queue="ansible")
def test_accounts_connectivity_manual(accounts):
"""
:param accounts: <AuthBook>对象
"""
for account in accounts:
task_name = gettext_noop("Test account connectivity: ") + str(account)
test_account_connectivity_util(account, task_name)
print(".\n") | from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks: | random_line_split |
__init__.py | # -*- coding: utf-8 -*-
"""digitalocean API to manage droplets"""
__version__ = "1.16.0"
__author__ = "Lorenzo Setale ( http://who.is.lorenzo.setale.me/? )"
__author_email__ = "[email protected]"
__license__ = "LGPL v3"
__copyright__ = "Copyright (c) 2012-2020 Lorenzo Setale"
from .Manager import Manager
from .Droplet import Droplet, DropletError, BadKernelObject, BadSSHKeyFormat | from .Balance import Balance
from .Domain import Domain
from .Record import Record
from .SSHKey import SSHKey
from .Kernel import Kernel
from .FloatingIP import FloatingIP
from .Volume import Volume
from .baseapi import Error, EndPointError, TokenError, DataReadError, NotFoundError
from .Tag import Tag
from .LoadBalancer import LoadBalancer
from .LoadBalancer import StickySessions, ForwardingRule, HealthCheck
from .Certificate import Certificate
from .Snapshot import Snapshot
from .Project import Project
from .Firewall import Firewall, InboundRule, OutboundRule, Destinations, Sources
from .VPC import VPC | from .Region import Region
from .Size import Size
from .Image import Image
from .Action import Action
from .Account import Account | random_line_split |
cli.py | import logging
import sys
from cliff.app import App
from cliff.commandmanager import CommandManager
# from .utils import ColorLogFormatter
from nicelog.formatters import ColorLineFormatter
class HarvesterApp(App):
logger = logging.getLogger(__name__)
def __init__(self):
super(HarvesterApp, self).__init__(
description='Harvester application CLI',
version='0.1',
command_manager=CommandManager('harvester.commands'))
def configure_logging(self):
"""
Create logging handlers for any log output.
Modified version to set custom formatter for console
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
# formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
formatter = ColorLineFormatter(
show_date=True, show_function=True, show_filename=True)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
|
if __name__ == '__main__':
sys.exit(main())
| myapp = HarvesterApp()
return myapp.run(argv) | identifier_body |
cli.py | import logging
import sys
from cliff.app import App
from cliff.commandmanager import CommandManager
# from .utils import ColorLogFormatter
from nicelog.formatters import ColorLineFormatter
class HarvesterApp(App):
logger = logging.getLogger(__name__)
def __init__(self):
super(HarvesterApp, self).__init__(
description='Harvester application CLI',
version='0.1',
command_manager=CommandManager('harvester.commands'))
def configure_logging(self):
"""
Create logging handlers for any log output.
Modified version to set custom formatter for console
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT) | # Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
# formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
formatter = ColorLineFormatter(
show_date=True, show_function=True, show_filename=True)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
myapp = HarvesterApp()
return myapp.run(argv)
if __name__ == '__main__':
sys.exit(main()) | file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
| random_line_split |
cli.py | import logging
import sys
from cliff.app import App
from cliff.commandmanager import CommandManager
# from .utils import ColorLogFormatter
from nicelog.formatters import ColorLineFormatter
class HarvesterApp(App):
logger = logging.getLogger(__name__)
def __init__(self):
super(HarvesterApp, self).__init__(
description='Harvester application CLI',
version='0.1',
command_manager=CommandManager('harvester.commands'))
def configure_logging(self):
"""
Create logging handlers for any log output.
Modified version to set custom formatter for console
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
# formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
formatter = ColorLineFormatter(
show_date=True, show_function=True, show_filename=True)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
myapp = HarvesterApp()
return myapp.run(argv)
if __name__ == '__main__':
| sys.exit(main()) | conditional_block |
|
cli.py | import logging
import sys
from cliff.app import App
from cliff.commandmanager import CommandManager
# from .utils import ColorLogFormatter
from nicelog.formatters import ColorLineFormatter
class HarvesterApp(App):
logger = logging.getLogger(__name__)
def | (self):
super(HarvesterApp, self).__init__(
description='Harvester application CLI',
version='0.1',
command_manager=CommandManager('harvester.commands'))
def configure_logging(self):
"""
Create logging handlers for any log output.
Modified version to set custom formatter for console
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
# formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
formatter = ColorLineFormatter(
show_date=True, show_function=True, show_filename=True)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def main(argv=sys.argv[1:]):
myapp = HarvesterApp()
return myapp.run(argv)
if __name__ == '__main__':
sys.exit(main())
| __init__ | identifier_name |
MergeObserver.ts | import { Log } from "wonder-commonlib/dist/es2015/Log";
import { Observer } from "../core/Observer";
import { IObserver } from "./IObserver";
import { Stream } from "../core/Stream";
import { GroupDisposable } from "../Disposable/GroupDisposable";
import { JudgeUtils } from "../JudgeUtils";
import { fromPromise } from "../global/Operator";
import { requireCheck, assert } from "../definition/typescript/decorator/contract";
import { SingleDisposable } from "../Disposable/SingleDisposable";
export class MergeObserver extends Observer {
public static create(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
return new this(currentObserver, maxConcurrent, groupDisposable);
}
constructor(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
super(null, null, null);
this.currentObserver = currentObserver;
this._maxConcurrent = maxConcurrent;
this._groupDisposable = groupDisposable;
}
public done: boolean = false;
public currentObserver: IObserver = null;
public activeCount: number = 0;
public q: Array<Stream> = [];
private _maxConcurrent: number = null;
private _groupDisposable: GroupDisposable = null;
public handleSubscribe(innerSource: any) {
if (JudgeUtils.isPromise(innerSource)) {
innerSource = fromPromise(innerSource);
}
let disposable = SingleDisposable.create(),
innerObserver = InnerObserver.create(this, innerSource, this._groupDisposable);
this._groupDisposable.add(disposable);
innerObserver.disposable = disposable;
disposable.setDispose(innerSource.buildStream(innerObserver));
}
@requireCheck(function(innerSource: any) {
assert(innerSource instanceof Stream || JudgeUtils.isPromise(innerSource), Log.info.FUNC_MUST_BE("innerSource", "Stream or Promise"));
})
protected onNext(innerSource: any) {
if (this._isNotReachMaxConcurrent()) {
this.activeCount++;
this.handleSubscribe(innerSource);
return;
}
this.q.push(innerSource);
}
protected onError(error) {
this.currentObserver.error(error);
}
protected onCompleted() {
this.done = true;
if (this.activeCount === 0) {
this.currentObserver.completed();
}
}
private _isNotReachMaxConcurrent() {
return this.activeCount < this._maxConcurrent;
}
}
class InnerObserver extends Observer {
public static create(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
var obj = new this(parent, currentStream, groupDisposable);
return obj;
}
constructor(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
super(null, null, null);
this._parent = parent;
this._currentStream = currentStream;
this._groupDisposable = groupDisposable;
}
public disposable: SingleDisposable = null;
private _parent: MergeObserver = null;
private _currentStream: Stream = null;
private _groupDisposable: GroupDisposable = null;
protected onNext(value) {
this._parent.currentObserver.next(value);
}
protected onError(error) {
this._parent.currentObserver.error(error);
}
protected onCompleted() {
var parent = this._parent;
if (!!this.disposable) |
if (parent.q.length > 0) {
parent.handleSubscribe(parent.q.shift());
}
else {
parent.activeCount -= 1;
if (this._isAsync() && parent.activeCount === 0) {
parent.currentObserver.completed();
}
}
}
private _isAsync() {
return this._parent.done;
}
} | {
this.disposable.dispose();
this._groupDisposable.remove(this.disposable);
} | conditional_block |
MergeObserver.ts | import { Log } from "wonder-commonlib/dist/es2015/Log";
import { Observer } from "../core/Observer";
import { IObserver } from "./IObserver";
import { Stream } from "../core/Stream";
import { GroupDisposable } from "../Disposable/GroupDisposable";
import { JudgeUtils } from "../JudgeUtils";
import { fromPromise } from "../global/Operator";
import { requireCheck, assert } from "../definition/typescript/decorator/contract";
import { SingleDisposable } from "../Disposable/SingleDisposable";
export class MergeObserver extends Observer {
public static create(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
return new this(currentObserver, maxConcurrent, groupDisposable);
}
constructor(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
super(null, null, null);
this.currentObserver = currentObserver;
this._maxConcurrent = maxConcurrent;
this._groupDisposable = groupDisposable;
}
public done: boolean = false;
public currentObserver: IObserver = null;
public activeCount: number = 0;
public q: Array<Stream> = [];
private _maxConcurrent: number = null;
private _groupDisposable: GroupDisposable = null;
public handleSubscribe(innerSource: any) {
if (JudgeUtils.isPromise(innerSource)) {
innerSource = fromPromise(innerSource);
}
let disposable = SingleDisposable.create(),
innerObserver = InnerObserver.create(this, innerSource, this._groupDisposable);
this._groupDisposable.add(disposable);
innerObserver.disposable = disposable;
disposable.setDispose(innerSource.buildStream(innerObserver));
}
@requireCheck(function(innerSource: any) {
assert(innerSource instanceof Stream || JudgeUtils.isPromise(innerSource), Log.info.FUNC_MUST_BE("innerSource", "Stream or Promise"));
})
protected onNext(innerSource: any) {
if (this._isNotReachMaxConcurrent()) {
this.activeCount++;
this.handleSubscribe(innerSource);
return;
}
this.q.push(innerSource);
}
protected onError(error) {
this.currentObserver.error(error);
}
protected onCompleted() {
this.done = true;
if (this.activeCount === 0) {
this.currentObserver.completed();
}
}
private _isNotReachMaxConcurrent() {
return this.activeCount < this._maxConcurrent;
}
}
class InnerObserver extends Observer {
public static create(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
var obj = new this(parent, currentStream, groupDisposable);
return obj;
}
constructor(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
super(null, null, null);
this._parent = parent;
this._currentStream = currentStream;
this._groupDisposable = groupDisposable;
}
public disposable: SingleDisposable = null;
private _parent: MergeObserver = null;
private _currentStream: Stream = null;
private _groupDisposable: GroupDisposable = null;
protected onNext(value) {
this._parent.currentObserver.next(value);
}
protected onError(error) {
this._parent.currentObserver.error(error);
}
protected onCompleted() |
private _isAsync() {
return this._parent.done;
}
} | {
var parent = this._parent;
if (!!this.disposable) {
this.disposable.dispose();
this._groupDisposable.remove(this.disposable);
}
if (parent.q.length > 0) {
parent.handleSubscribe(parent.q.shift());
}
else {
parent.activeCount -= 1;
if (this._isAsync() && parent.activeCount === 0) {
parent.currentObserver.completed();
}
}
} | identifier_body |
MergeObserver.ts | import { Log } from "wonder-commonlib/dist/es2015/Log";
import { Observer } from "../core/Observer";
import { IObserver } from "./IObserver";
import { Stream } from "../core/Stream";
import { GroupDisposable } from "../Disposable/GroupDisposable";
import { JudgeUtils } from "../JudgeUtils";
import { fromPromise } from "../global/Operator";
import { requireCheck, assert } from "../definition/typescript/decorator/contract";
import { SingleDisposable } from "../Disposable/SingleDisposable";
export class MergeObserver extends Observer {
public static create(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
return new this(currentObserver, maxConcurrent, groupDisposable);
}
constructor(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
super(null, null, null);
this.currentObserver = currentObserver;
this._maxConcurrent = maxConcurrent;
this._groupDisposable = groupDisposable;
}
public done: boolean = false;
public currentObserver: IObserver = null;
public activeCount: number = 0;
public q: Array<Stream> = [];
private _maxConcurrent: number = null;
private _groupDisposable: GroupDisposable = null;
public handleSubscribe(innerSource: any) {
if (JudgeUtils.isPromise(innerSource)) {
innerSource = fromPromise(innerSource);
}
let disposable = SingleDisposable.create(),
innerObserver = InnerObserver.create(this, innerSource, this._groupDisposable);
this._groupDisposable.add(disposable);
innerObserver.disposable = disposable;
disposable.setDispose(innerSource.buildStream(innerObserver));
}
@requireCheck(function(innerSource: any) {
assert(innerSource instanceof Stream || JudgeUtils.isPromise(innerSource), Log.info.FUNC_MUST_BE("innerSource", "Stream or Promise"));
})
protected onNext(innerSource: any) {
if (this._isNotReachMaxConcurrent()) {
this.activeCount++;
this.handleSubscribe(innerSource);
return;
}
this.q.push(innerSource);
}
protected onError(error) {
this.currentObserver.error(error);
}
protected onCompleted() {
this.done = true;
if (this.activeCount === 0) {
this.currentObserver.completed();
}
}
private _isNotReachMaxConcurrent() {
return this.activeCount < this._maxConcurrent;
}
}
class InnerObserver extends Observer {
public static create(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
var obj = new this(parent, currentStream, groupDisposable);
return obj;
}
constructor(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
super(null, null, null);
this._parent = parent;
this._currentStream = currentStream;
this._groupDisposable = groupDisposable;
}
public disposable: SingleDisposable = null;
private _parent: MergeObserver = null;
private _currentStream: Stream = null;
private _groupDisposable: GroupDisposable = null;
protected onNext(value) {
this._parent.currentObserver.next(value);
}
protected onError(error) {
this._parent.currentObserver.error(error);
}
protected onCompleted() {
var parent = this._parent;
if (!!this.disposable) {
this.disposable.dispose();
this._groupDisposable.remove(this.disposable);
}
if (parent.q.length > 0) {
parent.handleSubscribe(parent.q.shift());
}
else {
parent.activeCount -= 1;
if (this._isAsync() && parent.activeCount === 0) {
parent.currentObserver.completed();
}
}
}
private | () {
return this._parent.done;
}
} | _isAsync | identifier_name |
MergeObserver.ts | import { Log } from "wonder-commonlib/dist/es2015/Log";
import { Observer } from "../core/Observer";
import { IObserver } from "./IObserver";
import { Stream } from "../core/Stream";
import { GroupDisposable } from "../Disposable/GroupDisposable";
import { JudgeUtils } from "../JudgeUtils";
import { fromPromise } from "../global/Operator";
import { requireCheck, assert } from "../definition/typescript/decorator/contract";
import { SingleDisposable } from "../Disposable/SingleDisposable";
export class MergeObserver extends Observer {
public static create(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
return new this(currentObserver, maxConcurrent, groupDisposable);
| }
constructor(currentObserver: IObserver, maxConcurrent: number, groupDisposable: GroupDisposable) {
super(null, null, null);
this.currentObserver = currentObserver;
this._maxConcurrent = maxConcurrent;
this._groupDisposable = groupDisposable;
}
public done: boolean = false;
public currentObserver: IObserver = null;
public activeCount: number = 0;
public q: Array<Stream> = [];
private _maxConcurrent: number = null;
private _groupDisposable: GroupDisposable = null;
public handleSubscribe(innerSource: any) {
if (JudgeUtils.isPromise(innerSource)) {
innerSource = fromPromise(innerSource);
}
let disposable = SingleDisposable.create(),
innerObserver = InnerObserver.create(this, innerSource, this._groupDisposable);
this._groupDisposable.add(disposable);
innerObserver.disposable = disposable;
disposable.setDispose(innerSource.buildStream(innerObserver));
}
@requireCheck(function(innerSource: any) {
assert(innerSource instanceof Stream || JudgeUtils.isPromise(innerSource), Log.info.FUNC_MUST_BE("innerSource", "Stream or Promise"));
})
protected onNext(innerSource: any) {
if (this._isNotReachMaxConcurrent()) {
this.activeCount++;
this.handleSubscribe(innerSource);
return;
}
this.q.push(innerSource);
}
protected onError(error) {
this.currentObserver.error(error);
}
protected onCompleted() {
this.done = true;
if (this.activeCount === 0) {
this.currentObserver.completed();
}
}
private _isNotReachMaxConcurrent() {
return this.activeCount < this._maxConcurrent;
}
}
class InnerObserver extends Observer {
public static create(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
var obj = new this(parent, currentStream, groupDisposable);
return obj;
}
constructor(parent: MergeObserver, currentStream: Stream, groupDisposable: GroupDisposable) {
super(null, null, null);
this._parent = parent;
this._currentStream = currentStream;
this._groupDisposable = groupDisposable;
}
public disposable: SingleDisposable = null;
private _parent: MergeObserver = null;
private _currentStream: Stream = null;
private _groupDisposable: GroupDisposable = null;
protected onNext(value) {
this._parent.currentObserver.next(value);
}
protected onError(error) {
this._parent.currentObserver.error(error);
}
protected onCompleted() {
var parent = this._parent;
if (!!this.disposable) {
this.disposable.dispose();
this._groupDisposable.remove(this.disposable);
}
if (parent.q.length > 0) {
parent.handleSubscribe(parent.q.shift());
}
else {
parent.activeCount -= 1;
if (this._isAsync() && parent.activeCount === 0) {
parent.currentObserver.completed();
}
}
}
private _isAsync() {
return this._parent.done;
}
} | random_line_split |
|
tax-meta-clss.js | /**
* All Tax meta class
*
* JS used for the custom fields and other form items.
*
* Copyright 2012 Ohad Raz ([email protected])
* @since 1.0
*/
var $ =jQuery.noConflict();
function update_repeater_fields(){
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
*/
/*
/**
* Select Color Field.
*
* @since 1.0
*/
$('.at-color-select').click( function(){
var $this = $(this);
var id = $this.attr('rel');
$(this).siblings('.at-color-picker').farbtastic("#" + id).toggle();
return false;
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Reorder Images.
*
* @since 1.0
*/
$('.at-images').each( function() {
var $this = $(this), order, data;
$this.sortable( {
placeholder: 'ui-state-highlight',
update: function (){
order = $this.sortable('serialize');
data = order + '|' + $this.siblings('.at-images-data').val();
$.post(ajaxurl, {action: 'at_reorder_images', data: data}, function(response){
response == '0' ? alert( 'Order saved!' ) : alert( "You don't have permission to reorder images." );
});
}
});
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
}
jQuery(document).ready(function($) {
/**
* repater Field
* @since 1.1
*/
/*$( ".at-repeater-item" ).live('click', function() {
var $this = $(this);
$this.siblings().toggle();
});
jQuery(".at-repater-block").click(function(){
jQuery(this).find('table').toggle();
});
*/
//edit
$(".at-re-toggle").live('click', function() {
$(this).prev().toggle('slow');
});
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
* better handler for color picker with repeater fields support
* which now works both when button is clicked and when field gains focus.
*/
$('.at-color').live('focus', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
$('.at-color').live('focusout', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
/**
* Helper Function
*
* Get Query string value by name.
*
* @since 1.0
*/
function get_query_var( name ) {
var match = RegExp('[?&]' + name + '=([^&#]*)').exec(location.href);
return match && decodeURIComponent(match[1].replace(/\+/g, ' '));
}
//new image upload field
function load_images_muploader(){
jQuery(".mupload_img_holder").each(function(i,v){
if (jQuery(this).next().next().val() != ''){
if (!jQuery(this).children().size() > 0){
jQuery(this).append('<img src="' + jQuery(this).next().next().val() + '" style="height: 150px;width: 150px;" />');
jQuery(this).next().next().next().val("Delete");
jQuery(this).next().next().next().removeClass('at-upload_image_button').addClass('at-delete_image_button'); | }
}
});
}
load_images_muploader();
//delete img button
jQuery('.at-delete_image_button').live('click', function(e){
var field_id = jQuery(this).attr("rel");
var at_id = jQuery(this).prev().prev();
var at_src = jQuery(this).prev();
var t_button = jQuery(this);
data = {
action: 'at_delete_mupload',
_wpnonce: $('#nonce-delete-mupload_' + field_id).val(),
post_id: get_query_var('tag_ID'),
field_id: field_id,
attachment_id: jQuery(at_id).val()
};
$.getJSON(ajaxurl, data, function(response) {
if ('success' == response.status){
jQuery(t_button).val("Upload Image");
jQuery(t_button).removeClass('at-delete_image_button').addClass('at-upload_image_button');
//clear html values
jQuery(at_id).val('');
jQuery(at_src).val('');
jQuery(at_id).prev().html('');
load_images_muploader();
}else{
alert(response.message);
}
});
return false;
});
//upload button
var formfield1;
var formfield2;
jQuery('.at-upload_image_button').live('click',function(e){
formfield1 = jQuery(this).prev();
formfield2 = jQuery(this).prev().prev();
tb_show('', 'media-upload.php?type=image&TB_iframe=true');
//store old send to editor function
window.restore_send_to_editor = window.send_to_editor;
//overwrite send to editor function
window.send_to_editor = function(html) {
imgurl = jQuery('img',html).attr('src');
img_calsses = jQuery('img',html).attr('class').split(" ");
att_id = '';
jQuery.each(img_calsses,function(i,val){
if (val.indexOf("wp-image") != -1){
att_id = val.replace('wp-image-', "");
}
});
jQuery(formfield2).val(att_id);
jQuery(formfield1).val(imgurl);
load_images_muploader();
tb_remove();
//restore old send to editor function
window.send_to_editor = window.restore_send_to_editor;
}
return false;
});
}); | random_line_split |
|
tax-meta-clss.js | /**
* All Tax meta class
*
* JS used for the custom fields and other form items.
*
* Copyright 2012 Ohad Raz ([email protected])
* @since 1.0
*/
var $ =jQuery.noConflict();
function update_repeater_fields(){
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
*/
/*
/**
* Select Color Field.
*
* @since 1.0
*/
$('.at-color-select').click( function(){
var $this = $(this);
var id = $this.attr('rel');
$(this).siblings('.at-color-picker').farbtastic("#" + id).toggle();
return false;
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Reorder Images.
*
* @since 1.0
*/
$('.at-images').each( function() {
var $this = $(this), order, data;
$this.sortable( {
placeholder: 'ui-state-highlight',
update: function (){
order = $this.sortable('serialize');
data = order + '|' + $this.siblings('.at-images-data').val();
$.post(ajaxurl, {action: 'at_reorder_images', data: data}, function(response){
response == '0' ? alert( 'Order saved!' ) : alert( "You don't have permission to reorder images." );
});
}
});
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
}
jQuery(document).ready(function($) {
/**
* repater Field
* @since 1.1
*/
/*$( ".at-repeater-item" ).live('click', function() {
var $this = $(this);
$this.siblings().toggle();
});
jQuery(".at-repater-block").click(function(){
jQuery(this).find('table').toggle();
});
*/
//edit
$(".at-re-toggle").live('click', function() {
$(this).prev().toggle('slow');
});
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
* better handler for color picker with repeater fields support
* which now works both when button is clicked and when field gains focus.
*/
$('.at-color').live('focus', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
$('.at-color').live('focusout', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
/**
* Helper Function
*
* Get Query string value by name.
*
* @since 1.0
*/
function get_query_var( name ) |
//new image upload field
function load_images_muploader(){
jQuery(".mupload_img_holder").each(function(i,v){
if (jQuery(this).next().next().val() != ''){
if (!jQuery(this).children().size() > 0){
jQuery(this).append('<img src="' + jQuery(this).next().next().val() + '" style="height: 150px;width: 150px;" />');
jQuery(this).next().next().next().val("Delete");
jQuery(this).next().next().next().removeClass('at-upload_image_button').addClass('at-delete_image_button');
}
}
});
}
load_images_muploader();
//delete img button
jQuery('.at-delete_image_button').live('click', function(e){
var field_id = jQuery(this).attr("rel");
var at_id = jQuery(this).prev().prev();
var at_src = jQuery(this).prev();
var t_button = jQuery(this);
data = {
action: 'at_delete_mupload',
_wpnonce: $('#nonce-delete-mupload_' + field_id).val(),
post_id: get_query_var('tag_ID'),
field_id: field_id,
attachment_id: jQuery(at_id).val()
};
$.getJSON(ajaxurl, data, function(response) {
if ('success' == response.status){
jQuery(t_button).val("Upload Image");
jQuery(t_button).removeClass('at-delete_image_button').addClass('at-upload_image_button');
//clear html values
jQuery(at_id).val('');
jQuery(at_src).val('');
jQuery(at_id).prev().html('');
load_images_muploader();
}else{
alert(response.message);
}
});
return false;
});
//upload button
var formfield1;
var formfield2;
jQuery('.at-upload_image_button').live('click',function(e){
formfield1 = jQuery(this).prev();
formfield2 = jQuery(this).prev().prev();
tb_show('', 'media-upload.php?type=image&TB_iframe=true');
//store old send to editor function
window.restore_send_to_editor = window.send_to_editor;
//overwrite send to editor function
window.send_to_editor = function(html) {
imgurl = jQuery('img',html).attr('src');
img_calsses = jQuery('img',html).attr('class').split(" ");
att_id = '';
jQuery.each(img_calsses,function(i,val){
if (val.indexOf("wp-image") != -1){
att_id = val.replace('wp-image-', "");
}
});
jQuery(formfield2).val(att_id);
jQuery(formfield1).val(imgurl);
load_images_muploader();
tb_remove();
//restore old send to editor function
window.send_to_editor = window.restore_send_to_editor;
}
return false;
});
}); | {
var match = RegExp('[?&]' + name + '=([^&#]*)').exec(location.href);
return match && decodeURIComponent(match[1].replace(/\+/g, ' '));
} | identifier_body |
tax-meta-clss.js | /**
* All Tax meta class
*
* JS used for the custom fields and other form items.
*
* Copyright 2012 Ohad Raz ([email protected])
* @since 1.0
*/
var $ =jQuery.noConflict();
function update_repeater_fields(){
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
*/
/*
/**
* Select Color Field.
*
* @since 1.0
*/
$('.at-color-select').click( function(){
var $this = $(this);
var id = $this.attr('rel');
$(this).siblings('.at-color-picker').farbtastic("#" + id).toggle();
return false;
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Reorder Images.
*
* @since 1.0
*/
$('.at-images').each( function() {
var $this = $(this), order, data;
$this.sortable( {
placeholder: 'ui-state-highlight',
update: function (){
order = $this.sortable('serialize');
data = order + '|' + $this.siblings('.at-images-data').val();
$.post(ajaxurl, {action: 'at_reorder_images', data: data}, function(response){
response == '0' ? alert( 'Order saved!' ) : alert( "You don't have permission to reorder images." );
});
}
});
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
}
jQuery(document).ready(function($) {
/**
* repater Field
* @since 1.1
*/
/*$( ".at-repeater-item" ).live('click', function() {
var $this = $(this);
$this.siblings().toggle();
});
jQuery(".at-repater-block").click(function(){
jQuery(this).find('table').toggle();
});
*/
//edit
$(".at-re-toggle").live('click', function() {
$(this).prev().toggle('slow');
});
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
* better handler for color picker with repeater fields support
* which now works both when button is clicked and when field gains focus.
*/
$('.at-color').live('focus', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
$('.at-color').live('focusout', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
/**
* Helper Function
*
* Get Query string value by name.
*
* @since 1.0
*/
function get_query_var( name ) {
var match = RegExp('[?&]' + name + '=([^&#]*)').exec(location.href);
return match && decodeURIComponent(match[1].replace(/\+/g, ' '));
}
//new image upload field
function | (){
jQuery(".mupload_img_holder").each(function(i,v){
if (jQuery(this).next().next().val() != ''){
if (!jQuery(this).children().size() > 0){
jQuery(this).append('<img src="' + jQuery(this).next().next().val() + '" style="height: 150px;width: 150px;" />');
jQuery(this).next().next().next().val("Delete");
jQuery(this).next().next().next().removeClass('at-upload_image_button').addClass('at-delete_image_button');
}
}
});
}
load_images_muploader();
//delete img button
jQuery('.at-delete_image_button').live('click', function(e){
var field_id = jQuery(this).attr("rel");
var at_id = jQuery(this).prev().prev();
var at_src = jQuery(this).prev();
var t_button = jQuery(this);
data = {
action: 'at_delete_mupload',
_wpnonce: $('#nonce-delete-mupload_' + field_id).val(),
post_id: get_query_var('tag_ID'),
field_id: field_id,
attachment_id: jQuery(at_id).val()
};
$.getJSON(ajaxurl, data, function(response) {
if ('success' == response.status){
jQuery(t_button).val("Upload Image");
jQuery(t_button).removeClass('at-delete_image_button').addClass('at-upload_image_button');
//clear html values
jQuery(at_id).val('');
jQuery(at_src).val('');
jQuery(at_id).prev().html('');
load_images_muploader();
}else{
alert(response.message);
}
});
return false;
});
//upload button
var formfield1;
var formfield2;
jQuery('.at-upload_image_button').live('click',function(e){
formfield1 = jQuery(this).prev();
formfield2 = jQuery(this).prev().prev();
tb_show('', 'media-upload.php?type=image&TB_iframe=true');
//store old send to editor function
window.restore_send_to_editor = window.send_to_editor;
//overwrite send to editor function
window.send_to_editor = function(html) {
imgurl = jQuery('img',html).attr('src');
img_calsses = jQuery('img',html).attr('class').split(" ");
att_id = '';
jQuery.each(img_calsses,function(i,val){
if (val.indexOf("wp-image") != -1){
att_id = val.replace('wp-image-', "");
}
});
jQuery(formfield2).val(att_id);
jQuery(formfield1).val(imgurl);
load_images_muploader();
tb_remove();
//restore old send to editor function
window.send_to_editor = window.restore_send_to_editor;
}
return false;
});
}); | load_images_muploader | identifier_name |
tax-meta-clss.js | /**
* All Tax meta class
*
* JS used for the custom fields and other form items.
*
* Copyright 2012 Ohad Raz ([email protected])
* @since 1.0
*/
var $ =jQuery.noConflict();
function update_repeater_fields(){
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
*/
/*
/**
* Select Color Field.
*
* @since 1.0
*/
$('.at-color-select').click( function(){
var $this = $(this);
var id = $this.attr('rel');
$(this).siblings('.at-color-picker').farbtastic("#" + id).toggle();
return false;
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Reorder Images.
*
* @since 1.0
*/
$('.at-images').each( function() {
var $this = $(this), order, data;
$this.sortable( {
placeholder: 'ui-state-highlight',
update: function (){
order = $this.sortable('serialize');
data = order + '|' + $this.siblings('.at-images-data').val();
$.post(ajaxurl, {action: 'at_reorder_images', data: data}, function(response){
response == '0' ? alert( 'Order saved!' ) : alert( "You don't have permission to reorder images." );
});
}
});
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
}
jQuery(document).ready(function($) {
/**
* repater Field
* @since 1.1
*/
/*$( ".at-repeater-item" ).live('click', function() {
var $this = $(this);
$this.siblings().toggle();
});
jQuery(".at-repater-block").click(function(){
jQuery(this).find('table').toggle();
});
*/
//edit
$(".at-re-toggle").live('click', function() {
$(this).prev().toggle('slow');
});
/**
* Datepicker Field.
*
* @since 1.0
*/
$('.at-date').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.datepicker( { showButtonPanel: true, dateFormat: format } );
});
/**
* Timepicker Field.
*
* @since 1.0
*/
$('.at-time').each( function() {
var $this = $(this),
format = $this.attr('rel');
$this.timepicker( { showSecond: true, timeFormat: format } );
});
/**
* Colorpicker Field.
*
* @since 1.0
* better handler for color picker with repeater fields support
* which now works both when button is clicked and when field gains focus.
*/
$('.at-color').live('focus', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
$('.at-color').live('focusout', function() {
var $this = $(this);
$(this).siblings('.at-color-picker').farbtastic($this).toggle();
});
/**
* Add Files.
*
* @since 1.0
*/
$('.at-add-file').click( function() {
var $first = $(this).parent().find('.file-input:first');
$first.clone().insertAfter($first).show();
return false;
});
/**
* Delete File.
*
* @since 1.0
*/
$('.at-upload').delegate( '.at-delete-file', 'click' , function() {
var $this = $(this),
$parent = $this.parent(),
data = $this.attr('rel');
$.post( ajaxurl, { action: 'at_delete_file', data: data }, function(response) {
response == '0' ? ( alert( 'File has been successfully deleted.' ), $parent.remove() ) : alert( 'You do NOT have permission to delete this file.' );
});
return false;
});
/**
* Thickbox Upload
*
* @since 1.0
*/
$('.at-upload-button').click( function() {
var data = $(this).attr('rel').split('|'),
post_id = data[0],
field_id = data[1],
backup = window.send_to_editor; // backup the original 'send_to_editor' function which adds images to the editor
// change the function to make it adds images to our section of uploaded images
window.send_to_editor = function(html) {
$('#at-images-' + field_id).append( $(html) );
tb_remove();
window.send_to_editor = backup;
};
// note that we pass the field_id and post_id here
tb_show('', 'media-upload.php?post_id=' + post_id + '&field_id=' + field_id + '&type=image&TB_iframe=true');
return false;
});
/**
* Helper Function
*
* Get Query string value by name.
*
* @since 1.0
*/
function get_query_var( name ) {
var match = RegExp('[?&]' + name + '=([^&#]*)').exec(location.href);
return match && decodeURIComponent(match[1].replace(/\+/g, ' '));
}
//new image upload field
function load_images_muploader(){
jQuery(".mupload_img_holder").each(function(i,v){
if (jQuery(this).next().next().val() != ''){
if (!jQuery(this).children().size() > 0) |
}
});
}
load_images_muploader();
//delete img button
jQuery('.at-delete_image_button').live('click', function(e){
var field_id = jQuery(this).attr("rel");
var at_id = jQuery(this).prev().prev();
var at_src = jQuery(this).prev();
var t_button = jQuery(this);
data = {
action: 'at_delete_mupload',
_wpnonce: $('#nonce-delete-mupload_' + field_id).val(),
post_id: get_query_var('tag_ID'),
field_id: field_id,
attachment_id: jQuery(at_id).val()
};
$.getJSON(ajaxurl, data, function(response) {
if ('success' == response.status){
jQuery(t_button).val("Upload Image");
jQuery(t_button).removeClass('at-delete_image_button').addClass('at-upload_image_button');
//clear html values
jQuery(at_id).val('');
jQuery(at_src).val('');
jQuery(at_id).prev().html('');
load_images_muploader();
}else{
alert(response.message);
}
});
return false;
});
//upload button
var formfield1;
var formfield2;
jQuery('.at-upload_image_button').live('click',function(e){
formfield1 = jQuery(this).prev();
formfield2 = jQuery(this).prev().prev();
tb_show('', 'media-upload.php?type=image&TB_iframe=true');
//store old send to editor function
window.restore_send_to_editor = window.send_to_editor;
//overwrite send to editor function
window.send_to_editor = function(html) {
imgurl = jQuery('img',html).attr('src');
img_calsses = jQuery('img',html).attr('class').split(" ");
att_id = '';
jQuery.each(img_calsses,function(i,val){
if (val.indexOf("wp-image") != -1){
att_id = val.replace('wp-image-', "");
}
});
jQuery(formfield2).val(att_id);
jQuery(formfield1).val(imgurl);
load_images_muploader();
tb_remove();
//restore old send to editor function
window.send_to_editor = window.restore_send_to_editor;
}
return false;
});
}); | {
jQuery(this).append('<img src="' + jQuery(this).next().next().val() + '" style="height: 150px;width: 150px;" />');
jQuery(this).next().next().next().val("Delete");
jQuery(this).next().next().next().removeClass('at-upload_image_button').addClass('at-delete_image_button');
} | conditional_block |
page_hinkley.py | """
The Tornado Framework
By Ali Pesaranghader
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
---
*** The Page Hinkley (PH) Method Implementation ***
Paper: Page, Ewan S. "Continuous inspection schemes."
Published in: Biometrika 41.1/2 (1954): 100-115.
URL: http://www.jstor.org/stable/2333009
"""
from dictionary.tornado_dictionary import TornadoDic
from drift_detection.detector import SuperDetector
class PH(SuperDetector):
"""The Page Hinkley (PH) drift detection method class."""
DETECTOR_NAME = TornadoDic.PH
def __init__(self, min_instance=30, delta=0.005, lambda_=50, alpha=1 - 0.0001):
super().__init__()
self.MINIMUM_NUM_INSTANCES = min_instance
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
self.delta = delta
self.lambda_ = lambda_
self.alpha = alpha
def run(self, pr):
pr = 1 if pr is False else 0
warning_status = False
drift_status = False
# 1. UPDATING STATS
self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n
self.sum = self.alpha * self.sum + (pr - self.x_mean - self.delta)
self.m_n += 1
# 2. UPDATING WARNING AND DRIFT STATUSES
if self.m_n >= self.MINIMUM_NUM_INSTANCES:
if self.sum > self.lambda_:
drift_status = True
return warning_status, drift_status
def reset(self):
super().reset()
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
def get_settings(self):
return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." +
str(self.lambda_) + "." + str(self.alpha),
"$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " +
| "$\lambda$:" + str(self.lambda_).upper() + ", " +
"$\\alpha$:" + str(self.alpha).upper()] | "$\delta$:" + str(self.delta).upper() + ", " +
| random_line_split |
page_hinkley.py | """
The Tornado Framework
By Ali Pesaranghader
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
---
*** The Page Hinkley (PH) Method Implementation ***
Paper: Page, Ewan S. "Continuous inspection schemes."
Published in: Biometrika 41.1/2 (1954): 100-115.
URL: http://www.jstor.org/stable/2333009
"""
from dictionary.tornado_dictionary import TornadoDic
from drift_detection.detector import SuperDetector
class | (SuperDetector):
"""The Page Hinkley (PH) drift detection method class."""
DETECTOR_NAME = TornadoDic.PH
def __init__(self, min_instance=30, delta=0.005, lambda_=50, alpha=1 - 0.0001):
super().__init__()
self.MINIMUM_NUM_INSTANCES = min_instance
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
self.delta = delta
self.lambda_ = lambda_
self.alpha = alpha
def run(self, pr):
pr = 1 if pr is False else 0
warning_status = False
drift_status = False
# 1. UPDATING STATS
self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n
self.sum = self.alpha * self.sum + (pr - self.x_mean - self.delta)
self.m_n += 1
# 2. UPDATING WARNING AND DRIFT STATUSES
if self.m_n >= self.MINIMUM_NUM_INSTANCES:
if self.sum > self.lambda_:
drift_status = True
return warning_status, drift_status
def reset(self):
super().reset()
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
def get_settings(self):
return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." +
str(self.lambda_) + "." + str(self.alpha),
"$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " +
"$\delta$:" + str(self.delta).upper() + ", " +
"$\lambda$:" + str(self.lambda_).upper() + ", " +
"$\\alpha$:" + str(self.alpha).upper()]
| PH | identifier_name |
page_hinkley.py | """
The Tornado Framework
By Ali Pesaranghader
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
---
*** The Page Hinkley (PH) Method Implementation ***
Paper: Page, Ewan S. "Continuous inspection schemes."
Published in: Biometrika 41.1/2 (1954): 100-115.
URL: http://www.jstor.org/stable/2333009
"""
from dictionary.tornado_dictionary import TornadoDic
from drift_detection.detector import SuperDetector
class PH(SuperDetector):
"""The Page Hinkley (PH) drift detection method class."""
DETECTOR_NAME = TornadoDic.PH
def __init__(self, min_instance=30, delta=0.005, lambda_=50, alpha=1 - 0.0001):
super().__init__()
self.MINIMUM_NUM_INSTANCES = min_instance
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
self.delta = delta
self.lambda_ = lambda_
self.alpha = alpha
def run(self, pr):
pr = 1 if pr is False else 0
warning_status = False
drift_status = False
# 1. UPDATING STATS
self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n
self.sum = self.alpha * self.sum + (pr - self.x_mean - self.delta)
self.m_n += 1
# 2. UPDATING WARNING AND DRIFT STATUSES
if self.m_n >= self.MINIMUM_NUM_INSTANCES:
if self.sum > self.lambda_:
|
return warning_status, drift_status
def reset(self):
super().reset()
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
def get_settings(self):
return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." +
str(self.lambda_) + "." + str(self.alpha),
"$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " +
"$\delta$:" + str(self.delta).upper() + ", " +
"$\lambda$:" + str(self.lambda_).upper() + ", " +
"$\\alpha$:" + str(self.alpha).upper()]
| drift_status = True | conditional_block |
page_hinkley.py | """
The Tornado Framework
By Ali Pesaranghader
University of Ottawa, Ontario, Canada
E-mail: apesaran -at- uottawa -dot- ca / alipsgh -at- gmail -dot- com
---
*** The Page Hinkley (PH) Method Implementation ***
Paper: Page, Ewan S. "Continuous inspection schemes."
Published in: Biometrika 41.1/2 (1954): 100-115.
URL: http://www.jstor.org/stable/2333009
"""
from dictionary.tornado_dictionary import TornadoDic
from drift_detection.detector import SuperDetector
class PH(SuperDetector):
"""The Page Hinkley (PH) drift detection method class."""
DETECTOR_NAME = TornadoDic.PH
def __init__(self, min_instance=30, delta=0.005, lambda_=50, alpha=1 - 0.0001):
|
def run(self, pr):
pr = 1 if pr is False else 0
warning_status = False
drift_status = False
# 1. UPDATING STATS
self.x_mean = self.x_mean + (pr - self.x_mean) / self.m_n
self.sum = self.alpha * self.sum + (pr - self.x_mean - self.delta)
self.m_n += 1
# 2. UPDATING WARNING AND DRIFT STATUSES
if self.m_n >= self.MINIMUM_NUM_INSTANCES:
if self.sum > self.lambda_:
drift_status = True
return warning_status, drift_status
def reset(self):
super().reset()
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
def get_settings(self):
return [str(self.MINIMUM_NUM_INSTANCES) + "." + str(self.delta) + "." +
str(self.lambda_) + "." + str(self.alpha),
"$n_{min}$:" + str(self.MINIMUM_NUM_INSTANCES) + ", " +
"$\delta$:" + str(self.delta).upper() + ", " +
"$\lambda$:" + str(self.lambda_).upper() + ", " +
"$\\alpha$:" + str(self.alpha).upper()]
| super().__init__()
self.MINIMUM_NUM_INSTANCES = min_instance
self.m_n = 1
self.x_mean = 0.0
self.sum = 0.0
self.delta = delta
self.lambda_ = lambda_
self.alpha = alpha | identifier_body |
paginator-demo-module.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {CommonModule} from '@angular/common';
import {NgModule} from '@angular/core';
import {FormsModule} from '@angular/forms';
import {MatCardModule} from '@angular/material/card';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatInputModule} from '@angular/material/input';
import {MatPaginatorModule} from '@angular/material/paginator';
import {MatSlideToggleModule} from '@angular/material/slide-toggle';
import {RouterModule} from '@angular/router';
import {PaginatorDemo} from './paginator-demo';
@NgModule({
imports: [
CommonModule,
FormsModule,
MatCardModule,
MatFormFieldModule,
MatInputModule,
MatPaginatorModule,
MatSlideToggleModule,
RouterModule.forChild([{path: '', component: PaginatorDemo}]),
],
declarations: [PaginatorDemo],
})
export class | {
}
| PaginatorDemoModule | identifier_name |
paginator-demo-module.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {CommonModule} from '@angular/common';
import {NgModule} from '@angular/core';
import {FormsModule} from '@angular/forms';
import {MatCardModule} from '@angular/material/card';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatInputModule} from '@angular/material/input';
import {MatPaginatorModule} from '@angular/material/paginator';
import {MatSlideToggleModule} from '@angular/material/slide-toggle';
import {RouterModule} from '@angular/router';
import {PaginatorDemo} from './paginator-demo';
@NgModule({
imports: [
CommonModule, | MatPaginatorModule,
MatSlideToggleModule,
RouterModule.forChild([{path: '', component: PaginatorDemo}]),
],
declarations: [PaginatorDemo],
})
export class PaginatorDemoModule {
} | FormsModule,
MatCardModule,
MatFormFieldModule,
MatInputModule, | random_line_split |
test_flow_action.py | # -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.component.network.flow.basic.flow_action\
import FlowAction
import unittest
class FlowActionTest(unittest.TestCase):
Type = "FlowActionOutput"
def setUp(self):
self.target = FlowAction(self.Type)
def tearDown(self):
self.target = None
def test_constructor(self):
|
def test_type(self):
self.assertEqual(self.target.type, self.Type)
if __name__ == '__main__':
unittest.main() | self.assertEqual(self.target._body[self.target.TYPE], self.Type) | identifier_body |
test_flow_action.py | # -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.component.network.flow.basic.flow_action\
import FlowAction
import unittest
class FlowActionTest(unittest.TestCase):
Type = "FlowActionOutput"
def setUp(self):
self.target = FlowAction(self.Type)
def tearDown(self):
self.target = None
def | (self):
self.assertEqual(self.target._body[self.target.TYPE], self.Type)
def test_type(self):
self.assertEqual(self.target.type, self.Type)
if __name__ == '__main__':
unittest.main() | test_constructor | identifier_name |
test_flow_action.py | # -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.component.network.flow.basic.flow_action\
import FlowAction
import unittest
class FlowActionTest(unittest.TestCase):
Type = "FlowActionOutput"
def setUp(self):
self.target = FlowAction(self.Type)
def tearDown(self):
self.target = None
def test_constructor(self):
self.assertEqual(self.target._body[self.target.TYPE], self.Type)
def test_type(self):
self.assertEqual(self.target.type, self.Type)
if __name__ == '__main__':
| unittest.main() | conditional_block |
|
test_flow_action.py | # -*- coding:utf-8 -*- | # Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.component.network.flow.basic.flow_action\
import FlowAction
import unittest
class FlowActionTest(unittest.TestCase):
Type = "FlowActionOutput"
def setUp(self):
self.target = FlowAction(self.Type)
def tearDown(self):
self.target = None
def test_constructor(self):
self.assertEqual(self.target._body[self.target.TYPE], self.Type)
def test_type(self):
self.assertEqual(self.target.type, self.Type)
if __name__ == '__main__':
unittest.main() | random_line_split |
|
put_doc.js | var superagent = require('superagent')
var env = process.env
/**
* put_doc
* initialize with the couchdb to save to
*
* expects that the url, port, username, password are in environment
* variables. If not, add these to the options object.
*
* var cuser = env.COUCHDB_USER ;
* var cpass = env.COUCHDB_PASS ;
* var chost = env.COUCHDB_HOST || '127.0.0.1';
* var cport = env.COUCHDB_PORT || 5984;
*
* Options:
*
* {"cuser":"somerthineg",
* "cpass":"password",
* "chost":"couchdb host",
* "cport":"couchdb port", // must be a number
* "cdb" :"the%2fcouchdb%2fto%2fuse" // be sure to properly escape your db names
* }
*
* If you don't need user/pass to create docs, feel free to skip
* these. I only try to use credentials if these are truthy
*
* returns a function that will save new entries
*
* to create a new doc in couchdb, call with the
* object that is the doc, plus a callback
*
* The object should be a couchdb doc, but th _id field is optional.
*
* but highly recommended
*
* The first argument to the callback is whether there is an error in
* the requqest, the second is the json object returned from couchdb,
* whcih should have the save state of the document (ok or rejected)
*
*/
function put_doc(opts){
if(opts.cdb === undefined)
throw new Error('must define the {"cdb":"dbname"} option')
var cuser = env.COUCHDB_USER
var cpass = env.COUCHDB_PASS
var chost = env.COUCHDB_HOST || '127.0.0.1'
var cport = env.COUCHDB_PORT || 5984
// override env. vars
if(opts.cuser !== undefined) cuser = opts.cuser
if(opts.cpass !== undefined) cpass = opts.cpass
if(opts.chost !== undefined) chost = opts.chost
if(opts.cport !== undefined) cport = +opts.cport
var couch = 'http://'+chost+':'+cport
if(/http/.test(chost)) couch = chost+':'+cport
var overwrite = false
function | (doc,next){
var uri = couch+'/'+opts.cdb
var req
if(overwrite && doc._id !== undefined){
uri += '/'+doc._id
superagent.head(uri)
.end(function(err,res){
if(res.header.etag){
doc._rev=JSON.parse(res.headers.etag)
}
var req = superagent.put(uri)
.type('json')
.set('accept','application/json')
if(cuser && cpass){
req.auth(cuser,cpass)
}
req.send(doc)
req.end(function(e,r){
if(e) return next(e)
return next(null,r.body)
})
})
}else{
if(doc._id !== undefined){
uri += '/'+doc._id
req = superagent.put(uri)
}else{
req = superagent.post(uri)
}
req.type('json')
.set('accept','application/json')
if(cuser && cpass){
req.auth(cuser,cpass)
}
req.send(doc)
req.end(function(e,r){
if(e) return next(e)
return next(null,r.body)
})
}
return null
}
put.overwrite = function(setting){
if(setting === undefined){
return overwrite
}
overwrite = setting
return overwrite
}
return put
}
module.exports=put_doc
| put | identifier_name |
put_doc.js | var superagent = require('superagent')
var env = process.env
/**
* put_doc
* initialize with the couchdb to save to
*
* expects that the url, port, username, password are in environment
* variables. If not, add these to the options object.
*
* var cuser = env.COUCHDB_USER ;
* var cpass = env.COUCHDB_PASS ;
* var chost = env.COUCHDB_HOST || '127.0.0.1';
* var cport = env.COUCHDB_PORT || 5984;
*
* Options:
*
* {"cuser":"somerthineg",
* "cpass":"password",
* "chost":"couchdb host",
* "cport":"couchdb port", // must be a number
* "cdb" :"the%2fcouchdb%2fto%2fuse" // be sure to properly escape your db names
* }
*
* If you don't need user/pass to create docs, feel free to skip
* these. I only try to use credentials if these are truthy
*
* returns a function that will save new entries
*
* to create a new doc in couchdb, call with the
* object that is the doc, plus a callback
*
* The object should be a couchdb doc, but th _id field is optional.
*
* but highly recommended
*
* The first argument to the callback is whether there is an error in
* the requqest, the second is the json object returned from couchdb,
* whcih should have the save state of the document (ok or rejected)
*
*/
function put_doc(opts) |
module.exports=put_doc
| {
if(opts.cdb === undefined)
throw new Error('must define the {"cdb":"dbname"} option')
var cuser = env.COUCHDB_USER
var cpass = env.COUCHDB_PASS
var chost = env.COUCHDB_HOST || '127.0.0.1'
var cport = env.COUCHDB_PORT || 5984
// override env. vars
if(opts.cuser !== undefined) cuser = opts.cuser
if(opts.cpass !== undefined) cpass = opts.cpass
if(opts.chost !== undefined) chost = opts.chost
if(opts.cport !== undefined) cport = +opts.cport
var couch = 'http://'+chost+':'+cport
if(/http/.test(chost)) couch = chost+':'+cport
var overwrite = false
function put(doc,next){
var uri = couch+'/'+opts.cdb
var req
if(overwrite && doc._id !== undefined){
uri += '/'+doc._id
superagent.head(uri)
.end(function(err,res){
if(res.header.etag){
doc._rev=JSON.parse(res.headers.etag)
}
var req = superagent.put(uri)
.type('json')
.set('accept','application/json')
if(cuser && cpass){
req.auth(cuser,cpass)
}
req.send(doc)
req.end(function(e,r){
if(e) return next(e)
return next(null,r.body)
})
})
}else{
if(doc._id !== undefined){
uri += '/'+doc._id
req = superagent.put(uri)
}else{
req = superagent.post(uri)
}
req.type('json')
.set('accept','application/json')
if(cuser && cpass){
req.auth(cuser,cpass)
}
req.send(doc)
req.end(function(e,r){
if(e) return next(e)
return next(null,r.body)
})
}
return null
}
put.overwrite = function(setting){
if(setting === undefined){
return overwrite
}
overwrite = setting
return overwrite
}
return put
} | identifier_body |
put_doc.js | var superagent = require('superagent')
var env = process.env
/**
* put_doc
* initialize with the couchdb to save to | * var cpass = env.COUCHDB_PASS ;
* var chost = env.COUCHDB_HOST || '127.0.0.1';
* var cport = env.COUCHDB_PORT || 5984;
*
* Options:
*
* {"cuser":"somerthineg",
* "cpass":"password",
* "chost":"couchdb host",
* "cport":"couchdb port", // must be a number
* "cdb" :"the%2fcouchdb%2fto%2fuse" // be sure to properly escape your db names
* }
*
* If you don't need user/pass to create docs, feel free to skip
* these. I only try to use credentials if these are truthy
*
* returns a function that will save new entries
*
* to create a new doc in couchdb, call with the
* object that is the doc, plus a callback
*
* The object should be a couchdb doc, but th _id field is optional.
*
* but highly recommended
*
* The first argument to the callback is whether there is an error in
* the requqest, the second is the json object returned from couchdb,
* whcih should have the save state of the document (ok or rejected)
*
*/
function put_doc(opts){
if(opts.cdb === undefined)
throw new Error('must define the {"cdb":"dbname"} option')
var cuser = env.COUCHDB_USER
var cpass = env.COUCHDB_PASS
var chost = env.COUCHDB_HOST || '127.0.0.1'
var cport = env.COUCHDB_PORT || 5984
// override env. vars
if(opts.cuser !== undefined) cuser = opts.cuser
if(opts.cpass !== undefined) cpass = opts.cpass
if(opts.chost !== undefined) chost = opts.chost
if(opts.cport !== undefined) cport = +opts.cport
var couch = 'http://'+chost+':'+cport
if(/http/.test(chost)) couch = chost+':'+cport
var overwrite = false
function put(doc,next){
var uri = couch+'/'+opts.cdb
var req
if(overwrite && doc._id !== undefined){
uri += '/'+doc._id
superagent.head(uri)
.end(function(err,res){
if(res.header.etag){
doc._rev=JSON.parse(res.headers.etag)
}
var req = superagent.put(uri)
.type('json')
.set('accept','application/json')
if(cuser && cpass){
req.auth(cuser,cpass)
}
req.send(doc)
req.end(function(e,r){
if(e) return next(e)
return next(null,r.body)
})
})
}else{
if(doc._id !== undefined){
uri += '/'+doc._id
req = superagent.put(uri)
}else{
req = superagent.post(uri)
}
req.type('json')
.set('accept','application/json')
if(cuser && cpass){
req.auth(cuser,cpass)
}
req.send(doc)
req.end(function(e,r){
if(e) return next(e)
return next(null,r.body)
})
}
return null
}
put.overwrite = function(setting){
if(setting === undefined){
return overwrite
}
overwrite = setting
return overwrite
}
return put
}
module.exports=put_doc | *
* expects that the url, port, username, password are in environment
* variables. If not, add these to the options object.
*
* var cuser = env.COUCHDB_USER ; | random_line_split |
subst.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
use middle::ty;
use syntax::opt_vec::OptVec;
use util::ppaux::Repr;
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`.
pub trait Subst {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
///////////////////////////////////////////////////////////////////////////
// Substitution over types
//
// Because this is so common, we make a special optimization to avoid
// doing anything if `substs` is a no-op. I tried to generalize these
// to all subst methods but ran into trouble due to the limitations of
// our current method/trait matching algorithm. - Niko
trait EffectfulSubst {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
impl Subst for ty::t {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if ty::substs_is_noop(substs) {
return *self;
} else {
return self.effectfulSubst(tcx, substs);
}
}
}
impl EffectfulSubst for ty::t {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if !ty::type_needs_subst(*self) {
return *self;
}
match ty::get(*self).sty {
ty::ty_param(p) => {
substs.tps[p.idx]
}
ty::ty_self(_) => {
substs.self_ty.expect("ty_self not found in substs")
}
_ => {
ty::fold_regions_and_ty(
tcx, *self,
|r| r.subst(tcx, substs),
|t| t.effectfulSubst(tcx, substs),
|t| t.effectfulSubst(tcx, substs))
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Other types
impl<T:Subst> Subst for ~[T] {
fn | (&self, tcx: ty::ctxt, substs: &ty::substs) -> ~[T] {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst> Subst for OptVec<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> OptVec<T> {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst + 'static> Subst for @T {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @T {
match self {
&@ref t => @t.subst(tcx, substs)
}
}
}
impl<T:Subst> Subst for Option<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> {
self.as_ref().map(|t| t.subst(tcx, substs))
}
}
impl Subst for ty::TraitRef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TraitRef {
ty::TraitRef {
def_id: self.def_id,
substs: self.substs.subst(tcx, substs)
}
}
}
impl Subst for ty::substs {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::substs {
ty::substs {
regions: self.regions.subst(tcx, substs),
self_ty: self.self_ty.map(|typ| typ.subst(tcx, substs)),
tps: self.tps.map(|typ| typ.subst(tcx, substs))
}
}
}
impl Subst for ty::RegionSubsts {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::RegionSubsts {
match *self {
ty::ErasedRegions => {
ty::ErasedRegions
}
ty::NonerasedRegions(ref regions) => {
ty::NonerasedRegions(regions.subst(tcx, substs))
}
}
}
}
impl Subst for ty::BareFnTy {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::BareFnTy {
ty::fold_bare_fn_ty(self, |t| t.subst(tcx, substs))
}
}
impl Subst for ty::ParamBounds {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ParamBounds {
ty::ParamBounds {
builtin_bounds: self.builtin_bounds,
trait_bounds: self.trait_bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::TypeParameterDef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TypeParameterDef {
ty::TypeParameterDef {
ident: self.ident,
def_id: self.def_id,
bounds: self.bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::Generics {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Generics {
ty::Generics {
type_param_defs: self.type_param_defs.subst(tcx, substs),
region_param: self.region_param
}
}
}
impl Subst for ty::Region {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Region {
// Note: This routine only handles the self region, because it
// is only concerned with substitutions of regions that appear
// in types. Region substitution of the bound regions that
// appear in a function signature is done using the
// specialized routine
// `middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig()`.
// As we transition to the new region syntax this distinction
// will most likely disappear.
match self {
&ty::re_bound(ty::br_self) => {
match substs.regions {
ty::ErasedRegions => ty::re_static,
ty::NonerasedRegions(ref regions) => {
if regions.len() != 1 {
tcx.sess.bug(
format!("ty::Region\\#subst(): \
Reference to self region when \
given substs with no self region: {}",
substs.repr(tcx)));
}
*regions.get(0)
}
}
}
_ => *self
}
}
}
impl Subst for ty::ty_param_bounds_and_ty {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: self.generics.subst(tcx, substs),
ty: self.ty.subst(tcx, substs)
}
}
}
| subst | identifier_name |
subst.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
use middle::ty;
use syntax::opt_vec::OptVec;
use util::ppaux::Repr;
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`.
pub trait Subst {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
///////////////////////////////////////////////////////////////////////////
// Substitution over types
//
// Because this is so common, we make a special optimization to avoid
// doing anything if `substs` is a no-op. I tried to generalize these
// to all subst methods but ran into trouble due to the limitations of
// our current method/trait matching algorithm. - Niko
trait EffectfulSubst {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
impl Subst for ty::t {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if ty::substs_is_noop(substs) {
return *self;
} else {
return self.effectfulSubst(tcx, substs);
}
}
}
impl EffectfulSubst for ty::t {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if !ty::type_needs_subst(*self) {
return *self;
}
match ty::get(*self).sty {
ty::ty_param(p) => {
substs.tps[p.idx]
}
ty::ty_self(_) => {
substs.self_ty.expect("ty_self not found in substs")
}
_ => {
ty::fold_regions_and_ty(
tcx, *self,
|r| r.subst(tcx, substs),
|t| t.effectfulSubst(tcx, substs),
|t| t.effectfulSubst(tcx, substs)) |
///////////////////////////////////////////////////////////////////////////
// Other types
impl<T:Subst> Subst for ~[T] {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ~[T] {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst> Subst for OptVec<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> OptVec<T> {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst + 'static> Subst for @T {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @T {
match self {
&@ref t => @t.subst(tcx, substs)
}
}
}
impl<T:Subst> Subst for Option<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> {
self.as_ref().map(|t| t.subst(tcx, substs))
}
}
impl Subst for ty::TraitRef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TraitRef {
ty::TraitRef {
def_id: self.def_id,
substs: self.substs.subst(tcx, substs)
}
}
}
impl Subst for ty::substs {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::substs {
ty::substs {
regions: self.regions.subst(tcx, substs),
self_ty: self.self_ty.map(|typ| typ.subst(tcx, substs)),
tps: self.tps.map(|typ| typ.subst(tcx, substs))
}
}
}
impl Subst for ty::RegionSubsts {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::RegionSubsts {
match *self {
ty::ErasedRegions => {
ty::ErasedRegions
}
ty::NonerasedRegions(ref regions) => {
ty::NonerasedRegions(regions.subst(tcx, substs))
}
}
}
}
impl Subst for ty::BareFnTy {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::BareFnTy {
ty::fold_bare_fn_ty(self, |t| t.subst(tcx, substs))
}
}
impl Subst for ty::ParamBounds {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ParamBounds {
ty::ParamBounds {
builtin_bounds: self.builtin_bounds,
trait_bounds: self.trait_bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::TypeParameterDef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TypeParameterDef {
ty::TypeParameterDef {
ident: self.ident,
def_id: self.def_id,
bounds: self.bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::Generics {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Generics {
ty::Generics {
type_param_defs: self.type_param_defs.subst(tcx, substs),
region_param: self.region_param
}
}
}
impl Subst for ty::Region {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Region {
// Note: This routine only handles the self region, because it
// is only concerned with substitutions of regions that appear
// in types. Region substitution of the bound regions that
// appear in a function signature is done using the
// specialized routine
// `middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig()`.
// As we transition to the new region syntax this distinction
// will most likely disappear.
match self {
&ty::re_bound(ty::br_self) => {
match substs.regions {
ty::ErasedRegions => ty::re_static,
ty::NonerasedRegions(ref regions) => {
if regions.len() != 1 {
tcx.sess.bug(
format!("ty::Region\\#subst(): \
Reference to self region when \
given substs with no self region: {}",
substs.repr(tcx)));
}
*regions.get(0)
}
}
}
_ => *self
}
}
}
impl Subst for ty::ty_param_bounds_and_ty {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: self.generics.subst(tcx, substs),
ty: self.ty.subst(tcx, substs)
}
}
} | }
}
}
} | random_line_split |
subst.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
use middle::ty;
use syntax::opt_vec::OptVec;
use util::ppaux::Repr;
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`.
pub trait Subst {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
///////////////////////////////////////////////////////////////////////////
// Substitution over types
//
// Because this is so common, we make a special optimization to avoid
// doing anything if `substs` is a no-op. I tried to generalize these
// to all subst methods but ran into trouble due to the limitations of
// our current method/trait matching algorithm. - Niko
trait EffectfulSubst {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
impl Subst for ty::t {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if ty::substs_is_noop(substs) {
return *self;
} else {
return self.effectfulSubst(tcx, substs);
}
}
}
impl EffectfulSubst for ty::t {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if !ty::type_needs_subst(*self) {
return *self;
}
match ty::get(*self).sty {
ty::ty_param(p) => {
substs.tps[p.idx]
}
ty::ty_self(_) => {
substs.self_ty.expect("ty_self not found in substs")
}
_ => {
ty::fold_regions_and_ty(
tcx, *self,
|r| r.subst(tcx, substs),
|t| t.effectfulSubst(tcx, substs),
|t| t.effectfulSubst(tcx, substs))
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Other types
impl<T:Subst> Subst for ~[T] {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ~[T] {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst> Subst for OptVec<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> OptVec<T> {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst + 'static> Subst for @T {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @T {
match self {
&@ref t => @t.subst(tcx, substs)
}
}
}
impl<T:Subst> Subst for Option<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> {
self.as_ref().map(|t| t.subst(tcx, substs))
}
}
impl Subst for ty::TraitRef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TraitRef {
ty::TraitRef {
def_id: self.def_id,
substs: self.substs.subst(tcx, substs)
}
}
}
impl Subst for ty::substs {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::substs {
ty::substs {
regions: self.regions.subst(tcx, substs),
self_ty: self.self_ty.map(|typ| typ.subst(tcx, substs)),
tps: self.tps.map(|typ| typ.subst(tcx, substs))
}
}
}
impl Subst for ty::RegionSubsts {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::RegionSubsts {
match *self {
ty::ErasedRegions => |
ty::NonerasedRegions(ref regions) => {
ty::NonerasedRegions(regions.subst(tcx, substs))
}
}
}
}
impl Subst for ty::BareFnTy {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::BareFnTy {
ty::fold_bare_fn_ty(self, |t| t.subst(tcx, substs))
}
}
impl Subst for ty::ParamBounds {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ParamBounds {
ty::ParamBounds {
builtin_bounds: self.builtin_bounds,
trait_bounds: self.trait_bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::TypeParameterDef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TypeParameterDef {
ty::TypeParameterDef {
ident: self.ident,
def_id: self.def_id,
bounds: self.bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::Generics {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Generics {
ty::Generics {
type_param_defs: self.type_param_defs.subst(tcx, substs),
region_param: self.region_param
}
}
}
impl Subst for ty::Region {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Region {
// Note: This routine only handles the self region, because it
// is only concerned with substitutions of regions that appear
// in types. Region substitution of the bound regions that
// appear in a function signature is done using the
// specialized routine
// `middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig()`.
// As we transition to the new region syntax this distinction
// will most likely disappear.
match self {
&ty::re_bound(ty::br_self) => {
match substs.regions {
ty::ErasedRegions => ty::re_static,
ty::NonerasedRegions(ref regions) => {
if regions.len() != 1 {
tcx.sess.bug(
format!("ty::Region\\#subst(): \
Reference to self region when \
given substs with no self region: {}",
substs.repr(tcx)));
}
*regions.get(0)
}
}
}
_ => *self
}
}
}
impl Subst for ty::ty_param_bounds_and_ty {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: self.generics.subst(tcx, substs),
ty: self.ty.subst(tcx, substs)
}
}
}
| {
ty::ErasedRegions
} | conditional_block |
subst.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
use middle::ty;
use syntax::opt_vec::OptVec;
use util::ppaux::Repr;
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`.
pub trait Subst {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
///////////////////////////////////////////////////////////////////////////
// Substitution over types
//
// Because this is so common, we make a special optimization to avoid
// doing anything if `substs` is a no-op. I tried to generalize these
// to all subst methods but ran into trouble due to the limitations of
// our current method/trait matching algorithm. - Niko
trait EffectfulSubst {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
impl Subst for ty::t {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if ty::substs_is_noop(substs) {
return *self;
} else {
return self.effectfulSubst(tcx, substs);
}
}
}
impl EffectfulSubst for ty::t {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if !ty::type_needs_subst(*self) {
return *self;
}
match ty::get(*self).sty {
ty::ty_param(p) => {
substs.tps[p.idx]
}
ty::ty_self(_) => {
substs.self_ty.expect("ty_self not found in substs")
}
_ => {
ty::fold_regions_and_ty(
tcx, *self,
|r| r.subst(tcx, substs),
|t| t.effectfulSubst(tcx, substs),
|t| t.effectfulSubst(tcx, substs))
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Other types
impl<T:Subst> Subst for ~[T] {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ~[T] {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst> Subst for OptVec<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> OptVec<T> {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst + 'static> Subst for @T {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @T {
match self {
&@ref t => @t.subst(tcx, substs)
}
}
}
impl<T:Subst> Subst for Option<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> {
self.as_ref().map(|t| t.subst(tcx, substs))
}
}
impl Subst for ty::TraitRef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TraitRef {
ty::TraitRef {
def_id: self.def_id,
substs: self.substs.subst(tcx, substs)
}
}
}
impl Subst for ty::substs {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::substs {
ty::substs {
regions: self.regions.subst(tcx, substs),
self_ty: self.self_ty.map(|typ| typ.subst(tcx, substs)),
tps: self.tps.map(|typ| typ.subst(tcx, substs))
}
}
}
impl Subst for ty::RegionSubsts {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::RegionSubsts {
match *self {
ty::ErasedRegions => {
ty::ErasedRegions
}
ty::NonerasedRegions(ref regions) => {
ty::NonerasedRegions(regions.subst(tcx, substs))
}
}
}
}
impl Subst for ty::BareFnTy {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::BareFnTy {
ty::fold_bare_fn_ty(self, |t| t.subst(tcx, substs))
}
}
impl Subst for ty::ParamBounds {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ParamBounds {
ty::ParamBounds {
builtin_bounds: self.builtin_bounds,
trait_bounds: self.trait_bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::TypeParameterDef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TypeParameterDef {
ty::TypeParameterDef {
ident: self.ident,
def_id: self.def_id,
bounds: self.bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::Generics {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Generics |
}
impl Subst for ty::Region {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Region {
// Note: This routine only handles the self region, because it
// is only concerned with substitutions of regions that appear
// in types. Region substitution of the bound regions that
// appear in a function signature is done using the
// specialized routine
// `middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig()`.
// As we transition to the new region syntax this distinction
// will most likely disappear.
match self {
&ty::re_bound(ty::br_self) => {
match substs.regions {
ty::ErasedRegions => ty::re_static,
ty::NonerasedRegions(ref regions) => {
if regions.len() != 1 {
tcx.sess.bug(
format!("ty::Region\\#subst(): \
Reference to self region when \
given substs with no self region: {}",
substs.repr(tcx)));
}
*regions.get(0)
}
}
}
_ => *self
}
}
}
impl Subst for ty::ty_param_bounds_and_ty {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: self.generics.subst(tcx, substs),
ty: self.ty.subst(tcx, substs)
}
}
}
| {
ty::Generics {
type_param_defs: self.type_param_defs.subst(tcx, substs),
region_param: self.region_param
}
} | identifier_body |
constants.ts | /**
* @license
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tab names for primary tabs on change view page.
*/
import {DiffViewMode} from '../api/diff';
import {DiffPreferencesInfo} from '../types/diff';
import {EditPreferencesInfo, PreferencesInfo} from '../types/common';
import {
AuthType,
ChangeStatus,
ConfigParameterInfoType,
DefaultDisplayNameConfig,
EditableAccountField,
FileInfoStatus,
GpgKeyInfoStatus,
HttpMethod,
InheritedBooleanInfoConfiguredValue,
MergeabilityComputationBehavior,
ProblemInfoStatus,
ProjectState,
RequirementStatus,
ReviewerState,
RevisionKind,
SubmitType,
} from '../api/rest-api';
export {
AuthType,
ChangeStatus,
ConfigParameterInfoType,
DefaultDisplayNameConfig,
EditableAccountField,
FileInfoStatus,
GpgKeyInfoStatus,
HttpMethod,
InheritedBooleanInfoConfiguredValue,
MergeabilityComputationBehavior,
ProblemInfoStatus,
ProjectState,
RequirementStatus,
ReviewerState,
RevisionKind,
SubmitType,
};
export enum AccountTag {
SERVICE_USER = 'SERVICE_USER',
}
export enum PrimaryTab {
FILES = 'files',
/**
* When renaming 'comments' or 'findings', UrlFormatter.java must be updated.
*/
COMMENT_THREADS = 'comments',
FINDINGS = 'findings',
CHECKS = 'checks',
}
/**
* Tab names for secondary tabs on change view page.
*/
export enum SecondaryTab {
CHANGE_LOG = '_changeLog',
}
/**
* Tag names of change log messages.
*/
export enum MessageTag {
TAG_DELETE_REVIEWER = 'autogenerated:gerrit:deleteReviewer',
TAG_NEW_PATCHSET = 'autogenerated:gerrit:newPatchSet',
TAG_NEW_WIP_PATCHSET = 'autogenerated:gerrit:newWipPatchSet',
TAG_REVIEWER_UPDATE = 'autogenerated:gerrit:reviewerUpdate',
TAG_SET_PRIVATE = 'autogenerated:gerrit:setPrivate',
TAG_UNSET_PRIVATE = 'autogenerated:gerrit:unsetPrivate',
TAG_SET_READY = 'autogenerated:gerrit:setReadyForReview',
TAG_SET_WIP = 'autogenerated:gerrit:setWorkInProgress',
TAG_MERGED = 'autogenerated:gerrit:merged',
TAG_REVERT = 'autogenerated:gerrit:revert',
}
/**
* Modes for gr-diff-cursor
* The scroll behavior for the cursor. Values are 'never' and
* 'keep-visible'. 'keep-visible' will only scroll if the cursor is beyond
* the viewport.
*/
export enum ScrollMode {
KEEP_VISIBLE = 'keep-visible',
NEVER = 'never',
}
/**
* Special file paths
*/
export enum SpecialFilePath {
PATCHSET_LEVEL_COMMENTS = '/PATCHSET_LEVEL',
COMMIT_MESSAGE = '/COMMIT_MSG',
MERGE_LIST = '/MERGE_LIST',
}
export {Side} from '../api/diff';
/**
* https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#mergeable-info
*/
export enum MergeStrategy {
RECURSIVE = 'recursive',
RESOLVE = 'resolve',
SIMPLE_TWO_WAY_IN_CORE = 'simple-two-way-in-core',
OURS = 'ours',
THEIRS = 'theirs',
}
/**
* Enum for possible PermissionRuleInfo actions
* https://gerrit-review.googlesource.com/Documentation/rest-api-access.html#permission-info
*/
export enum PermissionAction {
ALLOW = 'ALLOW',
DENY = 'DENY',
BLOCK = 'BLOCK',
// Special values for global capabilities
INTERACTIVE = 'INTERACTIVE',
BATCH = 'BATCH',
}
/**
* This capability allows users to use the thread pool reserved for 'Non-Interactive Users'.
* https://gerrit-review.googlesource.com/Documentation/access-control.html#capability_priority
*/
export enum UserPriority {
BATCH = 'BATCH',
INTERACTIVE = 'INTERACTIVE',
}
/**
* The side on which the comment was added
* https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#comment-info
*/
export enum CommentSide {
REVISION = 'REVISION',
PARENT = 'PARENT',
}
/**
* Allowed app themes
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum AppTheme {
DARK = 'DARK',
LIGHT = 'LIGHT',
}
/**
* Date formats in preferences
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum DateFormat {
STD = 'STD',
US = 'US',
ISO = 'ISO',
EURO = 'EURO',
UK = 'UK',
}
/**
* Time formats in preferences
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum TimeFormat {
HHMM_12 = 'HHMM_12',
HHMM_24 = 'HHMM_24',
}
export {DiffViewMode};
/**
* The type of email strategy to use.
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum EmailStrategy {
ENABLED = 'ENABLED',
CC_ON_OWN_COMMENTS = 'CC_ON_OWN_COMMENTS',
ATTENTION_SET_ONLY = 'ATTENTION_SET_ONLY',
DISABLED = 'DISABLED',
}
/**
* The type of email format to use.
* Doesn't mentioned in doc, but exists in Java class GeneralPreferencesInfo.
*/
export enum EmailFormat { | /**
* The base which should be pre-selected in the 'Diff Against' drop-down list when the change screen is opened for a merge commit
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum DefaultBase {
AUTO_MERGE = 'AUTO_MERGE',
FIRST_PARENT = 'FIRST_PARENT',
}
/**
* how draft comments are handled
*/
export enum DraftsAction {
PUBLISH = 'PUBLISH',
PUBLISH_ALL_REVISIONS = 'PUBLISH_ALL_REVISIONS',
KEEP = 'KEEP',
}
export enum NotifyType {
NONE = 'NONE',
OWNER = 'OWNER',
OWNER_REVIEWERS = 'OWNER_REVIEWERS',
ALL = 'ALL',
}
/**
* Controls visibility of other users' dashboard pages and completion suggestions to web users
* https://gerrit-review.googlesource.com/Documentation/config-gerrit.html#accounts.visibility
*/
export enum AccountsVisibility {
ALL = 'ALL',
SAME_GROUP = 'SAME_GROUP',
VISIBLE_GROUP = 'VISIBLE_GROUP',
NONE = 'NONE',
}
// TODO(TS): Many properties are omitted here, but they are required.
// Add default values for missing properties.
export function createDefaultPreferences() {
return {
changes_per_page: 25,
diff_view: DiffViewMode.SIDE_BY_SIDE,
size_bar_in_change_table: true,
} as PreferencesInfo;
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/DiffPreferencesInfo.java
// NOTE: There are some settings that don't apply to PolyGerrit
// (Render mode being at least one of them).
export function createDefaultDiffPrefs(): DiffPreferencesInfo {
return {
context: 10,
cursor_blink_rate: 0,
font_size: 12,
ignore_whitespace: 'IGNORE_NONE',
line_length: 100,
line_wrapping: false,
show_line_endings: true,
show_tabs: true,
show_whitespace_errors: true,
syntax_highlighting: true,
tab_size: 8,
};
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/EditPreferencesInfo.java
export function createDefaultEditPrefs(): EditPreferencesInfo {
return {
auto_close_brackets: false,
cursor_blink_rate: 0,
hide_line_numbers: false,
hide_top_menu: false,
indent_unit: 2,
indent_with_tabs: false,
key_map_type: 'DEFAULT',
line_length: 100,
line_wrapping: false,
match_brackets: true,
show_base: false,
show_tabs: true,
show_whitespace_errors: true,
syntax_highlighting: true,
tab_size: 8,
theme: 'DEFAULT',
};
}
export const RELOAD_DASHBOARD_INTERVAL_MS = 10 * 1000;
export const SHOWN_ITEMS_COUNT = 25;
export const WAITING = 'Waiting'; | PLAINTEXT = 'PLAINTEXT',
HTML_PLAINTEXT = 'HTML_PLAINTEXT',
}
| random_line_split |
constants.ts | /**
* @license
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tab names for primary tabs on change view page.
*/
import {DiffViewMode} from '../api/diff';
import {DiffPreferencesInfo} from '../types/diff';
import {EditPreferencesInfo, PreferencesInfo} from '../types/common';
import {
AuthType,
ChangeStatus,
ConfigParameterInfoType,
DefaultDisplayNameConfig,
EditableAccountField,
FileInfoStatus,
GpgKeyInfoStatus,
HttpMethod,
InheritedBooleanInfoConfiguredValue,
MergeabilityComputationBehavior,
ProblemInfoStatus,
ProjectState,
RequirementStatus,
ReviewerState,
RevisionKind,
SubmitType,
} from '../api/rest-api';
export {
AuthType,
ChangeStatus,
ConfigParameterInfoType,
DefaultDisplayNameConfig,
EditableAccountField,
FileInfoStatus,
GpgKeyInfoStatus,
HttpMethod,
InheritedBooleanInfoConfiguredValue,
MergeabilityComputationBehavior,
ProblemInfoStatus,
ProjectState,
RequirementStatus,
ReviewerState,
RevisionKind,
SubmitType,
};
export enum AccountTag {
SERVICE_USER = 'SERVICE_USER',
}
export enum PrimaryTab {
FILES = 'files',
/**
* When renaming 'comments' or 'findings', UrlFormatter.java must be updated.
*/
COMMENT_THREADS = 'comments',
FINDINGS = 'findings',
CHECKS = 'checks',
}
/**
* Tab names for secondary tabs on change view page.
*/
export enum SecondaryTab {
CHANGE_LOG = '_changeLog',
}
/**
* Tag names of change log messages.
*/
export enum MessageTag {
TAG_DELETE_REVIEWER = 'autogenerated:gerrit:deleteReviewer',
TAG_NEW_PATCHSET = 'autogenerated:gerrit:newPatchSet',
TAG_NEW_WIP_PATCHSET = 'autogenerated:gerrit:newWipPatchSet',
TAG_REVIEWER_UPDATE = 'autogenerated:gerrit:reviewerUpdate',
TAG_SET_PRIVATE = 'autogenerated:gerrit:setPrivate',
TAG_UNSET_PRIVATE = 'autogenerated:gerrit:unsetPrivate',
TAG_SET_READY = 'autogenerated:gerrit:setReadyForReview',
TAG_SET_WIP = 'autogenerated:gerrit:setWorkInProgress',
TAG_MERGED = 'autogenerated:gerrit:merged',
TAG_REVERT = 'autogenerated:gerrit:revert',
}
/**
* Modes for gr-diff-cursor
* The scroll behavior for the cursor. Values are 'never' and
* 'keep-visible'. 'keep-visible' will only scroll if the cursor is beyond
* the viewport.
*/
export enum ScrollMode {
KEEP_VISIBLE = 'keep-visible',
NEVER = 'never',
}
/**
* Special file paths
*/
export enum SpecialFilePath {
PATCHSET_LEVEL_COMMENTS = '/PATCHSET_LEVEL',
COMMIT_MESSAGE = '/COMMIT_MSG',
MERGE_LIST = '/MERGE_LIST',
}
export {Side} from '../api/diff';
/**
* https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#mergeable-info
*/
export enum MergeStrategy {
RECURSIVE = 'recursive',
RESOLVE = 'resolve',
SIMPLE_TWO_WAY_IN_CORE = 'simple-two-way-in-core',
OURS = 'ours',
THEIRS = 'theirs',
}
/**
* Enum for possible PermissionRuleInfo actions
* https://gerrit-review.googlesource.com/Documentation/rest-api-access.html#permission-info
*/
export enum PermissionAction {
ALLOW = 'ALLOW',
DENY = 'DENY',
BLOCK = 'BLOCK',
// Special values for global capabilities
INTERACTIVE = 'INTERACTIVE',
BATCH = 'BATCH',
}
/**
* This capability allows users to use the thread pool reserved for 'Non-Interactive Users'.
* https://gerrit-review.googlesource.com/Documentation/access-control.html#capability_priority
*/
export enum UserPriority {
BATCH = 'BATCH',
INTERACTIVE = 'INTERACTIVE',
}
/**
* The side on which the comment was added
* https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#comment-info
*/
export enum CommentSide {
REVISION = 'REVISION',
PARENT = 'PARENT',
}
/**
* Allowed app themes
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum AppTheme {
DARK = 'DARK',
LIGHT = 'LIGHT',
}
/**
* Date formats in preferences
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum DateFormat {
STD = 'STD',
US = 'US',
ISO = 'ISO',
EURO = 'EURO',
UK = 'UK',
}
/**
* Time formats in preferences
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum TimeFormat {
HHMM_12 = 'HHMM_12',
HHMM_24 = 'HHMM_24',
}
export {DiffViewMode};
/**
* The type of email strategy to use.
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum EmailStrategy {
ENABLED = 'ENABLED',
CC_ON_OWN_COMMENTS = 'CC_ON_OWN_COMMENTS',
ATTENTION_SET_ONLY = 'ATTENTION_SET_ONLY',
DISABLED = 'DISABLED',
}
/**
* The type of email format to use.
* Doesn't mentioned in doc, but exists in Java class GeneralPreferencesInfo.
*/
export enum EmailFormat {
PLAINTEXT = 'PLAINTEXT',
HTML_PLAINTEXT = 'HTML_PLAINTEXT',
}
/**
* The base which should be pre-selected in the 'Diff Against' drop-down list when the change screen is opened for a merge commit
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum DefaultBase {
AUTO_MERGE = 'AUTO_MERGE',
FIRST_PARENT = 'FIRST_PARENT',
}
/**
* how draft comments are handled
*/
export enum DraftsAction {
PUBLISH = 'PUBLISH',
PUBLISH_ALL_REVISIONS = 'PUBLISH_ALL_REVISIONS',
KEEP = 'KEEP',
}
export enum NotifyType {
NONE = 'NONE',
OWNER = 'OWNER',
OWNER_REVIEWERS = 'OWNER_REVIEWERS',
ALL = 'ALL',
}
/**
* Controls visibility of other users' dashboard pages and completion suggestions to web users
* https://gerrit-review.googlesource.com/Documentation/config-gerrit.html#accounts.visibility
*/
export enum AccountsVisibility {
ALL = 'ALL',
SAME_GROUP = 'SAME_GROUP',
VISIBLE_GROUP = 'VISIBLE_GROUP',
NONE = 'NONE',
}
// TODO(TS): Many properties are omitted here, but they are required.
// Add default values for missing properties.
export function createDefaultPreferences() {
return {
changes_per_page: 25,
diff_view: DiffViewMode.SIDE_BY_SIDE,
size_bar_in_change_table: true,
} as PreferencesInfo;
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/DiffPreferencesInfo.java
// NOTE: There are some settings that don't apply to PolyGerrit
// (Render mode being at least one of them).
export function createDefaultDiffPrefs(): DiffPreferencesInfo |
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/EditPreferencesInfo.java
export function createDefaultEditPrefs(): EditPreferencesInfo {
return {
auto_close_brackets: false,
cursor_blink_rate: 0,
hide_line_numbers: false,
hide_top_menu: false,
indent_unit: 2,
indent_with_tabs: false,
key_map_type: 'DEFAULT',
line_length: 100,
line_wrapping: false,
match_brackets: true,
show_base: false,
show_tabs: true,
show_whitespace_errors: true,
syntax_highlighting: true,
tab_size: 8,
theme: 'DEFAULT',
};
}
export const RELOAD_DASHBOARD_INTERVAL_MS = 10 * 1000;
export const SHOWN_ITEMS_COUNT = 25;
export const WAITING = 'Waiting';
| {
return {
context: 10,
cursor_blink_rate: 0,
font_size: 12,
ignore_whitespace: 'IGNORE_NONE',
line_length: 100,
line_wrapping: false,
show_line_endings: true,
show_tabs: true,
show_whitespace_errors: true,
syntax_highlighting: true,
tab_size: 8,
};
} | identifier_body |
constants.ts | /**
* @license
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tab names for primary tabs on change view page.
*/
import {DiffViewMode} from '../api/diff';
import {DiffPreferencesInfo} from '../types/diff';
import {EditPreferencesInfo, PreferencesInfo} from '../types/common';
import {
AuthType,
ChangeStatus,
ConfigParameterInfoType,
DefaultDisplayNameConfig,
EditableAccountField,
FileInfoStatus,
GpgKeyInfoStatus,
HttpMethod,
InheritedBooleanInfoConfiguredValue,
MergeabilityComputationBehavior,
ProblemInfoStatus,
ProjectState,
RequirementStatus,
ReviewerState,
RevisionKind,
SubmitType,
} from '../api/rest-api';
export {
AuthType,
ChangeStatus,
ConfigParameterInfoType,
DefaultDisplayNameConfig,
EditableAccountField,
FileInfoStatus,
GpgKeyInfoStatus,
HttpMethod,
InheritedBooleanInfoConfiguredValue,
MergeabilityComputationBehavior,
ProblemInfoStatus,
ProjectState,
RequirementStatus,
ReviewerState,
RevisionKind,
SubmitType,
};
export enum AccountTag {
SERVICE_USER = 'SERVICE_USER',
}
export enum PrimaryTab {
FILES = 'files',
/**
* When renaming 'comments' or 'findings', UrlFormatter.java must be updated.
*/
COMMENT_THREADS = 'comments',
FINDINGS = 'findings',
CHECKS = 'checks',
}
/**
* Tab names for secondary tabs on change view page.
*/
export enum SecondaryTab {
CHANGE_LOG = '_changeLog',
}
/**
* Tag names of change log messages.
*/
export enum MessageTag {
TAG_DELETE_REVIEWER = 'autogenerated:gerrit:deleteReviewer',
TAG_NEW_PATCHSET = 'autogenerated:gerrit:newPatchSet',
TAG_NEW_WIP_PATCHSET = 'autogenerated:gerrit:newWipPatchSet',
TAG_REVIEWER_UPDATE = 'autogenerated:gerrit:reviewerUpdate',
TAG_SET_PRIVATE = 'autogenerated:gerrit:setPrivate',
TAG_UNSET_PRIVATE = 'autogenerated:gerrit:unsetPrivate',
TAG_SET_READY = 'autogenerated:gerrit:setReadyForReview',
TAG_SET_WIP = 'autogenerated:gerrit:setWorkInProgress',
TAG_MERGED = 'autogenerated:gerrit:merged',
TAG_REVERT = 'autogenerated:gerrit:revert',
}
/**
* Modes for gr-diff-cursor
* The scroll behavior for the cursor. Values are 'never' and
* 'keep-visible'. 'keep-visible' will only scroll if the cursor is beyond
* the viewport.
*/
export enum ScrollMode {
KEEP_VISIBLE = 'keep-visible',
NEVER = 'never',
}
/**
* Special file paths
*/
export enum SpecialFilePath {
PATCHSET_LEVEL_COMMENTS = '/PATCHSET_LEVEL',
COMMIT_MESSAGE = '/COMMIT_MSG',
MERGE_LIST = '/MERGE_LIST',
}
export {Side} from '../api/diff';
/**
* https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#mergeable-info
*/
export enum MergeStrategy {
RECURSIVE = 'recursive',
RESOLVE = 'resolve',
SIMPLE_TWO_WAY_IN_CORE = 'simple-two-way-in-core',
OURS = 'ours',
THEIRS = 'theirs',
}
/**
* Enum for possible PermissionRuleInfo actions
* https://gerrit-review.googlesource.com/Documentation/rest-api-access.html#permission-info
*/
export enum PermissionAction {
ALLOW = 'ALLOW',
DENY = 'DENY',
BLOCK = 'BLOCK',
// Special values for global capabilities
INTERACTIVE = 'INTERACTIVE',
BATCH = 'BATCH',
}
/**
* This capability allows users to use the thread pool reserved for 'Non-Interactive Users'.
* https://gerrit-review.googlesource.com/Documentation/access-control.html#capability_priority
*/
export enum UserPriority {
BATCH = 'BATCH',
INTERACTIVE = 'INTERACTIVE',
}
/**
* The side on which the comment was added
* https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#comment-info
*/
export enum CommentSide {
REVISION = 'REVISION',
PARENT = 'PARENT',
}
/**
* Allowed app themes
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum AppTheme {
DARK = 'DARK',
LIGHT = 'LIGHT',
}
/**
* Date formats in preferences
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum DateFormat {
STD = 'STD',
US = 'US',
ISO = 'ISO',
EURO = 'EURO',
UK = 'UK',
}
/**
* Time formats in preferences
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum TimeFormat {
HHMM_12 = 'HHMM_12',
HHMM_24 = 'HHMM_24',
}
export {DiffViewMode};
/**
* The type of email strategy to use.
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum EmailStrategy {
ENABLED = 'ENABLED',
CC_ON_OWN_COMMENTS = 'CC_ON_OWN_COMMENTS',
ATTENTION_SET_ONLY = 'ATTENTION_SET_ONLY',
DISABLED = 'DISABLED',
}
/**
* The type of email format to use.
* Doesn't mentioned in doc, but exists in Java class GeneralPreferencesInfo.
*/
export enum EmailFormat {
PLAINTEXT = 'PLAINTEXT',
HTML_PLAINTEXT = 'HTML_PLAINTEXT',
}
/**
* The base which should be pre-selected in the 'Diff Against' drop-down list when the change screen is opened for a merge commit
* https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#preferences-input
*/
export enum DefaultBase {
AUTO_MERGE = 'AUTO_MERGE',
FIRST_PARENT = 'FIRST_PARENT',
}
/**
* how draft comments are handled
*/
export enum DraftsAction {
PUBLISH = 'PUBLISH',
PUBLISH_ALL_REVISIONS = 'PUBLISH_ALL_REVISIONS',
KEEP = 'KEEP',
}
export enum NotifyType {
NONE = 'NONE',
OWNER = 'OWNER',
OWNER_REVIEWERS = 'OWNER_REVIEWERS',
ALL = 'ALL',
}
/**
* Controls visibility of other users' dashboard pages and completion suggestions to web users
* https://gerrit-review.googlesource.com/Documentation/config-gerrit.html#accounts.visibility
*/
export enum AccountsVisibility {
ALL = 'ALL',
SAME_GROUP = 'SAME_GROUP',
VISIBLE_GROUP = 'VISIBLE_GROUP',
NONE = 'NONE',
}
// TODO(TS): Many properties are omitted here, but they are required.
// Add default values for missing properties.
export function createDefaultPreferences() {
return {
changes_per_page: 25,
diff_view: DiffViewMode.SIDE_BY_SIDE,
size_bar_in_change_table: true,
} as PreferencesInfo;
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/DiffPreferencesInfo.java
// NOTE: There are some settings that don't apply to PolyGerrit
// (Render mode being at least one of them).
export function | (): DiffPreferencesInfo {
return {
context: 10,
cursor_blink_rate: 0,
font_size: 12,
ignore_whitespace: 'IGNORE_NONE',
line_length: 100,
line_wrapping: false,
show_line_endings: true,
show_tabs: true,
show_whitespace_errors: true,
syntax_highlighting: true,
tab_size: 8,
};
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/EditPreferencesInfo.java
export function createDefaultEditPrefs(): EditPreferencesInfo {
return {
auto_close_brackets: false,
cursor_blink_rate: 0,
hide_line_numbers: false,
hide_top_menu: false,
indent_unit: 2,
indent_with_tabs: false,
key_map_type: 'DEFAULT',
line_length: 100,
line_wrapping: false,
match_brackets: true,
show_base: false,
show_tabs: true,
show_whitespace_errors: true,
syntax_highlighting: true,
tab_size: 8,
theme: 'DEFAULT',
};
}
export const RELOAD_DASHBOARD_INTERVAL_MS = 10 * 1000;
export const SHOWN_ITEMS_COUNT = 25;
export const WAITING = 'Waiting';
| createDefaultDiffPrefs | identifier_name |
util.ts | import {stub} from "sinon"
import {logger} from "@bokehjs/core/logging"
export type TrapOutput = {
log: string
trace: string
debug: string
info: string
warn: string
error: string
}
export function trap(fn: () => void): TrapOutput {
const result = {
log: "", | error: "",
}
function join(args: unknown[]): string {
return args.map((arg) => `${arg}`).join(" ") + "\n"
}
// XXX: stubing both console and logger, and including logger's name manually is a hack,
// but that's be best we can do (at least for now) while preserving logger's ability to
// to reference the original location from where a logging method was called.
const log = stub(console, "log").callsFake((...args) => {result.log += join(args)})
const ctrace = stub(console, "trace").callsFake((...args) => {result.trace += join(args)})
const ltrace = stub(logger, "trace").callsFake((...args) => {result.trace += join(["[bokeh]", ...args])})
const cdebug = stub(console, "debug").callsFake((...args) => {result.debug += join(args)})
const ldebug = stub(logger, "debug").callsFake((...args) => {result.debug += join(["[bokeh]", ...args])})
const cinfo = stub(console, "info").callsFake((...args) => {result.info += join(args)})
const linfo = stub(logger, "info").callsFake((...args) => {result.info += join(["[bokeh]", ...args])})
const cwarn = stub(console, "warn").callsFake((...args) => {result.warn += join(args)})
const lwarn = stub(logger, "warn").callsFake((...args) => {result.warn += join(["[bokeh]", ...args])})
const cerror = stub(console, "error").callsFake((...args) => {result.error += join(args)})
const lerror = stub(logger, "error").callsFake((...args) => {result.error += join(["[bokeh]", ...args])})
try {
fn()
} finally {
log.restore()
ctrace.restore()
ltrace.restore()
cdebug.restore()
ldebug.restore()
cinfo.restore()
linfo.restore()
cwarn.restore()
lwarn.restore()
cerror.restore()
lerror.restore()
}
return result
} | trace: "",
debug: "",
info: "",
warn: "", | random_line_split |
util.ts | import {stub} from "sinon"
import {logger} from "@bokehjs/core/logging"
export type TrapOutput = {
log: string
trace: string
debug: string
info: string
warn: string
error: string
}
export function trap(fn: () => void): TrapOutput {
const result = {
log: "",
trace: "",
debug: "",
info: "",
warn: "",
error: "",
}
function | (args: unknown[]): string {
return args.map((arg) => `${arg}`).join(" ") + "\n"
}
// XXX: stubing both console and logger, and including logger's name manually is a hack,
// but that's be best we can do (at least for now) while preserving logger's ability to
// to reference the original location from where a logging method was called.
const log = stub(console, "log").callsFake((...args) => {result.log += join(args)})
const ctrace = stub(console, "trace").callsFake((...args) => {result.trace += join(args)})
const ltrace = stub(logger, "trace").callsFake((...args) => {result.trace += join(["[bokeh]", ...args])})
const cdebug = stub(console, "debug").callsFake((...args) => {result.debug += join(args)})
const ldebug = stub(logger, "debug").callsFake((...args) => {result.debug += join(["[bokeh]", ...args])})
const cinfo = stub(console, "info").callsFake((...args) => {result.info += join(args)})
const linfo = stub(logger, "info").callsFake((...args) => {result.info += join(["[bokeh]", ...args])})
const cwarn = stub(console, "warn").callsFake((...args) => {result.warn += join(args)})
const lwarn = stub(logger, "warn").callsFake((...args) => {result.warn += join(["[bokeh]", ...args])})
const cerror = stub(console, "error").callsFake((...args) => {result.error += join(args)})
const lerror = stub(logger, "error").callsFake((...args) => {result.error += join(["[bokeh]", ...args])})
try {
fn()
} finally {
log.restore()
ctrace.restore()
ltrace.restore()
cdebug.restore()
ldebug.restore()
cinfo.restore()
linfo.restore()
cwarn.restore()
lwarn.restore()
cerror.restore()
lerror.restore()
}
return result
}
| join | identifier_name |
util.ts | import {stub} from "sinon"
import {logger} from "@bokehjs/core/logging"
export type TrapOutput = {
log: string
trace: string
debug: string
info: string
warn: string
error: string
}
export function trap(fn: () => void): TrapOutput | {
const result = {
log: "",
trace: "",
debug: "",
info: "",
warn: "",
error: "",
}
function join(args: unknown[]): string {
return args.map((arg) => `${arg}`).join(" ") + "\n"
}
// XXX: stubing both console and logger, and including logger's name manually is a hack,
// but that's be best we can do (at least for now) while preserving logger's ability to
// to reference the original location from where a logging method was called.
const log = stub(console, "log").callsFake((...args) => {result.log += join(args)})
const ctrace = stub(console, "trace").callsFake((...args) => {result.trace += join(args)})
const ltrace = stub(logger, "trace").callsFake((...args) => {result.trace += join(["[bokeh]", ...args])})
const cdebug = stub(console, "debug").callsFake((...args) => {result.debug += join(args)})
const ldebug = stub(logger, "debug").callsFake((...args) => {result.debug += join(["[bokeh]", ...args])})
const cinfo = stub(console, "info").callsFake((...args) => {result.info += join(args)})
const linfo = stub(logger, "info").callsFake((...args) => {result.info += join(["[bokeh]", ...args])})
const cwarn = stub(console, "warn").callsFake((...args) => {result.warn += join(args)})
const lwarn = stub(logger, "warn").callsFake((...args) => {result.warn += join(["[bokeh]", ...args])})
const cerror = stub(console, "error").callsFake((...args) => {result.error += join(args)})
const lerror = stub(logger, "error").callsFake((...args) => {result.error += join(["[bokeh]", ...args])})
try {
fn()
} finally {
log.restore()
ctrace.restore()
ltrace.restore()
cdebug.restore()
ldebug.restore()
cinfo.restore()
linfo.restore()
cwarn.restore()
lwarn.restore()
cerror.restore()
lerror.restore()
}
return result
} | identifier_body |
|
remove_lines_from_a_file.rs | // http://rosettacode.org/wiki/Remove_lines_from_a_file
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use std::io::{BufReader,BufRead};
use std::fs::File;
const USAGE: &'static str = "
Usage: remove_lines_from_a_file <start> <count> <file>
";
#[derive(Debug, RustcDecodable)]
struct | {
arg_start: usize,
arg_count: usize,
arg_file: String,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
let file = BufReader::new(File::open(args.arg_file).unwrap());
for (i, line) in file.lines().enumerate() {
let cur = i + 1;
if cur < args.arg_start || cur >= (args.arg_start + args.arg_count) {
println!("{}", line.unwrap());
}
}
}
| Args | identifier_name |
remove_lines_from_a_file.rs | // http://rosettacode.org/wiki/Remove_lines_from_a_file
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use std::io::{BufReader,BufRead};
use std::fs::File;
const USAGE: &'static str = "
Usage: remove_lines_from_a_file <start> <count> <file>
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_start: usize,
arg_count: usize,
arg_file: String,
}
fn main() | {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
let file = BufReader::new(File::open(args.arg_file).unwrap());
for (i, line) in file.lines().enumerate() {
let cur = i + 1;
if cur < args.arg_start || cur >= (args.arg_start + args.arg_count) {
println!("{}", line.unwrap());
}
}
} | identifier_body |
|
remove_lines_from_a_file.rs | // http://rosettacode.org/wiki/Remove_lines_from_a_file
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use std::io::{BufReader,BufRead};
use std::fs::File;
const USAGE: &'static str = "
Usage: remove_lines_from_a_file <start> <count> <file>
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_start: usize, | arg_count: usize,
arg_file: String,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
let file = BufReader::new(File::open(args.arg_file).unwrap());
for (i, line) in file.lines().enumerate() {
let cur = i + 1;
if cur < args.arg_start || cur >= (args.arg_start + args.arg_count) {
println!("{}", line.unwrap());
}
}
} | random_line_split |
|
remove_lines_from_a_file.rs | // http://rosettacode.org/wiki/Remove_lines_from_a_file
extern crate rustc_serialize;
extern crate docopt;
use docopt::Docopt;
use std::io::{BufReader,BufRead};
use std::fs::File;
const USAGE: &'static str = "
Usage: remove_lines_from_a_file <start> <count> <file>
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_start: usize,
arg_count: usize,
arg_file: String,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
let file = BufReader::new(File::open(args.arg_file).unwrap());
for (i, line) in file.lines().enumerate() {
let cur = i + 1;
if cur < args.arg_start || cur >= (args.arg_start + args.arg_count) |
}
}
| {
println!("{}", line.unwrap());
} | conditional_block |
lib.rs | //! This crate provides generic implementations of clustering
//! algorithms, allowing them to work with any back-end "point
//! database" that implements the required operations, e.g. one might
//! be happy with using the naive collection `BruteScan` from this
//! crate, or go all out and implement a specialised R*-tree for
//! optimised performance.
//!
//! Density-based clustering algorithms:
//!
//! - DBSCAN (`Dbscan`)
//! - OPTICS (`Optics`)
//!
//! Others:
//!
//! - *k*-means (`Kmeans`)
//!
//! [Source](https://github.com/huonw/cogset).
//!
//! # Installation
//!
//! Add the following to your `Cargo.toml` file:
//!
//! ```toml
//! [dependencies]
//! cogset = "0.2"
//! ```
#![cfg_attr(all(test, feature = "unstable"), feature(test))]
#[cfg(all(test, feature = "unstable"))] extern crate test;
#[cfg(test)] extern crate rand;
extern crate order_stat;
#[cfg(all(test, feature = "unstable"))]
#[macro_use]
mod benches;
#[cfg(not(all(test, feature = "unstable")))]
macro_rules! make_benches {
($($_x: tt)*) => {}
}
mod dbscan;
pub use dbscan::Dbscan;
mod optics;
pub use optics::{Optics, OpticsDbscanClustering};
mod point;
pub use point::{Point, RegionQuery, Points, ListPoints, BruteScan, BruteScanNeighbours,
Euclid, Euclidean};
mod kmeans;
pub use kmeans::{Kmeans, KmeansBuilder}; | //! Clustering algorithms.
//!
//! 
//! | random_line_split |
|
streaming.py | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Client for streaming based WPS.
It exploits asynchronous capabilities of WPS and QGIS for visualizing
intermediate results from a WPS
-------------------
copyright : (C) 2012 by Germán Carrillo (GeoTux)
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import QColor, QMessageBox
from PyQt4.QtNetwork import QNetworkRequest, QNetworkAccessManager
from qgis.core import (QgsNetworkAccessManager, QgsVectorLayer, QgsRasterLayer,
QgsMapLayerRegistry, QgsFeature, QgsGeometry)
from qgis.gui import QgsRubberBand, QgsVertexMarker
from wpslib.processdescription import getFileExtension,isMimeTypeVector,isMimeTypeRaster
from wpslib.executionresult import decodeBase64
from functools import partial
import apicompat
import tempfile
import os, platform
import glob
class Streaming(QObject):
""" Class for keeping track of stream chunks and
providing methods for handling and visualizing them
"""
# Define SIGNALS/SLOTS
playlistHandled = pyqtSignal(dict)
urlReady = pyqtSignal(str, int, str)
dataReady = pyqtSignal(str, int)
def __init__(self, parent, iface, chunks, playlistUrl, mimeType, encoding):
super(Streaming, self).__init__()
self.DEBUG = True
# Variables from other classes
self.parent = parent # For GUI access
self.iface = iface
self.chunks = chunks
self.playlistUrl = playlistUrl
self.mimeType = mimeType
self.encoding = encoding
# Internal variables
self.__endTag = "#PLAYLIST-END"
self.__exceptionTag = "#EXCEPTION"
self.__exceptionUrl = ""
self.__exceptionFound = False
self.__playlistFinished = False # Did the end tag appeared?
self.__bytesInlastReply = 0 # To compare last and current reply sizes
self.__loadedChunks = 0 # For keeping track of # of loaded (to local vars) chunks
self.__deliveredChunks = 0 # For keeping track of # of loaded (to the map) chunks
self.__bFirstChunk = True
self.__features = {} # {0:[f0,f1,f2], 1:[f0,f1]}
self.__bGeomMulti = False # Is the geometry multi{point|line|polygon}
self.__geometryType = "" # Values: "Point","LineString","Polygon","Unknown", "NoGeometry"
self.__tmpGeometry = {} # For visualization purposes {chunkId1: rb1, chunkId2: rb2 }
self.__memoryLayer = None # The whole merged data
# For rasters only
self.__legend = self.iface.legendInterface()
self.__groupIndex = 0
self.__chunksDir = None
self.__virtualFile = "" # Virtual raster file path
if isMimeTypeRaster(self.mimeType, True) != None:
self.__chunksDir = tempfile.mkdtemp(prefix="tmpChunks")
# Other objects
self.timer = QTimer()
self.timer.setInterval(1 * 1000) # 1 second
self.QNAM4Playlist = QNetworkAccessManager()
self.QNAM4Chunks = QNetworkAccessManager()
self.QNAM4Exception = QNetworkAccessManager()
# SIGNAL/SLOT connections
self.playlistHandled.connect(self.fetchChunks)
self.urlReady.connect(self.fetchResult)
self.dataReady.connect(self.loadData)
self.timer.timeout.connect(partial(self.fetchPlaylist, self.playlistUrl))
self.QNAM4Playlist.finished.connect(self.handlePlaylist)
self.QNAM4Chunks.finished.connect(self.handleChunk)
self.QNAM4Exception.finished.connect(self.handleException)
#self.QNAM4Playlist = QgsNetworkAccessManager.instance()
#theReply2.error.connect(self.handleErrors)
# GUI
self.parent.progressBar.setRange(0,0)
self.parent.lblProcess.setText("Reading output playlist...")
def start(self):
""" Start fetching """
self.fetchPlaylist(self.playlistUrl) # First call
def stop(self):
""" Stop fetching """
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
self.QNAM4Chunks.finished.disconnect(self.handleChunk)
self.removeTempGeometry(self.__geometryType)
if self.DEBUG: print "Stop streaming!"
def validateCompletedStream(self):
""" Is the stream complete (Did the end tag appeared?) """
#return (self.__loadedChunks >= self.chunks and self.chunks != 0)
return self.__playlistFinished
def allChunksDelivered(self):
" |
def fetchPlaylist(self, playlistLink):
url = QUrl(playlistLink)
self.QNAM4Playlist.get(QNetworkRequest(url)) # SLOT: handlePlaylist
def handlePlaylist(self, reply):
""" Parse the chunk URLs and update the loadedChunks counter """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.fetchPlaylist(reDir.toString())
return
# Parse URLs only if there is new data in the reply
if reply.bytesAvailable() > self.__bytesInlastReply:
if self.DEBUG: print " Parsing the playlist..."
startFrom = reply.bytesAvailable() - self.__bytesInlastReply # Delta in bytes
self.__bytesInlastReply = reply.bytesAvailable()
newURLs = self.parseURLs(reply, startFrom)
else:
if self.DEBUG: print " No new data in the playlist..."
newURLs = {}
# Store new URLs
if len(newURLs) > 0:
self.__loadedChunks += len(newURLs)
if self.chunks:
self.parent.progressBar.setRange(0,self.chunks)
if self.DEBUG: print str(self.__loadedChunks) + " chunks loaded" + ((" out of " + str(self.chunks)) if self.chunks else "")
# If not complete, make additional calls
if not self.validateCompletedStream():
if not self.timer.isActive():
self.timer.start()
if self.DEBUG: print "Timer started..."
else:
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
if self.DEBUG: print "Playlist finished!"
if self.allChunksDelivered():
self.finishLoading()
if self.__exceptionFound:
self.fetchException()
if len(newURLs) > 0:
self.playlistHandled.emit(newURLs) # SLOT: fetchChunks
def parseURLs(self, reply, startFrom):
""" Get a dict of new IDs:URLs from the current playlist (newURLs) """
newURLs = {} # {0:URL0, 1:URL1, ...}
count = 0
#Get the delta and start reading it
allData = reply.readAll()
allData = allData.right(startFrom) # Get rid of old data
response = QTextStream(allData, QIODevice.ReadOnly)
data = response.readLine()
# Parse
while (data):
data = str(data.split("\n")[0])
if data:
if "#" in data: # It's a playlist comment
if self.__endTag in data:
self.__playlistFinished = True
elif self.__exceptionTag in data:
if self.DEBUG: print "Exception found!"
self.__exceptionFound = True
self.__exceptionUrl = data.split(":",1)[1].strip()
else:
newURLs[count+self.__loadedChunks] = data
count += 1
data = response.readLine()
return newURLs
def fetchChunks(self, newURLs):
""" Fetch each url """
for chunkId in newURLs:
self.urlReady.emit(self.encoding, chunkId, newURLs[chunkId]) # SLOT: fetchResult
def fetchResult(self, encoding, chunkId, fileLink):
""" Send the GET request """
url = QUrl(fileLink)
theReply2 = self.QNAM4Chunks.get(QNetworkRequest(url))
theReply2.setProperty("chunkId", pystring(chunkId))
theReply2.setProperty("encoding", pystring(encoding))
def handleErrors(self, error): # TODO connect it
if self.DEBUG: print "ERROR!!!", error
def fetchException(self):
""" Send the GET request for the exception """
url = QUrl(self.__exceptionUrl)
theReply3 = self.QNAM4Exception.get(QNetworkRequest(url))
def handleException(self, reply):
""" Display the exception """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.__exceptionUrl = reDir.toString()
self.fetchException()
return
resultXML = reply.readAll().data()
self.parent.setStatusLabel('error')
self.parent.progressBar.setMinimum(0)
self.parent.progressBar.setMaximum(100)
self.parent.errorHandler(resultXML)
def handleChunk(self, reply):
""" Store the file received """
#reply.deleteLater() # Recommended way to delete the reply
chunkId = reply.property("chunkId").toInt()[0]
encoding = reply.property("encoding").toString()
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.urlReady.emit(encoding, chunkId, reDir.toString())
return
if self.DEBUG: print "GET chunk", chunkId
# Update progressBar
if self.chunks:
self.parent.progressBar.setValue(self.__deliveredChunks + 1)
self.parent.lblProcess.setText("Downloading chunks... ("+str(self.__deliveredChunks + 1)+"/"+str(self.chunks)+")")
# Get a unique temporary file name
tmpFile = tempfile.NamedTemporaryFile(prefix="base64",
suffix=getFileExtension(self.mimeType), dir=self.__chunksDir, delete=False )
# TODO: Check if the file name already exists!!!
# Write the data to the temporary file
outFile = QFile(tmpFile.name)
outFile.open(QIODevice.WriteOnly)
outFile.write(reply.readAll())
outFile.close()
# Decode?
if encoding == "base64":
resultFile = decodeBase64(tmpFile.name, self.mimeType, self.__chunksDir)
else:
resultFile = tmpFile.name
# Finally, load the data
if self.DEBUG: print "READY to be loaded (", resultFile, ", chunkId:", chunkId, ")"
self.dataReady.emit(resultFile, chunkId) # SLOT: loadData
def loadData(self, resultFile, chunkId):
""" Load data to the map """
if isMimeTypeVector(self.mimeType, True) != None:
# Memory layer:
geometryTypes = ["Point","LineString","Polygon","Unknown", "NoGeometry"]
vlayer = QgsVectorLayer(resultFile, "chunk", "ogr")
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__geometryType = geometryTypes[vlayer.geometryType()]
self.__bGeomMulti = vlayer.wkbType() in [4,5,6,11,12,13]
self.__memoryLayer = QgsVectorLayer(self.__geometryType,"Streamed data","memory")
self.__memoryLayer.dataProvider().addAttributes(vlayer.pendingFields().values())
self.__memoryLayer.updateFieldMap()
provider = vlayer.dataProvider()
allAttrs = provider.attributeIndexes()
vlayer.select(allAttrs)
# Visualize temporal geometries during the downloading process
# Don't add temporal geometries if last chunk
if self.DEBUG: print "Loaded chunkId:",chunkId
res = self.__memoryLayer.dataProvider().addFeatures( [feat for feat in vlayer] )
self.__deliveredChunks += 1
if not self.allChunksDelivered():
inFeat = QgsFeature()
inGeom = QgsGeometry()
self.createTempGeometry(chunkId, self.__geometryType)
while provider.nextFeature( inFeat ):
inGeom = inFeat.geometry()
featList = self.extractAsSingle(self.__geometryType, inGeom) if self.__bGeomMulti else [inGeom]
for geom in featList:
self.addTempGeometry(chunkId, self.__geometryType, geom)
else:
self.finishLoading()
# Raster data
elif isMimeTypeRaster(self.mimeType, True) != None:
# We can directly attach the new layer
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__groupIndex = self.__legend.addGroup("Streamed-raster")
rLayer = QgsRasterLayer(resultFile, "raster_"+str(chunkId))
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.__legend.moveLayer(rLayer, self.__groupIndex + 1)
self.__deliveredChunks += 1
if self.allChunksDelivered():
self.finishLoading()
def finishLoading(self):
""" Finish the loading process, load the definite assembled layer """
if self.DEBUG: print "DONE!"
if not self.__bFirstChunk:
if isMimeTypeVector(self.mimeType, True) != None:
self.removeTempGeometry(self.__geometryType)
QgsMapLayerRegistry.instance().addMapLayer(self.__memoryLayer)
elif isMimeTypeRaster(self.mimeType, True) != None:
self.parent.lblProcess.setText("All tiles are loaded. Merging them...")
# Generate gdal virtual raster
# Code adapted from GdalTools (C) 2009 by L. Masini and G. Sucameli (Faunalia)
self.process = QProcess(self)
self.connect(self.process, SIGNAL("finished(int, QProcess::ExitStatus)"),
self.loadVirtualRaster)
#self.setProcessEnvironment(self.process) Required in Windows?
cmd = "gdalbuildvrt"
arguments = pystringlist()
if platform.system() == "Windows" and cmd[-3:] == ".py":
command = cmd[:-3] + ".bat"
else:
command = cmd
tmpFile = tempfile.NamedTemporaryFile(prefix="virtual",
suffix=".vrt")
self.__virtualFile = tmpFile.name
arguments.append(self.__virtualFile)
rasters = self.getRasterFiles(self.__chunksDir,
getFileExtension(self.mimeType))
for raster in rasters:
arguments.append(raster)
self.process.start(command, arguments, QIODevice.ReadOnly)
if not self.__exceptionFound:
self.parent.setStatusLabel('finished')
self.parent.progressBar.setRange(0,100)
self.parent.progressBar.setValue(100)
def createTempGeometry(self, chunkId, geometryType):
""" Create rubber bands for rapid visualization of geometries """
if geometryType == "Polygon":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), True)
self.__tmpGeometry[chunkId].setColor( QColor( 0,255,0,255 ) )
self.__tmpGeometry[chunkId].setWidth( 2 )
if self.DEBUG: print "rubberBand created"
elif geometryType == "LineString":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), False)
self.__tmpGeometry[chunkId].setColor( QColor( 255,121,48,255 ) )
self.__tmpGeometry[chunkId].setWidth( 3 )
elif geometryType == "Point":
# In the case of points, they will be added as vertex objects later
self.__tmpGeometry[chunkId] = []
def addTempGeometry(self, chunkId, geometryType, geometry):
""" Add geometries as rubber bands or vertex objects """
if geometryType == "Polygon" or geometryType == "LineString":
self.__tmpGeometry[chunkId].addGeometry(geometry, None)
elif geometryType == "Point":
vertex = QgsVertexMarker(self.iface.mapCanvas())
vertex.setCenter(geometry.asPoint())
vertex.setColor(QColor(0,255,0))
vertex.setIconSize(6)
vertex.setIconType(QgsVertexMarker.ICON_BOX) # or ICON_CROSS, ICON_X
vertex.setPenWidth(3)
self.__tmpGeometry[chunkId].append(vertex)
def removeTempGeometry(self, geometryType):
""" Remove rubber bands or vertex objects from the map """
if geometryType == "Polygon" or geometryType == "LineString":
for chunkId in self.__tmpGeometry.keys():
self.iface.mapCanvas().scene().removeItem(self.__tmpGeometry[chunkId])
del self.__tmpGeometry[chunkId]
elif geometryType == "Point":
for chunkId in self.__tmpGeometry.keys():
if len( self.__tmpGeometry[chunkId] ) > 0:
for vertex in self.__tmpGeometry[chunkId]:
self.iface.mapCanvas().scene().removeItem(vertex)
del vertex
def extractAsSingle(self, geometryType, geom):
""" Extract multi geometries as single ones.
Required because of a QGIS bug regarding multipolygons and rubber bands
"""
# Code adapted from QGIS fTools plugin, (C) 2008-2011 Carson Farmer
multi_geom = QgsGeometry()
temp_geom = []
if geometryType == "Point":
multi_geom = geom.asMultiPoint()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPoint ( i ) )
elif geometryType == "LineString":
multi_geom = geom.asMultiPolyline()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolyline( i ) )
elif geometryType == "Polygon":
multi_geom = geom.asMultiPolygon()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolygon( i ) )
return temp_geom
def loadVirtualRaster(self, exitCode, status):
""" Load a virtual raster to QGIS """
if exitCode == 0:
self.__legend.setGroupVisible( self.__groupIndex, False )
rLayer = QgsRasterLayer(self.__virtualFile, "virtual")
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.process.kill()
def stretchRaster(self, raster):
raster.setMinimumMaximumUsingLastExtent()
raster.setContrastEnhancementAlgorithm(1)
raster.triggerRepaint()
def setProcessEnvironment(self, process):
""" From GdalTools. Set environment variables for running gdalbuildvrt """
envvar_list = {
"PATH" : self.getGdalBinPath(),
"PYTHONPATH" : self.getGdalPymodPath()
}
if self.DEBUG: print envvar_list
sep = os.pathsep
for name, val in envvar_list.iteritems():
if val == None or val == "":
continue
envval = os.getenv(name)
if envval == None or envval == "":
envval = str(val)
elif not pystring( envval ).split( sep ).contains( val, Qt.CaseInsensitive ):
envval += "%s%s" % (sep, str(val))
else:
envval = None
if envval != None:
os.putenv( name, envval )
if False: # not needed because os.putenv() has already updated the environment for new child processes
env = QProcess.systemEnvironment()
if env.contains( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ) ):
env.replaceInStrings( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ), "%s=\\1%s%s" % (name, sep, gdalPath) )
else:
env << "%s=%s" % (name, val)
process.setEnvironment( env )
def getRasterFiles(self, dir, extension):
rasters = pystringlist()
for name in glob.glob(dir + '/*' + extension):
rasters.append(name)
return rasters
def getGdalBinPath(self):
""" Retrieves GDAL binaries location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPath", pystring( "" ) ).toString()
def getGdalPymodPath(self):
""" Retrieves GDAL python modules location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPymodPath", pystring( "" ) ).toString()
| "" Are all chunks already loaded into the map? """
return ((self.__loadedChunks == self.__deliveredChunks and
self.__playlistFinished) or self.__exceptionFound)
| identifier_body |
streaming.py | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Client for streaming based WPS.
It exploits asynchronous capabilities of WPS and QGIS for visualizing
intermediate results from a WPS
-------------------
copyright : (C) 2012 by Germán Carrillo (GeoTux)
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import QColor, QMessageBox
from PyQt4.QtNetwork import QNetworkRequest, QNetworkAccessManager
from qgis.core import (QgsNetworkAccessManager, QgsVectorLayer, QgsRasterLayer,
QgsMapLayerRegistry, QgsFeature, QgsGeometry)
from qgis.gui import QgsRubberBand, QgsVertexMarker
from wpslib.processdescription import getFileExtension,isMimeTypeVector,isMimeTypeRaster
from wpslib.executionresult import decodeBase64
from functools import partial
import apicompat
import tempfile
import os, platform
import glob
class Streaming(QObject):
""" Class for keeping track of stream chunks and
providing methods for handling and visualizing them
"""
# Define SIGNALS/SLOTS
playlistHandled = pyqtSignal(dict)
urlReady = pyqtSignal(str, int, str)
dataReady = pyqtSignal(str, int)
def __init__(self, parent, iface, chunks, playlistUrl, mimeType, encoding):
super(Streaming, self).__init__()
self.DEBUG = True
# Variables from other classes
self.parent = parent # For GUI access
self.iface = iface
self.chunks = chunks
self.playlistUrl = playlistUrl
self.mimeType = mimeType
self.encoding = encoding
# Internal variables
self.__endTag = "#PLAYLIST-END"
self.__exceptionTag = "#EXCEPTION"
self.__exceptionUrl = ""
self.__exceptionFound = False
self.__playlistFinished = False # Did the end tag appeared?
self.__bytesInlastReply = 0 # To compare last and current reply sizes
self.__loadedChunks = 0 # For keeping track of # of loaded (to local vars) chunks
self.__deliveredChunks = 0 # For keeping track of # of loaded (to the map) chunks
self.__bFirstChunk = True
self.__features = {} # {0:[f0,f1,f2], 1:[f0,f1]}
self.__bGeomMulti = False # Is the geometry multi{point|line|polygon}
self.__geometryType = "" # Values: "Point","LineString","Polygon","Unknown", "NoGeometry"
self.__tmpGeometry = {} # For visualization purposes {chunkId1: rb1, chunkId2: rb2 }
self.__memoryLayer = None # The whole merged data
# For rasters only
self.__legend = self.iface.legendInterface()
self.__groupIndex = 0
self.__chunksDir = None
self.__virtualFile = "" # Virtual raster file path
if isMimeTypeRaster(self.mimeType, True) != None:
self.__chunksDir = tempfile.mkdtemp(prefix="tmpChunks")
# Other objects
self.timer = QTimer()
self.timer.setInterval(1 * 1000) # 1 second
self.QNAM4Playlist = QNetworkAccessManager()
self.QNAM4Chunks = QNetworkAccessManager()
self.QNAM4Exception = QNetworkAccessManager()
# SIGNAL/SLOT connections
self.playlistHandled.connect(self.fetchChunks)
self.urlReady.connect(self.fetchResult)
self.dataReady.connect(self.loadData)
self.timer.timeout.connect(partial(self.fetchPlaylist, self.playlistUrl))
self.QNAM4Playlist.finished.connect(self.handlePlaylist)
self.QNAM4Chunks.finished.connect(self.handleChunk)
self.QNAM4Exception.finished.connect(self.handleException)
#self.QNAM4Playlist = QgsNetworkAccessManager.instance()
#theReply2.error.connect(self.handleErrors)
# GUI
self.parent.progressBar.setRange(0,0)
self.parent.lblProcess.setText("Reading output playlist...")
def start(self):
""" Start fetching """
self.fetchPlaylist(self.playlistUrl) # First call
def stop(self):
""" Stop fetching """
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
self.QNAM4Chunks.finished.disconnect(self.handleChunk)
self.removeTempGeometry(self.__geometryType)
if self.DEBUG: print "Stop streaming!"
def validateCompletedStream(self):
""" Is the stream complete (Did the end tag appeared?) """
#return (self.__loadedChunks >= self.chunks and self.chunks != 0)
return self.__playlistFinished
def allChunksDelivered(self):
""" Are all chunks already loaded into the map? """
return ((self.__loadedChunks == self.__deliveredChunks and
self.__playlistFinished) or self.__exceptionFound)
def fetchPlaylist(self, playlistLink):
url = QUrl(playlistLink)
self.QNAM4Playlist.get(QNetworkRequest(url)) # SLOT: handlePlaylist
def handlePlaylist(self, reply):
""" Parse the chunk URLs and update the loadedChunks counter """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.fetchPlaylist(reDir.toString())
return
# Parse URLs only if there is new data in the reply
if reply.bytesAvailable() > self.__bytesInlastReply:
if self.DEBUG: print " Parsing the playlist..."
startFrom = reply.bytesAvailable() - self.__bytesInlastReply # Delta in bytes
self.__bytesInlastReply = reply.bytesAvailable()
newURLs = self.parseURLs(reply, startFrom)
else:
if self.DEBUG: print " No new data in the playlist..."
newURLs = {}
# Store new URLs
if len(newURLs) > 0:
self.__loadedChunks += len(newURLs)
if self.chunks:
self.parent.progressBar.setRange(0,self.chunks)
if self.DEBUG: print str(self.__loadedChunks) + " chunks loaded" + ((" out of " + str(self.chunks)) if self.chunks else "")
# If not complete, make additional calls
if not self.validateCompletedStream():
if not self.timer.isActive():
self.timer.start()
if self.DEBUG: print "Timer started..."
else:
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
if self.DEBUG: print "Playlist finished!"
if self.allChunksDelivered():
self.finishLoading()
if self.__exceptionFound:
self.fetchException()
if len(newURLs) > 0:
self.playlistHandled.emit(newURLs) # SLOT: fetchChunks
def parseURLs(self, reply, startFrom):
""" Get a dict of new IDs:URLs from the current playlist (newURLs) """
newURLs = {} # {0:URL0, 1:URL1, ...}
count = 0
#Get the delta and start reading it
allData = reply.readAll()
allData = allData.right(startFrom) # Get rid of old data
response = QTextStream(allData, QIODevice.ReadOnly)
data = response.readLine()
# Parse
while (data):
data = str(data.split("\n")[0])
if data:
if "#" in data: # It's a playlist comment
if self.__endTag in data:
self.__playlistFinished = True
elif self.__exceptionTag in data:
if self.DEBUG: print "Exception found!"
self.__exceptionFound = True
self.__exceptionUrl = data.split(":",1)[1].strip()
else:
newURLs[count+self.__loadedChunks] = data
count += 1
data = response.readLine()
return newURLs
def fetchChunks(self, newURLs):
""" Fetch each url """
for chunkId in newURLs:
self.urlReady.emit(self.encoding, chunkId, newURLs[chunkId]) # SLOT: fetchResult
def fetchResult(self, encoding, chunkId, fileLink):
""" Send the GET request """
url = QUrl(fileLink)
theReply2 = self.QNAM4Chunks.get(QNetworkRequest(url))
theReply2.setProperty("chunkId", pystring(chunkId))
theReply2.setProperty("encoding", pystring(encoding))
def handleErrors(self, error): # TODO connect it
if self.DEBUG: print "ERROR!!!", error
def fetchException(self):
""" Send the GET request for the exception """
url = QUrl(self.__exceptionUrl)
theReply3 = self.QNAM4Exception.get(QNetworkRequest(url))
def handleException(self, reply):
""" Display the exception """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.__exceptionUrl = reDir.toString()
self.fetchException()
return
resultXML = reply.readAll().data()
self.parent.setStatusLabel('error')
self.parent.progressBar.setMinimum(0)
self.parent.progressBar.setMaximum(100)
self.parent.errorHandler(resultXML)
def handleChunk(self, reply):
""" Store the file received """
#reply.deleteLater() # Recommended way to delete the reply
chunkId = reply.property("chunkId").toInt()[0]
encoding = reply.property("encoding").toString()
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.urlReady.emit(encoding, chunkId, reDir.toString())
return
if self.DEBUG: print "GET chunk", chunkId
# Update progressBar
if self.chunks:
self.parent.progressBar.setValue(self.__deliveredChunks + 1)
self.parent.lblProcess.setText("Downloading chunks... ("+str(self.__deliveredChunks + 1)+"/"+str(self.chunks)+")")
# Get a unique temporary file name
tmpFile = tempfile.NamedTemporaryFile(prefix="base64",
suffix=getFileExtension(self.mimeType), dir=self.__chunksDir, delete=False )
# TODO: Check if the file name already exists!!!
# Write the data to the temporary file
outFile = QFile(tmpFile.name)
outFile.open(QIODevice.WriteOnly)
outFile.write(reply.readAll())
outFile.close()
# Decode?
if encoding == "base64":
resultFile = decodeBase64(tmpFile.name, self.mimeType, self.__chunksDir)
else:
resultFile = tmpFile.name
# Finally, load the data
if self.DEBUG: print "READY to be loaded (", resultFile, ", chunkId:", chunkId, ")"
self.dataReady.emit(resultFile, chunkId) # SLOT: loadData
def loadData(self, resultFile, chunkId):
""" Load data to the map """
if isMimeTypeVector(self.mimeType, True) != None:
# Memory layer:
geometryTypes = ["Point","LineString","Polygon","Unknown", "NoGeometry"]
vlayer = QgsVectorLayer(resultFile, "chunk", "ogr")
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__geometryType = geometryTypes[vlayer.geometryType()]
self.__bGeomMulti = vlayer.wkbType() in [4,5,6,11,12,13]
self.__memoryLayer = QgsVectorLayer(self.__geometryType,"Streamed data","memory")
self.__memoryLayer.dataProvider().addAttributes(vlayer.pendingFields().values())
self.__memoryLayer.updateFieldMap()
provider = vlayer.dataProvider()
allAttrs = provider.attributeIndexes()
vlayer.select(allAttrs)
# Visualize temporal geometries during the downloading process
# Don't add temporal geometries if last chunk
if self.DEBUG: print "Loaded chunkId:",chunkId
res = self.__memoryLayer.dataProvider().addFeatures( [feat for feat in vlayer] )
self.__deliveredChunks += 1
if not self.allChunksDelivered():
inFeat = QgsFeature()
inGeom = QgsGeometry()
self.createTempGeometry(chunkId, self.__geometryType)
while provider.nextFeature( inFeat ):
inGeom = inFeat.geometry()
featList = self.extractAsSingle(self.__geometryType, inGeom) if self.__bGeomMulti else [inGeom]
for geom in featList:
self.addTempGeometry(chunkId, self.__geometryType, geom)
else:
self.finishLoading()
# Raster data
elif isMimeTypeRaster(self.mimeType, True) != None:
# We can directly attach the new layer
if self.__bFirstChunk:
self.__bFirstChunk = False
self.__groupIndex = self.__legend.addGroup("Streamed-raster")
rLayer = QgsRasterLayer(resultFile, "raster_"+str(chunkId))
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.__legend.moveLayer(rLayer, self.__groupIndex + 1)
self.__deliveredChunks += 1
if self.allChunksDelivered():
self.finishLoading()
def finishLoading(self):
""" Finish the loading process, load the definite assembled layer """
if self.DEBUG: p |
if not self.__bFirstChunk:
if isMimeTypeVector(self.mimeType, True) != None:
self.removeTempGeometry(self.__geometryType)
QgsMapLayerRegistry.instance().addMapLayer(self.__memoryLayer)
elif isMimeTypeRaster(self.mimeType, True) != None:
self.parent.lblProcess.setText("All tiles are loaded. Merging them...")
# Generate gdal virtual raster
# Code adapted from GdalTools (C) 2009 by L. Masini and G. Sucameli (Faunalia)
self.process = QProcess(self)
self.connect(self.process, SIGNAL("finished(int, QProcess::ExitStatus)"),
self.loadVirtualRaster)
#self.setProcessEnvironment(self.process) Required in Windows?
cmd = "gdalbuildvrt"
arguments = pystringlist()
if platform.system() == "Windows" and cmd[-3:] == ".py":
command = cmd[:-3] + ".bat"
else:
command = cmd
tmpFile = tempfile.NamedTemporaryFile(prefix="virtual",
suffix=".vrt")
self.__virtualFile = tmpFile.name
arguments.append(self.__virtualFile)
rasters = self.getRasterFiles(self.__chunksDir,
getFileExtension(self.mimeType))
for raster in rasters:
arguments.append(raster)
self.process.start(command, arguments, QIODevice.ReadOnly)
if not self.__exceptionFound:
self.parent.setStatusLabel('finished')
self.parent.progressBar.setRange(0,100)
self.parent.progressBar.setValue(100)
def createTempGeometry(self, chunkId, geometryType):
""" Create rubber bands for rapid visualization of geometries """
if geometryType == "Polygon":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), True)
self.__tmpGeometry[chunkId].setColor( QColor( 0,255,0,255 ) )
self.__tmpGeometry[chunkId].setWidth( 2 )
if self.DEBUG: print "rubberBand created"
elif geometryType == "LineString":
self.__tmpGeometry[chunkId] = QgsRubberBand(self.iface.mapCanvas(), False)
self.__tmpGeometry[chunkId].setColor( QColor( 255,121,48,255 ) )
self.__tmpGeometry[chunkId].setWidth( 3 )
elif geometryType == "Point":
# In the case of points, they will be added as vertex objects later
self.__tmpGeometry[chunkId] = []
def addTempGeometry(self, chunkId, geometryType, geometry):
""" Add geometries as rubber bands or vertex objects """
if geometryType == "Polygon" or geometryType == "LineString":
self.__tmpGeometry[chunkId].addGeometry(geometry, None)
elif geometryType == "Point":
vertex = QgsVertexMarker(self.iface.mapCanvas())
vertex.setCenter(geometry.asPoint())
vertex.setColor(QColor(0,255,0))
vertex.setIconSize(6)
vertex.setIconType(QgsVertexMarker.ICON_BOX) # or ICON_CROSS, ICON_X
vertex.setPenWidth(3)
self.__tmpGeometry[chunkId].append(vertex)
def removeTempGeometry(self, geometryType):
""" Remove rubber bands or vertex objects from the map """
if geometryType == "Polygon" or geometryType == "LineString":
for chunkId in self.__tmpGeometry.keys():
self.iface.mapCanvas().scene().removeItem(self.__tmpGeometry[chunkId])
del self.__tmpGeometry[chunkId]
elif geometryType == "Point":
for chunkId in self.__tmpGeometry.keys():
if len( self.__tmpGeometry[chunkId] ) > 0:
for vertex in self.__tmpGeometry[chunkId]:
self.iface.mapCanvas().scene().removeItem(vertex)
del vertex
def extractAsSingle(self, geometryType, geom):
""" Extract multi geometries as single ones.
Required because of a QGIS bug regarding multipolygons and rubber bands
"""
# Code adapted from QGIS fTools plugin, (C) 2008-2011 Carson Farmer
multi_geom = QgsGeometry()
temp_geom = []
if geometryType == "Point":
multi_geom = geom.asMultiPoint()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPoint ( i ) )
elif geometryType == "LineString":
multi_geom = geom.asMultiPolyline()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolyline( i ) )
elif geometryType == "Polygon":
multi_geom = geom.asMultiPolygon()
for i in multi_geom:
temp_geom.append( QgsGeometry().fromPolygon( i ) )
return temp_geom
def loadVirtualRaster(self, exitCode, status):
""" Load a virtual raster to QGIS """
if exitCode == 0:
self.__legend.setGroupVisible( self.__groupIndex, False )
rLayer = QgsRasterLayer(self.__virtualFile, "virtual")
bLoaded = QgsMapLayerRegistry.instance().addMapLayer(rLayer)
self.stretchRaster(rLayer)
self.process.kill()
def stretchRaster(self, raster):
raster.setMinimumMaximumUsingLastExtent()
raster.setContrastEnhancementAlgorithm(1)
raster.triggerRepaint()
def setProcessEnvironment(self, process):
""" From GdalTools. Set environment variables for running gdalbuildvrt """
envvar_list = {
"PATH" : self.getGdalBinPath(),
"PYTHONPATH" : self.getGdalPymodPath()
}
if self.DEBUG: print envvar_list
sep = os.pathsep
for name, val in envvar_list.iteritems():
if val == None or val == "":
continue
envval = os.getenv(name)
if envval == None or envval == "":
envval = str(val)
elif not pystring( envval ).split( sep ).contains( val, Qt.CaseInsensitive ):
envval += "%s%s" % (sep, str(val))
else:
envval = None
if envval != None:
os.putenv( name, envval )
if False: # not needed because os.putenv() has already updated the environment for new child processes
env = QProcess.systemEnvironment()
if env.contains( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ) ):
env.replaceInStrings( QRegExp( "^%s=(.*)" % name, Qt.CaseInsensitive ), "%s=\\1%s%s" % (name, sep, gdalPath) )
else:
env << "%s=%s" % (name, val)
process.setEnvironment( env )
def getRasterFiles(self, dir, extension):
rasters = pystringlist()
for name in glob.glob(dir + '/*' + extension):
rasters.append(name)
return rasters
def getGdalBinPath(self):
""" Retrieves GDAL binaries location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPath", pystring( "" ) ).toString()
def getGdalPymodPath(self):
""" Retrieves GDAL python modules location """
settings = QSettings()
return settings.value( "/GdalTools/gdalPymodPath", pystring( "" ) ).toString()
| rint "DONE!"
| conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.