file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
HypothermicPresence.tsx | import React from "react";
import Analyzer, { Options } from "parser/core/Analyzer";
import SPELLS from "common/SPELLS";
import Statistic from "parser/ui/Statistic";
import { STATISTIC_ORDER } from "parser/ui/StatisticBox"; | import BoringSpellValue from "parser/ui/BoringSpellValue";
import RunicPowerTracker from "../runicpower/RunicPowerTracker";
/** reduces the Runic Power cost of your abilities by 35% for 8 sec */
class HypothermicPresence extends Analyzer {
static dependencies = {
runicPowerTracker: RunicPowerTracker,
}
protected runicPowerTracker!: RunicPowerTracker;
constructor(options: Options) {
super(options);
this.active = this.selectedCombatant.hasTalent(SPELLS.HYPOTHERMIC_PRESENCE_TALENT.id);
if (!this.active) {
return;
}
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.OPTIONAL(50)}
size="flexible"
>
<BoringSpellValue
spell={SPELLS.HYPOTHERMIC_PRESENCE_TALENT}
value={`${this.runicPowerTracker.totalHypothermicPresenceReduction}`}
label="Runic Power saved"
/>
</Statistic>
)
}
}
export default HypothermicPresence; | random_line_split |
|
HypothermicPresence.tsx | import React from "react";
import Analyzer, { Options } from "parser/core/Analyzer";
import SPELLS from "common/SPELLS";
import Statistic from "parser/ui/Statistic";
import { STATISTIC_ORDER } from "parser/ui/StatisticBox";
import BoringSpellValue from "parser/ui/BoringSpellValue";
import RunicPowerTracker from "../runicpower/RunicPowerTracker";
/** reduces the Runic Power cost of your abilities by 35% for 8 sec */
class HypothermicPresence extends Analyzer {
static dependencies = {
runicPowerTracker: RunicPowerTracker,
}
protected runicPowerTracker!: RunicPowerTracker;
| (options: Options) {
super(options);
this.active = this.selectedCombatant.hasTalent(SPELLS.HYPOTHERMIC_PRESENCE_TALENT.id);
if (!this.active) {
return;
}
}
statistic() {
return (
<Statistic
position={STATISTIC_ORDER.OPTIONAL(50)}
size="flexible"
>
<BoringSpellValue
spell={SPELLS.HYPOTHERMIC_PRESENCE_TALENT}
value={`${this.runicPowerTracker.totalHypothermicPresenceReduction}`}
label="Runic Power saved"
/>
</Statistic>
)
}
}
export default HypothermicPresence;
| constructor | identifier_name |
non_ts_pseudo_class_list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* This file contains a helper macro includes all supported non-tree-structural
* pseudo-classes.
*
* FIXME: Find a way to autogenerate this file.
*
* Expected usage is as follows:
* ```
* macro_rules! pseudo_class_macro{
* (bare: [$(($css:expr, $name:ident, $gecko_type:tt, $state:tt, $flags:tt),)*],
* string: [$(($s_css:expr, $s_name:ident, $s_gecko_type:tt, $s_state:tt, $s_flags:tt),)*]) => {
* keyword: [$(($k_css:expr, $k_name:ident, $k_gecko_type:tt, $k_state:tt, $k_flags:tt),)*]) => {
* // do stuff
* }
* }
* apply_non_ts_list!(pseudo_class_macro)
* ```
*
* The `string` and `keyword` variables will be applied to pseudoclasses that are of the form of
* functions with string or keyword arguments.
*
* Pending pseudo-classes:
*
* :scope -> <style scoped>, pending discussion.
*
* This follows the order defined in layout/style/nsCSSPseudoClassList.h when
* possible.
*
* $gecko_type can be either "_" or an ident in Gecko's CSSPseudoClassType.
* $state can be either "_" or an expression of type ElementState. If present,
* the semantics are that the pseudo-class matches if any of the bits in
* $state are set on the element.
* $flags can be either "_" or an expression of type NonTSPseudoClassFlag,
* see selector_parser.rs for more details.
*/
macro_rules! apply_non_ts_list {
($apply_macro:ident) => {
$apply_macro! {
bare: [
("-moz-table-border-nonzero", MozTableBorderNonzero, mozTableBorderNonzero, _, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-browser-frame", MozBrowserFrame, mozBrowserFrame, _, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("link", Link, link, IN_UNVISITED_STATE, _),
("any-link", AnyLink, anyLink, IN_VISITED_OR_UNVISITED_STATE, _), | ("focus", Focus, focus, IN_FOCUS_STATE, _),
("focus-within", FocusWithin, focusWithin, IN_FOCUS_WITHIN_STATE, _),
("hover", Hover, hover, IN_HOVER_STATE, _),
("-moz-drag-over", MozDragOver, mozDragOver, IN_DRAGOVER_STATE, _),
("target", Target, target, IN_TARGET_STATE, _),
("indeterminate", Indeterminate, indeterminate, IN_INDETERMINATE_STATE, _),
("-moz-devtools-highlighted", MozDevtoolsHighlighted, mozDevtoolsHighlighted, IN_DEVTOOLS_HIGHLIGHTED_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-styleeditor-transitioning", MozStyleeditorTransitioning, mozStyleeditorTransitioning, IN_STYLEEDITOR_TRANSITIONING_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("fullscreen", Fullscreen, fullscreen, IN_FULLSCREEN_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-full-screen", MozFullScreen, mozFullScreen, IN_FULLSCREEN_STATE, _),
// TODO(emilio): This is inconsistently named (the capital R).
("-moz-focusring", MozFocusRing, mozFocusRing, IN_FOCUSRING_STATE, _),
("-moz-broken", MozBroken, mozBroken, IN_BROKEN_STATE, _),
("-moz-loading", MozLoading, mozLoading, IN_LOADING_STATE, _),
("-moz-suppressed", MozSuppressed, mozSuppressed, IN_SUPPRESSED_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-has-dir-attr", MozHasDirAttr, mozHasDirAttr, IN_HAS_DIR_ATTR_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-dir-attr-ltr", MozDirAttrLTR, mozDirAttrLTR, IN_HAS_DIR_ATTR_LTR_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-dir-attr-rtl", MozDirAttrRTL, mozDirAttrRTL, IN_HAS_DIR_ATTR_RTL_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-dir-attr-like-auto", MozDirAttrLikeAuto, mozDirAttrLikeAuto, IN_HAS_DIR_ATTR_LIKE_AUTO_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-autofill", MozAutofill, mozAutofill, IN_AUTOFILL_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-autofill-preview", MozAutofillPreview, mozAutofillPreview, IN_AUTOFILL_PREVIEW_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-handler-clicktoplay", MozHandlerClickToPlay, mozHandlerClickToPlay, IN_HANDLER_CLICK_TO_PLAY_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-handler-vulnerable-updatable", MozHandlerVulnerableUpdatable, mozHandlerVulnerableUpdatable, IN_HANDLER_VULNERABLE_UPDATABLE_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-handler-vulnerable-no-update", MozHandlerVulnerableNoUpdate, mozHandlerVulnerableNoUpdate, IN_HANDLER_VULNERABLE_NO_UPDATE_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-handler-disabled", MozHandlerDisabled, mozHandlerDisabled, IN_HANDLER_DISABLED_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-handler-blocked", MozHandlerBlocked, mozHandlerBlocked, IN_HANDLER_BLOCKED_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-handler-crashed", MozHandlerCrashed, mozHandlerCrashed, IN_HANDLER_CRASHED_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-math-increment-script-level", MozMathIncrementScriptLevel, mozMathIncrementScriptLevel, IN_INCREMENT_SCRIPT_LEVEL_STATE, _),
("required", Required, required, IN_REQUIRED_STATE, _),
("optional", Optional, optional, IN_OPTIONAL_STATE, _),
("valid", Valid, valid, IN_VALID_STATE, _),
("invalid", Invalid, invalid, IN_INVALID_STATE, _),
("in-range", InRange, inRange, IN_INRANGE_STATE, _),
("out-of-range", OutOfRange, outOfRange, IN_OUTOFRANGE_STATE, _),
("default", Default, defaultPseudo, IN_DEFAULT_STATE, _),
("placeholder-shown", PlaceholderShown, placeholderShown, IN_PLACEHOLDER_SHOWN_STATE, _),
("-moz-read-only", MozReadOnly, mozReadOnly, IN_MOZ_READONLY_STATE, _),
("-moz-read-write", MozReadWrite, mozReadWrite, IN_MOZ_READWRITE_STATE, _),
("-moz-submit-invalid", MozSubmitInvalid, mozSubmitInvalid, IN_MOZ_SUBMITINVALID_STATE, _),
("-moz-ui-valid", MozUIValid, mozUIValid, IN_MOZ_UI_VALID_STATE, _),
("-moz-ui-invalid", MozUIInvalid, mozUIInvalid, IN_MOZ_UI_INVALID_STATE, _),
("-moz-meter-optimum", MozMeterOptimum, mozMeterOptimum, IN_OPTIMUM_STATE, _),
("-moz-meter-sub-optimum", MozMeterSubOptimum, mozMeterSubOptimum, IN_SUB_OPTIMUM_STATE, _),
("-moz-meter-sub-sub-optimum", MozMeterSubSubOptimum, mozMeterSubSubOptimum, IN_SUB_SUB_OPTIMUM_STATE, _),
("-moz-user-disabled", MozUserDisabled, mozUserDisabled, IN_USER_DISABLED_STATE, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS_AND_CHROME),
("-moz-first-node", MozFirstNode, firstNode, _, _),
("-moz-last-node", MozLastNode, lastNode, _, _),
("-moz-only-whitespace", MozOnlyWhitespace, mozOnlyWhitespace, _, _),
("-moz-native-anonymous", MozNativeAnonymous, mozNativeAnonymous, _, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-use-shadow-tree-root", MozUseShadowTreeRoot, mozUseShadowTreeRoot, _, PSEUDO_CLASS_ENABLED_IN_UA_SHEETS),
("-moz-is-html", MozIsHTML, mozIsHTML, _, _),
("-moz-placeholder", MozPlaceholder, mozPlaceholder, _, _),
("-moz-lwtheme", MozLWTheme, mozLWTheme, _, _),
("-moz-lwtheme-brighttext", MozLWThemeBrightText, mozLWThemeBrightText, _, _),
("-moz-lwtheme-darktext", MozLWThemeDarkText, mozLWThemeDarkText, _, _),
("-moz-window-inactive", MozWindowInactive, mozWindowInactive, _, _),
],
string: [
("lang", Lang, lang, _, _),
]
}
}
} | ("visited", Visited, visited, IN_VISITED_STATE, _),
("active", Active, active, IN_ACTIVE_STATE, _),
("checked", Checked, checked, IN_CHECKED_STATE, _),
("disabled", Disabled, disabled, IN_DISABLED_STATE, _),
("enabled", Enabled, enabled, IN_ENABLED_STATE, _), | random_line_split |
view-pulse.js | var Gelato_Pulse = {
form : null,
init: function() {
Gelato_Pulse.form = jQuery( '.pulse-form' );
jQuery('<input />')
.attr( 'type', 'hidden' )
.attr( 'class', 'ss_synctime' )
.attr( 'name', 'ss_synctime' )
.appendTo( Gelato_Pulse.form );
Gelato_Pulse.form.find( 'textarea').keyup( function() {
Gelato_Media.pauseForModule();
} );
Gelato_Pulse.form.submit( function( e ) {
console.log( Gelato_Media.media.roundTime());
jQuery('.ss_synctime').val( Gelato_Media.media.roundTime() );
Gelato_Media.playForModule();
} );
},
onContentLoad: function() {
if ( typeof CTLT_Stream != 'undefined' ) { // Check for stream activity
CTLT_Stream.on( 'server-push', Gelato_Pulse.listen );
}
Gelato_Media.media.on( 'loadedmetadata', Gelato_Pulse.loadPulses );
Gelato_Media.media.on( 'loadedmetadata', Gelato_Pulse.loadMarkers );
},
listen: function( data ) {
if ( data.type == 'pulse' ) { // We are interested
var pulse_data = jQuery.parseJSON(data.data);
Gelato_Pulse.addPulse( pulse_data, pulse_data.synctime, true );
}
},
loadMarkers: function() {
if( typeof gelatoScoop.bookmarks != 'undefined') {
for ( index in gelatoScoop.bookmarks.list ) {
var bookmark = gelatoScoop.bookmarks.list[index];
Gelato_Media.media.pulse( {
start: bookmark.synctime,
end: Gelato_Media.media.duration(),
text: '<a class="bookmark" onclick="Gelato_Media.skipTo('+bookmark.synctime+');">'+bookmark.title+'<span class="time">'+bookmark.time+'</span></a>',
sort: true,
target: "pulse-list",
} );
}
}
},
loadPulses: function() {
var list = gelatoScoop.pulse;
console.log(list); | for ( index in list ) {
Gelato_Pulse.addPulse( list[index], list[index].synctime, false );
}
},
addPulse: function( data, start, sort ) {
var new_pulse = Pulse_CPT_Form.single_pulse_template( data );
Gelato_Media.media.pulse( {
start: start,
end: Gelato_Media.media.duration(),
text: new_pulse,
sort: sort,
target: "pulse-list",
} );
},
}
Gelato_Pulse.init();
document.addEventListener( "DOMContentLoaded", Gelato_Pulse.onContentLoad, false ); | random_line_split |
|
view-pulse.js | var Gelato_Pulse = {
form : null,
init: function() {
Gelato_Pulse.form = jQuery( '.pulse-form' );
jQuery('<input />')
.attr( 'type', 'hidden' )
.attr( 'class', 'ss_synctime' )
.attr( 'name', 'ss_synctime' )
.appendTo( Gelato_Pulse.form );
Gelato_Pulse.form.find( 'textarea').keyup( function() {
Gelato_Media.pauseForModule();
} );
Gelato_Pulse.form.submit( function( e ) {
console.log( Gelato_Media.media.roundTime());
jQuery('.ss_synctime').val( Gelato_Media.media.roundTime() );
Gelato_Media.playForModule();
} );
},
onContentLoad: function() {
if ( typeof CTLT_Stream != 'undefined' ) { // Check for stream activity
CTLT_Stream.on( 'server-push', Gelato_Pulse.listen );
}
Gelato_Media.media.on( 'loadedmetadata', Gelato_Pulse.loadPulses );
Gelato_Media.media.on( 'loadedmetadata', Gelato_Pulse.loadMarkers );
},
listen: function( data ) {
if ( data.type == 'pulse' ) |
},
loadMarkers: function() {
if( typeof gelatoScoop.bookmarks != 'undefined') {
for ( index in gelatoScoop.bookmarks.list ) {
var bookmark = gelatoScoop.bookmarks.list[index];
Gelato_Media.media.pulse( {
start: bookmark.synctime,
end: Gelato_Media.media.duration(),
text: '<a class="bookmark" onclick="Gelato_Media.skipTo('+bookmark.synctime+');">'+bookmark.title+'<span class="time">'+bookmark.time+'</span></a>',
sort: true,
target: "pulse-list",
} );
}
}
},
loadPulses: function() {
var list = gelatoScoop.pulse;
console.log(list);
for ( index in list ) {
Gelato_Pulse.addPulse( list[index], list[index].synctime, false );
}
},
addPulse: function( data, start, sort ) {
var new_pulse = Pulse_CPT_Form.single_pulse_template( data );
Gelato_Media.media.pulse( {
start: start,
end: Gelato_Media.media.duration(),
text: new_pulse,
sort: sort,
target: "pulse-list",
} );
},
}
Gelato_Pulse.init();
document.addEventListener( "DOMContentLoaded", Gelato_Pulse.onContentLoad, false ); | { // We are interested
var pulse_data = jQuery.parseJSON(data.data);
Gelato_Pulse.addPulse( pulse_data, pulse_data.synctime, true );
} | conditional_block |
aurelia-loader.d.ts | declare module 'aurelia-loader' {
import * as core from 'core-js';
import { relativeToFile } from 'aurelia-path';
import { Origin } from 'aurelia-metadata';
/*eslint no-unused-vars:0*/
export interface LoaderPlugin {
fetch(address: string): Promise<any>;
}
export class TemplateDependency {
constructor(src: string, name?: string);
}
export class TemplateRegistryEntry {
constructor(address: string);
templateIsLoaded(): boolean;
isReady(): boolean;
setTemplate(template: Element): void;
addDependency(src: string | Function, name?: string): void;
setResources(resources: any): void;
setFactory(factory: any): void;
}
export class Loader { | loadAllModules(ids: string[]): Promise<any[]>;
loadTemplate(url: string): Promise<TemplateRegistryEntry>;
loadText(url: string): Promise<string>;
applyPluginToUrl(url: string, pluginName: string): string;
addPlugin(pluginName: string, implementation: LoaderPlugin): void;
getOrCreateTemplateRegistryEntry(id: string): TemplateRegistryEntry;
}
} | constructor();
loadModule(id: string): Promise<any>; | random_line_split |
aurelia-loader.d.ts | declare module 'aurelia-loader' {
import * as core from 'core-js';
import { relativeToFile } from 'aurelia-path';
import { Origin } from 'aurelia-metadata';
/*eslint no-unused-vars:0*/
export interface LoaderPlugin {
fetch(address: string): Promise<any>;
}
export class | {
constructor(src: string, name?: string);
}
export class TemplateRegistryEntry {
constructor(address: string);
templateIsLoaded(): boolean;
isReady(): boolean;
setTemplate(template: Element): void;
addDependency(src: string | Function, name?: string): void;
setResources(resources: any): void;
setFactory(factory: any): void;
}
export class Loader {
constructor();
loadModule(id: string): Promise<any>;
loadAllModules(ids: string[]): Promise<any[]>;
loadTemplate(url: string): Promise<TemplateRegistryEntry>;
loadText(url: string): Promise<string>;
applyPluginToUrl(url: string, pluginName: string): string;
addPlugin(pluginName: string, implementation: LoaderPlugin): void;
getOrCreateTemplateRegistryEntry(id: string): TemplateRegistryEntry;
}
} | TemplateDependency | identifier_name |
setup.py | from distutils.core import setup, Extension
include_dirs = ['/usr/include', '/usr/local/include']
library_dirs = ['/usr/lib', '/usr/local/lib']
libraries = ['jpeg']
runtime_library_dirs = []
extra_objects = []
define_macros = []
setup(name = "pyjpegoptim", | version = "0.1.1",
author = "Guangming Li",
author_email = "[email protected]",
license = "GPL",
description = 'a utility for optimizing JPEG files',
url = "https://github.com/cute/pyjpegoptim",
keywords = ['JpegOptim', 'TinyJpeg'],
packages = ["pyjpegoptim"],
ext_package = "pyjpegoptim",
ext_modules = [Extension( name = "jpegoptim",
sources = ["src/jpegoptim.c"],
include_dirs = include_dirs,
library_dirs = library_dirs,
runtime_library_dirs = runtime_library_dirs,
libraries = libraries,
extra_objects = extra_objects,
define_macros = define_macros
)],
) | random_line_split |
|
update.rs | #![feature(test)]
extern crate protocoll;
extern crate test;
use test::Bencher;
use protocoll::Map;
use std::collections::HashMap;
#[bench]
fn imperative(b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| {
let mut letters = HashMap::new();
for ch in sent.chars() {
let counter = letters.entry(ch).or_insert(0);
*counter += 1;
}
letters
})
}
#[bench]
fn functional(b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| sent.chars().fold(HashMap::new(), |m,c| m.update(c, |n| 1 + n.unwrap_or(0))))
} | let sent = "a short treatise on fungi";
b.iter(|| sent.chars().fold(HashMap::new(), |m,c| m.update_in_place(c, 0, |n| *n += 1)))
} |
#[bench]
fn in_place(b: &mut Bencher) { | random_line_split |
update.rs | #![feature(test)]
extern crate protocoll;
extern crate test;
use test::Bencher;
use protocoll::Map;
use std::collections::HashMap;
#[bench]
fn imperative(b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| {
let mut letters = HashMap::new();
for ch in sent.chars() {
let counter = letters.entry(ch).or_insert(0);
*counter += 1;
}
letters
})
}
#[bench]
fn functional(b: &mut Bencher) |
#[bench]
fn in_place(b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| sent.chars().fold(HashMap::new(), |m,c| m.update_in_place(c, 0, |n| *n += 1)))
}
| {
let sent = "a short treatise on fungi";
b.iter(|| sent.chars().fold(HashMap::new(), |m,c| m.update(c, |n| 1 + n.unwrap_or(0))))
} | identifier_body |
update.rs | #![feature(test)]
extern crate protocoll;
extern crate test;
use test::Bencher;
use protocoll::Map;
use std::collections::HashMap;
#[bench]
fn imperative(b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| {
let mut letters = HashMap::new();
for ch in sent.chars() {
let counter = letters.entry(ch).or_insert(0);
*counter += 1;
}
letters
})
}
#[bench]
fn | (b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| sent.chars().fold(HashMap::new(), |m,c| m.update(c, |n| 1 + n.unwrap_or(0))))
}
#[bench]
fn in_place(b: &mut Bencher) {
let sent = "a short treatise on fungi";
b.iter(|| sent.chars().fold(HashMap::new(), |m,c| m.update_in_place(c, 0, |n| *n += 1)))
}
| functional | identifier_name |
module_geokick.py | # -*- coding: utf-8 -*-
import sys
import pygeoip
import os.path
import socket
import sqlite3
import time
import re
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def init(botconfig):
open_DB(True)
def | (createTable=False, db="module_geokick.db"):
conn = sqlite3.connect(db)
c = conn.cursor()
if createTable:
c.execute('CREATE TABLE IF NOT EXISTS exceptions (hostmask);')
conn.commit()
return conn, c
def command_geo_exempt(bot, user, channel, args):
""".geo_exempt nick!ident@hostname | Supports wildcards, for example *!*@*site.com (! and @ are required)"""
if get_op_status(user):
if not get_exempt_status(args):
if len(args) < 4:
conn, c = open_DB()
insert = "INSERT INTO exceptions VALUES ('" + args + "');"
c.execute(insert)
conn.commit()
conn.close()
bot.say(channel, "Success: " + args.encode('utf-8') + " added to exempt list.")
return True
else:
return bot.say(channel, "Error: invalid exempt. See .help geo_exempt")
else:
return bot.say(channel, "Error: exempt exists already!")
def command_geo_list(bot, user, channel, args):
if get_op_status(user):
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
if rows:
excepts = str("")
for i in rows:
excepts += "[" + i[0] + "] "
return bot.say(channel, "Exceptions: " + excepts)
else:
return bot.say(channel, "Error: no exceptions added. See .help geo_exempt")
def command_geo_remove(bot, user, channel, args):
""".geo_remove hostname"""
if get_op_status(user):
conn, c = open_DB()
c.execute("SELECT hostmask FROM exceptions WHERE hostmask = '" + args + "'")
if c.fetchone():
conn, c = open_DB()
c.execute("DELETE FROM exceptions WHERE hostmask = '" + args + "'")
conn.commit()
conn.close()
bot.say(channel, "Success: exception removed.")
else:
bot.say(channel, "Error: hostmask not found. Check .geo_list for broader exempts that would override what you are trying to add.")
def get_op_status(user):
if isAdmin(user):
return True
else:
# käytetään authentikointiin qban_moduulin adminlistaa
conn, c = open_DB(db="module_qban_ops.db")
c.execute("SELECT hostmask FROM ops WHERE hostmask = '" + user + "' ")
if c.fetchone():
retval = True
else:
retval = False
conn.close()
return retval
# try to split user string as dictionary with nick, ident and hostname
def get_data(user):
try:
temp = user.split('@')[0]
data = {'nick':getNick(user), 'ident':temp.split('!')[1], 'host':user.split('@')[1] }
return data
except:
return False
#@todo blacklist = ['elisa-mobile.fi', 'nat-elisa-mobile.fi']
def get_exempt_status(user):
if isAdmin(user):
return True
else:
data = get_data(user)
if data:
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
# iterate all hostmasks
for i in rows:
row = get_data(i[0])
j = 0
# check current row data against that of the user data
for row_value in row.values():
for data_value in data.values():
# if a wildcard or exact match
if row_value == "*" or ( row_value in data_value and "*" not in row_value ):
j += 1
break
# if contains a wildcard, we have to regex
elif "*" in row_value:
regex = re.escape(row_value)
regex = row_value.replace("*",".*")
if re.search(regex, data_value):
j += 1
break
# if counter reaches three, user matches exception list
if j == 3:
return True
return False
def handle_userJoined(bot, user, channel):
# if tested user is in exception list
if not get_exempt_status(user):
host = user.split('@')[1]
# attempt to get location data from the geoip database
try:
country = gi4.country_name_by_name(host)
except socket.gaierror:
country = None
# if country information was found & if it wasn't Finland
if country != "Finland" and country != "":
# grab nickname and hostname of the user
nick = getNick(user)
banmask = "*!*@" + host
banmask = banmask.encode('utf-8')
# ban & kick
bot.mode(channel, True, 'b', mask=banmask)
bot.kick(channel, nick, "Hosted from a banned country (" + country + ") or host (" + host + "). If you think you should have access, /msg lolfi .request_exempt")
# unban after 300s to avoid filling the banlist
time.sleep(300)
bot.mode(channel, False, 'b', mask=banmask)
def command_request_exempt(bot, user, channel, args):
if channel != "#projekti_lol":
nick = getNick(user)
bot.say("#projekti_lol".encode('utf-8'), "Notification: " + nick + " (" + user + ") requested and exempt.")
| open_DB | identifier_name |
module_geokick.py | # -*- coding: utf-8 -*-
import sys
import pygeoip
import os.path
import socket
import sqlite3
import time
import re
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def init(botconfig):
open_DB(True)
def open_DB(createTable=False, db="module_geokick.db"):
conn = sqlite3.connect(db)
c = conn.cursor()
if createTable:
c.execute('CREATE TABLE IF NOT EXISTS exceptions (hostmask);')
conn.commit()
return conn, c
def command_geo_exempt(bot, user, channel, args):
""".geo_exempt nick!ident@hostname | Supports wildcards, for example *!*@*site.com (! and @ are required)"""
if get_op_status(user):
if not get_exempt_status(args):
if len(args) < 4:
conn, c = open_DB()
insert = "INSERT INTO exceptions VALUES ('" + args + "');"
c.execute(insert)
conn.commit()
conn.close()
bot.say(channel, "Success: " + args.encode('utf-8') + " added to exempt list.")
return True
else:
return bot.say(channel, "Error: invalid exempt. See .help geo_exempt")
else:
return bot.say(channel, "Error: exempt exists already!")
def command_geo_list(bot, user, channel, args):
|
def command_geo_remove(bot, user, channel, args):
""".geo_remove hostname"""
if get_op_status(user):
conn, c = open_DB()
c.execute("SELECT hostmask FROM exceptions WHERE hostmask = '" + args + "'")
if c.fetchone():
conn, c = open_DB()
c.execute("DELETE FROM exceptions WHERE hostmask = '" + args + "'")
conn.commit()
conn.close()
bot.say(channel, "Success: exception removed.")
else:
bot.say(channel, "Error: hostmask not found. Check .geo_list for broader exempts that would override what you are trying to add.")
def get_op_status(user):
if isAdmin(user):
return True
else:
# käytetään authentikointiin qban_moduulin adminlistaa
conn, c = open_DB(db="module_qban_ops.db")
c.execute("SELECT hostmask FROM ops WHERE hostmask = '" + user + "' ")
if c.fetchone():
retval = True
else:
retval = False
conn.close()
return retval
# try to split user string as dictionary with nick, ident and hostname
def get_data(user):
try:
temp = user.split('@')[0]
data = {'nick':getNick(user), 'ident':temp.split('!')[1], 'host':user.split('@')[1] }
return data
except:
return False
#@todo blacklist = ['elisa-mobile.fi', 'nat-elisa-mobile.fi']
def get_exempt_status(user):
if isAdmin(user):
return True
else:
data = get_data(user)
if data:
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
# iterate all hostmasks
for i in rows:
row = get_data(i[0])
j = 0
# check current row data against that of the user data
for row_value in row.values():
for data_value in data.values():
# if a wildcard or exact match
if row_value == "*" or ( row_value in data_value and "*" not in row_value ):
j += 1
break
# if contains a wildcard, we have to regex
elif "*" in row_value:
regex = re.escape(row_value)
regex = row_value.replace("*",".*")
if re.search(regex, data_value):
j += 1
break
# if counter reaches three, user matches exception list
if j == 3:
return True
return False
def handle_userJoined(bot, user, channel):
# if tested user is in exception list
if not get_exempt_status(user):
host = user.split('@')[1]
# attempt to get location data from the geoip database
try:
country = gi4.country_name_by_name(host)
except socket.gaierror:
country = None
# if country information was found & if it wasn't Finland
if country != "Finland" and country != "":
# grab nickname and hostname of the user
nick = getNick(user)
banmask = "*!*@" + host
banmask = banmask.encode('utf-8')
# ban & kick
bot.mode(channel, True, 'b', mask=banmask)
bot.kick(channel, nick, "Hosted from a banned country (" + country + ") or host (" + host + "). If you think you should have access, /msg lolfi .request_exempt")
# unban after 300s to avoid filling the banlist
time.sleep(300)
bot.mode(channel, False, 'b', mask=banmask)
def command_request_exempt(bot, user, channel, args):
if channel != "#projekti_lol":
nick = getNick(user)
bot.say("#projekti_lol".encode('utf-8'), "Notification: " + nick + " (" + user + ") requested and exempt.")
| if get_op_status(user):
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
if rows:
excepts = str("")
for i in rows:
excepts += "[" + i[0] + "] "
return bot.say(channel, "Exceptions: " + excepts)
else:
return bot.say(channel, "Error: no exceptions added. See .help geo_exempt") | identifier_body |
module_geokick.py | # -*- coding: utf-8 -*-
import sys
import pygeoip
import os.path
import socket
import sqlite3
import time
import re
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def init(botconfig):
open_DB(True)
def open_DB(createTable=False, db="module_geokick.db"):
conn = sqlite3.connect(db)
c = conn.cursor()
if createTable:
c.execute('CREATE TABLE IF NOT EXISTS exceptions (hostmask);')
conn.commit()
return conn, c
def command_geo_exempt(bot, user, channel, args):
""".geo_exempt nick!ident@hostname | Supports wildcards, for example *!*@*site.com (! and @ are required)"""
if get_op_status(user):
if not get_exempt_status(args):
if len(args) < 4:
conn, c = open_DB()
insert = "INSERT INTO exceptions VALUES ('" + args + "');" | bot.say(channel, "Success: " + args.encode('utf-8') + " added to exempt list.")
return True
else:
return bot.say(channel, "Error: invalid exempt. See .help geo_exempt")
else:
return bot.say(channel, "Error: exempt exists already!")
def command_geo_list(bot, user, channel, args):
if get_op_status(user):
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
if rows:
excepts = str("")
for i in rows:
excepts += "[" + i[0] + "] "
return bot.say(channel, "Exceptions: " + excepts)
else:
return bot.say(channel, "Error: no exceptions added. See .help geo_exempt")
def command_geo_remove(bot, user, channel, args):
""".geo_remove hostname"""
if get_op_status(user):
conn, c = open_DB()
c.execute("SELECT hostmask FROM exceptions WHERE hostmask = '" + args + "'")
if c.fetchone():
conn, c = open_DB()
c.execute("DELETE FROM exceptions WHERE hostmask = '" + args + "'")
conn.commit()
conn.close()
bot.say(channel, "Success: exception removed.")
else:
bot.say(channel, "Error: hostmask not found. Check .geo_list for broader exempts that would override what you are trying to add.")
def get_op_status(user):
if isAdmin(user):
return True
else:
# käytetään authentikointiin qban_moduulin adminlistaa
conn, c = open_DB(db="module_qban_ops.db")
c.execute("SELECT hostmask FROM ops WHERE hostmask = '" + user + "' ")
if c.fetchone():
retval = True
else:
retval = False
conn.close()
return retval
# try to split user string as dictionary with nick, ident and hostname
def get_data(user):
try:
temp = user.split('@')[0]
data = {'nick':getNick(user), 'ident':temp.split('!')[1], 'host':user.split('@')[1] }
return data
except:
return False
#@todo blacklist = ['elisa-mobile.fi', 'nat-elisa-mobile.fi']
def get_exempt_status(user):
if isAdmin(user):
return True
else:
data = get_data(user)
if data:
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
# iterate all hostmasks
for i in rows:
row = get_data(i[0])
j = 0
# check current row data against that of the user data
for row_value in row.values():
for data_value in data.values():
# if a wildcard or exact match
if row_value == "*" or ( row_value in data_value and "*" not in row_value ):
j += 1
break
# if contains a wildcard, we have to regex
elif "*" in row_value:
regex = re.escape(row_value)
regex = row_value.replace("*",".*")
if re.search(regex, data_value):
j += 1
break
# if counter reaches three, user matches exception list
if j == 3:
return True
return False
def handle_userJoined(bot, user, channel):
# if tested user is in exception list
if not get_exempt_status(user):
host = user.split('@')[1]
# attempt to get location data from the geoip database
try:
country = gi4.country_name_by_name(host)
except socket.gaierror:
country = None
# if country information was found & if it wasn't Finland
if country != "Finland" and country != "":
# grab nickname and hostname of the user
nick = getNick(user)
banmask = "*!*@" + host
banmask = banmask.encode('utf-8')
# ban & kick
bot.mode(channel, True, 'b', mask=banmask)
bot.kick(channel, nick, "Hosted from a banned country (" + country + ") or host (" + host + "). If you think you should have access, /msg lolfi .request_exempt")
# unban after 300s to avoid filling the banlist
time.sleep(300)
bot.mode(channel, False, 'b', mask=banmask)
def command_request_exempt(bot, user, channel, args):
if channel != "#projekti_lol":
nick = getNick(user)
bot.say("#projekti_lol".encode('utf-8'), "Notification: " + nick + " (" + user + ") requested and exempt.") | c.execute(insert)
conn.commit()
conn.close() | random_line_split |
module_geokick.py | # -*- coding: utf-8 -*-
import sys
import pygeoip
import os.path
import socket
import sqlite3
import time
import re
DATAFILE = os.path.join(sys.path[0], "GeoIP.dat")
# STANDARD = reload from disk
# MEMORY_CACHE = load to memory
# MMAP_CACHE = memory using mmap
gi4 = pygeoip.GeoIP(DATAFILE, pygeoip.MEMORY_CACHE)
def init(botconfig):
open_DB(True)
def open_DB(createTable=False, db="module_geokick.db"):
conn = sqlite3.connect(db)
c = conn.cursor()
if createTable:
c.execute('CREATE TABLE IF NOT EXISTS exceptions (hostmask);')
conn.commit()
return conn, c
def command_geo_exempt(bot, user, channel, args):
""".geo_exempt nick!ident@hostname | Supports wildcards, for example *!*@*site.com (! and @ are required)"""
if get_op_status(user):
if not get_exempt_status(args):
if len(args) < 4:
conn, c = open_DB()
insert = "INSERT INTO exceptions VALUES ('" + args + "');"
c.execute(insert)
conn.commit()
conn.close()
bot.say(channel, "Success: " + args.encode('utf-8') + " added to exempt list.")
return True
else:
return bot.say(channel, "Error: invalid exempt. See .help geo_exempt")
else:
return bot.say(channel, "Error: exempt exists already!")
def command_geo_list(bot, user, channel, args):
if get_op_status(user):
|
def command_geo_remove(bot, user, channel, args):
""".geo_remove hostname"""
if get_op_status(user):
conn, c = open_DB()
c.execute("SELECT hostmask FROM exceptions WHERE hostmask = '" + args + "'")
if c.fetchone():
conn, c = open_DB()
c.execute("DELETE FROM exceptions WHERE hostmask = '" + args + "'")
conn.commit()
conn.close()
bot.say(channel, "Success: exception removed.")
else:
bot.say(channel, "Error: hostmask not found. Check .geo_list for broader exempts that would override what you are trying to add.")
def get_op_status(user):
if isAdmin(user):
return True
else:
# käytetään authentikointiin qban_moduulin adminlistaa
conn, c = open_DB(db="module_qban_ops.db")
c.execute("SELECT hostmask FROM ops WHERE hostmask = '" + user + "' ")
if c.fetchone():
retval = True
else:
retval = False
conn.close()
return retval
# try to split user string as dictionary with nick, ident and hostname
def get_data(user):
try:
temp = user.split('@')[0]
data = {'nick':getNick(user), 'ident':temp.split('!')[1], 'host':user.split('@')[1] }
return data
except:
return False
#@todo blacklist = ['elisa-mobile.fi', 'nat-elisa-mobile.fi']
def get_exempt_status(user):
if isAdmin(user):
return True
else:
data = get_data(user)
if data:
conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
# iterate all hostmasks
for i in rows:
row = get_data(i[0])
j = 0
# check current row data against that of the user data
for row_value in row.values():
for data_value in data.values():
# if a wildcard or exact match
if row_value == "*" or ( row_value in data_value and "*" not in row_value ):
j += 1
break
# if contains a wildcard, we have to regex
elif "*" in row_value:
regex = re.escape(row_value)
regex = row_value.replace("*",".*")
if re.search(regex, data_value):
j += 1
break
# if counter reaches three, user matches exception list
if j == 3:
return True
return False
def handle_userJoined(bot, user, channel):
# if tested user is in exception list
if not get_exempt_status(user):
host = user.split('@')[1]
# attempt to get location data from the geoip database
try:
country = gi4.country_name_by_name(host)
except socket.gaierror:
country = None
# if country information was found & if it wasn't Finland
if country != "Finland" and country != "":
# grab nickname and hostname of the user
nick = getNick(user)
banmask = "*!*@" + host
banmask = banmask.encode('utf-8')
# ban & kick
bot.mode(channel, True, 'b', mask=banmask)
bot.kick(channel, nick, "Hosted from a banned country (" + country + ") or host (" + host + "). If you think you should have access, /msg lolfi .request_exempt")
# unban after 300s to avoid filling the banlist
time.sleep(300)
bot.mode(channel, False, 'b', mask=banmask)
def command_request_exempt(bot, user, channel, args):
if channel != "#projekti_lol":
nick = getNick(user)
bot.say("#projekti_lol".encode('utf-8'), "Notification: " + nick + " (" + user + ") requested and exempt.")
| conn, c = open_DB()
c.execute('SELECT hostmask FROM exceptions;')
rows = c.fetchall()
conn.close()
if rows:
excepts = str("")
for i in rows:
excepts += "[" + i[0] + "] "
return bot.say(channel, "Exceptions: " + excepts)
else:
return bot.say(channel, "Error: no exceptions added. See .help geo_exempt") | conditional_block |
notebook-repos.component.ts | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, OnInit } from '@angular/core';
import { NotebookRepo } from '@zeppelin/interfaces';
import { NotebookRepoService } from '@zeppelin/services';
@Component({
selector: 'zeppelin-notebook-repos',
templateUrl: './notebook-repos.component.html',
styleUrls: ['./notebook-repos.component.less'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class NotebookReposComponent implements OnInit {
repositories: NotebookRepo[] = [];
constructor(private notebookRepoService: NotebookRepoService, private cdr: ChangeDetectorRef) {}
ngOnInit() {
this.getRepos();
}
getRepos() {
this.notebookRepoService.getRepos().subscribe(data => {
this.repositories = data.sort((a, b) => a.name.charCodeAt(0) - b.name.charCodeAt(0));
this.cdr.markForCheck();
});
}
updateRepoSetting(repo: NotebookRepo) {
const data = {
name: repo.className, | data.settings[name] = selected;
});
this.notebookRepoService.updateRepo(data).subscribe(() => {
this.getRepos();
});
}
} | settings: {}
};
repo.settings.forEach(({ name, selected }) => { | random_line_split |
notebook-repos.component.ts | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, OnInit } from '@angular/core';
import { NotebookRepo } from '@zeppelin/interfaces';
import { NotebookRepoService } from '@zeppelin/services';
@Component({
selector: 'zeppelin-notebook-repos',
templateUrl: './notebook-repos.component.html',
styleUrls: ['./notebook-repos.component.less'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class | implements OnInit {
repositories: NotebookRepo[] = [];
constructor(private notebookRepoService: NotebookRepoService, private cdr: ChangeDetectorRef) {}
ngOnInit() {
this.getRepos();
}
getRepos() {
this.notebookRepoService.getRepos().subscribe(data => {
this.repositories = data.sort((a, b) => a.name.charCodeAt(0) - b.name.charCodeAt(0));
this.cdr.markForCheck();
});
}
updateRepoSetting(repo: NotebookRepo) {
const data = {
name: repo.className,
settings: {}
};
repo.settings.forEach(({ name, selected }) => {
data.settings[name] = selected;
});
this.notebookRepoService.updateRepo(data).subscribe(() => {
this.getRepos();
});
}
}
| NotebookReposComponent | identifier_name |
notebook-repos.component.ts | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, OnInit } from '@angular/core';
import { NotebookRepo } from '@zeppelin/interfaces';
import { NotebookRepoService } from '@zeppelin/services';
@Component({
selector: 'zeppelin-notebook-repos',
templateUrl: './notebook-repos.component.html',
styleUrls: ['./notebook-repos.component.less'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class NotebookReposComponent implements OnInit {
repositories: NotebookRepo[] = [];
constructor(private notebookRepoService: NotebookRepoService, private cdr: ChangeDetectorRef) {}
ngOnInit() |
getRepos() {
this.notebookRepoService.getRepos().subscribe(data => {
this.repositories = data.sort((a, b) => a.name.charCodeAt(0) - b.name.charCodeAt(0));
this.cdr.markForCheck();
});
}
updateRepoSetting(repo: NotebookRepo) {
const data = {
name: repo.className,
settings: {}
};
repo.settings.forEach(({ name, selected }) => {
data.settings[name] = selected;
});
this.notebookRepoService.updateRepo(data).subscribe(() => {
this.getRepos();
});
}
}
| {
this.getRepos();
} | identifier_body |
main.py | """A guestbook sample with sqlite3."""
import logging
import os
import jinja2
import sqlite3
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import modules
from google.appengine.api import runtime
from google.appengine.api import users
from google.appengine.ext import ndb
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'])
DB_FILENAME = os.path.join('/tmp', 'guestbook.sqlite')
CREATE_TABLE_SQL = """\
CREATE TABLE IF NOT EXISTS guestbook
(id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR, content VARCHAR)"""
SELECT_SQL = 'SELECT * FROM guestbook ORDER BY id DESC LIMIT {}'
INSERT_SQL = 'INSERT INTO guestbook (name, content) VALUES (?, ?)'
POST_PER_PAGE = 20
def shutdown_hook():
"""A hook function for de-registering myself."""
logging.info('shutdown_hook called.')
instance_id = modules.get_current_instance_id()
ndb.transaction(
lambda: ActiveServer.get_instance_key(instance_id).delete())
def get_connection():
"""A function to get sqlite connection.
Returns:
An sqlite connection object.
"""
logging.info('Opening a sqlite db.')
return sqlite3.connect(DB_FILENAME)
def get_url_for_instance(instance_id):
"""Return a full url of the guestbook running on a particular instance.
Args:
A string to represent an VM instance.
Returns:
URL string for the guestbook form on the instance.
"""
hostname = app_identity.get_default_version_hostname()
return 'https://{}-dot-{}-dot-{}/guestbook'.format(
instance_id, modules.get_current_version_name(), hostname)
def get_signin_navigation(original_url):
"""Return a pair of a link text and a link for sign in/out operation.
Args:
An original URL.
Returns:
Two value tuple; a url and a link text.
"""
if users.get_current_user():
url = users.create_logout_url(original_url)
url_linktext = 'Logout'
else:
url = users.create_login_url(original_url)
url_linktext = 'Login'
return url, url_linktext
class ActiveServer(ndb.Model):
"""A model to store active servers.
We use the instance id as the key name, and there are no properties.
"""
@classmethod
def get_instance_key(cls, instance_id):
"""Return a key for the given instance_id.
Args:
An instance id for the server.
Returns:
A Key object which has a common parent key with the name 'Root'.
"""
return ndb.Key(cls, 'Root', cls, instance_id)
class ListServers(webapp2.RequestHandler):
"""A handler for listing active servers."""
def get(self):
"""A get handler for listing active servers."""
key = ndb.Key(ActiveServer, 'Root')
query = ActiveServer.query(ancestor=key)
servers = []
for key in query.iter(keys_only=True):
|
template = JINJA_ENVIRONMENT.get_template('index.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.out.write(template.render(servers=servers,
url=url,
url_linktext=url_linktext))
class MainPage(webapp2.RequestHandler):
"""A handler for showing the guestbook form."""
def get(self):
"""Guestbook main page."""
con = get_connection()
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute(SELECT_SQL.format(POST_PER_PAGE))
greetings = cur.fetchall()
con.close()
template = JINJA_ENVIRONMENT.get_template('guestbook.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.write(template.render(greetings=greetings,
url=url,
url_linktext=url_linktext))
class Guestbook(webapp2.RequestHandler):
"""A handler for storing a message."""
def post(self):
"""A handler for storing a message."""
author = ''
if users.get_current_user():
author = users.get_current_user().nickname()
con = get_connection()
with con:
con.execute(INSERT_SQL, (author, self.request.get('content')))
self.redirect('/guestbook')
class Start(webapp2.RequestHandler):
"""A handler for /_ah/start."""
def get(self):
"""A handler for /_ah/start, registering myself."""
runtime.set_shutdown_hook(shutdown_hook)
con = get_connection()
with con:
con.execute(CREATE_TABLE_SQL)
instance_id = modules.get_current_instance_id()
server = ActiveServer(key=ActiveServer.get_instance_key(instance_id))
server.put()
class Stop(webapp2.RequestHandler):
"""A handler for /_ah/stop."""
def get(self):
"""Just call shutdown_hook now for a temporary workaround.
With the initial version of the VM Runtime, a call to
/_ah/stop hits this handler, without invoking the shutdown
hook we registered in the start handler. We're working on the
fix to make it a consistent behavior same as the traditional
App Engine backends. After the fix is out, this stop handler
won't be necessary any more.
"""
shutdown_hook()
APPLICATION = webapp2.WSGIApplication([
('/', ListServers),
('/guestbook', MainPage),
('/sign', Guestbook),
('/_ah/start', Start),
('/_ah/stop', Stop),
], debug=True)
| instance_id = key.string_id()
servers.append((instance_id, get_url_for_instance(instance_id))) | conditional_block |
main.py | """A guestbook sample with sqlite3."""
import logging
import os
import jinja2
import sqlite3
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import modules
from google.appengine.api import runtime
from google.appengine.api import users
from google.appengine.ext import ndb
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'])
DB_FILENAME = os.path.join('/tmp', 'guestbook.sqlite')
CREATE_TABLE_SQL = """\
CREATE TABLE IF NOT EXISTS guestbook
(id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR, content VARCHAR)"""
SELECT_SQL = 'SELECT * FROM guestbook ORDER BY id DESC LIMIT {}'
INSERT_SQL = 'INSERT INTO guestbook (name, content) VALUES (?, ?)'
POST_PER_PAGE = 20
def shutdown_hook():
"""A hook function for de-registering myself."""
logging.info('shutdown_hook called.')
instance_id = modules.get_current_instance_id()
ndb.transaction(
lambda: ActiveServer.get_instance_key(instance_id).delete())
def get_connection():
"""A function to get sqlite connection.
Returns:
An sqlite connection object.
"""
logging.info('Opening a sqlite db.')
return sqlite3.connect(DB_FILENAME)
def get_url_for_instance(instance_id):
"""Return a full url of the guestbook running on a particular instance.
Args:
A string to represent an VM instance.
Returns:
URL string for the guestbook form on the instance.
"""
hostname = app_identity.get_default_version_hostname()
return 'https://{}-dot-{}-dot-{}/guestbook'.format(
instance_id, modules.get_current_version_name(), hostname)
def get_signin_navigation(original_url):
"""Return a pair of a link text and a link for sign in/out operation.
Args:
An original URL.
Returns:
Two value tuple; a url and a link text.
"""
if users.get_current_user():
url = users.create_logout_url(original_url)
url_linktext = 'Logout'
else:
url = users.create_login_url(original_url)
url_linktext = 'Login'
return url, url_linktext
class ActiveServer(ndb.Model):
"""A model to store active servers.
We use the instance id as the key name, and there are no properties.
"""
@classmethod
def | (cls, instance_id):
"""Return a key for the given instance_id.
Args:
An instance id for the server.
Returns:
A Key object which has a common parent key with the name 'Root'.
"""
return ndb.Key(cls, 'Root', cls, instance_id)
class ListServers(webapp2.RequestHandler):
"""A handler for listing active servers."""
def get(self):
"""A get handler for listing active servers."""
key = ndb.Key(ActiveServer, 'Root')
query = ActiveServer.query(ancestor=key)
servers = []
for key in query.iter(keys_only=True):
instance_id = key.string_id()
servers.append((instance_id, get_url_for_instance(instance_id)))
template = JINJA_ENVIRONMENT.get_template('index.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.out.write(template.render(servers=servers,
url=url,
url_linktext=url_linktext))
class MainPage(webapp2.RequestHandler):
"""A handler for showing the guestbook form."""
def get(self):
"""Guestbook main page."""
con = get_connection()
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute(SELECT_SQL.format(POST_PER_PAGE))
greetings = cur.fetchall()
con.close()
template = JINJA_ENVIRONMENT.get_template('guestbook.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.write(template.render(greetings=greetings,
url=url,
url_linktext=url_linktext))
class Guestbook(webapp2.RequestHandler):
"""A handler for storing a message."""
def post(self):
"""A handler for storing a message."""
author = ''
if users.get_current_user():
author = users.get_current_user().nickname()
con = get_connection()
with con:
con.execute(INSERT_SQL, (author, self.request.get('content')))
self.redirect('/guestbook')
class Start(webapp2.RequestHandler):
"""A handler for /_ah/start."""
def get(self):
"""A handler for /_ah/start, registering myself."""
runtime.set_shutdown_hook(shutdown_hook)
con = get_connection()
with con:
con.execute(CREATE_TABLE_SQL)
instance_id = modules.get_current_instance_id()
server = ActiveServer(key=ActiveServer.get_instance_key(instance_id))
server.put()
class Stop(webapp2.RequestHandler):
"""A handler for /_ah/stop."""
def get(self):
"""Just call shutdown_hook now for a temporary workaround.
With the initial version of the VM Runtime, a call to
/_ah/stop hits this handler, without invoking the shutdown
hook we registered in the start handler. We're working on the
fix to make it a consistent behavior same as the traditional
App Engine backends. After the fix is out, this stop handler
won't be necessary any more.
"""
shutdown_hook()
APPLICATION = webapp2.WSGIApplication([
('/', ListServers),
('/guestbook', MainPage),
('/sign', Guestbook),
('/_ah/start', Start),
('/_ah/stop', Stop),
], debug=True)
| get_instance_key | identifier_name |
main.py | """A guestbook sample with sqlite3."""
import logging
import os
import jinja2
import sqlite3
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import modules
from google.appengine.api import runtime
from google.appengine.api import users
from google.appengine.ext import ndb
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'])
DB_FILENAME = os.path.join('/tmp', 'guestbook.sqlite')
CREATE_TABLE_SQL = """\
CREATE TABLE IF NOT EXISTS guestbook
(id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR, content VARCHAR)"""
SELECT_SQL = 'SELECT * FROM guestbook ORDER BY id DESC LIMIT {}'
INSERT_SQL = 'INSERT INTO guestbook (name, content) VALUES (?, ?)'
POST_PER_PAGE = 20
def shutdown_hook():
"""A hook function for de-registering myself."""
logging.info('shutdown_hook called.')
instance_id = modules.get_current_instance_id()
ndb.transaction(
lambda: ActiveServer.get_instance_key(instance_id).delete())
def get_connection():
"""A function to get sqlite connection.
Returns:
An sqlite connection object.
"""
logging.info('Opening a sqlite db.')
return sqlite3.connect(DB_FILENAME)
def get_url_for_instance(instance_id):
"""Return a full url of the guestbook running on a particular instance.
Args:
A string to represent an VM instance.
Returns:
URL string for the guestbook form on the instance.
"""
hostname = app_identity.get_default_version_hostname()
return 'https://{}-dot-{}-dot-{}/guestbook'.format(
instance_id, modules.get_current_version_name(), hostname)
def get_signin_navigation(original_url):
"""Return a pair of a link text and a link for sign in/out operation.
Args:
An original URL.
Returns:
Two value tuple; a url and a link text.
"""
if users.get_current_user():
url = users.create_logout_url(original_url)
url_linktext = 'Logout'
else:
url = users.create_login_url(original_url)
url_linktext = 'Login'
return url, url_linktext
class ActiveServer(ndb.Model):
"""A model to store active servers.
We use the instance id as the key name, and there are no properties.
"""
@classmethod
def get_instance_key(cls, instance_id):
"""Return a key for the given instance_id.
Args:
An instance id for the server.
Returns:
A Key object which has a common parent key with the name 'Root'.
"""
return ndb.Key(cls, 'Root', cls, instance_id)
class ListServers(webapp2.RequestHandler):
"""A handler for listing active servers."""
def get(self):
"""A get handler for listing active servers."""
key = ndb.Key(ActiveServer, 'Root')
query = ActiveServer.query(ancestor=key)
servers = []
for key in query.iter(keys_only=True):
instance_id = key.string_id()
servers.append((instance_id, get_url_for_instance(instance_id)))
template = JINJA_ENVIRONMENT.get_template('index.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.out.write(template.render(servers=servers,
url=url,
url_linktext=url_linktext))
class MainPage(webapp2.RequestHandler):
"""A handler for showing the guestbook form."""
def get(self):
"""Guestbook main page."""
con = get_connection()
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute(SELECT_SQL.format(POST_PER_PAGE))
greetings = cur.fetchall()
con.close()
template = JINJA_ENVIRONMENT.get_template('guestbook.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.write(template.render(greetings=greetings,
url=url,
url_linktext=url_linktext))
class Guestbook(webapp2.RequestHandler):
|
class Start(webapp2.RequestHandler):
"""A handler for /_ah/start."""
def get(self):
"""A handler for /_ah/start, registering myself."""
runtime.set_shutdown_hook(shutdown_hook)
con = get_connection()
with con:
con.execute(CREATE_TABLE_SQL)
instance_id = modules.get_current_instance_id()
server = ActiveServer(key=ActiveServer.get_instance_key(instance_id))
server.put()
class Stop(webapp2.RequestHandler):
"""A handler for /_ah/stop."""
def get(self):
"""Just call shutdown_hook now for a temporary workaround.
With the initial version of the VM Runtime, a call to
/_ah/stop hits this handler, without invoking the shutdown
hook we registered in the start handler. We're working on the
fix to make it a consistent behavior same as the traditional
App Engine backends. After the fix is out, this stop handler
won't be necessary any more.
"""
shutdown_hook()
APPLICATION = webapp2.WSGIApplication([
('/', ListServers),
('/guestbook', MainPage),
('/sign', Guestbook),
('/_ah/start', Start),
('/_ah/stop', Stop),
], debug=True)
| """A handler for storing a message."""
def post(self):
"""A handler for storing a message."""
author = ''
if users.get_current_user():
author = users.get_current_user().nickname()
con = get_connection()
with con:
con.execute(INSERT_SQL, (author, self.request.get('content')))
self.redirect('/guestbook') | identifier_body |
main.py | """A guestbook sample with sqlite3."""
import logging
import os
import jinja2
import sqlite3
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import modules
from google.appengine.api import runtime
from google.appengine.api import users
from google.appengine.ext import ndb
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'])
DB_FILENAME = os.path.join('/tmp', 'guestbook.sqlite')
CREATE_TABLE_SQL = """\
CREATE TABLE IF NOT EXISTS guestbook
(id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR, content VARCHAR)"""
SELECT_SQL = 'SELECT * FROM guestbook ORDER BY id DESC LIMIT {}'
INSERT_SQL = 'INSERT INTO guestbook (name, content) VALUES (?, ?)'
POST_PER_PAGE = 20
def shutdown_hook():
"""A hook function for de-registering myself."""
logging.info('shutdown_hook called.')
instance_id = modules.get_current_instance_id()
ndb.transaction(
lambda: ActiveServer.get_instance_key(instance_id).delete())
def get_connection():
"""A function to get sqlite connection.
Returns:
An sqlite connection object.
"""
logging.info('Opening a sqlite db.')
return sqlite3.connect(DB_FILENAME)
def get_url_for_instance(instance_id):
"""Return a full url of the guestbook running on a particular instance.
Args:
A string to represent an VM instance.
Returns:
URL string for the guestbook form on the instance.
"""
hostname = app_identity.get_default_version_hostname()
return 'https://{}-dot-{}-dot-{}/guestbook'.format(
instance_id, modules.get_current_version_name(), hostname)
def get_signin_navigation(original_url):
"""Return a pair of a link text and a link for sign in/out operation.
Args:
An original URL.
Returns:
Two value tuple; a url and a link text.
"""
if users.get_current_user():
url = users.create_logout_url(original_url)
url_linktext = 'Logout'
else:
url = users.create_login_url(original_url)
url_linktext = 'Login'
return url, url_linktext
class ActiveServer(ndb.Model):
"""A model to store active servers.
We use the instance id as the key name, and there are no properties.
"""
@classmethod
def get_instance_key(cls, instance_id):
"""Return a key for the given instance_id.
Args:
An instance id for the server.
Returns:
A Key object which has a common parent key with the name 'Root'.
"""
return ndb.Key(cls, 'Root', cls, instance_id)
class ListServers(webapp2.RequestHandler):
"""A handler for listing active servers."""
def get(self):
"""A get handler for listing active servers."""
key = ndb.Key(ActiveServer, 'Root')
query = ActiveServer.query(ancestor=key)
servers = []
for key in query.iter(keys_only=True):
instance_id = key.string_id()
servers.append((instance_id, get_url_for_instance(instance_id))) | self.response.out.write(template.render(servers=servers,
url=url,
url_linktext=url_linktext))
class MainPage(webapp2.RequestHandler):
"""A handler for showing the guestbook form."""
def get(self):
"""Guestbook main page."""
con = get_connection()
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute(SELECT_SQL.format(POST_PER_PAGE))
greetings = cur.fetchall()
con.close()
template = JINJA_ENVIRONMENT.get_template('guestbook.html')
url, url_linktext = get_signin_navigation(self.request.uri)
self.response.write(template.render(greetings=greetings,
url=url,
url_linktext=url_linktext))
class Guestbook(webapp2.RequestHandler):
"""A handler for storing a message."""
def post(self):
"""A handler for storing a message."""
author = ''
if users.get_current_user():
author = users.get_current_user().nickname()
con = get_connection()
with con:
con.execute(INSERT_SQL, (author, self.request.get('content')))
self.redirect('/guestbook')
class Start(webapp2.RequestHandler):
"""A handler for /_ah/start."""
def get(self):
"""A handler for /_ah/start, registering myself."""
runtime.set_shutdown_hook(shutdown_hook)
con = get_connection()
with con:
con.execute(CREATE_TABLE_SQL)
instance_id = modules.get_current_instance_id()
server = ActiveServer(key=ActiveServer.get_instance_key(instance_id))
server.put()
class Stop(webapp2.RequestHandler):
"""A handler for /_ah/stop."""
def get(self):
"""Just call shutdown_hook now for a temporary workaround.
With the initial version of the VM Runtime, a call to
/_ah/stop hits this handler, without invoking the shutdown
hook we registered in the start handler. We're working on the
fix to make it a consistent behavior same as the traditional
App Engine backends. After the fix is out, this stop handler
won't be necessary any more.
"""
shutdown_hook()
APPLICATION = webapp2.WSGIApplication([
('/', ListServers),
('/guestbook', MainPage),
('/sign', Guestbook),
('/_ah/start', Start),
('/_ah/stop', Stop),
], debug=True) | template = JINJA_ENVIRONMENT.get_template('index.html')
url, url_linktext = get_signin_navigation(self.request.uri) | random_line_split |
ImagePlugin.tsx | import * as React from 'react';
import * as draftjs from 'draft-js';
import { IContentStateConverter, HtmlEditorController, HtmlEditorPlugin } from "../HtmlEditor"
import { HtmlContentStateConverter } from '../HtmlContentStateConverter';
export interface ImageConverter<T extends object> {
uploadData(blob: Blob): Promise<T>;
renderImage(val: T): React.ReactElement;
toHtml(val: T): string | undefined;
fromElement(val: HTMLElement): T | undefined;
}
export default class ImagePlugin implements HtmlEditorPlugin{
constructor(public imageConverter: ImageConverter<any>) {
}
addImage(editorState: draftjs.EditorState, data: Object): draftjs.EditorState {
const contentState = editorState.getCurrentContent();
const contentStateWithEntity = contentState.createEntity(
'IMAGE',
'IMMUTABLE',
data
);
const entityKey = contentStateWithEntity.getLastCreatedEntityKey();
const newEditorState = draftjs.AtomicBlockUtils.insertAtomicBlock(
editorState,
entityKey,
' '
);
return draftjs.EditorState.forceSelection(
newEditorState,
newEditorState.getCurrentContent().getSelectionAfter()
);
}
expandConverter(converter: IContentStateConverter) {
if (converter instanceof HtmlContentStateConverter) {
const { draftToHtmlOptions, htmlToDraftOptions } = converter;
//@ts-ignore
var oldCustomEntityTransformer = draftToHtmlOptions.customEntityTransform;
draftToHtmlOptions.customEntityTransform = (entity, text) => {
if (oldCustomEntityTransformer) {
var result = oldCustomEntityTransformer(entity, text);
if (result)
return result;
}
return this.imageConverter.toHtml(entity.data);
};
var oldCustomChunkRenderer = htmlToDraftOptions.customChunkRenderer;
htmlToDraftOptions.customChunkRenderer = (nodeName, node) => {
if (oldCustomChunkRenderer) {
var result = oldCustomChunkRenderer(nodeName, node);
if (result != null)
return result;
}
var data = this.imageConverter.fromElement(node);
if (data != null) {
return {
type: "IMAGE",
data: data,
mutability: "IMMUTABLE"
};
}
return undefined;
}
}
}
expandEditorProps(props: draftjs.EditorProps, controller: HtmlEditorController) {
var oldRenderer = props.blockRendererFn;
props.blockRendererFn = (block) => {
if (oldRenderer) {
const result = oldRenderer(block);
if (result)
return result;
}
//if (block.getType() === 'atomic') {
const contentState = controller.editorState.getCurrentContent();
const entity = block.getEntityAt(0);
if (!entity)
return null;
const type = contentState.getEntity(entity).getType();
if (type === 'IMAGE') {
return {
component: ImageComponent,
editable: false,
props: { imageConverter: this.imageConverter }
};
}
//}
return null;
};
props.handlePastedFiles = files => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled"
}
var oldPasteText = props.handlePastedText;
props.handlePastedText = (text, html, editorState) => {
if (html) {
var node = document.createElement('html')
node.innerHTML = html;
var array = Array.from(node.getElementsByTagName("img"));
if (array.length && array.every(a => a.src.startsWith("data:"))) {
var blobs = array.map(a => dataURItoBlob(a.src));
Promise.all(blobs.map(img => this.imageConverter.uploadData(img)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled";
}
}
if (oldPasteText)
return oldPasteText(text, html, editorState);
return "not-handled";
};
props.handleDroppedFiles = (selection, files) => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
const editorStateWithSelection = draftjs.EditorState.acceptSelection(controller.editorState, selection);
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), editorStateWithSelection);
controller.setEditorState(newState);
}).done();
return "handled"
}
}
}
function dataURItoBlob(dataURI: string) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.after(','));
// separate out the mime component
var mimeString = dataURI.between('data:', ";");
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
// create a view into the buffer
var ia = new Uint8Array(ab);
// set the bytes of the buffer to the correct values
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
| return blob;
}
function ImageComponent(p: { contentState: draftjs.ContentState, block: draftjs.ContentBlock, blockProps: { imageConverter: ImageConverter<any> } }) {
const data = p.contentState.getEntity(p.block.getEntityAt(0)).getData();
return p.blockProps.imageConverter!.renderImage(data);
} | var blob = new Blob([ab], { type: mimeString });
| random_line_split |
ImagePlugin.tsx | import * as React from 'react';
import * as draftjs from 'draft-js';
import { IContentStateConverter, HtmlEditorController, HtmlEditorPlugin } from "../HtmlEditor"
import { HtmlContentStateConverter } from '../HtmlContentStateConverter';
export interface ImageConverter<T extends object> {
uploadData(blob: Blob): Promise<T>;
renderImage(val: T): React.ReactElement;
toHtml(val: T): string | undefined;
fromElement(val: HTMLElement): T | undefined;
}
export default class ImagePlugin implements HtmlEditorPlugin{
constructor(public imageConverter: ImageConverter<any>) {
}
addImage(editorState: draftjs.EditorState, data: Object): draftjs.EditorState {
const contentState = editorState.getCurrentContent();
const contentStateWithEntity = contentState.createEntity(
'IMAGE',
'IMMUTABLE',
data
);
const entityKey = contentStateWithEntity.getLastCreatedEntityKey();
const newEditorState = draftjs.AtomicBlockUtils.insertAtomicBlock(
editorState,
entityKey,
' '
);
return draftjs.EditorState.forceSelection(
newEditorState,
newEditorState.getCurrentContent().getSelectionAfter()
);
}
expandConverter(converter: IContentStateConverter) {
if (converter instanceof HtmlContentStateConverter) {
const { draftToHtmlOptions, htmlToDraftOptions } = converter;
//@ts-ignore
var oldCustomEntityTransformer = draftToHtmlOptions.customEntityTransform;
draftToHtmlOptions.customEntityTransform = (entity, text) => {
if (oldCustomEntityTransformer) {
var result = oldCustomEntityTransformer(entity, text);
if (result)
return result;
}
return this.imageConverter.toHtml(entity.data);
};
var oldCustomChunkRenderer = htmlToDraftOptions.customChunkRenderer;
htmlToDraftOptions.customChunkRenderer = (nodeName, node) => {
if (oldCustomChunkRenderer) {
var result = oldCustomChunkRenderer(nodeName, node);
if (result != null)
return result;
}
var data = this.imageConverter.fromElement(node);
if (data != null) {
return {
type: "IMAGE",
data: data,
mutability: "IMMUTABLE"
};
}
return undefined;
}
}
}
expandEditorProps(props: draftjs.EditorProps, controller: HtmlEditorController) {
var oldRenderer = props.blockRendererFn;
props.blockRendererFn = (block) => {
if (oldRenderer) {
const result = oldRenderer(block);
if (result)
return result;
}
//if (block.getType() === 'atomic') {
const contentState = controller.editorState.getCurrentContent();
const entity = block.getEntityAt(0);
if (!entity)
return null;
const type = contentState.getEntity(entity).getType();
if (type === 'IMAGE') {
return {
component: ImageComponent,
editable: false,
props: { imageConverter: this.imageConverter }
};
}
//}
return null;
};
props.handlePastedFiles = files => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled"
}
var oldPasteText = props.handlePastedText;
props.handlePastedText = (text, html, editorState) => {
if (html) |
if (oldPasteText)
return oldPasteText(text, html, editorState);
return "not-handled";
};
props.handleDroppedFiles = (selection, files) => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
const editorStateWithSelection = draftjs.EditorState.acceptSelection(controller.editorState, selection);
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), editorStateWithSelection);
controller.setEditorState(newState);
}).done();
return "handled"
}
}
}
function dataURItoBlob(dataURI: string) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.after(','));
// separate out the mime component
var mimeString = dataURI.between('data:', ";");
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
// create a view into the buffer
var ia = new Uint8Array(ab);
// set the bytes of the buffer to the correct values
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
var blob = new Blob([ab], { type: mimeString });
return blob;
}
function ImageComponent(p: { contentState: draftjs.ContentState, block: draftjs.ContentBlock, blockProps: { imageConverter: ImageConverter<any> } }) {
const data = p.contentState.getEntity(p.block.getEntityAt(0)).getData();
return p.blockProps.imageConverter!.renderImage(data);
}
| {
var node = document.createElement('html')
node.innerHTML = html;
var array = Array.from(node.getElementsByTagName("img"));
if (array.length && array.every(a => a.src.startsWith("data:"))) {
var blobs = array.map(a => dataURItoBlob(a.src));
Promise.all(blobs.map(img => this.imageConverter.uploadData(img)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled";
}
} | conditional_block |
ImagePlugin.tsx | import * as React from 'react';
import * as draftjs from 'draft-js';
import { IContentStateConverter, HtmlEditorController, HtmlEditorPlugin } from "../HtmlEditor"
import { HtmlContentStateConverter } from '../HtmlContentStateConverter';
export interface ImageConverter<T extends object> {
uploadData(blob: Blob): Promise<T>;
renderImage(val: T): React.ReactElement;
toHtml(val: T): string | undefined;
fromElement(val: HTMLElement): T | undefined;
}
export default class ImagePlugin implements HtmlEditorPlugin{
constructor(public imageConverter: ImageConverter<any>) {
}
addImage(editorState: draftjs.EditorState, data: Object): draftjs.EditorState {
const contentState = editorState.getCurrentContent();
const contentStateWithEntity = contentState.createEntity(
'IMAGE',
'IMMUTABLE',
data
);
const entityKey = contentStateWithEntity.getLastCreatedEntityKey();
const newEditorState = draftjs.AtomicBlockUtils.insertAtomicBlock(
editorState,
entityKey,
' '
);
return draftjs.EditorState.forceSelection(
newEditorState,
newEditorState.getCurrentContent().getSelectionAfter()
);
}
expandConverter(converter: IContentStateConverter) {
if (converter instanceof HtmlContentStateConverter) {
const { draftToHtmlOptions, htmlToDraftOptions } = converter;
//@ts-ignore
var oldCustomEntityTransformer = draftToHtmlOptions.customEntityTransform;
draftToHtmlOptions.customEntityTransform = (entity, text) => {
if (oldCustomEntityTransformer) {
var result = oldCustomEntityTransformer(entity, text);
if (result)
return result;
}
return this.imageConverter.toHtml(entity.data);
};
var oldCustomChunkRenderer = htmlToDraftOptions.customChunkRenderer;
htmlToDraftOptions.customChunkRenderer = (nodeName, node) => {
if (oldCustomChunkRenderer) {
var result = oldCustomChunkRenderer(nodeName, node);
if (result != null)
return result;
}
var data = this.imageConverter.fromElement(node);
if (data != null) {
return {
type: "IMAGE",
data: data,
mutability: "IMMUTABLE"
};
}
return undefined;
}
}
}
expandEditorProps(props: draftjs.EditorProps, controller: HtmlEditorController) {
var oldRenderer = props.blockRendererFn;
props.blockRendererFn = (block) => {
if (oldRenderer) {
const result = oldRenderer(block);
if (result)
return result;
}
//if (block.getType() === 'atomic') {
const contentState = controller.editorState.getCurrentContent();
const entity = block.getEntityAt(0);
if (!entity)
return null;
const type = contentState.getEntity(entity).getType();
if (type === 'IMAGE') {
return {
component: ImageComponent,
editable: false,
props: { imageConverter: this.imageConverter }
};
}
//}
return null;
};
props.handlePastedFiles = files => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled"
}
var oldPasteText = props.handlePastedText;
props.handlePastedText = (text, html, editorState) => {
if (html) {
var node = document.createElement('html')
node.innerHTML = html;
var array = Array.from(node.getElementsByTagName("img"));
if (array.length && array.every(a => a.src.startsWith("data:"))) {
var blobs = array.map(a => dataURItoBlob(a.src));
Promise.all(blobs.map(img => this.imageConverter.uploadData(img)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled";
}
}
if (oldPasteText)
return oldPasteText(text, html, editorState);
return "not-handled";
};
props.handleDroppedFiles = (selection, files) => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
const editorStateWithSelection = draftjs.EditorState.acceptSelection(controller.editorState, selection);
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), editorStateWithSelection);
controller.setEditorState(newState);
}).done();
return "handled"
}
}
}
function dataURItoBlob(dataURI: string) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.after(','));
// separate out the mime component
var mimeString = dataURI.between('data:', ";");
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
// create a view into the buffer
var ia = new Uint8Array(ab);
// set the bytes of the buffer to the correct values
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
var blob = new Blob([ab], { type: mimeString });
return blob;
}
function ImageComponent(p: { contentState: draftjs.ContentState, block: draftjs.ContentBlock, blockProps: { imageConverter: ImageConverter<any> } }) | {
const data = p.contentState.getEntity(p.block.getEntityAt(0)).getData();
return p.blockProps.imageConverter!.renderImage(data);
} | identifier_body |
|
ImagePlugin.tsx | import * as React from 'react';
import * as draftjs from 'draft-js';
import { IContentStateConverter, HtmlEditorController, HtmlEditorPlugin } from "../HtmlEditor"
import { HtmlContentStateConverter } from '../HtmlContentStateConverter';
export interface ImageConverter<T extends object> {
uploadData(blob: Blob): Promise<T>;
renderImage(val: T): React.ReactElement;
toHtml(val: T): string | undefined;
fromElement(val: HTMLElement): T | undefined;
}
export default class ImagePlugin implements HtmlEditorPlugin{
constructor(public imageConverter: ImageConverter<any>) {
}
| (editorState: draftjs.EditorState, data: Object): draftjs.EditorState {
const contentState = editorState.getCurrentContent();
const contentStateWithEntity = contentState.createEntity(
'IMAGE',
'IMMUTABLE',
data
);
const entityKey = contentStateWithEntity.getLastCreatedEntityKey();
const newEditorState = draftjs.AtomicBlockUtils.insertAtomicBlock(
editorState,
entityKey,
' '
);
return draftjs.EditorState.forceSelection(
newEditorState,
newEditorState.getCurrentContent().getSelectionAfter()
);
}
expandConverter(converter: IContentStateConverter) {
if (converter instanceof HtmlContentStateConverter) {
const { draftToHtmlOptions, htmlToDraftOptions } = converter;
//@ts-ignore
var oldCustomEntityTransformer = draftToHtmlOptions.customEntityTransform;
draftToHtmlOptions.customEntityTransform = (entity, text) => {
if (oldCustomEntityTransformer) {
var result = oldCustomEntityTransformer(entity, text);
if (result)
return result;
}
return this.imageConverter.toHtml(entity.data);
};
var oldCustomChunkRenderer = htmlToDraftOptions.customChunkRenderer;
htmlToDraftOptions.customChunkRenderer = (nodeName, node) => {
if (oldCustomChunkRenderer) {
var result = oldCustomChunkRenderer(nodeName, node);
if (result != null)
return result;
}
var data = this.imageConverter.fromElement(node);
if (data != null) {
return {
type: "IMAGE",
data: data,
mutability: "IMMUTABLE"
};
}
return undefined;
}
}
}
expandEditorProps(props: draftjs.EditorProps, controller: HtmlEditorController) {
var oldRenderer = props.blockRendererFn;
props.blockRendererFn = (block) => {
if (oldRenderer) {
const result = oldRenderer(block);
if (result)
return result;
}
//if (block.getType() === 'atomic') {
const contentState = controller.editorState.getCurrentContent();
const entity = block.getEntityAt(0);
if (!entity)
return null;
const type = contentState.getEntity(entity).getType();
if (type === 'IMAGE') {
return {
component: ImageComponent,
editable: false,
props: { imageConverter: this.imageConverter }
};
}
//}
return null;
};
props.handlePastedFiles = files => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled"
}
var oldPasteText = props.handlePastedText;
props.handlePastedText = (text, html, editorState) => {
if (html) {
var node = document.createElement('html')
node.innerHTML = html;
var array = Array.from(node.getElementsByTagName("img"));
if (array.length && array.every(a => a.src.startsWith("data:"))) {
var blobs = array.map(a => dataURItoBlob(a.src));
Promise.all(blobs.map(img => this.imageConverter.uploadData(img)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), controller.editorState);
controller.setEditorState(newState);
}).done();
return "handled";
}
}
if (oldPasteText)
return oldPasteText(text, html, editorState);
return "not-handled";
};
props.handleDroppedFiles = (selection, files) => {
const imageFiles = files.filter(a => a.type.startsWith("image/"));
if (imageFiles.length == 0)
return "not-handled";
const editorStateWithSelection = draftjs.EditorState.acceptSelection(controller.editorState, selection);
Promise.all(imageFiles.map(blob => this.imageConverter.uploadData(blob)))
.then(datas => {
var newState = datas.reduce<draftjs.EditorState>((state, data) => this.addImage(state, data), editorStateWithSelection);
controller.setEditorState(newState);
}).done();
return "handled"
}
}
}
function dataURItoBlob(dataURI: string) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.after(','));
// separate out the mime component
var mimeString = dataURI.between('data:', ";");
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
// create a view into the buffer
var ia = new Uint8Array(ab);
// set the bytes of the buffer to the correct values
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
var blob = new Blob([ab], { type: mimeString });
return blob;
}
function ImageComponent(p: { contentState: draftjs.ContentState, block: draftjs.ContentBlock, blockProps: { imageConverter: ImageConverter<any> } }) {
const data = p.contentState.getEntity(p.block.getEntityAt(0)).getData();
return p.blockProps.imageConverter!.renderImage(data);
}
| addImage | identifier_name |
main.js | var $animText = $("#anim_text");
$animText.html( $animText.html().replace(/./g, "<span>$&</span>").replace(/\s/g, " "));
//TweenMax.staggerFromTo( $animText.find("span"), 0.1, {autoAlpha:0}, {autoAlpha:1}, 0.1 );
$animText.find("span").each(function(){
TweenMax.fromTo(this, 2.5, {autoAlpha:0, rotation:randomNum(-360, 360), rotationX:randomNum(-360, 360), rotationY:randomNum(-360, 360), rotationZ:randomNum(-360, 360), scale:0}, {autoAlpha:1, rotation:0, rotationX:0, rotationY:0, rotationZ:0, scale:1});
});
//helper random function
function randomNum (min, max) | {
return Math.random() * (max - min) + min;
} | identifier_body |
|
main.js | var $animText = $("#anim_text");
$animText.html( $animText.html().replace(/./g, "<span>$&</span>").replace(/\s/g, " "));
//TweenMax.staggerFromTo( $animText.find("span"), 0.1, {autoAlpha:0}, {autoAlpha:1}, 0.1 );
$animText.find("span").each(function(){
TweenMax.fromTo(this, 2.5, {autoAlpha:0, rotation:randomNum(-360, 360), rotationX:randomNum(-360, 360), rotationY:randomNum(-360, 360), rotationZ:randomNum(-360, 360), scale:0}, {autoAlpha:1, rotation:0, rotationX:0, rotationY:0, rotationZ:0, scale:1});
});
//helper random function
function | (min, max) {
return Math.random() * (max - min) + min;
}
| randomNum | identifier_name |
main.js | var $animText = $("#anim_text");
$animText.html( $animText.html().replace(/./g, "<span>$&</span>").replace(/\s/g, " "));
//TweenMax.staggerFromTo( $animText.find("span"), 0.1, {autoAlpha:0}, {autoAlpha:1}, 0.1 );
$animText.find("span").each(function(){
TweenMax.fromTo(this, 2.5, {autoAlpha:0, rotation:randomNum(-360, 360), rotationX:randomNum(-360, 360), rotationY:randomNum(-360, 360), rotationZ:randomNum(-360, 360), scale:0}, {autoAlpha:1, rotation:0, rotationX:0, rotationY:0, rotationZ:0, scale:1});
});
//helper random function | } | function randomNum (min, max) {
return Math.random() * (max - min) + min; | random_line_split |
browser_tree.ts | 'use strict';
var Funnel = require('broccoli-funnel');
var htmlReplace = require('../html-replace');
var jsReplace = require("../js-replace");
var path = require('path');
var stew = require('broccoli-stew');
import compileWithTypescript from '../broccoli-typescript';
import destCopy from '../broccoli-dest-copy';
import flatten from '../broccoli-flatten';
import mergeTrees from '../broccoli-merge-trees';
import replace from '../broccoli-replace';
import {default as transpileWithTraceur, TRACEUR_RUNTIME_PATH} from '../traceur/index';
var projectRootDir = path.normalize(path.join(__dirname, '..', '..', '..', '..'));
const kServedPaths = [
// Relative (to /modules) paths to benchmark directories
'benchmarks/src',
'benchmarks/src/change_detection',
'benchmarks/src/compiler',
'benchmarks/src/costs',
'benchmarks/src/di',
'benchmarks/src/element_injector',
'benchmarks/src/largetable',
'benchmarks/src/naive_infinite_scroll',
'benchmarks/src/tree',
'benchmarks/src/static_tree',
// Relative (to /modules) paths to external benchmark directories
'benchmarks_external/src',
'benchmarks_external/src/compiler',
'benchmarks_external/src/largetable',
'benchmarks_external/src/naive_infinite_scroll',
'benchmarks_external/src/tree',
'benchmarks_external/src/tree/react',
'benchmarks_external/src/static_tree',
// Relative (to /modules) paths to example directories
'examples/src/benchpress',
'examples/src/model_driven_forms',
'examples/src/template_driven_forms',
'examples/src/person_management',
'examples/src/order_management',
'examples/src/gestures',
'examples/src/hello_world',
'examples/src/http',
'examples/src/jsonp',
'examples/src/key_events',
'examples/src/routing',
'examples/src/sourcemap',
'examples/src/todo',
'examples/src/zippy_component',
'examples/src/async',
'examples/src/material/button',
'examples/src/material/checkbox',
'examples/src/material/dialog',
'examples/src/material/grid_list',
'examples/src/material/input',
'examples/src/material/progress-linear',
'examples/src/material/radio',
'examples/src/material/switcher',
'examples/src/message_broker',
'examples/src/web_workers/kitchen_sink',
'examples/src/web_workers/todo',
'examples/src/web_workers/images'
];
module.exports = function makeBrowserTree(options, destinationPath) {
var modulesTree = new Funnel('modules', {
include: ['**/**'],
exclude: [
'**/*.cjs',
'benchmarks/e2e_test/**',
// Exclude ES6 polyfill typings when tsc target=ES6
'angular2/traceur-runtime.d.ts',
'angular2/typings/es6-promise/**'
],
destDir: '/'
});
var scriptPathPatternReplacement = {
match: '@@FILENAME_NO_EXT',
replacement: function(replacement, relativePath) {
return relativePath.replace(/\.\w+$/, '').replace(/\\/g, '/');
}
};
modulesTree = replace(modulesTree, {
files: ["examples*/**/*.js"],
patterns: [{match: /\$SCRIPTS\$/, replacement: jsReplace('SCRIPTS')}]
});
// Use TypeScript to transpile the *.ts files to ES6
var es6Tree = compileWithTypescript(modulesTree, {
allowNonTsExtensions: false,
declaration: true,
emitDecoratorMetadata: true,
mapRoot: '', // force sourcemaps to use relative path
noEmitOnError: false,
rootDir: '.',
sourceMap: true,
sourceRoot: '.',
target: 'ES6'
});
// Call Traceur to lower the ES6 build tree to ES5
var es5Tree = transpileWithTraceur(es6Tree, {
destExtension: '.js',
destSourceMapExtension: '.js.map',
traceurOptions: {modules: 'instantiate', sourceMaps: true}
});
// Now we add a few more files to the es6 tree that Traceur should not see
['angular2', 'rtts_assert'].forEach(function(destDir) {
var extras = new Funnel('tools/build', {files: ['es5build.js'], destDir: destDir});
es6Tree = mergeTrees([es6Tree, extras]);
});
var vendorScriptsTree = flatten(new Funnel('.', {
files: [
'node_modules/zone.js/dist/zone-microtask.js',
'node_modules/zone.js/dist/long-stack-trace-zone.js',
'node_modules/es6-module-loader/dist/es6-module-loader-sans-promises.src.js',
'node_modules/systemjs/dist/system.src.js',
'node_modules/systemjs/lib/extension-register.js',
'node_modules/systemjs/lib/extension-cjs.js',
'node_modules/rx/dist/rx.js',
'node_modules/base64-js/lib/b64.js',
'node_modules/reflect-metadata/Reflect.js',
'tools/build/snippets/runtime_paths.js',
path.relative(projectRootDir, TRACEUR_RUNTIME_PATH)
]
}));
var vendorScripts_benchmark =
new Funnel('tools/build/snippets', {files: ['url_params_to_form.js'], destDir: '/'});
var vendorScripts_benchmarks_external =
new Funnel('node_modules/angular', {files: ['angular.js'], destDir: '/'});
// Get scripts for each benchmark or example
let servingTrees = kServedPaths.reduce(getServedFunnels, []);
function getServedFunnels(funnels, destDir) |
var htmlTree = new Funnel(modulesTree, {include: ['*/src/**/*.html'], destDir: '/'});
htmlTree = replace(htmlTree, {
files: ['examples*/**/*.html'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks_external/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks_external')},
scriptPathPatternReplacement
]
});
var assetsTree =
new Funnel(modulesTree, {include: ['**/*'], exclude: ['**/*.{html,ts,dart}'], destDir: '/'});
var scripts = mergeTrees(servingTrees);
var polymerFiles = new Funnel('.', {
files: [
'bower_components/polymer/lib/polymer.html',
'tools/build/snippets/url_params_to_form.js'
]
});
var polymer = stew.mv(flatten(polymerFiles), 'benchmarks_external/src/tree/polymer');
var reactFiles = new Funnel('.', {files: ['node_modules/react/dist/react.min.js']});
var react = stew.mv(flatten(reactFiles), 'benchmarks_external/src/tree/react');
htmlTree = mergeTrees([htmlTree, scripts, polymer, react]);
es5Tree = mergeTrees([es5Tree, htmlTree, assetsTree]);
es6Tree = mergeTrees([es6Tree, htmlTree, assetsTree]);
var mergedTree = mergeTrees([stew.mv(es6Tree, '/es6'), stew.mv(es5Tree, '/es5')]);
return destCopy(mergedTree, destinationPath);
};
| {
let options = {srcDir: '/', destDir: destDir};
funnels.push(new Funnel(vendorScriptsTree, options));
if (destDir.indexOf('benchmarks') > -1) {
funnels.push(new Funnel(vendorScripts_benchmark, options));
}
if (destDir.indexOf('benchmarks_external') > -1) {
funnels.push(new Funnel(vendorScripts_benchmarks_external, options));
}
return funnels;
} | identifier_body |
browser_tree.ts | 'use strict';
var Funnel = require('broccoli-funnel');
var htmlReplace = require('../html-replace');
var jsReplace = require("../js-replace");
var path = require('path');
var stew = require('broccoli-stew');
import compileWithTypescript from '../broccoli-typescript';
import destCopy from '../broccoli-dest-copy';
import flatten from '../broccoli-flatten';
import mergeTrees from '../broccoli-merge-trees';
import replace from '../broccoli-replace';
import {default as transpileWithTraceur, TRACEUR_RUNTIME_PATH} from '../traceur/index';
var projectRootDir = path.normalize(path.join(__dirname, '..', '..', '..', '..'));
const kServedPaths = [
// Relative (to /modules) paths to benchmark directories
'benchmarks/src',
'benchmarks/src/change_detection',
'benchmarks/src/compiler',
'benchmarks/src/costs',
'benchmarks/src/di',
'benchmarks/src/element_injector',
'benchmarks/src/largetable',
'benchmarks/src/naive_infinite_scroll',
'benchmarks/src/tree',
'benchmarks/src/static_tree',
// Relative (to /modules) paths to external benchmark directories
'benchmarks_external/src',
'benchmarks_external/src/compiler',
'benchmarks_external/src/largetable',
'benchmarks_external/src/naive_infinite_scroll',
'benchmarks_external/src/tree',
'benchmarks_external/src/tree/react',
'benchmarks_external/src/static_tree',
// Relative (to /modules) paths to example directories
'examples/src/benchpress',
'examples/src/model_driven_forms',
'examples/src/template_driven_forms',
'examples/src/person_management',
'examples/src/order_management',
'examples/src/gestures',
'examples/src/hello_world',
'examples/src/http',
'examples/src/jsonp',
'examples/src/key_events',
'examples/src/routing',
'examples/src/sourcemap',
'examples/src/todo',
'examples/src/zippy_component',
'examples/src/async',
'examples/src/material/button',
'examples/src/material/checkbox',
'examples/src/material/dialog',
'examples/src/material/grid_list',
'examples/src/material/input',
'examples/src/material/progress-linear',
'examples/src/material/radio',
'examples/src/material/switcher',
'examples/src/message_broker',
'examples/src/web_workers/kitchen_sink',
'examples/src/web_workers/todo',
'examples/src/web_workers/images'
];
module.exports = function makeBrowserTree(options, destinationPath) {
var modulesTree = new Funnel('modules', {
include: ['**/**'],
exclude: [
'**/*.cjs',
'benchmarks/e2e_test/**',
// Exclude ES6 polyfill typings when tsc target=ES6
'angular2/traceur-runtime.d.ts',
'angular2/typings/es6-promise/**'
],
destDir: '/'
});
var scriptPathPatternReplacement = {
match: '@@FILENAME_NO_EXT',
replacement: function(replacement, relativePath) {
return relativePath.replace(/\.\w+$/, '').replace(/\\/g, '/');
}
};
modulesTree = replace(modulesTree, {
files: ["examples*/**/*.js"],
patterns: [{match: /\$SCRIPTS\$/, replacement: jsReplace('SCRIPTS')}]
});
// Use TypeScript to transpile the *.ts files to ES6
var es6Tree = compileWithTypescript(modulesTree, {
allowNonTsExtensions: false,
declaration: true,
emitDecoratorMetadata: true,
mapRoot: '', // force sourcemaps to use relative path
noEmitOnError: false,
rootDir: '.',
sourceMap: true,
sourceRoot: '.',
target: 'ES6'
});
// Call Traceur to lower the ES6 build tree to ES5
var es5Tree = transpileWithTraceur(es6Tree, {
destExtension: '.js',
destSourceMapExtension: '.js.map',
traceurOptions: {modules: 'instantiate', sourceMaps: true}
});
// Now we add a few more files to the es6 tree that Traceur should not see
['angular2', 'rtts_assert'].forEach(function(destDir) {
var extras = new Funnel('tools/build', {files: ['es5build.js'], destDir: destDir});
es6Tree = mergeTrees([es6Tree, extras]); | 'node_modules/zone.js/dist/zone-microtask.js',
'node_modules/zone.js/dist/long-stack-trace-zone.js',
'node_modules/es6-module-loader/dist/es6-module-loader-sans-promises.src.js',
'node_modules/systemjs/dist/system.src.js',
'node_modules/systemjs/lib/extension-register.js',
'node_modules/systemjs/lib/extension-cjs.js',
'node_modules/rx/dist/rx.js',
'node_modules/base64-js/lib/b64.js',
'node_modules/reflect-metadata/Reflect.js',
'tools/build/snippets/runtime_paths.js',
path.relative(projectRootDir, TRACEUR_RUNTIME_PATH)
]
}));
var vendorScripts_benchmark =
new Funnel('tools/build/snippets', {files: ['url_params_to_form.js'], destDir: '/'});
var vendorScripts_benchmarks_external =
new Funnel('node_modules/angular', {files: ['angular.js'], destDir: '/'});
// Get scripts for each benchmark or example
let servingTrees = kServedPaths.reduce(getServedFunnels, []);
function getServedFunnels(funnels, destDir) {
let options = {srcDir: '/', destDir: destDir};
funnels.push(new Funnel(vendorScriptsTree, options));
if (destDir.indexOf('benchmarks') > -1) {
funnels.push(new Funnel(vendorScripts_benchmark, options));
}
if (destDir.indexOf('benchmarks_external') > -1) {
funnels.push(new Funnel(vendorScripts_benchmarks_external, options));
}
return funnels;
}
var htmlTree = new Funnel(modulesTree, {include: ['*/src/**/*.html'], destDir: '/'});
htmlTree = replace(htmlTree, {
files: ['examples*/**/*.html'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks_external/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks_external')},
scriptPathPatternReplacement
]
});
var assetsTree =
new Funnel(modulesTree, {include: ['**/*'], exclude: ['**/*.{html,ts,dart}'], destDir: '/'});
var scripts = mergeTrees(servingTrees);
var polymerFiles = new Funnel('.', {
files: [
'bower_components/polymer/lib/polymer.html',
'tools/build/snippets/url_params_to_form.js'
]
});
var polymer = stew.mv(flatten(polymerFiles), 'benchmarks_external/src/tree/polymer');
var reactFiles = new Funnel('.', {files: ['node_modules/react/dist/react.min.js']});
var react = stew.mv(flatten(reactFiles), 'benchmarks_external/src/tree/react');
htmlTree = mergeTrees([htmlTree, scripts, polymer, react]);
es5Tree = mergeTrees([es5Tree, htmlTree, assetsTree]);
es6Tree = mergeTrees([es6Tree, htmlTree, assetsTree]);
var mergedTree = mergeTrees([stew.mv(es6Tree, '/es6'), stew.mv(es5Tree, '/es5')]);
return destCopy(mergedTree, destinationPath);
}; | });
var vendorScriptsTree = flatten(new Funnel('.', {
files: [ | random_line_split |
browser_tree.ts | 'use strict';
var Funnel = require('broccoli-funnel');
var htmlReplace = require('../html-replace');
var jsReplace = require("../js-replace");
var path = require('path');
var stew = require('broccoli-stew');
import compileWithTypescript from '../broccoli-typescript';
import destCopy from '../broccoli-dest-copy';
import flatten from '../broccoli-flatten';
import mergeTrees from '../broccoli-merge-trees';
import replace from '../broccoli-replace';
import {default as transpileWithTraceur, TRACEUR_RUNTIME_PATH} from '../traceur/index';
var projectRootDir = path.normalize(path.join(__dirname, '..', '..', '..', '..'));
const kServedPaths = [
// Relative (to /modules) paths to benchmark directories
'benchmarks/src',
'benchmarks/src/change_detection',
'benchmarks/src/compiler',
'benchmarks/src/costs',
'benchmarks/src/di',
'benchmarks/src/element_injector',
'benchmarks/src/largetable',
'benchmarks/src/naive_infinite_scroll',
'benchmarks/src/tree',
'benchmarks/src/static_tree',
// Relative (to /modules) paths to external benchmark directories
'benchmarks_external/src',
'benchmarks_external/src/compiler',
'benchmarks_external/src/largetable',
'benchmarks_external/src/naive_infinite_scroll',
'benchmarks_external/src/tree',
'benchmarks_external/src/tree/react',
'benchmarks_external/src/static_tree',
// Relative (to /modules) paths to example directories
'examples/src/benchpress',
'examples/src/model_driven_forms',
'examples/src/template_driven_forms',
'examples/src/person_management',
'examples/src/order_management',
'examples/src/gestures',
'examples/src/hello_world',
'examples/src/http',
'examples/src/jsonp',
'examples/src/key_events',
'examples/src/routing',
'examples/src/sourcemap',
'examples/src/todo',
'examples/src/zippy_component',
'examples/src/async',
'examples/src/material/button',
'examples/src/material/checkbox',
'examples/src/material/dialog',
'examples/src/material/grid_list',
'examples/src/material/input',
'examples/src/material/progress-linear',
'examples/src/material/radio',
'examples/src/material/switcher',
'examples/src/message_broker',
'examples/src/web_workers/kitchen_sink',
'examples/src/web_workers/todo',
'examples/src/web_workers/images'
];
module.exports = function makeBrowserTree(options, destinationPath) {
var modulesTree = new Funnel('modules', {
include: ['**/**'],
exclude: [
'**/*.cjs',
'benchmarks/e2e_test/**',
// Exclude ES6 polyfill typings when tsc target=ES6
'angular2/traceur-runtime.d.ts',
'angular2/typings/es6-promise/**'
],
destDir: '/'
});
var scriptPathPatternReplacement = {
match: '@@FILENAME_NO_EXT',
replacement: function(replacement, relativePath) {
return relativePath.replace(/\.\w+$/, '').replace(/\\/g, '/');
}
};
modulesTree = replace(modulesTree, {
files: ["examples*/**/*.js"],
patterns: [{match: /\$SCRIPTS\$/, replacement: jsReplace('SCRIPTS')}]
});
// Use TypeScript to transpile the *.ts files to ES6
var es6Tree = compileWithTypescript(modulesTree, {
allowNonTsExtensions: false,
declaration: true,
emitDecoratorMetadata: true,
mapRoot: '', // force sourcemaps to use relative path
noEmitOnError: false,
rootDir: '.',
sourceMap: true,
sourceRoot: '.',
target: 'ES6'
});
// Call Traceur to lower the ES6 build tree to ES5
var es5Tree = transpileWithTraceur(es6Tree, {
destExtension: '.js',
destSourceMapExtension: '.js.map',
traceurOptions: {modules: 'instantiate', sourceMaps: true}
});
// Now we add a few more files to the es6 tree that Traceur should not see
['angular2', 'rtts_assert'].forEach(function(destDir) {
var extras = new Funnel('tools/build', {files: ['es5build.js'], destDir: destDir});
es6Tree = mergeTrees([es6Tree, extras]);
});
var vendorScriptsTree = flatten(new Funnel('.', {
files: [
'node_modules/zone.js/dist/zone-microtask.js',
'node_modules/zone.js/dist/long-stack-trace-zone.js',
'node_modules/es6-module-loader/dist/es6-module-loader-sans-promises.src.js',
'node_modules/systemjs/dist/system.src.js',
'node_modules/systemjs/lib/extension-register.js',
'node_modules/systemjs/lib/extension-cjs.js',
'node_modules/rx/dist/rx.js',
'node_modules/base64-js/lib/b64.js',
'node_modules/reflect-metadata/Reflect.js',
'tools/build/snippets/runtime_paths.js',
path.relative(projectRootDir, TRACEUR_RUNTIME_PATH)
]
}));
var vendorScripts_benchmark =
new Funnel('tools/build/snippets', {files: ['url_params_to_form.js'], destDir: '/'});
var vendorScripts_benchmarks_external =
new Funnel('node_modules/angular', {files: ['angular.js'], destDir: '/'});
// Get scripts for each benchmark or example
let servingTrees = kServedPaths.reduce(getServedFunnels, []);
function | (funnels, destDir) {
let options = {srcDir: '/', destDir: destDir};
funnels.push(new Funnel(vendorScriptsTree, options));
if (destDir.indexOf('benchmarks') > -1) {
funnels.push(new Funnel(vendorScripts_benchmark, options));
}
if (destDir.indexOf('benchmarks_external') > -1) {
funnels.push(new Funnel(vendorScripts_benchmarks_external, options));
}
return funnels;
}
var htmlTree = new Funnel(modulesTree, {include: ['*/src/**/*.html'], destDir: '/'});
htmlTree = replace(htmlTree, {
files: ['examples*/**/*.html'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks_external/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks_external')},
scriptPathPatternReplacement
]
});
var assetsTree =
new Funnel(modulesTree, {include: ['**/*'], exclude: ['**/*.{html,ts,dart}'], destDir: '/'});
var scripts = mergeTrees(servingTrees);
var polymerFiles = new Funnel('.', {
files: [
'bower_components/polymer/lib/polymer.html',
'tools/build/snippets/url_params_to_form.js'
]
});
var polymer = stew.mv(flatten(polymerFiles), 'benchmarks_external/src/tree/polymer');
var reactFiles = new Funnel('.', {files: ['node_modules/react/dist/react.min.js']});
var react = stew.mv(flatten(reactFiles), 'benchmarks_external/src/tree/react');
htmlTree = mergeTrees([htmlTree, scripts, polymer, react]);
es5Tree = mergeTrees([es5Tree, htmlTree, assetsTree]);
es6Tree = mergeTrees([es6Tree, htmlTree, assetsTree]);
var mergedTree = mergeTrees([stew.mv(es6Tree, '/es6'), stew.mv(es5Tree, '/es5')]);
return destCopy(mergedTree, destinationPath);
};
| getServedFunnels | identifier_name |
browser_tree.ts | 'use strict';
var Funnel = require('broccoli-funnel');
var htmlReplace = require('../html-replace');
var jsReplace = require("../js-replace");
var path = require('path');
var stew = require('broccoli-stew');
import compileWithTypescript from '../broccoli-typescript';
import destCopy from '../broccoli-dest-copy';
import flatten from '../broccoli-flatten';
import mergeTrees from '../broccoli-merge-trees';
import replace from '../broccoli-replace';
import {default as transpileWithTraceur, TRACEUR_RUNTIME_PATH} from '../traceur/index';
var projectRootDir = path.normalize(path.join(__dirname, '..', '..', '..', '..'));
const kServedPaths = [
// Relative (to /modules) paths to benchmark directories
'benchmarks/src',
'benchmarks/src/change_detection',
'benchmarks/src/compiler',
'benchmarks/src/costs',
'benchmarks/src/di',
'benchmarks/src/element_injector',
'benchmarks/src/largetable',
'benchmarks/src/naive_infinite_scroll',
'benchmarks/src/tree',
'benchmarks/src/static_tree',
// Relative (to /modules) paths to external benchmark directories
'benchmarks_external/src',
'benchmarks_external/src/compiler',
'benchmarks_external/src/largetable',
'benchmarks_external/src/naive_infinite_scroll',
'benchmarks_external/src/tree',
'benchmarks_external/src/tree/react',
'benchmarks_external/src/static_tree',
// Relative (to /modules) paths to example directories
'examples/src/benchpress',
'examples/src/model_driven_forms',
'examples/src/template_driven_forms',
'examples/src/person_management',
'examples/src/order_management',
'examples/src/gestures',
'examples/src/hello_world',
'examples/src/http',
'examples/src/jsonp',
'examples/src/key_events',
'examples/src/routing',
'examples/src/sourcemap',
'examples/src/todo',
'examples/src/zippy_component',
'examples/src/async',
'examples/src/material/button',
'examples/src/material/checkbox',
'examples/src/material/dialog',
'examples/src/material/grid_list',
'examples/src/material/input',
'examples/src/material/progress-linear',
'examples/src/material/radio',
'examples/src/material/switcher',
'examples/src/message_broker',
'examples/src/web_workers/kitchen_sink',
'examples/src/web_workers/todo',
'examples/src/web_workers/images'
];
module.exports = function makeBrowserTree(options, destinationPath) {
var modulesTree = new Funnel('modules', {
include: ['**/**'],
exclude: [
'**/*.cjs',
'benchmarks/e2e_test/**',
// Exclude ES6 polyfill typings when tsc target=ES6
'angular2/traceur-runtime.d.ts',
'angular2/typings/es6-promise/**'
],
destDir: '/'
});
var scriptPathPatternReplacement = {
match: '@@FILENAME_NO_EXT',
replacement: function(replacement, relativePath) {
return relativePath.replace(/\.\w+$/, '').replace(/\\/g, '/');
}
};
modulesTree = replace(modulesTree, {
files: ["examples*/**/*.js"],
patterns: [{match: /\$SCRIPTS\$/, replacement: jsReplace('SCRIPTS')}]
});
// Use TypeScript to transpile the *.ts files to ES6
var es6Tree = compileWithTypescript(modulesTree, {
allowNonTsExtensions: false,
declaration: true,
emitDecoratorMetadata: true,
mapRoot: '', // force sourcemaps to use relative path
noEmitOnError: false,
rootDir: '.',
sourceMap: true,
sourceRoot: '.',
target: 'ES6'
});
// Call Traceur to lower the ES6 build tree to ES5
var es5Tree = transpileWithTraceur(es6Tree, {
destExtension: '.js',
destSourceMapExtension: '.js.map',
traceurOptions: {modules: 'instantiate', sourceMaps: true}
});
// Now we add a few more files to the es6 tree that Traceur should not see
['angular2', 'rtts_assert'].forEach(function(destDir) {
var extras = new Funnel('tools/build', {files: ['es5build.js'], destDir: destDir});
es6Tree = mergeTrees([es6Tree, extras]);
});
var vendorScriptsTree = flatten(new Funnel('.', {
files: [
'node_modules/zone.js/dist/zone-microtask.js',
'node_modules/zone.js/dist/long-stack-trace-zone.js',
'node_modules/es6-module-loader/dist/es6-module-loader-sans-promises.src.js',
'node_modules/systemjs/dist/system.src.js',
'node_modules/systemjs/lib/extension-register.js',
'node_modules/systemjs/lib/extension-cjs.js',
'node_modules/rx/dist/rx.js',
'node_modules/base64-js/lib/b64.js',
'node_modules/reflect-metadata/Reflect.js',
'tools/build/snippets/runtime_paths.js',
path.relative(projectRootDir, TRACEUR_RUNTIME_PATH)
]
}));
var vendorScripts_benchmark =
new Funnel('tools/build/snippets', {files: ['url_params_to_form.js'], destDir: '/'});
var vendorScripts_benchmarks_external =
new Funnel('node_modules/angular', {files: ['angular.js'], destDir: '/'});
// Get scripts for each benchmark or example
let servingTrees = kServedPaths.reduce(getServedFunnels, []);
function getServedFunnels(funnels, destDir) {
let options = {srcDir: '/', destDir: destDir};
funnels.push(new Funnel(vendorScriptsTree, options));
if (destDir.indexOf('benchmarks') > -1) |
if (destDir.indexOf('benchmarks_external') > -1) {
funnels.push(new Funnel(vendorScripts_benchmarks_external, options));
}
return funnels;
}
var htmlTree = new Funnel(modulesTree, {include: ['*/src/**/*.html'], destDir: '/'});
htmlTree = replace(htmlTree, {
files: ['examples*/**/*.html'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks')},
scriptPathPatternReplacement
]
});
htmlTree = replace(htmlTree, {
files: ['benchmarks_external/**'],
patterns: [
{match: /\$SCRIPTS\$/, replacement: htmlReplace('SCRIPTS_benchmarks_external')},
scriptPathPatternReplacement
]
});
var assetsTree =
new Funnel(modulesTree, {include: ['**/*'], exclude: ['**/*.{html,ts,dart}'], destDir: '/'});
var scripts = mergeTrees(servingTrees);
var polymerFiles = new Funnel('.', {
files: [
'bower_components/polymer/lib/polymer.html',
'tools/build/snippets/url_params_to_form.js'
]
});
var polymer = stew.mv(flatten(polymerFiles), 'benchmarks_external/src/tree/polymer');
var reactFiles = new Funnel('.', {files: ['node_modules/react/dist/react.min.js']});
var react = stew.mv(flatten(reactFiles), 'benchmarks_external/src/tree/react');
htmlTree = mergeTrees([htmlTree, scripts, polymer, react]);
es5Tree = mergeTrees([es5Tree, htmlTree, assetsTree]);
es6Tree = mergeTrees([es6Tree, htmlTree, assetsTree]);
var mergedTree = mergeTrees([stew.mv(es6Tree, '/es6'), stew.mv(es5Tree, '/es5')]);
return destCopy(mergedTree, destinationPath);
};
| {
funnels.push(new Funnel(vendorScripts_benchmark, options));
} | conditional_block |
models.py | import base64
import cPickle as pickle
import datetime
from email import message_from_string
from email.utils import getaddresses
from django import forms
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import models
from django.db.models import Q, F
from django.utils import simplejson as json
from django.utils.encoding import smart_str
from kiki.message import KikiMessage
from kiki.validators import validate_local_part, validate_not_command
class ListUserMetadata(models.Model):
UNCONFIRMED = 0
SUBSCRIBER = 1
MODERATOR = 2
BLACKLISTED = 3
STATUS_CHOICES = (
(UNCONFIRMED, u'Unconfirmed'),
(SUBSCRIBER, u'Subscriber'),
(MODERATOR, u'Moderator'),
(BLACKLISTED, u'Blacklisted'),
)
user = models.ForeignKey(User)
mailing_list = models.ForeignKey('MailingList')
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, default=UNCONFIRMED, db_index=True)
def __unicode__(self):
return u"%s - %s - %s" % (self.user, self.mailing_list, self.get_status_display())
class Meta:
unique_together = ('user', 'mailing_list')
class MailingListManager(models.Manager):
|
class MailingList(models.Model):
"""
This model contains all options for a mailing list, as well as some helpful
methods for accessing subscribers, moderators, etc.
"""
objects = MailingListManager()
MODERATORS = "mod"
SUBSCRIBERS = "sub"
ANYONE = "all"
PERMISSION_CHOICES = (
(MODERATORS, 'Moderators',),
(SUBSCRIBERS, 'Subscribers',),
(ANYONE, 'Anyone',),
)
name = models.CharField(max_length=50)
subject_prefix = models.CharField(max_length=10, blank=True)
local_part = models.CharField(max_length=64, validators=[validate_local_part, validate_not_command])
domain = models.ForeignKey(Site)
description = models.TextField(blank=True)
who_can_post = models.CharField(max_length=3, choices=PERMISSION_CHOICES, default=SUBSCRIBERS)
self_subscribe_enabled = models.BooleanField(verbose_name='self-subscribe enabled', default=True)
moderation_enabled = models.BooleanField(help_text="If enabled, messages that would be rejected will be marked ``Requires Moderation`` and an email will be sent to the list's moderators.", default=False)
# If is_anonymous becomes an option, the precooker will need to handle some anonymizing.
#is_anonymous = models.BooleanField()
users = models.ManyToManyField(
User,
related_name = 'mailinglists',
blank = True,
null = True,
through = ListUserMetadata
)
messages = models.ManyToManyField(
'Message',
related_name = 'mailinglists',
blank = True,
null = True,
through = 'ListMessage'
)
@property
def address(self):
return "%s@%s" % (self.local_part, self.domain.domain)
def _list_id_header(self):
# Does this need to be a byte string?
return smart_str(u"%s <%s.%s>" % (self.name, self.local_part, self.domain.domain))
def __unicode__(self):
return self.name
def clean(self):
validate_email(self.address)
# As per RFC 2919, the list_id_header has a max length of 255 octets.
if len(self._list_id_header()) > 254:
# Allow 4 extra spaces: the delimiters, the space, and the period.
raise ValidationError("The list name, local part, and site domain name can be at most 250 characters long together.")
def get_recipients(self):
"""Returns a queryset of :class:`User`\ s that should receive this message."""
qs = User.objects.filter(is_active=True)
qs = qs.filter(listusermetadata__mailing_list=self, listusermetadata__status__in=[ListUserMetadata.SUBSCRIBER, ListUserMetadata.MODERATOR])
return qs.distinct()
def _is_email_with_status(self, email, status):
if isinstance(email, basestring):
kwargs = {'user__email__iexact': email}
elif isinstance(email, User):
kwargs = {'user': email}
else:
return False
try:
self.listusermetadata_set.get(status=status, **kwargs)
except ListUserMetadata.DoesNotExist:
return False
return True
def is_subscriber(self, email):
return self._is_email_with_status(email, ListUserMetadata.SUBCRIBER)
def is_moderator(self, email):
return self._is_email_with_status(email, ListUserMetadata.MODERATOR)
def can_post(self, email):
if self.who_can_post == MailingList.ANYONE:
return True
if self.who_can_post == MailingList.SUBSCRIBERS and self.is_subscriber(email):
return True
if self.is_moderator(email):
return True
return False
class ProcessedMessageModel(models.Model):
"""
Encapsulates the logic required for storing and fetching pickled EmailMessage objects. This should eventually be replaced with a custom model field.
"""
processed_message = models.TextField(help_text="The processed form of the message at the current stage (pickled).", blank=True)
# Store the message as a base64-encoded pickle dump a la django-mailer.
def set_processed(self, msg):
self.processed_message = base64.encodestring(pickle.dumps(msg, pickle.HIGHEST_PROTOCOL))
self._processed = msg
def get_processed(self):
if not hasattr(self, '_processed'):
self._processed = pickle.loads(base64.decodestring(self.processed_message))
return self._processed
class Meta:
abstract = True
class Message(ProcessedMessageModel):
"""
Represents an email received by Kiki. Stores the original received message as well as a pickled version of the processed message.
"""
UNPROCESSED = 'u'
PROCESSED = 'p'
FAILED = 'f'
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message_id = models.CharField(max_length=255, unique=True)
#: The message_id of the email this is in reply to.
# in_reply_to = models.CharField(max_length=255, db_index=True, blank=True)
from_email = models.EmailField()
received = models.DateTimeField()
status = models.CharField(max_length=1, choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
original_message = models.TextField(help_text="The original raw text of the message.")
class ListMessage(ProcessedMessageModel):
"""
Represents the relationship between a :class:`Message` and a :class:`MailingList`. This is what is processed to handle the sending of a message to a list rather than the original message.
"""
ACCEPTED = 1
REQUIRES_MODERATION = 2
PREPPED = 3
SENT = 4
FAILED = 5
STATUS_CHOICES = (
(ACCEPTED, 'Accepted'),
(REQUIRES_MODERATION, 'Requires Moderation'),
(PREPPED, 'Prepped'),
(SENT, 'Sent'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True)
class Meta:
unique_together = ('message', 'mailing_list',)
class ListCommand(models.Model):
#: The ListCommand has not been processed.
UNPROCESSED = 1
#: The ListCommand has been rejected (e.g. for permissioning reasons.)
REJECTED = 2
#: Ths ListCommand has been processed completely.
PROCESSED = 3
#: An error occurred while processing the ListCommand.
FAILED = 4
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(REJECTED, 'Rejected'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
command = models.CharField(max_length=20) | def for_site(self, site):
return self.filter(site=site)
def for_addresses(self, addresses):
"""
Takes a an iterable of email addresses and returns a queryset of mailinglists attached to the current site with matching local parts.
"""
site = Site.objects.get_current()
local_parts = []
for addr in addresses:
addr = addr.rsplit('@', 1)
if addr[1] == site.domain:
local_parts.append(addr[0])
if not local_parts:
return self.none()
return self.filter(domain=site, local_part__in=local_parts) | identifier_body |
models.py | import base64
import cPickle as pickle
import datetime
from email import message_from_string
from email.utils import getaddresses
from django import forms
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import models
from django.db.models import Q, F
from django.utils import simplejson as json
from django.utils.encoding import smart_str
from kiki.message import KikiMessage
from kiki.validators import validate_local_part, validate_not_command
class ListUserMetadata(models.Model):
UNCONFIRMED = 0
SUBSCRIBER = 1
MODERATOR = 2
BLACKLISTED = 3
STATUS_CHOICES = (
(UNCONFIRMED, u'Unconfirmed'),
(SUBSCRIBER, u'Subscriber'),
(MODERATOR, u'Moderator'),
(BLACKLISTED, u'Blacklisted'),
)
user = models.ForeignKey(User)
mailing_list = models.ForeignKey('MailingList')
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, default=UNCONFIRMED, db_index=True)
def __unicode__(self):
return u"%s - %s - %s" % (self.user, self.mailing_list, self.get_status_display())
class Meta:
unique_together = ('user', 'mailing_list')
class MailingListManager(models.Manager):
def for_site(self, site):
return self.filter(site=site)
def for_addresses(self, addresses):
"""
Takes a an iterable of email addresses and returns a queryset of mailinglists attached to the current site with matching local parts.
"""
site = Site.objects.get_current()
local_parts = []
for addr in addresses:
addr = addr.rsplit('@', 1)
if addr[1] == site.domain:
local_parts.append(addr[0])
if not local_parts:
return self.none()
return self.filter(domain=site, local_part__in=local_parts)
class MailingList(models.Model):
"""
This model contains all options for a mailing list, as well as some helpful
methods for accessing subscribers, moderators, etc.
"""
objects = MailingListManager()
MODERATORS = "mod"
SUBSCRIBERS = "sub"
ANYONE = "all"
PERMISSION_CHOICES = (
(MODERATORS, 'Moderators',),
(SUBSCRIBERS, 'Subscribers',),
(ANYONE, 'Anyone',),
)
name = models.CharField(max_length=50)
subject_prefix = models.CharField(max_length=10, blank=True)
local_part = models.CharField(max_length=64, validators=[validate_local_part, validate_not_command])
domain = models.ForeignKey(Site)
description = models.TextField(blank=True)
who_can_post = models.CharField(max_length=3, choices=PERMISSION_CHOICES, default=SUBSCRIBERS)
self_subscribe_enabled = models.BooleanField(verbose_name='self-subscribe enabled', default=True)
moderation_enabled = models.BooleanField(help_text="If enabled, messages that would be rejected will be marked ``Requires Moderation`` and an email will be sent to the list's moderators.", default=False)
# If is_anonymous becomes an option, the precooker will need to handle some anonymizing.
#is_anonymous = models.BooleanField()
users = models.ManyToManyField(
User,
related_name = 'mailinglists',
blank = True,
null = True,
through = ListUserMetadata
)
messages = models.ManyToManyField(
'Message',
related_name = 'mailinglists',
blank = True,
null = True,
through = 'ListMessage'
)
@property
def address(self):
return "%s@%s" % (self.local_part, self.domain.domain)
def | (self):
# Does this need to be a byte string?
return smart_str(u"%s <%s.%s>" % (self.name, self.local_part, self.domain.domain))
def __unicode__(self):
return self.name
def clean(self):
validate_email(self.address)
# As per RFC 2919, the list_id_header has a max length of 255 octets.
if len(self._list_id_header()) > 254:
# Allow 4 extra spaces: the delimiters, the space, and the period.
raise ValidationError("The list name, local part, and site domain name can be at most 250 characters long together.")
def get_recipients(self):
"""Returns a queryset of :class:`User`\ s that should receive this message."""
qs = User.objects.filter(is_active=True)
qs = qs.filter(listusermetadata__mailing_list=self, listusermetadata__status__in=[ListUserMetadata.SUBSCRIBER, ListUserMetadata.MODERATOR])
return qs.distinct()
def _is_email_with_status(self, email, status):
if isinstance(email, basestring):
kwargs = {'user__email__iexact': email}
elif isinstance(email, User):
kwargs = {'user': email}
else:
return False
try:
self.listusermetadata_set.get(status=status, **kwargs)
except ListUserMetadata.DoesNotExist:
return False
return True
def is_subscriber(self, email):
return self._is_email_with_status(email, ListUserMetadata.SUBCRIBER)
def is_moderator(self, email):
return self._is_email_with_status(email, ListUserMetadata.MODERATOR)
def can_post(self, email):
if self.who_can_post == MailingList.ANYONE:
return True
if self.who_can_post == MailingList.SUBSCRIBERS and self.is_subscriber(email):
return True
if self.is_moderator(email):
return True
return False
class ProcessedMessageModel(models.Model):
"""
Encapsulates the logic required for storing and fetching pickled EmailMessage objects. This should eventually be replaced with a custom model field.
"""
processed_message = models.TextField(help_text="The processed form of the message at the current stage (pickled).", blank=True)
# Store the message as a base64-encoded pickle dump a la django-mailer.
def set_processed(self, msg):
self.processed_message = base64.encodestring(pickle.dumps(msg, pickle.HIGHEST_PROTOCOL))
self._processed = msg
def get_processed(self):
if not hasattr(self, '_processed'):
self._processed = pickle.loads(base64.decodestring(self.processed_message))
return self._processed
class Meta:
abstract = True
class Message(ProcessedMessageModel):
"""
Represents an email received by Kiki. Stores the original received message as well as a pickled version of the processed message.
"""
UNPROCESSED = 'u'
PROCESSED = 'p'
FAILED = 'f'
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message_id = models.CharField(max_length=255, unique=True)
#: The message_id of the email this is in reply to.
# in_reply_to = models.CharField(max_length=255, db_index=True, blank=True)
from_email = models.EmailField()
received = models.DateTimeField()
status = models.CharField(max_length=1, choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
original_message = models.TextField(help_text="The original raw text of the message.")
class ListMessage(ProcessedMessageModel):
"""
Represents the relationship between a :class:`Message` and a :class:`MailingList`. This is what is processed to handle the sending of a message to a list rather than the original message.
"""
ACCEPTED = 1
REQUIRES_MODERATION = 2
PREPPED = 3
SENT = 4
FAILED = 5
STATUS_CHOICES = (
(ACCEPTED, 'Accepted'),
(REQUIRES_MODERATION, 'Requires Moderation'),
(PREPPED, 'Prepped'),
(SENT, 'Sent'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True)
class Meta:
unique_together = ('message', 'mailing_list',)
class ListCommand(models.Model):
#: The ListCommand has not been processed.
UNPROCESSED = 1
#: The ListCommand has been rejected (e.g. for permissioning reasons.)
REJECTED = 2
#: Ths ListCommand has been processed completely.
PROCESSED = 3
#: An error occurred while processing the ListCommand.
FAILED = 4
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(REJECTED, 'Rejected'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
command = models.CharField(max_length=20) | _list_id_header | identifier_name |
models.py | import base64
import cPickle as pickle
import datetime
from email import message_from_string
from email.utils import getaddresses
from django import forms
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import models
from django.db.models import Q, F
from django.utils import simplejson as json
from django.utils.encoding import smart_str
from kiki.message import KikiMessage
from kiki.validators import validate_local_part, validate_not_command
class ListUserMetadata(models.Model):
UNCONFIRMED = 0
SUBSCRIBER = 1
MODERATOR = 2
BLACKLISTED = 3
STATUS_CHOICES = (
(UNCONFIRMED, u'Unconfirmed'),
(SUBSCRIBER, u'Subscriber'),
(MODERATOR, u'Moderator'),
(BLACKLISTED, u'Blacklisted'),
)
user = models.ForeignKey(User)
mailing_list = models.ForeignKey('MailingList')
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, default=UNCONFIRMED, db_index=True)
def __unicode__(self):
return u"%s - %s - %s" % (self.user, self.mailing_list, self.get_status_display())
class Meta:
unique_together = ('user', 'mailing_list')
class MailingListManager(models.Manager):
def for_site(self, site):
return self.filter(site=site)
def for_addresses(self, addresses):
"""
Takes a an iterable of email addresses and returns a queryset of mailinglists attached to the current site with matching local parts.
"""
site = Site.objects.get_current()
local_parts = []
for addr in addresses:
addr = addr.rsplit('@', 1)
if addr[1] == site.domain:
local_parts.append(addr[0])
if not local_parts:
return self.none()
return self.filter(domain=site, local_part__in=local_parts)
class MailingList(models.Model):
"""
This model contains all options for a mailing list, as well as some helpful
methods for accessing subscribers, moderators, etc.
"""
objects = MailingListManager()
MODERATORS = "mod"
SUBSCRIBERS = "sub"
ANYONE = "all"
PERMISSION_CHOICES = (
(MODERATORS, 'Moderators',),
(SUBSCRIBERS, 'Subscribers',),
(ANYONE, 'Anyone',),
)
name = models.CharField(max_length=50)
subject_prefix = models.CharField(max_length=10, blank=True)
local_part = models.CharField(max_length=64, validators=[validate_local_part, validate_not_command])
domain = models.ForeignKey(Site)
description = models.TextField(blank=True)
who_can_post = models.CharField(max_length=3, choices=PERMISSION_CHOICES, default=SUBSCRIBERS)
self_subscribe_enabled = models.BooleanField(verbose_name='self-subscribe enabled', default=True)
moderation_enabled = models.BooleanField(help_text="If enabled, messages that would be rejected will be marked ``Requires Moderation`` and an email will be sent to the list's moderators.", default=False)
# If is_anonymous becomes an option, the precooker will need to handle some anonymizing.
#is_anonymous = models.BooleanField()
users = models.ManyToManyField(
User,
related_name = 'mailinglists', | 'Message',
related_name = 'mailinglists',
blank = True,
null = True,
through = 'ListMessage'
)
@property
def address(self):
return "%s@%s" % (self.local_part, self.domain.domain)
def _list_id_header(self):
# Does this need to be a byte string?
return smart_str(u"%s <%s.%s>" % (self.name, self.local_part, self.domain.domain))
def __unicode__(self):
return self.name
def clean(self):
validate_email(self.address)
# As per RFC 2919, the list_id_header has a max length of 255 octets.
if len(self._list_id_header()) > 254:
# Allow 4 extra spaces: the delimiters, the space, and the period.
raise ValidationError("The list name, local part, and site domain name can be at most 250 characters long together.")
def get_recipients(self):
"""Returns a queryset of :class:`User`\ s that should receive this message."""
qs = User.objects.filter(is_active=True)
qs = qs.filter(listusermetadata__mailing_list=self, listusermetadata__status__in=[ListUserMetadata.SUBSCRIBER, ListUserMetadata.MODERATOR])
return qs.distinct()
def _is_email_with_status(self, email, status):
if isinstance(email, basestring):
kwargs = {'user__email__iexact': email}
elif isinstance(email, User):
kwargs = {'user': email}
else:
return False
try:
self.listusermetadata_set.get(status=status, **kwargs)
except ListUserMetadata.DoesNotExist:
return False
return True
def is_subscriber(self, email):
return self._is_email_with_status(email, ListUserMetadata.SUBCRIBER)
def is_moderator(self, email):
return self._is_email_with_status(email, ListUserMetadata.MODERATOR)
def can_post(self, email):
if self.who_can_post == MailingList.ANYONE:
return True
if self.who_can_post == MailingList.SUBSCRIBERS and self.is_subscriber(email):
return True
if self.is_moderator(email):
return True
return False
class ProcessedMessageModel(models.Model):
"""
Encapsulates the logic required for storing and fetching pickled EmailMessage objects. This should eventually be replaced with a custom model field.
"""
processed_message = models.TextField(help_text="The processed form of the message at the current stage (pickled).", blank=True)
# Store the message as a base64-encoded pickle dump a la django-mailer.
def set_processed(self, msg):
self.processed_message = base64.encodestring(pickle.dumps(msg, pickle.HIGHEST_PROTOCOL))
self._processed = msg
def get_processed(self):
if not hasattr(self, '_processed'):
self._processed = pickle.loads(base64.decodestring(self.processed_message))
return self._processed
class Meta:
abstract = True
class Message(ProcessedMessageModel):
"""
Represents an email received by Kiki. Stores the original received message as well as a pickled version of the processed message.
"""
UNPROCESSED = 'u'
PROCESSED = 'p'
FAILED = 'f'
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message_id = models.CharField(max_length=255, unique=True)
#: The message_id of the email this is in reply to.
# in_reply_to = models.CharField(max_length=255, db_index=True, blank=True)
from_email = models.EmailField()
received = models.DateTimeField()
status = models.CharField(max_length=1, choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
original_message = models.TextField(help_text="The original raw text of the message.")
class ListMessage(ProcessedMessageModel):
"""
Represents the relationship between a :class:`Message` and a :class:`MailingList`. This is what is processed to handle the sending of a message to a list rather than the original message.
"""
ACCEPTED = 1
REQUIRES_MODERATION = 2
PREPPED = 3
SENT = 4
FAILED = 5
STATUS_CHOICES = (
(ACCEPTED, 'Accepted'),
(REQUIRES_MODERATION, 'Requires Moderation'),
(PREPPED, 'Prepped'),
(SENT, 'Sent'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True)
class Meta:
unique_together = ('message', 'mailing_list',)
class ListCommand(models.Model):
#: The ListCommand has not been processed.
UNPROCESSED = 1
#: The ListCommand has been rejected (e.g. for permissioning reasons.)
REJECTED = 2
#: Ths ListCommand has been processed completely.
PROCESSED = 3
#: An error occurred while processing the ListCommand.
FAILED = 4
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(REJECTED, 'Rejected'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
command = models.CharField(max_length=20) | blank = True,
null = True,
through = ListUserMetadata
)
messages = models.ManyToManyField( | random_line_split |
models.py | import base64
import cPickle as pickle
import datetime
from email import message_from_string
from email.utils import getaddresses
from django import forms
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import models
from django.db.models import Q, F
from django.utils import simplejson as json
from django.utils.encoding import smart_str
from kiki.message import KikiMessage
from kiki.validators import validate_local_part, validate_not_command
class ListUserMetadata(models.Model):
UNCONFIRMED = 0
SUBSCRIBER = 1
MODERATOR = 2
BLACKLISTED = 3
STATUS_CHOICES = (
(UNCONFIRMED, u'Unconfirmed'),
(SUBSCRIBER, u'Subscriber'),
(MODERATOR, u'Moderator'),
(BLACKLISTED, u'Blacklisted'),
)
user = models.ForeignKey(User)
mailing_list = models.ForeignKey('MailingList')
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, default=UNCONFIRMED, db_index=True)
def __unicode__(self):
return u"%s - %s - %s" % (self.user, self.mailing_list, self.get_status_display())
class Meta:
unique_together = ('user', 'mailing_list')
class MailingListManager(models.Manager):
def for_site(self, site):
return self.filter(site=site)
def for_addresses(self, addresses):
"""
Takes a an iterable of email addresses and returns a queryset of mailinglists attached to the current site with matching local parts.
"""
site = Site.objects.get_current()
local_parts = []
for addr in addresses:
addr = addr.rsplit('@', 1)
if addr[1] == site.domain:
local_parts.append(addr[0])
if not local_parts:
return self.none()
return self.filter(domain=site, local_part__in=local_parts)
class MailingList(models.Model):
"""
This model contains all options for a mailing list, as well as some helpful
methods for accessing subscribers, moderators, etc.
"""
objects = MailingListManager()
MODERATORS = "mod"
SUBSCRIBERS = "sub"
ANYONE = "all"
PERMISSION_CHOICES = (
(MODERATORS, 'Moderators',),
(SUBSCRIBERS, 'Subscribers',),
(ANYONE, 'Anyone',),
)
name = models.CharField(max_length=50)
subject_prefix = models.CharField(max_length=10, blank=True)
local_part = models.CharField(max_length=64, validators=[validate_local_part, validate_not_command])
domain = models.ForeignKey(Site)
description = models.TextField(blank=True)
who_can_post = models.CharField(max_length=3, choices=PERMISSION_CHOICES, default=SUBSCRIBERS)
self_subscribe_enabled = models.BooleanField(verbose_name='self-subscribe enabled', default=True)
moderation_enabled = models.BooleanField(help_text="If enabled, messages that would be rejected will be marked ``Requires Moderation`` and an email will be sent to the list's moderators.", default=False)
# If is_anonymous becomes an option, the precooker will need to handle some anonymizing.
#is_anonymous = models.BooleanField()
users = models.ManyToManyField(
User,
related_name = 'mailinglists',
blank = True,
null = True,
through = ListUserMetadata
)
messages = models.ManyToManyField(
'Message',
related_name = 'mailinglists',
blank = True,
null = True,
through = 'ListMessage'
)
@property
def address(self):
return "%s@%s" % (self.local_part, self.domain.domain)
def _list_id_header(self):
# Does this need to be a byte string?
return smart_str(u"%s <%s.%s>" % (self.name, self.local_part, self.domain.domain))
def __unicode__(self):
return self.name
def clean(self):
validate_email(self.address)
# As per RFC 2919, the list_id_header has a max length of 255 octets.
if len(self._list_id_header()) > 254:
# Allow 4 extra spaces: the delimiters, the space, and the period.
raise ValidationError("The list name, local part, and site domain name can be at most 250 characters long together.")
def get_recipients(self):
"""Returns a queryset of :class:`User`\ s that should receive this message."""
qs = User.objects.filter(is_active=True)
qs = qs.filter(listusermetadata__mailing_list=self, listusermetadata__status__in=[ListUserMetadata.SUBSCRIBER, ListUserMetadata.MODERATOR])
return qs.distinct()
def _is_email_with_status(self, email, status):
if isinstance(email, basestring):
|
elif isinstance(email, User):
kwargs = {'user': email}
else:
return False
try:
self.listusermetadata_set.get(status=status, **kwargs)
except ListUserMetadata.DoesNotExist:
return False
return True
def is_subscriber(self, email):
return self._is_email_with_status(email, ListUserMetadata.SUBCRIBER)
def is_moderator(self, email):
return self._is_email_with_status(email, ListUserMetadata.MODERATOR)
def can_post(self, email):
if self.who_can_post == MailingList.ANYONE:
return True
if self.who_can_post == MailingList.SUBSCRIBERS and self.is_subscriber(email):
return True
if self.is_moderator(email):
return True
return False
class ProcessedMessageModel(models.Model):
"""
Encapsulates the logic required for storing and fetching pickled EmailMessage objects. This should eventually be replaced with a custom model field.
"""
processed_message = models.TextField(help_text="The processed form of the message at the current stage (pickled).", blank=True)
# Store the message as a base64-encoded pickle dump a la django-mailer.
def set_processed(self, msg):
self.processed_message = base64.encodestring(pickle.dumps(msg, pickle.HIGHEST_PROTOCOL))
self._processed = msg
def get_processed(self):
if not hasattr(self, '_processed'):
self._processed = pickle.loads(base64.decodestring(self.processed_message))
return self._processed
class Meta:
abstract = True
class Message(ProcessedMessageModel):
"""
Represents an email received by Kiki. Stores the original received message as well as a pickled version of the processed message.
"""
UNPROCESSED = 'u'
PROCESSED = 'p'
FAILED = 'f'
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message_id = models.CharField(max_length=255, unique=True)
#: The message_id of the email this is in reply to.
# in_reply_to = models.CharField(max_length=255, db_index=True, blank=True)
from_email = models.EmailField()
received = models.DateTimeField()
status = models.CharField(max_length=1, choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
original_message = models.TextField(help_text="The original raw text of the message.")
class ListMessage(ProcessedMessageModel):
"""
Represents the relationship between a :class:`Message` and a :class:`MailingList`. This is what is processed to handle the sending of a message to a list rather than the original message.
"""
ACCEPTED = 1
REQUIRES_MODERATION = 2
PREPPED = 3
SENT = 4
FAILED = 5
STATUS_CHOICES = (
(ACCEPTED, 'Accepted'),
(REQUIRES_MODERATION, 'Requires Moderation'),
(PREPPED, 'Prepped'),
(SENT, 'Sent'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True)
class Meta:
unique_together = ('message', 'mailing_list',)
class ListCommand(models.Model):
#: The ListCommand has not been processed.
UNPROCESSED = 1
#: The ListCommand has been rejected (e.g. for permissioning reasons.)
REJECTED = 2
#: Ths ListCommand has been processed completely.
PROCESSED = 3
#: An error occurred while processing the ListCommand.
FAILED = 4
STATUS_CHOICES = (
(UNPROCESSED, 'Unprocessed'),
(REJECTED, 'Rejected'),
(PROCESSED, 'Processed'),
(FAILED, 'Failed'),
)
message = models.ForeignKey(Message)
mailing_list = models.ForeignKey(MailingList)
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES, db_index=True, default=UNPROCESSED)
command = models.CharField(max_length=20) | kwargs = {'user__email__iexact': email} | conditional_block |
test_natural.py | from __future__ import unicode_literals
from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import FKDataNaturalKey, NaturalKeyAnchor
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
|
def natural_key_serializer_test(format, self):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2, use_natural_foreign_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
obj.pk, obj.data, type(obj.data), instance, type(instance.data),
)
)
def natural_key_test(format, self):
book1 = {
'data': '978-1590597255',
'title': 'The Definitive Guide to Django: Web Development Done Right',
}
book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(
format, NaturalKeyAnchor.objects.all(), indent=2,
use_natural_foreign_keys=True, use_natural_primary_keys=True,
)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertEqual(len(books), 2)
self.assertEqual(books[0].object.title, book1['title'])
self.assertEqual(books[0].object.pk, adrian.pk)
self.assertEqual(books[1].object.title, book2['title'])
self.assertIsNone(books[1].object.pk)
# Dynamically register tests for each serializer
register_tests(NaturalKeySerializerTests, 'test_%s_natural_key_serializer', natural_key_serializer_test)
register_tests(NaturalKeySerializerTests, 'test_%s_serializer_natural_keys', natural_key_test)
| pass | identifier_body |
test_natural.py | from __future__ import unicode_literals
from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import FKDataNaturalKey, NaturalKeyAnchor
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
pass
def | (format, self):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2, use_natural_foreign_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
obj.pk, obj.data, type(obj.data), instance, type(instance.data),
)
)
def natural_key_test(format, self):
book1 = {
'data': '978-1590597255',
'title': 'The Definitive Guide to Django: Web Development Done Right',
}
book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(
format, NaturalKeyAnchor.objects.all(), indent=2,
use_natural_foreign_keys=True, use_natural_primary_keys=True,
)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertEqual(len(books), 2)
self.assertEqual(books[0].object.title, book1['title'])
self.assertEqual(books[0].object.pk, adrian.pk)
self.assertEqual(books[1].object.title, book2['title'])
self.assertIsNone(books[1].object.pk)
# Dynamically register tests for each serializer
register_tests(NaturalKeySerializerTests, 'test_%s_natural_key_serializer', natural_key_serializer_test)
register_tests(NaturalKeySerializerTests, 'test_%s_serializer_natural_keys', natural_key_test)
| natural_key_serializer_test | identifier_name |
test_natural.py | from __future__ import unicode_literals
from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import FKDataNaturalKey, NaturalKeyAnchor
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
pass
def natural_key_serializer_test(format, self):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2, use_natural_foreign_keys=True)
for obj in serializers.deserialize(format, serialized_data):
|
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
obj.pk, obj.data, type(obj.data), instance, type(instance.data),
)
)
def natural_key_test(format, self):
book1 = {
'data': '978-1590597255',
'title': 'The Definitive Guide to Django: Web Development Done Right',
}
book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(
format, NaturalKeyAnchor.objects.all(), indent=2,
use_natural_foreign_keys=True, use_natural_primary_keys=True,
)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertEqual(len(books), 2)
self.assertEqual(books[0].object.title, book1['title'])
self.assertEqual(books[0].object.pk, adrian.pk)
self.assertEqual(books[1].object.title, book2['title'])
self.assertIsNone(books[1].object.pk)
# Dynamically register tests for each serializer
register_tests(NaturalKeySerializerTests, 'test_%s_natural_key_serializer', natural_key_serializer_test)
register_tests(NaturalKeySerializerTests, 'test_%s_serializer_natural_keys', natural_key_test)
| obj.save() | conditional_block |
test_natural.py | from __future__ import unicode_literals
from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import FKDataNaturalKey, NaturalKeyAnchor
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
pass
def natural_key_serializer_test(format, self):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2, use_natural_foreign_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
obj.pk, obj.data, type(obj.data), instance, type(instance.data),
)
)
def natural_key_test(format, self):
book1 = {
'data': '978-1590597255',
'title': 'The Definitive Guide to Django: Web Development Done Right',
}
book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
|
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertEqual(len(books), 2)
self.assertEqual(books[0].object.title, book1['title'])
self.assertEqual(books[0].object.pk, adrian.pk)
self.assertEqual(books[1].object.title, book2['title'])
self.assertIsNone(books[1].object.pk)
# Dynamically register tests for each serializer
register_tests(NaturalKeySerializerTests, 'test_%s_natural_key_serializer', natural_key_serializer_test)
register_tests(NaturalKeySerializerTests, 'test_%s_serializer_natural_keys', natural_key_test) | # Serialize the books.
string_data = serializers.serialize(
format, NaturalKeyAnchor.objects.all(), indent=2,
use_natural_foreign_keys=True, use_natural_primary_keys=True,
) | random_line_split |
unittest.py | # (c) 2014, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python2.7's unittest module
'''
import sys
# Python 2.6
if sys.version_info < (2, 7):
try:
# Need unittest2 on python2.6
from unittest2 import *
except ImportError:
print('You need unittest2 installed on python2.6.x to run tests')
else:
| from unittest import * | conditional_block |
|
unittest.py | # (c) 2014, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python2.7's unittest module
'''
import sys
# Python 2.6
if sys.version_info < (2, 7): | except ImportError:
print('You need unittest2 installed on python2.6.x to run tests')
else:
from unittest import * | try:
# Need unittest2 on python2.6
from unittest2 import * | random_line_split |
base.ts | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import { Data } from '../data';
import { AbstractVector, Vector } from '../vector';
import { DataType } from '../type';
import { Chunked } from './chunked';
import { clampRange } from '../util/vector';
import { Vector as VType } from '../interfaces';
import { Clonable, Sliceable, Applicative } from '../vector';
export interface BaseVector<T extends DataType = any> extends Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
slice(begin?: number, end?: number): VType<T>;
concat(...others: Vector<T>[]): Chunked<T>;
clone<R extends DataType = T>(data: Data<R>, children?: Vector<R>[]): VType<R>;
}
export abstract class BaseVector<T extends DataType = any> extends AbstractVector<T>
implements Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
protected _children?: Vector[];
constructor(data: Data<T>, children?: Vector[]) {
super();
this._children = children;
this.numChildren = data.childData.length;
this._bindDataAccessors(this.data = data);
}
public readonly data: Data<T>;
public readonly numChildren: number;
public get type() { return this.data.type; }
public get typeId() { return this.data.typeId; }
public get length() { return this.data.length; }
public get offset() { return this.data.offset; }
public get stride() { return this.data.stride; }
public get nullCount() |
public get VectorName() { return this.constructor.name; }
public get ArrayType(): T['ArrayType'] { return this.data.ArrayType; }
public get values() { return this.data.values; }
public get typeIds() { return this.data.typeIds; }
public get nullBitmap() { return this.data.nullBitmap; }
public get valueOffsets() { return this.data.valueOffsets; }
public get [Symbol.toStringTag]() { return `${this.VectorName}<${this.type[Symbol.toStringTag]}>`; }
public clone<R extends DataType = T>(data: Data<R>, children = this._children) {
return Vector.new<R>(data, children) as any;
}
public concat(...others: Vector<T>[]) {
return Chunked.concat<T>(this, ...others);
}
public slice(begin?: number, end?: number) {
// Adjust args similar to Array.prototype.slice. Normalize begin/end to
// clamp between 0 and length, and wrap around on negative indices, e.g.
// slice(-1, 5) or slice(5, -1)
return clampRange(this, begin, end, this._sliceInternal);
}
public isValid(index: number): boolean {
if (this.nullCount > 0) {
const idx = this.offset + index;
const val = this.nullBitmap[idx >> 3];
const mask = (val & (1 << (idx % 8)));
return mask !== 0;
}
return true;
}
public getChildAt<R extends DataType = any>(index: number): Vector<R> | null {
return index < 0 || index >= this.numChildren ? null : (
(this._children || (this._children = []))[index] ||
(this._children[index] = Vector.new<R>(this.data.childData[index] as Data<R>))
) as Vector<R>;
}
// @ts-ignore
public toJSON(): any { return [...this]; }
protected _sliceInternal(self: this, begin: number, end: number) {
return self.clone(self.data.slice(begin, end - begin));
}
// @ts-ignore
protected _bindDataAccessors(data: Data<T>) {
// Implementation in src/vectors/index.ts due to circular dependency/packaging shenanigans
}
}
(BaseVector.prototype as any)[Symbol.isConcatSpreadable] = true;
| { return this.data.nullCount; } | identifier_body |
base.ts | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import { Data } from '../data';
import { AbstractVector, Vector } from '../vector';
import { DataType } from '../type';
import { Chunked } from './chunked';
import { clampRange } from '../util/vector';
import { Vector as VType } from '../interfaces';
import { Clonable, Sliceable, Applicative } from '../vector';
export interface BaseVector<T extends DataType = any> extends Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
slice(begin?: number, end?: number): VType<T>;
concat(...others: Vector<T>[]): Chunked<T>;
clone<R extends DataType = T>(data: Data<R>, children?: Vector<R>[]): VType<R>;
}
export abstract class BaseVector<T extends DataType = any> extends AbstractVector<T>
implements Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
protected _children?: Vector[];
| (data: Data<T>, children?: Vector[]) {
super();
this._children = children;
this.numChildren = data.childData.length;
this._bindDataAccessors(this.data = data);
}
public readonly data: Data<T>;
public readonly numChildren: number;
public get type() { return this.data.type; }
public get typeId() { return this.data.typeId; }
public get length() { return this.data.length; }
public get offset() { return this.data.offset; }
public get stride() { return this.data.stride; }
public get nullCount() { return this.data.nullCount; }
public get VectorName() { return this.constructor.name; }
public get ArrayType(): T['ArrayType'] { return this.data.ArrayType; }
public get values() { return this.data.values; }
public get typeIds() { return this.data.typeIds; }
public get nullBitmap() { return this.data.nullBitmap; }
public get valueOffsets() { return this.data.valueOffsets; }
public get [Symbol.toStringTag]() { return `${this.VectorName}<${this.type[Symbol.toStringTag]}>`; }
public clone<R extends DataType = T>(data: Data<R>, children = this._children) {
return Vector.new<R>(data, children) as any;
}
public concat(...others: Vector<T>[]) {
return Chunked.concat<T>(this, ...others);
}
public slice(begin?: number, end?: number) {
// Adjust args similar to Array.prototype.slice. Normalize begin/end to
// clamp between 0 and length, and wrap around on negative indices, e.g.
// slice(-1, 5) or slice(5, -1)
return clampRange(this, begin, end, this._sliceInternal);
}
public isValid(index: number): boolean {
if (this.nullCount > 0) {
const idx = this.offset + index;
const val = this.nullBitmap[idx >> 3];
const mask = (val & (1 << (idx % 8)));
return mask !== 0;
}
return true;
}
public getChildAt<R extends DataType = any>(index: number): Vector<R> | null {
return index < 0 || index >= this.numChildren ? null : (
(this._children || (this._children = []))[index] ||
(this._children[index] = Vector.new<R>(this.data.childData[index] as Data<R>))
) as Vector<R>;
}
// @ts-ignore
public toJSON(): any { return [...this]; }
protected _sliceInternal(self: this, begin: number, end: number) {
return self.clone(self.data.slice(begin, end - begin));
}
// @ts-ignore
protected _bindDataAccessors(data: Data<T>) {
// Implementation in src/vectors/index.ts due to circular dependency/packaging shenanigans
}
}
(BaseVector.prototype as any)[Symbol.isConcatSpreadable] = true;
| constructor | identifier_name |
base.ts | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import { Data } from '../data';
import { AbstractVector, Vector } from '../vector';
import { DataType } from '../type';
import { Chunked } from './chunked';
import { clampRange } from '../util/vector';
import { Vector as VType } from '../interfaces';
import { Clonable, Sliceable, Applicative } from '../vector';
export interface BaseVector<T extends DataType = any> extends Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
slice(begin?: number, end?: number): VType<T>;
concat(...others: Vector<T>[]): Chunked<T>;
clone<R extends DataType = T>(data: Data<R>, children?: Vector<R>[]): VType<R>;
}
export abstract class BaseVector<T extends DataType = any> extends AbstractVector<T>
implements Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
protected _children?: Vector[];
constructor(data: Data<T>, children?: Vector[]) {
super();
this._children = children;
this.numChildren = data.childData.length;
this._bindDataAccessors(this.data = data);
}
public readonly data: Data<T>;
public readonly numChildren: number;
public get type() { return this.data.type; }
public get typeId() { return this.data.typeId; }
public get length() { return this.data.length; }
public get offset() { return this.data.offset; }
public get stride() { return this.data.stride; }
public get nullCount() { return this.data.nullCount; } | public get VectorName() { return this.constructor.name; }
public get ArrayType(): T['ArrayType'] { return this.data.ArrayType; }
public get values() { return this.data.values; }
public get typeIds() { return this.data.typeIds; }
public get nullBitmap() { return this.data.nullBitmap; }
public get valueOffsets() { return this.data.valueOffsets; }
public get [Symbol.toStringTag]() { return `${this.VectorName}<${this.type[Symbol.toStringTag]}>`; }
public clone<R extends DataType = T>(data: Data<R>, children = this._children) {
return Vector.new<R>(data, children) as any;
}
public concat(...others: Vector<T>[]) {
return Chunked.concat<T>(this, ...others);
}
public slice(begin?: number, end?: number) {
// Adjust args similar to Array.prototype.slice. Normalize begin/end to
// clamp between 0 and length, and wrap around on negative indices, e.g.
// slice(-1, 5) or slice(5, -1)
return clampRange(this, begin, end, this._sliceInternal);
}
public isValid(index: number): boolean {
if (this.nullCount > 0) {
const idx = this.offset + index;
const val = this.nullBitmap[idx >> 3];
const mask = (val & (1 << (idx % 8)));
return mask !== 0;
}
return true;
}
public getChildAt<R extends DataType = any>(index: number): Vector<R> | null {
return index < 0 || index >= this.numChildren ? null : (
(this._children || (this._children = []))[index] ||
(this._children[index] = Vector.new<R>(this.data.childData[index] as Data<R>))
) as Vector<R>;
}
// @ts-ignore
public toJSON(): any { return [...this]; }
protected _sliceInternal(self: this, begin: number, end: number) {
return self.clone(self.data.slice(begin, end - begin));
}
// @ts-ignore
protected _bindDataAccessors(data: Data<T>) {
// Implementation in src/vectors/index.ts due to circular dependency/packaging shenanigans
}
}
(BaseVector.prototype as any)[Symbol.isConcatSpreadable] = true; | random_line_split |
|
base.ts | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import { Data } from '../data';
import { AbstractVector, Vector } from '../vector';
import { DataType } from '../type';
import { Chunked } from './chunked';
import { clampRange } from '../util/vector';
import { Vector as VType } from '../interfaces';
import { Clonable, Sliceable, Applicative } from '../vector';
export interface BaseVector<T extends DataType = any> extends Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
slice(begin?: number, end?: number): VType<T>;
concat(...others: Vector<T>[]): Chunked<T>;
clone<R extends DataType = T>(data: Data<R>, children?: Vector<R>[]): VType<R>;
}
export abstract class BaseVector<T extends DataType = any> extends AbstractVector<T>
implements Clonable<VType<T>>, Sliceable<VType<T>>, Applicative<T, Chunked<T>> {
protected _children?: Vector[];
constructor(data: Data<T>, children?: Vector[]) {
super();
this._children = children;
this.numChildren = data.childData.length;
this._bindDataAccessors(this.data = data);
}
public readonly data: Data<T>;
public readonly numChildren: number;
public get type() { return this.data.type; }
public get typeId() { return this.data.typeId; }
public get length() { return this.data.length; }
public get offset() { return this.data.offset; }
public get stride() { return this.data.stride; }
public get nullCount() { return this.data.nullCount; }
public get VectorName() { return this.constructor.name; }
public get ArrayType(): T['ArrayType'] { return this.data.ArrayType; }
public get values() { return this.data.values; }
public get typeIds() { return this.data.typeIds; }
public get nullBitmap() { return this.data.nullBitmap; }
public get valueOffsets() { return this.data.valueOffsets; }
public get [Symbol.toStringTag]() { return `${this.VectorName}<${this.type[Symbol.toStringTag]}>`; }
public clone<R extends DataType = T>(data: Data<R>, children = this._children) {
return Vector.new<R>(data, children) as any;
}
public concat(...others: Vector<T>[]) {
return Chunked.concat<T>(this, ...others);
}
public slice(begin?: number, end?: number) {
// Adjust args similar to Array.prototype.slice. Normalize begin/end to
// clamp between 0 and length, and wrap around on negative indices, e.g.
// slice(-1, 5) or slice(5, -1)
return clampRange(this, begin, end, this._sliceInternal);
}
public isValid(index: number): boolean {
if (this.nullCount > 0) |
return true;
}
public getChildAt<R extends DataType = any>(index: number): Vector<R> | null {
return index < 0 || index >= this.numChildren ? null : (
(this._children || (this._children = []))[index] ||
(this._children[index] = Vector.new<R>(this.data.childData[index] as Data<R>))
) as Vector<R>;
}
// @ts-ignore
public toJSON(): any { return [...this]; }
protected _sliceInternal(self: this, begin: number, end: number) {
return self.clone(self.data.slice(begin, end - begin));
}
// @ts-ignore
protected _bindDataAccessors(data: Data<T>) {
// Implementation in src/vectors/index.ts due to circular dependency/packaging shenanigans
}
}
(BaseVector.prototype as any)[Symbol.isConcatSpreadable] = true;
| {
const idx = this.offset + index;
const val = this.nullBitmap[idx >> 3];
const mask = (val & (1 << (idx % 8)));
return mask !== 0;
} | conditional_block |
game.py | from gameinfo import *
from porkglobals import *
def genGameMap():
"""This is an "abstract function" to hold this docstring and information.
A GameMap function defines Places and connects all the Places it defines in
a graph, but simpler graph than CommandGraph. It simply uses Place.nextnodes.
A GameMap function returns the starting location."""
def testGameMap():
""" ***TEST CASES*** """
# testing item adj/name collision
testsword = Weapon("elvish sword", "A blade of Elvish make.", 2, weight=2)
testsword2 = Weapon("rusty elvish sword", "A discarded old blade of Elvish steel.", 2)
testsword3 = Weapon("sword elvish rusty", "A mix of adjectives to fuck with you.", 2)
startlocnext = {}
startloc = Place("Sword testing location.",
items=[testsword,testsword2,testsword3],
next=startlocnext)
return startloc
def goldenfieldMap():
# python objs are pointers, putting an object in two places on accident
# would make some weird behavior
|
if DEBUG:
genGameMap = testGameMap
# ghetto map choosing
genGameMap = goldenfieldMap | shittystartersword = Weapon("old, rusty sword", "A simple sword, obviously aged and covered in rust.", 2, weight=2)
startlocnext = {'e':"There is a wall there."}
startloc = Place("You are in a field. Swaying, golden grass surrounds you in all directions.",
items=[shittystartersword],
next=startlocnext)
field1next = {'s':startloc}
field1 = Place("You are in a field. Golden, swaying grass surrounds you in all directions.",
next=field1next)
startlocnext['n'] = field1
field2next = {'n':startloc}
field2 = Place("You are in a field with golden, swaying grass in all directions.",
next=field2next)
startlocnext['s'] = field2
# wait why the hell am i not just doing Place.next = {}
aSecretRoomNext = {'u':startloc}
aSecretRoom = Place(("You find yourself in a secret room. The walls glare down at you, but otherwise the room is quiet. There "
"is a painting on the wall in front of you, flanked by two statues of what appear to be kneeling warriors."),
next=aSecretRoomNext)
warriorStatue = Feature("warrior statue", ("A statue of a kneeling warrior. He faces down, with one hand on the hilt of his sheathed sword and "
"the other in a fist."))
painting = Feature("painting", "A painting of a bowl of fruit. A note attached to it says, do not to this.")
aSecretRoom.features = [warriorStatue, painting]
startlocnext['d'] = aSecretRoom
return startloc | identifier_body |
game.py | from gameinfo import *
from porkglobals import *
def genGameMap():
"""This is an "abstract function" to hold this docstring and information.
A GameMap function defines Places and connects all the Places it defines in
a graph, but simpler graph than CommandGraph. It simply uses Place.nextnodes.
A GameMap function returns the starting location."""
def testGameMap():
""" ***TEST CASES*** """
# testing item adj/name collision | testsword2 = Weapon("rusty elvish sword", "A discarded old blade of Elvish steel.", 2)
testsword3 = Weapon("sword elvish rusty", "A mix of adjectives to fuck with you.", 2)
startlocnext = {}
startloc = Place("Sword testing location.",
items=[testsword,testsword2,testsword3],
next=startlocnext)
return startloc
def goldenfieldMap():
# python objs are pointers, putting an object in two places on accident
# would make some weird behavior
shittystartersword = Weapon("old, rusty sword", "A simple sword, obviously aged and covered in rust.", 2, weight=2)
startlocnext = {'e':"There is a wall there."}
startloc = Place("You are in a field. Swaying, golden grass surrounds you in all directions.",
items=[shittystartersword],
next=startlocnext)
field1next = {'s':startloc}
field1 = Place("You are in a field. Golden, swaying grass surrounds you in all directions.",
next=field1next)
startlocnext['n'] = field1
field2next = {'n':startloc}
field2 = Place("You are in a field with golden, swaying grass in all directions.",
next=field2next)
startlocnext['s'] = field2
# wait why the hell am i not just doing Place.next = {}
aSecretRoomNext = {'u':startloc}
aSecretRoom = Place(("You find yourself in a secret room. The walls glare down at you, but otherwise the room is quiet. There "
"is a painting on the wall in front of you, flanked by two statues of what appear to be kneeling warriors."),
next=aSecretRoomNext)
warriorStatue = Feature("warrior statue", ("A statue of a kneeling warrior. He faces down, with one hand on the hilt of his sheathed sword and "
"the other in a fist."))
painting = Feature("painting", "A painting of a bowl of fruit. A note attached to it says, do not to this.")
aSecretRoom.features = [warriorStatue, painting]
startlocnext['d'] = aSecretRoom
return startloc
if DEBUG:
genGameMap = testGameMap
# ghetto map choosing
genGameMap = goldenfieldMap | testsword = Weapon("elvish sword", "A blade of Elvish make.", 2, weight=2) | random_line_split |
game.py | from gameinfo import *
from porkglobals import *
def genGameMap():
"""This is an "abstract function" to hold this docstring and information.
A GameMap function defines Places and connects all the Places it defines in
a graph, but simpler graph than CommandGraph. It simply uses Place.nextnodes.
A GameMap function returns the starting location."""
def testGameMap():
""" ***TEST CASES*** """
# testing item adj/name collision
testsword = Weapon("elvish sword", "A blade of Elvish make.", 2, weight=2)
testsword2 = Weapon("rusty elvish sword", "A discarded old blade of Elvish steel.", 2)
testsword3 = Weapon("sword elvish rusty", "A mix of adjectives to fuck with you.", 2)
startlocnext = {}
startloc = Place("Sword testing location.",
items=[testsword,testsword2,testsword3],
next=startlocnext)
return startloc
def goldenfieldMap():
# python objs are pointers, putting an object in two places on accident
# would make some weird behavior
shittystartersword = Weapon("old, rusty sword", "A simple sword, obviously aged and covered in rust.", 2, weight=2)
startlocnext = {'e':"There is a wall there."}
startloc = Place("You are in a field. Swaying, golden grass surrounds you in all directions.",
items=[shittystartersword],
next=startlocnext)
field1next = {'s':startloc}
field1 = Place("You are in a field. Golden, swaying grass surrounds you in all directions.",
next=field1next)
startlocnext['n'] = field1
field2next = {'n':startloc}
field2 = Place("You are in a field with golden, swaying grass in all directions.",
next=field2next)
startlocnext['s'] = field2
# wait why the hell am i not just doing Place.next = {}
aSecretRoomNext = {'u':startloc}
aSecretRoom = Place(("You find yourself in a secret room. The walls glare down at you, but otherwise the room is quiet. There "
"is a painting on the wall in front of you, flanked by two statues of what appear to be kneeling warriors."),
next=aSecretRoomNext)
warriorStatue = Feature("warrior statue", ("A statue of a kneeling warrior. He faces down, with one hand on the hilt of his sheathed sword and "
"the other in a fist."))
painting = Feature("painting", "A painting of a bowl of fruit. A note attached to it says, do not to this.")
aSecretRoom.features = [warriorStatue, painting]
startlocnext['d'] = aSecretRoom
return startloc
if DEBUG:
|
# ghetto map choosing
genGameMap = goldenfieldMap | genGameMap = testGameMap | conditional_block |
game.py | from gameinfo import *
from porkglobals import *
def | ():
"""This is an "abstract function" to hold this docstring and information.
A GameMap function defines Places and connects all the Places it defines in
a graph, but simpler graph than CommandGraph. It simply uses Place.nextnodes.
A GameMap function returns the starting location."""
def testGameMap():
""" ***TEST CASES*** """
# testing item adj/name collision
testsword = Weapon("elvish sword", "A blade of Elvish make.", 2, weight=2)
testsword2 = Weapon("rusty elvish sword", "A discarded old blade of Elvish steel.", 2)
testsword3 = Weapon("sword elvish rusty", "A mix of adjectives to fuck with you.", 2)
startlocnext = {}
startloc = Place("Sword testing location.",
items=[testsword,testsword2,testsword3],
next=startlocnext)
return startloc
def goldenfieldMap():
# python objs are pointers, putting an object in two places on accident
# would make some weird behavior
shittystartersword = Weapon("old, rusty sword", "A simple sword, obviously aged and covered in rust.", 2, weight=2)
startlocnext = {'e':"There is a wall there."}
startloc = Place("You are in a field. Swaying, golden grass surrounds you in all directions.",
items=[shittystartersword],
next=startlocnext)
field1next = {'s':startloc}
field1 = Place("You are in a field. Golden, swaying grass surrounds you in all directions.",
next=field1next)
startlocnext['n'] = field1
field2next = {'n':startloc}
field2 = Place("You are in a field with golden, swaying grass in all directions.",
next=field2next)
startlocnext['s'] = field2
# wait why the hell am i not just doing Place.next = {}
aSecretRoomNext = {'u':startloc}
aSecretRoom = Place(("You find yourself in a secret room. The walls glare down at you, but otherwise the room is quiet. There "
"is a painting on the wall in front of you, flanked by two statues of what appear to be kneeling warriors."),
next=aSecretRoomNext)
warriorStatue = Feature("warrior statue", ("A statue of a kneeling warrior. He faces down, with one hand on the hilt of his sheathed sword and "
"the other in a fist."))
painting = Feature("painting", "A painting of a bowl of fruit. A note attached to it says, do not to this.")
aSecretRoom.features = [warriorStatue, painting]
startlocnext['d'] = aSecretRoom
return startloc
if DEBUG:
genGameMap = testGameMap
# ghetto map choosing
genGameMap = goldenfieldMap | genGameMap | identifier_name |
unwind.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Implementation of Rust stack unwinding
//
// For background on exception handling and stack unwinding please see
// "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and
// documents linked from it.
// These are also good reads:
// http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
// http://monoinfinito.wordpress.com/series/exception-handling-in-c/
// http://www.airs.com/blog/index.php?s=exception+frames
//
// ~~~ A brief summary ~~~
// Exception handling happens in two phases: a search phase and a cleanup phase.
//
// In both phases the unwinder walks stack frames from top to bottom using
// information from the stack frame unwind sections of the current process's
// modules ("module" here refers to an OS module, i.e. an executable or a
// dynamic library).
//
// For each stack frame, it invokes the associated "personality routine", whose
// address is also stored in the unwind info section.
//
// In the search phase, the job of a personality routine is to examine exception
// object being thrown, and to decide whether it should be caught at that stack
// frame. Once the handler frame has been identified, cleanup phase begins.
//
// In the cleanup phase, personality routines invoke cleanup code associated
// with their stack frames (i.e. destructors). Once stack has been unwound down
// to the handler frame level, unwinding stops and the last personality routine
// transfers control to its' catch block.
//
// ~~~ Frame unwind info registration ~~~
// Each module has its' own frame unwind info section (usually ".eh_frame"), and
// unwinder needs to know about all of them in order for unwinding to be able to
// cross module boundaries.
//
// On some platforms, like Linux, this is achieved by dynamically enumerating
// currently loaded modules via the dl_iterate_phdr() API and finding all
// .eh_frame sections.
//
// Others, like Windows, require modules to actively register their unwind info
// sections by calling __register_frame_info() API at startup. In the latter
// case it is essential that there is only one copy of the unwinder runtime in
// the process. This is usually achieved by linking to the dynamic version of
// the unwind runtime.
//
// Currently Rust uses unwind runtime provided by libgcc.
use any::{Any, AnyRefExt};
use c_str::CString;
use cast;
use fmt;
use kinds::Send;
use mem;
use option::{Some, None, Option};
use prelude::drop;
use ptr::RawPtr;
use result::{Err, Ok};
use rt::backtrace;
use rt::local::Local;
use rt::task::Task;
use str::Str;
use task::TaskResult;
use intrinsics;
use uw = rt::libunwind;
pub struct Unwinder {
priv unwinding: bool,
priv cause: Option<~Any:Send>
}
impl Unwinder {
pub fn new() -> Unwinder {
Unwinder {
unwinding: false,
cause: None,
}
}
pub fn unwinding(&self) -> bool {
self.unwinding
}
pub fn try(&mut self, f: ||) {
use raw::Closure; |
unsafe {
let closure: Closure = cast::transmute(f);
let ep = rust_try(try_fn, closure.code as *c_void,
closure.env as *c_void);
if !ep.is_null() {
rtdebug!("caught {}", (*ep).exception_class);
uw::_Unwind_DeleteException(ep);
}
}
extern fn try_fn(code: *c_void, env: *c_void) {
unsafe {
let closure: || = cast::transmute(Closure {
code: code as *(),
env: env as *(),
});
closure();
}
}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern "C" fn(*c_void, *c_void),
code: *c_void,
data: *c_void) -> *uw::_Unwind_Exception;
}
}
pub fn begin_unwind(&mut self, cause: ~Any:Send) -> ! {
rtdebug!("begin_unwind()");
self.unwinding = true;
self.cause = Some(cause);
rust_fail();
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_fail() -> ! {
unsafe {
let exception = ~uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0, ..uw::unwinder_private_data_size],
};
let error = uw::_Unwind_RaiseException(cast::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern "C" fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: ~uw::_Unwind_Exception = cast::transmute(exception);
}
}
}
}
pub fn result(&mut self) -> TaskResult {
if self.unwinding {
Err(self.cause.take().unwrap())
} else {
Ok(())
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each task's stack.
// So we have two versions:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
// This is achieved by overriding the return value in search phase to always
// say "catch!".
#[cfg(not(target_arch = "arm"), not(test))]
#[doc(hidden)]
#[allow(visible_private_types)]
pub mod eabi {
use uw = rt::libunwind;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // so we can reference it by name from middle/trans/base.rs
pub extern "C" fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int) != 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(target_arch = "arm", not(test))]
#[allow(visible_private_types)]
pub mod eabi {
use uw = rt::libunwind;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // so we can reference it by name from middle/trans/base.rs
pub extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
}
}
#[cold]
#[lang="fail_"]
#[cfg(not(test))]
pub fn fail_(expr: *u8, file: *u8, line: uint) -> ! {
begin_unwind_raw(expr, file, line);
}
#[cold]
#[lang="fail_bounds_check"]
#[cfg(not(test))]
pub fn fail_bounds_check(file: *u8, line: uint, index: uint, len: uint) -> ! {
use c_str::ToCStr;
let msg = format!("index out of bounds: the len is {} but the index is {}",
len as uint, index as uint);
msg.with_c_str(|buf| fail_(buf as *u8, file, line))
}
/// This is the entry point of unwinding for things like lang items and such.
/// The arguments are normally generated by the compiler, and need to
/// have static lifetimes.
#[inline(never)] #[cold] // this is the slow path, please never inline this
pub fn begin_unwind_raw(msg: *u8, file: *u8, line: uint) -> ! {
use libc::c_char;
#[inline]
fn static_char_ptr(p: *u8) -> &'static str {
let s = unsafe { CString::new(p as *c_char, false) };
match s.as_str() {
Some(s) => unsafe { cast::transmute::<&str, &'static str>(s) },
None => rtabort!("message wasn't utf8?")
}
}
let msg = static_char_ptr(msg);
let file = static_char_ptr(file);
begin_unwind(msg, file, line as uint)
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `fail!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: &fmt::Arguments, file: &'static str, line: uint) -> ! {
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// failure + OOM properly anyway (see comment in begin_unwind
// below).
begin_unwind_inner(~fmt::format(msg), file, line)
}
/// This is the entry point of unwinding for fail!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file: &'static str, line: uint) -> ! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that fail!() on OOM will invoke this code path,
// but then again we're not really ready for failing on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this task instead of the task that's
// failing.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(~msg, file, line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Do this split took the LLVM IR line counts of `fn main() { fail!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: ~Any:Send, file: &'static str, line: uint) -> ! {
let mut task;
{
let msg_s = match msg.as_ref::<&'static str>() {
Some(s) => *s,
None => match msg.as_ref::<~str>() {
Some(s) => s.as_slice(),
None => "~Any",
}
};
// It is assumed that all reasonable rust code will have a local task at
// all times. This means that this `try_take` will succeed almost all of
// the time. There are border cases, however, when the runtime has
// *almost* set up the local task, but hasn't quite gotten there yet. In
// order to get some better diagnostics, we print on failure and
// immediately abort the whole process if there is no local task
// available.
let opt_task: Option<~Task> = Local::try_take();
task = match opt_task {
Some(t) => t,
None => {
rterrln!("failed at '{}', {}:{}", msg_s, file, line);
if backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
} else {
rterrln!("run with `RUST_BACKTRACE=1` to see a backtrace");
}
unsafe { intrinsics::abort() }
}
};
// See comments in io::stdio::with_task_stdout as to why we have to be
// careful when using an arbitrary I/O handle from the task. We
// essentially need to dance to make sure when a task is in TLS when
// running user code.
let name = task.name.take();
{
let n = name.as_ref().map(|n| n.as_slice()).unwrap_or("<unnamed>");
match task.stderr.take() {
Some(mut stderr) => {
Local::put(task);
// FIXME: what to do when the task printing fails?
let _err = format_args!(|args| ::fmt::writeln(stderr, args),
"task '{}' failed at '{}', {}:{}",
n, msg_s, file, line);
if backtrace::log_enabled() {
let _err = backtrace::write(stderr);
}
task = Local::take();
match mem::replace(&mut task.stderr, Some(stderr)) {
Some(prev) => {
Local::put(task);
drop(prev);
task = Local::take();
}
None => {}
}
}
None => {
rterrln!("task '{}' failed at '{}', {}:{}", n, msg_s,
file, line);
if backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
}
}
}
}
task.name = name;
if task.unwinder.unwinding {
// If a task fails while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
// unwinding or otherwise exiting the task cleanly.
rterrln!("task failed during unwinding (double-failure - total drag!)")
rterrln!("rust must abort now. so sorry.");
// Don't print the backtrace twice (it would have already been
// printed if logging was enabled).
if !backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
}
unsafe { intrinsics::abort() }
}
}
// The unwinder won't actually use the task at all, so we put the task back
// into TLS right before we invoke the unwinder, but this means we need an
// unsafe reference back to the unwinder once it's in TLS.
Local::put(task);
unsafe {
let task: *mut Task = Local::unsafe_borrow();
(*task).unwinder.begin_unwind(msg);
}
} | use libc::{c_void}; | random_line_split |
unwind.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Implementation of Rust stack unwinding
//
// For background on exception handling and stack unwinding please see
// "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and
// documents linked from it.
// These are also good reads:
// http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
// http://monoinfinito.wordpress.com/series/exception-handling-in-c/
// http://www.airs.com/blog/index.php?s=exception+frames
//
// ~~~ A brief summary ~~~
// Exception handling happens in two phases: a search phase and a cleanup phase.
//
// In both phases the unwinder walks stack frames from top to bottom using
// information from the stack frame unwind sections of the current process's
// modules ("module" here refers to an OS module, i.e. an executable or a
// dynamic library).
//
// For each stack frame, it invokes the associated "personality routine", whose
// address is also stored in the unwind info section.
//
// In the search phase, the job of a personality routine is to examine exception
// object being thrown, and to decide whether it should be caught at that stack
// frame. Once the handler frame has been identified, cleanup phase begins.
//
// In the cleanup phase, personality routines invoke cleanup code associated
// with their stack frames (i.e. destructors). Once stack has been unwound down
// to the handler frame level, unwinding stops and the last personality routine
// transfers control to its' catch block.
//
// ~~~ Frame unwind info registration ~~~
// Each module has its' own frame unwind info section (usually ".eh_frame"), and
// unwinder needs to know about all of them in order for unwinding to be able to
// cross module boundaries.
//
// On some platforms, like Linux, this is achieved by dynamically enumerating
// currently loaded modules via the dl_iterate_phdr() API and finding all
// .eh_frame sections.
//
// Others, like Windows, require modules to actively register their unwind info
// sections by calling __register_frame_info() API at startup. In the latter
// case it is essential that there is only one copy of the unwinder runtime in
// the process. This is usually achieved by linking to the dynamic version of
// the unwind runtime.
//
// Currently Rust uses unwind runtime provided by libgcc.
use any::{Any, AnyRefExt};
use c_str::CString;
use cast;
use fmt;
use kinds::Send;
use mem;
use option::{Some, None, Option};
use prelude::drop;
use ptr::RawPtr;
use result::{Err, Ok};
use rt::backtrace;
use rt::local::Local;
use rt::task::Task;
use str::Str;
use task::TaskResult;
use intrinsics;
use uw = rt::libunwind;
pub struct Unwinder {
priv unwinding: bool,
priv cause: Option<~Any:Send>
}
impl Unwinder {
pub fn new() -> Unwinder {
Unwinder {
unwinding: false,
cause: None,
}
}
pub fn unwinding(&self) -> bool {
self.unwinding
}
pub fn try(&mut self, f: ||) {
use raw::Closure;
use libc::{c_void};
unsafe {
let closure: Closure = cast::transmute(f);
let ep = rust_try(try_fn, closure.code as *c_void,
closure.env as *c_void);
if !ep.is_null() {
rtdebug!("caught {}", (*ep).exception_class);
uw::_Unwind_DeleteException(ep);
}
}
extern fn | (code: *c_void, env: *c_void) {
unsafe {
let closure: || = cast::transmute(Closure {
code: code as *(),
env: env as *(),
});
closure();
}
}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern "C" fn(*c_void, *c_void),
code: *c_void,
data: *c_void) -> *uw::_Unwind_Exception;
}
}
pub fn begin_unwind(&mut self, cause: ~Any:Send) -> ! {
rtdebug!("begin_unwind()");
self.unwinding = true;
self.cause = Some(cause);
rust_fail();
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_fail() -> ! {
unsafe {
let exception = ~uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0, ..uw::unwinder_private_data_size],
};
let error = uw::_Unwind_RaiseException(cast::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern "C" fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: ~uw::_Unwind_Exception = cast::transmute(exception);
}
}
}
}
pub fn result(&mut self) -> TaskResult {
if self.unwinding {
Err(self.cause.take().unwrap())
} else {
Ok(())
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each task's stack.
// So we have two versions:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
// This is achieved by overriding the return value in search phase to always
// say "catch!".
#[cfg(not(target_arch = "arm"), not(test))]
#[doc(hidden)]
#[allow(visible_private_types)]
pub mod eabi {
use uw = rt::libunwind;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // so we can reference it by name from middle/trans/base.rs
pub extern "C" fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int) != 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(target_arch = "arm", not(test))]
#[allow(visible_private_types)]
pub mod eabi {
use uw = rt::libunwind;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // so we can reference it by name from middle/trans/base.rs
pub extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
}
}
#[cold]
#[lang="fail_"]
#[cfg(not(test))]
pub fn fail_(expr: *u8, file: *u8, line: uint) -> ! {
begin_unwind_raw(expr, file, line);
}
#[cold]
#[lang="fail_bounds_check"]
#[cfg(not(test))]
pub fn fail_bounds_check(file: *u8, line: uint, index: uint, len: uint) -> ! {
use c_str::ToCStr;
let msg = format!("index out of bounds: the len is {} but the index is {}",
len as uint, index as uint);
msg.with_c_str(|buf| fail_(buf as *u8, file, line))
}
/// This is the entry point of unwinding for things like lang items and such.
/// The arguments are normally generated by the compiler, and need to
/// have static lifetimes.
#[inline(never)] #[cold] // this is the slow path, please never inline this
pub fn begin_unwind_raw(msg: *u8, file: *u8, line: uint) -> ! {
use libc::c_char;
#[inline]
fn static_char_ptr(p: *u8) -> &'static str {
let s = unsafe { CString::new(p as *c_char, false) };
match s.as_str() {
Some(s) => unsafe { cast::transmute::<&str, &'static str>(s) },
None => rtabort!("message wasn't utf8?")
}
}
let msg = static_char_ptr(msg);
let file = static_char_ptr(file);
begin_unwind(msg, file, line as uint)
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `fail!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: &fmt::Arguments, file: &'static str, line: uint) -> ! {
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// failure + OOM properly anyway (see comment in begin_unwind
// below).
begin_unwind_inner(~fmt::format(msg), file, line)
}
/// This is the entry point of unwinding for fail!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file: &'static str, line: uint) -> ! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that fail!() on OOM will invoke this code path,
// but then again we're not really ready for failing on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this task instead of the task that's
// failing.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(~msg, file, line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Do this split took the LLVM IR line counts of `fn main() { fail!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: ~Any:Send, file: &'static str, line: uint) -> ! {
let mut task;
{
let msg_s = match msg.as_ref::<&'static str>() {
Some(s) => *s,
None => match msg.as_ref::<~str>() {
Some(s) => s.as_slice(),
None => "~Any",
}
};
// It is assumed that all reasonable rust code will have a local task at
// all times. This means that this `try_take` will succeed almost all of
// the time. There are border cases, however, when the runtime has
// *almost* set up the local task, but hasn't quite gotten there yet. In
// order to get some better diagnostics, we print on failure and
// immediately abort the whole process if there is no local task
// available.
let opt_task: Option<~Task> = Local::try_take();
task = match opt_task {
Some(t) => t,
None => {
rterrln!("failed at '{}', {}:{}", msg_s, file, line);
if backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
} else {
rterrln!("run with `RUST_BACKTRACE=1` to see a backtrace");
}
unsafe { intrinsics::abort() }
}
};
// See comments in io::stdio::with_task_stdout as to why we have to be
// careful when using an arbitrary I/O handle from the task. We
// essentially need to dance to make sure when a task is in TLS when
// running user code.
let name = task.name.take();
{
let n = name.as_ref().map(|n| n.as_slice()).unwrap_or("<unnamed>");
match task.stderr.take() {
Some(mut stderr) => {
Local::put(task);
// FIXME: what to do when the task printing fails?
let _err = format_args!(|args| ::fmt::writeln(stderr, args),
"task '{}' failed at '{}', {}:{}",
n, msg_s, file, line);
if backtrace::log_enabled() {
let _err = backtrace::write(stderr);
}
task = Local::take();
match mem::replace(&mut task.stderr, Some(stderr)) {
Some(prev) => {
Local::put(task);
drop(prev);
task = Local::take();
}
None => {}
}
}
None => {
rterrln!("task '{}' failed at '{}', {}:{}", n, msg_s,
file, line);
if backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
}
}
}
}
task.name = name;
if task.unwinder.unwinding {
// If a task fails while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
// unwinding or otherwise exiting the task cleanly.
rterrln!("task failed during unwinding (double-failure - total drag!)")
rterrln!("rust must abort now. so sorry.");
// Don't print the backtrace twice (it would have already been
// printed if logging was enabled).
if !backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
}
unsafe { intrinsics::abort() }
}
}
// The unwinder won't actually use the task at all, so we put the task back
// into TLS right before we invoke the unwinder, but this means we need an
// unsafe reference back to the unwinder once it's in TLS.
Local::put(task);
unsafe {
let task: *mut Task = Local::unsafe_borrow();
(*task).unwinder.begin_unwind(msg);
}
}
| try_fn | identifier_name |
unwind.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Implementation of Rust stack unwinding
//
// For background on exception handling and stack unwinding please see
// "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and
// documents linked from it.
// These are also good reads:
// http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
// http://monoinfinito.wordpress.com/series/exception-handling-in-c/
// http://www.airs.com/blog/index.php?s=exception+frames
//
// ~~~ A brief summary ~~~
// Exception handling happens in two phases: a search phase and a cleanup phase.
//
// In both phases the unwinder walks stack frames from top to bottom using
// information from the stack frame unwind sections of the current process's
// modules ("module" here refers to an OS module, i.e. an executable or a
// dynamic library).
//
// For each stack frame, it invokes the associated "personality routine", whose
// address is also stored in the unwind info section.
//
// In the search phase, the job of a personality routine is to examine exception
// object being thrown, and to decide whether it should be caught at that stack
// frame. Once the handler frame has been identified, cleanup phase begins.
//
// In the cleanup phase, personality routines invoke cleanup code associated
// with their stack frames (i.e. destructors). Once stack has been unwound down
// to the handler frame level, unwinding stops and the last personality routine
// transfers control to its' catch block.
//
// ~~~ Frame unwind info registration ~~~
// Each module has its' own frame unwind info section (usually ".eh_frame"), and
// unwinder needs to know about all of them in order for unwinding to be able to
// cross module boundaries.
//
// On some platforms, like Linux, this is achieved by dynamically enumerating
// currently loaded modules via the dl_iterate_phdr() API and finding all
// .eh_frame sections.
//
// Others, like Windows, require modules to actively register their unwind info
// sections by calling __register_frame_info() API at startup. In the latter
// case it is essential that there is only one copy of the unwinder runtime in
// the process. This is usually achieved by linking to the dynamic version of
// the unwind runtime.
//
// Currently Rust uses unwind runtime provided by libgcc.
use any::{Any, AnyRefExt};
use c_str::CString;
use cast;
use fmt;
use kinds::Send;
use mem;
use option::{Some, None, Option};
use prelude::drop;
use ptr::RawPtr;
use result::{Err, Ok};
use rt::backtrace;
use rt::local::Local;
use rt::task::Task;
use str::Str;
use task::TaskResult;
use intrinsics;
use uw = rt::libunwind;
pub struct Unwinder {
priv unwinding: bool,
priv cause: Option<~Any:Send>
}
impl Unwinder {
pub fn new() -> Unwinder {
Unwinder {
unwinding: false,
cause: None,
}
}
pub fn unwinding(&self) -> bool {
self.unwinding
}
pub fn try(&mut self, f: ||) {
use raw::Closure;
use libc::{c_void};
unsafe {
let closure: Closure = cast::transmute(f);
let ep = rust_try(try_fn, closure.code as *c_void,
closure.env as *c_void);
if !ep.is_null() {
rtdebug!("caught {}", (*ep).exception_class);
uw::_Unwind_DeleteException(ep);
}
}
extern fn try_fn(code: *c_void, env: *c_void) {
unsafe {
let closure: || = cast::transmute(Closure {
code: code as *(),
env: env as *(),
});
closure();
}
}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern "C" fn(*c_void, *c_void),
code: *c_void,
data: *c_void) -> *uw::_Unwind_Exception;
}
}
pub fn begin_unwind(&mut self, cause: ~Any:Send) -> ! |
pub fn result(&mut self) -> TaskResult {
if self.unwinding {
Err(self.cause.take().unwrap())
} else {
Ok(())
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each task's stack.
// So we have two versions:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
// This is achieved by overriding the return value in search phase to always
// say "catch!".
#[cfg(not(target_arch = "arm"), not(test))]
#[doc(hidden)]
#[allow(visible_private_types)]
pub mod eabi {
use uw = rt::libunwind;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // so we can reference it by name from middle/trans/base.rs
pub extern "C" fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int) != 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(target_arch = "arm", not(test))]
#[allow(visible_private_types)]
pub mod eabi {
use uw = rt::libunwind;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // so we can reference it by name from middle/trans/base.rs
pub extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
ue_header: *uw::_Unwind_Exception,
context: *uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
}
}
#[cold]
#[lang="fail_"]
#[cfg(not(test))]
pub fn fail_(expr: *u8, file: *u8, line: uint) -> ! {
begin_unwind_raw(expr, file, line);
}
#[cold]
#[lang="fail_bounds_check"]
#[cfg(not(test))]
pub fn fail_bounds_check(file: *u8, line: uint, index: uint, len: uint) -> ! {
use c_str::ToCStr;
let msg = format!("index out of bounds: the len is {} but the index is {}",
len as uint, index as uint);
msg.with_c_str(|buf| fail_(buf as *u8, file, line))
}
/// This is the entry point of unwinding for things like lang items and such.
/// The arguments are normally generated by the compiler, and need to
/// have static lifetimes.
#[inline(never)] #[cold] // this is the slow path, please never inline this
pub fn begin_unwind_raw(msg: *u8, file: *u8, line: uint) -> ! {
use libc::c_char;
#[inline]
fn static_char_ptr(p: *u8) -> &'static str {
let s = unsafe { CString::new(p as *c_char, false) };
match s.as_str() {
Some(s) => unsafe { cast::transmute::<&str, &'static str>(s) },
None => rtabort!("message wasn't utf8?")
}
}
let msg = static_char_ptr(msg);
let file = static_char_ptr(file);
begin_unwind(msg, file, line as uint)
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `fail!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: &fmt::Arguments, file: &'static str, line: uint) -> ! {
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// failure + OOM properly anyway (see comment in begin_unwind
// below).
begin_unwind_inner(~fmt::format(msg), file, line)
}
/// This is the entry point of unwinding for fail!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file: &'static str, line: uint) -> ! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that fail!() on OOM will invoke this code path,
// but then again we're not really ready for failing on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this task instead of the task that's
// failing.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(~msg, file, line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Do this split took the LLVM IR line counts of `fn main() { fail!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: ~Any:Send, file: &'static str, line: uint) -> ! {
let mut task;
{
let msg_s = match msg.as_ref::<&'static str>() {
Some(s) => *s,
None => match msg.as_ref::<~str>() {
Some(s) => s.as_slice(),
None => "~Any",
}
};
// It is assumed that all reasonable rust code will have a local task at
// all times. This means that this `try_take` will succeed almost all of
// the time. There are border cases, however, when the runtime has
// *almost* set up the local task, but hasn't quite gotten there yet. In
// order to get some better diagnostics, we print on failure and
// immediately abort the whole process if there is no local task
// available.
let opt_task: Option<~Task> = Local::try_take();
task = match opt_task {
Some(t) => t,
None => {
rterrln!("failed at '{}', {}:{}", msg_s, file, line);
if backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
} else {
rterrln!("run with `RUST_BACKTRACE=1` to see a backtrace");
}
unsafe { intrinsics::abort() }
}
};
// See comments in io::stdio::with_task_stdout as to why we have to be
// careful when using an arbitrary I/O handle from the task. We
// essentially need to dance to make sure when a task is in TLS when
// running user code.
let name = task.name.take();
{
let n = name.as_ref().map(|n| n.as_slice()).unwrap_or("<unnamed>");
match task.stderr.take() {
Some(mut stderr) => {
Local::put(task);
// FIXME: what to do when the task printing fails?
let _err = format_args!(|args| ::fmt::writeln(stderr, args),
"task '{}' failed at '{}', {}:{}",
n, msg_s, file, line);
if backtrace::log_enabled() {
let _err = backtrace::write(stderr);
}
task = Local::take();
match mem::replace(&mut task.stderr, Some(stderr)) {
Some(prev) => {
Local::put(task);
drop(prev);
task = Local::take();
}
None => {}
}
}
None => {
rterrln!("task '{}' failed at '{}', {}:{}", n, msg_s,
file, line);
if backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
}
}
}
}
task.name = name;
if task.unwinder.unwinding {
// If a task fails while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
// unwinding or otherwise exiting the task cleanly.
rterrln!("task failed during unwinding (double-failure - total drag!)")
rterrln!("rust must abort now. so sorry.");
// Don't print the backtrace twice (it would have already been
// printed if logging was enabled).
if !backtrace::log_enabled() {
let mut err = ::rt::util::Stderr;
let _err = backtrace::write(&mut err);
}
unsafe { intrinsics::abort() }
}
}
// The unwinder won't actually use the task at all, so we put the task back
// into TLS right before we invoke the unwinder, but this means we need an
// unsafe reference back to the unwinder once it's in TLS.
Local::put(task);
unsafe {
let task: *mut Task = Local::unsafe_borrow();
(*task).unwinder.begin_unwind(msg);
}
}
| {
rtdebug!("begin_unwind()");
self.unwinding = true;
self.cause = Some(cause);
rust_fail();
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_fail() -> ! {
unsafe {
let exception = ~uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0, ..uw::unwinder_private_data_size],
};
let error = uw::_Unwind_RaiseException(cast::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern "C" fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: ~uw::_Unwind_Exception = cast::transmute(exception);
}
}
}
} | identifier_body |
html_parser.py | '''A convenient class for parsing HTML pages.'''
from __future__ import unicode_literals
from HTMLParser import HTMLParser
import logging
import re
from RSSvk.core import Error
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class HTMLPageParser(HTMLParser):
'''A convenient class for parsing HTML pages.'''
tag_name_regex = '[a-zA-Z][-.a-zA-Z0-9:_]*'
'''A regular expression for tag name.'''
attribute_name_regex = tag_name_regex
'''A regular expression for attribute name.'''
tag_attrs_regex = re.sub(r'\s*', '', r'''
(?:\s+
''' + attribute_name_regex + r'''
(?:\s*=\s*
(?:
'[^']*'
|"[^"]*"
|[^'"/>\s]+
)
)?
)*
''')
'''A regular expression for tag attributes.'''
script_regex = re.compile('<script' + tag_attrs_regex + '>.*?</script>', re.DOTALL | re.IGNORECASE)
'''A regular expression for matching scripts.'''
__invalid_tag_attr_spacing_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
# Two attributes without a space between them
\s+ # whitespace before attribute name
''' + attribute_name_regex + r''' # attribute name
\s*=\s* # value indicator | (?:
'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
)
)
([^\s>]) # Do not include / to make the preparation replacement for __invalid_tag_attr_regex
''', re.VERBOSE)
'''
A regular expression for matching a common error in specifying tag
attributes.
'''
__invalid_tag_attr_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
)
\s+(?:
# Invalid characters instead of an attribute
[^\sa-zA-Z/>]\S*
|
# Sole slash
/\s
|
# Invalid characters starting from slash instead of an attribute
/[^>\s]+
)
''', re.VERBOSE)
'''
A regular expression for matching HTML errors like:
<a class="app photo"/app2322149_58238998?from_id=2381857&loc=addneighbour onclick="return cur.needLoginBox()">
'''
__empty_tags = 'area|base|basefont|br|col|frame|hr|img|input|link|meta|param'
'''A list of all HTML empty tags.'''
__misopened_tag_regex = re.compile(r'<(' + __empty_tags + tag_attrs_regex + r')\s*>', re.IGNORECASE)
'''A regular expression for matching opened tags that should be closed.'''
__tag_stack = None
'''A stack of currently opened HTML tags.'''
__cur_data = None
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
def __init__(self):
HTMLParser.__init__(self)
def handle_charref(self, name):
'''Handles a character reference of the form &#ref;.'''
self.__accumulate_data('&#' + name + ';')
def handle_data(self, data):
'''Handles data.'''
self.__accumulate_data(data)
def handle_endtag(self, tag_name):
'''Handles end of a tag.'''
self.__handle_data_if_exists()
if self.__get_cur_tag()['name'] == tag_name:
self.__close_tag(self.__tag_stack.pop())
else:
for tag_id in xrange(len(self.__tag_stack) - 1, -1, -1):
if self.__tag_stack[tag_id]['name'] == tag_name:
for tag in reversed(self.__tag_stack[tag_id + 1:]):
self.__close_tag(tag, forced = True)
self.__tag_stack.pop()
self.__close_tag(self.__tag_stack.pop())
break
else:
LOG.debug('Dropping excess end tag "%s"...', tag_name)
def handle_entityref(self, name):
'''Handles a general entity reference of the form &name;.'''
self.__accumulate_data('&' + name + ';')
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
LOG.debug('%s', data)
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
LOG.debug('<%s %s%s>', tag['name'], attrs, '/' if empty else '')
tag['new_tag_handler'] = self.handle_root
tag['data_handler'] = self.handle_root_data
tag['end_tag_handler'] = self.handle_root_end
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
LOG.debug('</%s>', tag['name'])
def handle_startendtag(self, tag, attrs):
'''Handles start of an XHTML-style empty tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, True)
def handle_starttag(self, tag, attrs):
'''Handles start of a tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, False)
def reset(self):
'''Resets the parser.'''
HTMLParser.reset(self)
self.__tag_stack = [{
# Add fake root tag
'name': None,
'new_tag_handler': self.handle_root,
'data_handler': self.handle_root_data,
'end_tag_handler': self.handle_root_end,
}]
def parse(self, html):
'''Parses the specified HTML page.'''
html = self.__fix_html(html)
self.reset()
try:
# Run the parser
self.feed(html)
self.close()
finally:
# Close all unclosed tags
for tag in self.__tag_stack[1:]:
self.__close_tag(tag, True)
def __accumulate_data(self, data):
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
if self.__cur_data is None:
self.__cur_data = data
else:
self.__cur_data += data
def __close_tag(self, tag, forced = False):
'''Forces closing of an unclosed tag.'''
if forced:
LOG.debug('Force closing of unclosed tag "%s".', tag['name'])
else:
LOG.debug('Tag %s closed.', tag)
if 'end_tag_handler' in tag:
tag['end_tag_handler'](tag)
LOG.debug('Current tag: %s.', self.__get_cur_tag())
def __fix_html(self, html):
'''Fixes various things that may confuse the Python's HTML parser.'''
html = self.script_regex.sub('', html)
loop_replacements = (
lambda html: self.__invalid_tag_attr_spacing_regex.subn(r'\1 \2', html),
lambda html: self.__invalid_tag_attr_regex.subn(r'\1 ', html),
)
for loop_replacement in loop_replacements:
for i in xrange(0, 1000):
html, changed = loop_replacement(html)
if not changed:
break
else:
raise Error('Too many errors in the HTML or infinite loop.')
html = self.__misopened_tag_regex.sub(r'<\1 />', html)
return html
def __get_cur_tag(self):
'''Returns currently opened tag.'''
return self.__tag_stack[-1]
def __handle_data_if_exists(self):
'''Handles accumulated data (if exists).'''
data = self.__cur_data
if data is None:
return
self.__cur_data = None
tag = self.__get_cur_tag()
handler = tag.get('data_handler')
if handler is not None:
LOG.debug('Data "%s" in "%s" with handler %s.',
data, tag['name'], handler.func_name)
handler(tag, data)
def __handle_start_tag(self, tag_name, attrs, empty):
'''Handles start of any tag.'''
tag = { 'name': tag_name }
handler = self.__get_cur_tag().get('new_tag_handler')
if handler is not None:
attrs = self.__parse_attrs(attrs)
LOG.debug('Start tag: %s %s with handler %s.',
tag, attrs, handler.func_name)
handler(tag, attrs, empty)
if not empty:
self.__tag_stack.append(tag)
def __parse_attrs(self, attrs_tuple):
'''Converts tag attributes from a tuple to a dictionary.'''
attrs = {}
for attr, value in attrs_tuple:
attrs[attr.lower()] = value
return attrs | random_line_split |
|
html_parser.py | '''A convenient class for parsing HTML pages.'''
from __future__ import unicode_literals
from HTMLParser import HTMLParser
import logging
import re
from RSSvk.core import Error
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class HTMLPageParser(HTMLParser):
'''A convenient class for parsing HTML pages.'''
tag_name_regex = '[a-zA-Z][-.a-zA-Z0-9:_]*'
'''A regular expression for tag name.'''
attribute_name_regex = tag_name_regex
'''A regular expression for attribute name.'''
tag_attrs_regex = re.sub(r'\s*', '', r'''
(?:\s+
''' + attribute_name_regex + r'''
(?:\s*=\s*
(?:
'[^']*'
|"[^"]*"
|[^'"/>\s]+
)
)?
)*
''')
'''A regular expression for tag attributes.'''
script_regex = re.compile('<script' + tag_attrs_regex + '>.*?</script>', re.DOTALL | re.IGNORECASE)
'''A regular expression for matching scripts.'''
__invalid_tag_attr_spacing_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
# Two attributes without a space between them
\s+ # whitespace before attribute name
''' + attribute_name_regex + r''' # attribute name
\s*=\s* # value indicator
(?:
'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
)
)
([^\s>]) # Do not include / to make the preparation replacement for __invalid_tag_attr_regex
''', re.VERBOSE)
'''
A regular expression for matching a common error in specifying tag
attributes.
'''
__invalid_tag_attr_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
)
\s+(?:
# Invalid characters instead of an attribute
[^\sa-zA-Z/>]\S*
|
# Sole slash
/\s
|
# Invalid characters starting from slash instead of an attribute
/[^>\s]+
)
''', re.VERBOSE)
'''
A regular expression for matching HTML errors like:
<a class="app photo"/app2322149_58238998?from_id=2381857&loc=addneighbour onclick="return cur.needLoginBox()">
'''
__empty_tags = 'area|base|basefont|br|col|frame|hr|img|input|link|meta|param'
'''A list of all HTML empty tags.'''
__misopened_tag_regex = re.compile(r'<(' + __empty_tags + tag_attrs_regex + r')\s*>', re.IGNORECASE)
'''A regular expression for matching opened tags that should be closed.'''
__tag_stack = None
'''A stack of currently opened HTML tags.'''
__cur_data = None
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
def __init__(self):
HTMLParser.__init__(self)
def handle_charref(self, name):
'''Handles a character reference of the form &#ref;.'''
self.__accumulate_data('&#' + name + ';')
def handle_data(self, data):
'''Handles data.'''
self.__accumulate_data(data)
def handle_endtag(self, tag_name):
'''Handles end of a tag.'''
self.__handle_data_if_exists()
if self.__get_cur_tag()['name'] == tag_name:
self.__close_tag(self.__tag_stack.pop())
else:
for tag_id in xrange(len(self.__tag_stack) - 1, -1, -1):
if self.__tag_stack[tag_id]['name'] == tag_name:
for tag in reversed(self.__tag_stack[tag_id + 1:]):
self.__close_tag(tag, forced = True)
self.__tag_stack.pop()
self.__close_tag(self.__tag_stack.pop())
break
else:
LOG.debug('Dropping excess end tag "%s"...', tag_name)
def handle_entityref(self, name):
'''Handles a general entity reference of the form &name;.'''
self.__accumulate_data('&' + name + ';')
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
LOG.debug('%s', data)
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
LOG.debug('<%s %s%s>', tag['name'], attrs, '/' if empty else '')
tag['new_tag_handler'] = self.handle_root
tag['data_handler'] = self.handle_root_data
tag['end_tag_handler'] = self.handle_root_end
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
LOG.debug('</%s>', tag['name'])
def handle_startendtag(self, tag, attrs):
'''Handles start of an XHTML-style empty tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, True)
def handle_starttag(self, tag, attrs):
'''Handles start of a tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, False)
def reset(self):
'''Resets the parser.'''
HTMLParser.reset(self)
self.__tag_stack = [{
# Add fake root tag
'name': None,
'new_tag_handler': self.handle_root,
'data_handler': self.handle_root_data,
'end_tag_handler': self.handle_root_end,
}]
def parse(self, html):
'''Parses the specified HTML page.'''
html = self.__fix_html(html)
self.reset()
try:
# Run the parser
self.feed(html)
self.close()
finally:
# Close all unclosed tags
for tag in self.__tag_stack[1:]:
self.__close_tag(tag, True)
def __accumulate_data(self, data):
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
if self.__cur_data is None:
self.__cur_data = data
else:
self.__cur_data += data
def __close_tag(self, tag, forced = False):
'''Forces closing of an unclosed tag.'''
if forced:
LOG.debug('Force closing of unclosed tag "%s".', tag['name'])
else:
LOG.debug('Tag %s closed.', tag)
if 'end_tag_handler' in tag:
tag['end_tag_handler'](tag)
LOG.debug('Current tag: %s.', self.__get_cur_tag())
def | (self, html):
'''Fixes various things that may confuse the Python's HTML parser.'''
html = self.script_regex.sub('', html)
loop_replacements = (
lambda html: self.__invalid_tag_attr_spacing_regex.subn(r'\1 \2', html),
lambda html: self.__invalid_tag_attr_regex.subn(r'\1 ', html),
)
for loop_replacement in loop_replacements:
for i in xrange(0, 1000):
html, changed = loop_replacement(html)
if not changed:
break
else:
raise Error('Too many errors in the HTML or infinite loop.')
html = self.__misopened_tag_regex.sub(r'<\1 />', html)
return html
def __get_cur_tag(self):
'''Returns currently opened tag.'''
return self.__tag_stack[-1]
def __handle_data_if_exists(self):
'''Handles accumulated data (if exists).'''
data = self.__cur_data
if data is None:
return
self.__cur_data = None
tag = self.__get_cur_tag()
handler = tag.get('data_handler')
if handler is not None:
LOG.debug('Data "%s" in "%s" with handler %s.',
data, tag['name'], handler.func_name)
handler(tag, data)
def __handle_start_tag(self, tag_name, attrs, empty):
'''Handles start of any tag.'''
tag = { 'name': tag_name }
handler = self.__get_cur_tag().get('new_tag_handler')
if handler is not None:
attrs = self.__parse_attrs(attrs)
LOG.debug('Start tag: %s %s with handler %s.',
tag, attrs, handler.func_name)
handler(tag, attrs, empty)
if not empty:
self.__tag_stack.append(tag)
def __parse_attrs(self, attrs_tuple):
'''Converts tag attributes from a tuple to a dictionary.'''
attrs = {}
for attr, value in attrs_tuple:
attrs[attr.lower()] = value
return attrs
| __fix_html | identifier_name |
html_parser.py | '''A convenient class for parsing HTML pages.'''
from __future__ import unicode_literals
from HTMLParser import HTMLParser
import logging
import re
from RSSvk.core import Error
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class HTMLPageParser(HTMLParser):
'''A convenient class for parsing HTML pages.'''
tag_name_regex = '[a-zA-Z][-.a-zA-Z0-9:_]*'
'''A regular expression for tag name.'''
attribute_name_regex = tag_name_regex
'''A regular expression for attribute name.'''
tag_attrs_regex = re.sub(r'\s*', '', r'''
(?:\s+
''' + attribute_name_regex + r'''
(?:\s*=\s*
(?:
'[^']*'
|"[^"]*"
|[^'"/>\s]+
)
)?
)*
''')
'''A regular expression for tag attributes.'''
script_regex = re.compile('<script' + tag_attrs_regex + '>.*?</script>', re.DOTALL | re.IGNORECASE)
'''A regular expression for matching scripts.'''
__invalid_tag_attr_spacing_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
# Two attributes without a space between them
\s+ # whitespace before attribute name
''' + attribute_name_regex + r''' # attribute name
\s*=\s* # value indicator
(?:
'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
)
)
([^\s>]) # Do not include / to make the preparation replacement for __invalid_tag_attr_regex
''', re.VERBOSE)
'''
A regular expression for matching a common error in specifying tag
attributes.
'''
__invalid_tag_attr_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
)
\s+(?:
# Invalid characters instead of an attribute
[^\sa-zA-Z/>]\S*
|
# Sole slash
/\s
|
# Invalid characters starting from slash instead of an attribute
/[^>\s]+
)
''', re.VERBOSE)
'''
A regular expression for matching HTML errors like:
<a class="app photo"/app2322149_58238998?from_id=2381857&loc=addneighbour onclick="return cur.needLoginBox()">
'''
__empty_tags = 'area|base|basefont|br|col|frame|hr|img|input|link|meta|param'
'''A list of all HTML empty tags.'''
__misopened_tag_regex = re.compile(r'<(' + __empty_tags + tag_attrs_regex + r')\s*>', re.IGNORECASE)
'''A regular expression for matching opened tags that should be closed.'''
__tag_stack = None
'''A stack of currently opened HTML tags.'''
__cur_data = None
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
def __init__(self):
HTMLParser.__init__(self)
def handle_charref(self, name):
'''Handles a character reference of the form &#ref;.'''
self.__accumulate_data('&#' + name + ';')
def handle_data(self, data):
'''Handles data.'''
self.__accumulate_data(data)
def handle_endtag(self, tag_name):
'''Handles end of a tag.'''
self.__handle_data_if_exists()
if self.__get_cur_tag()['name'] == tag_name:
self.__close_tag(self.__tag_stack.pop())
else:
|
def handle_entityref(self, name):
'''Handles a general entity reference of the form &name;.'''
self.__accumulate_data('&' + name + ';')
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
LOG.debug('%s', data)
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
LOG.debug('<%s %s%s>', tag['name'], attrs, '/' if empty else '')
tag['new_tag_handler'] = self.handle_root
tag['data_handler'] = self.handle_root_data
tag['end_tag_handler'] = self.handle_root_end
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
LOG.debug('</%s>', tag['name'])
def handle_startendtag(self, tag, attrs):
'''Handles start of an XHTML-style empty tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, True)
def handle_starttag(self, tag, attrs):
'''Handles start of a tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, False)
def reset(self):
'''Resets the parser.'''
HTMLParser.reset(self)
self.__tag_stack = [{
# Add fake root tag
'name': None,
'new_tag_handler': self.handle_root,
'data_handler': self.handle_root_data,
'end_tag_handler': self.handle_root_end,
}]
def parse(self, html):
'''Parses the specified HTML page.'''
html = self.__fix_html(html)
self.reset()
try:
# Run the parser
self.feed(html)
self.close()
finally:
# Close all unclosed tags
for tag in self.__tag_stack[1:]:
self.__close_tag(tag, True)
def __accumulate_data(self, data):
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
if self.__cur_data is None:
self.__cur_data = data
else:
self.__cur_data += data
def __close_tag(self, tag, forced = False):
'''Forces closing of an unclosed tag.'''
if forced:
LOG.debug('Force closing of unclosed tag "%s".', tag['name'])
else:
LOG.debug('Tag %s closed.', tag)
if 'end_tag_handler' in tag:
tag['end_tag_handler'](tag)
LOG.debug('Current tag: %s.', self.__get_cur_tag())
def __fix_html(self, html):
'''Fixes various things that may confuse the Python's HTML parser.'''
html = self.script_regex.sub('', html)
loop_replacements = (
lambda html: self.__invalid_tag_attr_spacing_regex.subn(r'\1 \2', html),
lambda html: self.__invalid_tag_attr_regex.subn(r'\1 ', html),
)
for loop_replacement in loop_replacements:
for i in xrange(0, 1000):
html, changed = loop_replacement(html)
if not changed:
break
else:
raise Error('Too many errors in the HTML or infinite loop.')
html = self.__misopened_tag_regex.sub(r'<\1 />', html)
return html
def __get_cur_tag(self):
'''Returns currently opened tag.'''
return self.__tag_stack[-1]
def __handle_data_if_exists(self):
'''Handles accumulated data (if exists).'''
data = self.__cur_data
if data is None:
return
self.__cur_data = None
tag = self.__get_cur_tag()
handler = tag.get('data_handler')
if handler is not None:
LOG.debug('Data "%s" in "%s" with handler %s.',
data, tag['name'], handler.func_name)
handler(tag, data)
def __handle_start_tag(self, tag_name, attrs, empty):
'''Handles start of any tag.'''
tag = { 'name': tag_name }
handler = self.__get_cur_tag().get('new_tag_handler')
if handler is not None:
attrs = self.__parse_attrs(attrs)
LOG.debug('Start tag: %s %s with handler %s.',
tag, attrs, handler.func_name)
handler(tag, attrs, empty)
if not empty:
self.__tag_stack.append(tag)
def __parse_attrs(self, attrs_tuple):
'''Converts tag attributes from a tuple to a dictionary.'''
attrs = {}
for attr, value in attrs_tuple:
attrs[attr.lower()] = value
return attrs
| for tag_id in xrange(len(self.__tag_stack) - 1, -1, -1):
if self.__tag_stack[tag_id]['name'] == tag_name:
for tag in reversed(self.__tag_stack[tag_id + 1:]):
self.__close_tag(tag, forced = True)
self.__tag_stack.pop()
self.__close_tag(self.__tag_stack.pop())
break
else:
LOG.debug('Dropping excess end tag "%s"...', tag_name) | conditional_block |
html_parser.py | '''A convenient class for parsing HTML pages.'''
from __future__ import unicode_literals
from HTMLParser import HTMLParser
import logging
import re
from RSSvk.core import Error
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class HTMLPageParser(HTMLParser):
'''A convenient class for parsing HTML pages.'''
tag_name_regex = '[a-zA-Z][-.a-zA-Z0-9:_]*'
'''A regular expression for tag name.'''
attribute_name_regex = tag_name_regex
'''A regular expression for attribute name.'''
tag_attrs_regex = re.sub(r'\s*', '', r'''
(?:\s+
''' + attribute_name_regex + r'''
(?:\s*=\s*
(?:
'[^']*'
|"[^"]*"
|[^'"/>\s]+
)
)?
)*
''')
'''A regular expression for tag attributes.'''
script_regex = re.compile('<script' + tag_attrs_regex + '>.*?</script>', re.DOTALL | re.IGNORECASE)
'''A regular expression for matching scripts.'''
__invalid_tag_attr_spacing_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
# Two attributes without a space between them
\s+ # whitespace before attribute name
''' + attribute_name_regex + r''' # attribute name
\s*=\s* # value indicator
(?:
'[^']*' # LITA-enclosed value
|"[^"]*" # LIT-enclosed value
)
)
([^\s>]) # Do not include / to make the preparation replacement for __invalid_tag_attr_regex
''', re.VERBOSE)
'''
A regular expression for matching a common error in specifying tag
attributes.
'''
__invalid_tag_attr_regex = re.compile(r'''
(
# Tag name
<''' + tag_name_regex + r'''
# Zero or several attributes
''' + tag_attrs_regex + r'''
)
\s+(?:
# Invalid characters instead of an attribute
[^\sa-zA-Z/>]\S*
|
# Sole slash
/\s
|
# Invalid characters starting from slash instead of an attribute
/[^>\s]+
)
''', re.VERBOSE)
'''
A regular expression for matching HTML errors like:
<a class="app photo"/app2322149_58238998?from_id=2381857&loc=addneighbour onclick="return cur.needLoginBox()">
'''
__empty_tags = 'area|base|basefont|br|col|frame|hr|img|input|link|meta|param'
'''A list of all HTML empty tags.'''
__misopened_tag_regex = re.compile(r'<(' + __empty_tags + tag_attrs_regex + r')\s*>', re.IGNORECASE)
'''A regular expression for matching opened tags that should be closed.'''
__tag_stack = None
'''A stack of currently opened HTML tags.'''
__cur_data = None
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
def __init__(self):
HTMLParser.__init__(self)
def handle_charref(self, name):
'''Handles a character reference of the form &#ref;.'''
self.__accumulate_data('&#' + name + ';')
def handle_data(self, data):
'''Handles data.'''
self.__accumulate_data(data)
def handle_endtag(self, tag_name):
'''Handles end of a tag.'''
self.__handle_data_if_exists()
if self.__get_cur_tag()['name'] == tag_name:
self.__close_tag(self.__tag_stack.pop())
else:
for tag_id in xrange(len(self.__tag_stack) - 1, -1, -1):
if self.__tag_stack[tag_id]['name'] == tag_name:
for tag in reversed(self.__tag_stack[tag_id + 1:]):
self.__close_tag(tag, forced = True)
self.__tag_stack.pop()
self.__close_tag(self.__tag_stack.pop())
break
else:
LOG.debug('Dropping excess end tag "%s"...', tag_name)
def handle_entityref(self, name):
'''Handles a general entity reference of the form &name;.'''
self.__accumulate_data('&' + name + ';')
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
LOG.debug('%s', data)
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
LOG.debug('<%s %s%s>', tag['name'], attrs, '/' if empty else '')
tag['new_tag_handler'] = self.handle_root
tag['data_handler'] = self.handle_root_data
tag['end_tag_handler'] = self.handle_root_end
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
LOG.debug('</%s>', tag['name'])
def handle_startendtag(self, tag, attrs):
'''Handles start of an XHTML-style empty tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, True)
def handle_starttag(self, tag, attrs):
'''Handles start of a tag.'''
self.__handle_data_if_exists()
self.__handle_start_tag(tag, attrs, False)
def reset(self):
|
def parse(self, html):
'''Parses the specified HTML page.'''
html = self.__fix_html(html)
self.reset()
try:
# Run the parser
self.feed(html)
self.close()
finally:
# Close all unclosed tags
for tag in self.__tag_stack[1:]:
self.__close_tag(tag, True)
def __accumulate_data(self, data):
'''
Accumulates data between handle_charref(), handle_entityref() and
handle_data() calls.
'''
if self.__cur_data is None:
self.__cur_data = data
else:
self.__cur_data += data
def __close_tag(self, tag, forced = False):
'''Forces closing of an unclosed tag.'''
if forced:
LOG.debug('Force closing of unclosed tag "%s".', tag['name'])
else:
LOG.debug('Tag %s closed.', tag)
if 'end_tag_handler' in tag:
tag['end_tag_handler'](tag)
LOG.debug('Current tag: %s.', self.__get_cur_tag())
def __fix_html(self, html):
'''Fixes various things that may confuse the Python's HTML parser.'''
html = self.script_regex.sub('', html)
loop_replacements = (
lambda html: self.__invalid_tag_attr_spacing_regex.subn(r'\1 \2', html),
lambda html: self.__invalid_tag_attr_regex.subn(r'\1 ', html),
)
for loop_replacement in loop_replacements:
for i in xrange(0, 1000):
html, changed = loop_replacement(html)
if not changed:
break
else:
raise Error('Too many errors in the HTML or infinite loop.')
html = self.__misopened_tag_regex.sub(r'<\1 />', html)
return html
def __get_cur_tag(self):
'''Returns currently opened tag.'''
return self.__tag_stack[-1]
def __handle_data_if_exists(self):
'''Handles accumulated data (if exists).'''
data = self.__cur_data
if data is None:
return
self.__cur_data = None
tag = self.__get_cur_tag()
handler = tag.get('data_handler')
if handler is not None:
LOG.debug('Data "%s" in "%s" with handler %s.',
data, tag['name'], handler.func_name)
handler(tag, data)
def __handle_start_tag(self, tag_name, attrs, empty):
'''Handles start of any tag.'''
tag = { 'name': tag_name }
handler = self.__get_cur_tag().get('new_tag_handler')
if handler is not None:
attrs = self.__parse_attrs(attrs)
LOG.debug('Start tag: %s %s with handler %s.',
tag, attrs, handler.func_name)
handler(tag, attrs, empty)
if not empty:
self.__tag_stack.append(tag)
def __parse_attrs(self, attrs_tuple):
'''Converts tag attributes from a tuple to a dictionary.'''
attrs = {}
for attr, value in attrs_tuple:
attrs[attr.lower()] = value
return attrs
| '''Resets the parser.'''
HTMLParser.reset(self)
self.__tag_stack = [{
# Add fake root tag
'name': None,
'new_tag_handler': self.handle_root,
'data_handler': self.handle_root_data,
'end_tag_handler': self.handle_root_end,
}] | identifier_body |
results_fetcher_mock.py | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
def fetch_results(self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
|
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
self._canned_retry_summary_json[build] = content
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name
| results = self._canned_results.get(build.build_id)
if results:
rv.extend(results) | conditional_block |
results_fetcher_mock.py | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build) | def fetch_results(self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
results = self._canned_results.get(build.build_id)
if results:
rv.extend(results)
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
self._canned_retry_summary_json[build] = content
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name | step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
| random_line_split |
results_fetcher_mock.py | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
def fetch_results(self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
results = self._canned_results.get(build.build_id)
if results:
rv.extend(results)
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
|
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name
| self._canned_retry_summary_json[build] = content | identifier_body |
results_fetcher_mock.py | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
def | (self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
results = self._canned_results.get(build.build_id)
if results:
rv.extend(results)
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
self._canned_retry_summary_json[build] = content
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name
| fetch_results | identifier_name |
addresses.rs | use postgres;
use ipm::PostgresReqExt;
use rustc_serialize::json::{Json, ToJson};
#[derive(ToJson)]
pub struct Address {
address_id: i32,
address: String,
postal_code: String,
city: String,
state: String,
country: String,
geospot: Json | pub fn get_by_contact_id(req: &PostgresReqExt, contact_id: i32) -> Vec<Address> {
let mut vec = Vec::new();
let conn = req.db_conn();
let stmt = conn.prepare(
"SELECT * FROM addresses a \
WHERE EXISTS(SELECT * \
FROM many_contacts_has_many_addresses b \
WHERE b.address_id_addresses=a.address_id \
AND b.contact_id_contacts=$1) "
).unwrap();
let rows = stmt.query(&[&contact_id]).unwrap();
for row in rows {
vec.push(Address {
address_id: row.get(0),
address: row.get(1),
postal_code: row.get(2),
city: row.get(3),
state: row.get(4),
country: row.get(5),
geospot: row.get(6)
});
}
vec
}
pub fn commit(&self, req: &PostgresReqExt) -> postgres::Result<u64> {
let conn = req.db_conn();
//TODO: trigger for INSERT or UPDATE to remove duplicates.
// if address_id is 0, then INSERT else UPDATE.
conn.execute(
"INSERT INTO addresses \
VALUES($1, $2, $3, $4, $5, $6, $7) ",
&[&self.address_id,
&self.address,
&self.postal_code,
&self.city,
&self.state,
&self.country,
&self.geospot
])
}
} | }
impl Address { | random_line_split |
addresses.rs |
use postgres;
use ipm::PostgresReqExt;
use rustc_serialize::json::{Json, ToJson};
#[derive(ToJson)]
pub struct Address {
address_id: i32,
address: String,
postal_code: String,
city: String,
state: String,
country: String,
geospot: Json
}
impl Address {
pub fn get_by_contact_id(req: &PostgresReqExt, contact_id: i32) -> Vec<Address> {
let mut vec = Vec::new();
let conn = req.db_conn();
let stmt = conn.prepare(
"SELECT * FROM addresses a \
WHERE EXISTS(SELECT * \
FROM many_contacts_has_many_addresses b \
WHERE b.address_id_addresses=a.address_id \
AND b.contact_id_contacts=$1) "
).unwrap();
let rows = stmt.query(&[&contact_id]).unwrap();
for row in rows {
vec.push(Address {
address_id: row.get(0),
address: row.get(1),
postal_code: row.get(2),
city: row.get(3),
state: row.get(4),
country: row.get(5),
geospot: row.get(6)
});
}
vec
}
pub fn | (&self, req: &PostgresReqExt) -> postgres::Result<u64> {
let conn = req.db_conn();
//TODO: trigger for INSERT or UPDATE to remove duplicates.
// if address_id is 0, then INSERT else UPDATE.
conn.execute(
"INSERT INTO addresses \
VALUES($1, $2, $3, $4, $5, $6, $7) ",
&[&self.address_id,
&self.address,
&self.postal_code,
&self.city,
&self.state,
&self.country,
&self.geospot
])
}
}
| commit | identifier_name |
fuse.rs | use core::pin::Pin;
use futures_core::stream::{FusedStream, Stream};
use futures_core::task::{Context, Poll};
#[cfg(feature = "sink")]
use futures_sink::Sink;
use pin_utils::{unsafe_pinned, unsafe_unpinned};
/// Stream for the [`fuse`](super::StreamExt::fuse) method.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Fuse<St> {
stream: St,
done: bool,
}
impl<St: Unpin> Unpin for Fuse<St> {}
impl<St> Fuse<St> {
unsafe_pinned!(stream: St);
unsafe_unpinned!(done: bool);
pub(super) fn new(stream: St) -> Fuse<St> {
Fuse { stream, done: false }
}
/// Returns whether the underlying stream has finished or not.
///
/// If this method returns `true`, then all future calls to poll are
/// guaranteed to return `None`. If this returns `false`, then the
/// underlying stream is still in use.
pub fn is_done(&self) -> bool {
self.done
}
/// Acquires a reference to the underlying stream that this combinator is
/// pulling from.
pub fn get_ref(&self) -> &St {
&self.stream
}
/// Acquires a mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_mut(&mut self) -> &mut St {
&mut self.stream
}
/// Acquires a pinned mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut St> {
self.stream()
}
/// Consumes this combinator, returning the underlying stream.
///
/// Note that this may discard intermediate state of this combinator, so
/// care should be taken to avoid losing resources when this is called.
pub fn into_inner(self) -> St {
self.stream
}
}
impl<S: Stream> FusedStream for Fuse<S> {
fn is_terminated(&self) -> bool {
self.done
}
}
impl<S: Stream> Stream for Fuse<S> {
type Item = S::Item;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<S::Item>> {
if self.done {
return Poll::Ready(None);
}
let item = ready!(self.as_mut().stream().poll_next(cx));
if item.is_none() {
*self.as_mut().done() = true;
}
Poll::Ready(item)
}
fn | (&self) -> (usize, Option<usize>) {
if self.done {
(0, Some(0))
} else {
self.stream.size_hint()
}
}
}
// Forwarding impl of Sink from the underlying stream
#[cfg(feature = "sink")]
impl<S: Stream + Sink<Item>, Item> Sink<Item> for Fuse<S> {
type Error = S::Error;
delegate_sink!(stream, Item);
}
| size_hint | identifier_name |
fuse.rs | use core::pin::Pin;
use futures_core::stream::{FusedStream, Stream};
use futures_core::task::{Context, Poll};
#[cfg(feature = "sink")]
use futures_sink::Sink;
use pin_utils::{unsafe_pinned, unsafe_unpinned};
/// Stream for the [`fuse`](super::StreamExt::fuse) method.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Fuse<St> {
stream: St,
done: bool,
}
impl<St: Unpin> Unpin for Fuse<St> {}
impl<St> Fuse<St> {
unsafe_pinned!(stream: St);
unsafe_unpinned!(done: bool);
pub(super) fn new(stream: St) -> Fuse<St> {
Fuse { stream, done: false }
}
/// Returns whether the underlying stream has finished or not.
///
/// If this method returns `true`, then all future calls to poll are
/// guaranteed to return `None`. If this returns `false`, then the
/// underlying stream is still in use.
pub fn is_done(&self) -> bool {
self.done
}
/// Acquires a reference to the underlying stream that this combinator is
/// pulling from.
pub fn get_ref(&self) -> &St {
&self.stream
}
/// Acquires a mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_mut(&mut self) -> &mut St |
/// Acquires a pinned mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut St> {
self.stream()
}
/// Consumes this combinator, returning the underlying stream.
///
/// Note that this may discard intermediate state of this combinator, so
/// care should be taken to avoid losing resources when this is called.
pub fn into_inner(self) -> St {
self.stream
}
}
impl<S: Stream> FusedStream for Fuse<S> {
fn is_terminated(&self) -> bool {
self.done
}
}
impl<S: Stream> Stream for Fuse<S> {
type Item = S::Item;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<S::Item>> {
if self.done {
return Poll::Ready(None);
}
let item = ready!(self.as_mut().stream().poll_next(cx));
if item.is_none() {
*self.as_mut().done() = true;
}
Poll::Ready(item)
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.done {
(0, Some(0))
} else {
self.stream.size_hint()
}
}
}
// Forwarding impl of Sink from the underlying stream
#[cfg(feature = "sink")]
impl<S: Stream + Sink<Item>, Item> Sink<Item> for Fuse<S> {
type Error = S::Error;
delegate_sink!(stream, Item);
}
| {
&mut self.stream
} | identifier_body |
fuse.rs | use core::pin::Pin;
use futures_core::stream::{FusedStream, Stream};
use futures_core::task::{Context, Poll};
#[cfg(feature = "sink")]
use futures_sink::Sink;
use pin_utils::{unsafe_pinned, unsafe_unpinned};
/// Stream for the [`fuse`](super::StreamExt::fuse) method.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Fuse<St> {
stream: St,
done: bool,
}
impl<St: Unpin> Unpin for Fuse<St> {}
impl<St> Fuse<St> {
unsafe_pinned!(stream: St);
unsafe_unpinned!(done: bool);
pub(super) fn new(stream: St) -> Fuse<St> {
Fuse { stream, done: false }
}
/// Returns whether the underlying stream has finished or not.
///
/// If this method returns `true`, then all future calls to poll are
/// guaranteed to return `None`. If this returns `false`, then the
/// underlying stream is still in use.
pub fn is_done(&self) -> bool {
self.done
}
/// Acquires a reference to the underlying stream that this combinator is
/// pulling from.
pub fn get_ref(&self) -> &St {
&self.stream
}
/// Acquires a mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_mut(&mut self) -> &mut St {
&mut self.stream
}
/// Acquires a pinned mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut St> {
self.stream()
}
/// Consumes this combinator, returning the underlying stream.
///
/// Note that this may discard intermediate state of this combinator, so
/// care should be taken to avoid losing resources when this is called.
pub fn into_inner(self) -> St {
self.stream
}
}
impl<S: Stream> FusedStream for Fuse<S> {
fn is_terminated(&self) -> bool {
self.done
}
}
impl<S: Stream> Stream for Fuse<S> {
type Item = S::Item;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<S::Item>> {
if self.done {
return Poll::Ready(None);
}
let item = ready!(self.as_mut().stream().poll_next(cx));
if item.is_none() {
*self.as_mut().done() = true;
}
Poll::Ready(item)
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.done | else {
self.stream.size_hint()
}
}
}
// Forwarding impl of Sink from the underlying stream
#[cfg(feature = "sink")]
impl<S: Stream + Sink<Item>, Item> Sink<Item> for Fuse<S> {
type Error = S::Error;
delegate_sink!(stream, Item);
}
| {
(0, Some(0))
} | conditional_block |
fuse.rs | use core::pin::Pin;
use futures_core::stream::{FusedStream, Stream};
use futures_core::task::{Context, Poll};
#[cfg(feature = "sink")]
use futures_sink::Sink;
use pin_utils::{unsafe_pinned, unsafe_unpinned};
/// Stream for the [`fuse`](super::StreamExt::fuse) method.
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct Fuse<St> {
stream: St,
done: bool,
}
impl<St: Unpin> Unpin for Fuse<St> {} | impl<St> Fuse<St> {
unsafe_pinned!(stream: St);
unsafe_unpinned!(done: bool);
pub(super) fn new(stream: St) -> Fuse<St> {
Fuse { stream, done: false }
}
/// Returns whether the underlying stream has finished or not.
///
/// If this method returns `true`, then all future calls to poll are
/// guaranteed to return `None`. If this returns `false`, then the
/// underlying stream is still in use.
pub fn is_done(&self) -> bool {
self.done
}
/// Acquires a reference to the underlying stream that this combinator is
/// pulling from.
pub fn get_ref(&self) -> &St {
&self.stream
}
/// Acquires a mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_mut(&mut self) -> &mut St {
&mut self.stream
}
/// Acquires a pinned mutable reference to the underlying stream that this
/// combinator is pulling from.
///
/// Note that care must be taken to avoid tampering with the state of the
/// stream which may otherwise confuse this combinator.
pub fn get_pin_mut(self: Pin<&mut Self>) -> Pin<&mut St> {
self.stream()
}
/// Consumes this combinator, returning the underlying stream.
///
/// Note that this may discard intermediate state of this combinator, so
/// care should be taken to avoid losing resources when this is called.
pub fn into_inner(self) -> St {
self.stream
}
}
impl<S: Stream> FusedStream for Fuse<S> {
fn is_terminated(&self) -> bool {
self.done
}
}
impl<S: Stream> Stream for Fuse<S> {
type Item = S::Item;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<S::Item>> {
if self.done {
return Poll::Ready(None);
}
let item = ready!(self.as_mut().stream().poll_next(cx));
if item.is_none() {
*self.as_mut().done() = true;
}
Poll::Ready(item)
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.done {
(0, Some(0))
} else {
self.stream.size_hint()
}
}
}
// Forwarding impl of Sink from the underlying stream
#[cfg(feature = "sink")]
impl<S: Stream + Sink<Item>, Item> Sink<Item> for Fuse<S> {
type Error = S::Error;
delegate_sink!(stream, Item);
} | random_line_split |
|
location-indicator.ts | /**
* @module @bldr/media-manager/location-indicator
*/
// Node packages.
import path from 'path'
import fs from 'fs'
// Project packages.
import config from '@bldr/config'
import { untildify, findParentFile } from '@bldr/core-node'
/**
* Indicates in which folder structure a file is located.
*
* Merge the configurations entries of `config.mediaServer.basePath` and
* `config.mediaServer.archivePaths`. Store only the accessible ones.
*/
class LocationIndicator {
/**
* The base path of the main media folder.
*/
public main: string
/**
* Multiple base paths of media collections (the main base path and some
* archive base paths)
*/
public readonly basePaths: string[]
constructor () {
this.main = config.mediaServer.basePath
const basePaths = [
config.mediaServer.basePath,
...config.mediaServer.archivePaths
]
this.basePaths = []
for (let i = 0; i < basePaths.length; i++) {
basePaths[i] = path.resolve(untildify(basePaths[i]))
if (fs.existsSync(basePaths[i])) {
this.basePaths.push(basePaths[i])
}
}
}
/**
* Check if the `currentPath` is inside a archive folder structure and
* not in den main media folder.
*/
isInArchive (currentPath: string): boolean {
if (path.resolve(currentPath).includes(this.main)) {
return false
}
return true
}
/**
* Get the directory where a presentation file (Praesentation.baldr.yml) is
* located in (The first folder with a prefix like `10_`)
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/Material/Duke-Ellington.jpg` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing`
*/
getPresParentDir (currentPath: string): string | undefined {
const parentFile = findParentFile(currentPath, 'Praesentation.baldr.yml')
if (parentFile != null) {
return path.dirname(parentFile)
}
}
/**
* Move a file path into a directory relative to the current
* presentation directory.
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/NB/Duke-Ellington.jpg` `BD` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/BD/Duke-Ellington.jpg`
*
* @param currentPath - The current path.
* @param subDir - A relative path.
*/
moveIntoSubdir (currentPath: string, subDir: string): string {
const fileName = path.basename(currentPath)
const presPath = this.getPresParentDir(currentPath)
if (presPath == null) {
throw new Error('The parent presentation folder couldn’t be detected!')
}
return path.join(presPath, subDir, fileName)
}
/**
* A deactivaed directory is a directory which has no direct counter part in
* the main media folder, which is not mirrored. It is a real archived folder
* in the archive folder. Activated folders have a prefix like `10_`
*
* true:
*
* - `/archive/10/10_Jazz/30_Stile/10_New-Orleans-Dixieland/Material/Texte.tex`
* - `/archive/10/10_Jazz/History-of-Jazz/Inhalt.tex`
* - `/archive/12/20_Tradition/30_Volksmusik/Bartok/10_Tanzsuite/Gliederung.tex`
*
* false:
*
* `/archive/10/10_Jazz/20_Vorformen/10_Worksongs-Spirtuals/Arbeitsblatt.tex`
*/
isInDeactivatedDir (currentPath: string): boolean {
currentPath = path.dirname(currentPath)
const relPath = this.getRelPath(currentPath)
if (relPath == null) return true
const segments = relPath.split(path.sep)
for (const segment of segments) {
if (segment.match(/^\d\d/) == null) {
return true
}
}
return false
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getRelPath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let relPath: string | undefined
for (const basePath of this.basePaths) {
if (currentPath.indexOf(basePath) === 0) {
relPath = currentPath.replace(basePath, '')
break
}
}
if (relPath !== undefined) {
return relPath.replace(new RegExp(`^${path.sep}`), '')
}
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getBasePath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let basePath: string | undefined
for (const bPath of this.basePaths) {
if (currentPath.indexOf(bPath) === 0) {
basePath = bPath
break
}
}
if (basePath !== undefined) {
return basePath.replace(new RegExp(`${path.sep}$`), '')
}
}
/**
* The mirrored path of the current give file path, for example:
*
* This folder in the main media folder structure
*
* `/var/data/baldr/media/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau/TX`
*
* gets converted to
*
* `/mnt/xpsschulearchiv/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getMirroredPath (currentPath: string): string | undefined {
const basePath = this.getBasePath(currentPath)
const relPath = this.getRelPath(currentPath)
let mirroredBasePath: string | undefined
for (const bPath of this.basePaths) {
if (basePath !== bPath) {
mirroredBasePath = bPath
break
}
}
if (mirroredBasePath !== undefined && relPath !== undefined) {
return path.join(mirroredBasePath, relPath)
}
}
}
export const locationIndicator = new LocationIndicator() | export default locationIndicator | random_line_split |
|
location-indicator.ts | /**
* @module @bldr/media-manager/location-indicator
*/
// Node packages.
import path from 'path'
import fs from 'fs'
// Project packages.
import config from '@bldr/config'
import { untildify, findParentFile } from '@bldr/core-node'
/**
* Indicates in which folder structure a file is located.
*
* Merge the configurations entries of `config.mediaServer.basePath` and
* `config.mediaServer.archivePaths`. Store only the accessible ones.
*/
class LocationIndicator {
/**
* The base path of the main media folder.
*/
public main: string
/**
* Multiple base paths of media collections (the main base path and some
* archive base paths)
*/
public readonly basePaths: string[]
constructor () {
this.main = config.mediaServer.basePath
const basePaths = [
config.mediaServer.basePath,
...config.mediaServer.archivePaths
]
this.basePaths = []
for (let i = 0; i < basePaths.length; i++) {
basePaths[i] = path.resolve(untildify(basePaths[i]))
if (fs.existsSync(basePaths[i])) {
this.basePaths.push(basePaths[i])
}
}
}
/**
* Check if the `currentPath` is inside a archive folder structure and
* not in den main media folder.
*/
isInArchive (currentPath: string): boolean {
if (path.resolve(currentPath).includes(this.main)) {
return false
}
return true
}
/**
* Get the directory where a presentation file (Praesentation.baldr.yml) is
* located in (The first folder with a prefix like `10_`)
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/Material/Duke-Ellington.jpg` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing`
*/
getPresParentDir (currentPath: string): string | undefined {
const parentFile = findParentFile(currentPath, 'Praesentation.baldr.yml')
if (parentFile != null) {
return path.dirname(parentFile)
}
}
/**
* Move a file path into a directory relative to the current
* presentation directory.
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/NB/Duke-Ellington.jpg` `BD` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/BD/Duke-Ellington.jpg`
*
* @param currentPath - The current path.
* @param subDir - A relative path.
*/
moveIntoSubdir (currentPath: string, subDir: string): string {
const fileName = path.basename(currentPath)
const presPath = this.getPresParentDir(currentPath)
if (presPath == null) {
throw new Error('The parent presentation folder couldn’t be detected!')
}
return path.join(presPath, subDir, fileName)
}
/**
* A deactivaed directory is a directory which has no direct counter part in
* the main media folder, which is not mirrored. It is a real archived folder
* in the archive folder. Activated folders have a prefix like `10_`
*
* true:
*
* - `/archive/10/10_Jazz/30_Stile/10_New-Orleans-Dixieland/Material/Texte.tex`
* - `/archive/10/10_Jazz/History-of-Jazz/Inhalt.tex`
* - `/archive/12/20_Tradition/30_Volksmusik/Bartok/10_Tanzsuite/Gliederung.tex`
*
* false:
*
* `/archive/10/10_Jazz/20_Vorformen/10_Worksongs-Spirtuals/Arbeitsblatt.tex`
*/
isInDeactivatedDir (currentPath: string): boolean {
currentPath = path.dirname(currentPath)
const relPath = this.getRelPath(currentPath)
if (relPath == null) return true
const segments = relPath.split(path.sep)
for (const segment of segments) {
if (segment.match(/^\d\d/) == null) {
return true
}
}
return false
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getRelPath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let relPath: string | undefined
for (const basePath of this.basePaths) {
if (currentPath.indexOf(basePath) === 0) {
relPath = currentPath.replace(basePath, '')
break
}
}
if (relPath !== undefined) {
return relPath.replace(new RegExp(`^${path.sep}`), '')
}
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getBasePath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let basePath: string | undefined
for (const bPath of this.basePaths) {
if (currentPath.indexOf(bPath) === 0) {
basePath = bPath
break
}
}
if (basePath !== undefined) {
return basePath.replace(new RegExp(`${path.sep}$`), '')
}
}
/**
* The mirrored path of the current give file path, for example:
*
* This folder in the main media folder structure
*
* `/var/data/baldr/media/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau/TX`
*
* gets converted to
*
* `/mnt/xpsschulearchiv/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getMirroredPath (currentPath: string): string | undefined {
|
export const locationIndicator = new LocationIndicator()
export default locationIndicator
| const basePath = this.getBasePath(currentPath)
const relPath = this.getRelPath(currentPath)
let mirroredBasePath: string | undefined
for (const bPath of this.basePaths) {
if (basePath !== bPath) {
mirroredBasePath = bPath
break
}
}
if (mirroredBasePath !== undefined && relPath !== undefined) {
return path.join(mirroredBasePath, relPath)
}
}
} | identifier_body |
location-indicator.ts | /**
* @module @bldr/media-manager/location-indicator
*/
// Node packages.
import path from 'path'
import fs from 'fs'
// Project packages.
import config from '@bldr/config'
import { untildify, findParentFile } from '@bldr/core-node'
/**
* Indicates in which folder structure a file is located.
*
* Merge the configurations entries of `config.mediaServer.basePath` and
* `config.mediaServer.archivePaths`. Store only the accessible ones.
*/
class LocationIndicator {
/**
* The base path of the main media folder.
*/
public main: string
/**
* Multiple base paths of media collections (the main base path and some
* archive base paths)
*/
public readonly basePaths: string[]
constructor () {
this.main = config.mediaServer.basePath
const basePaths = [
config.mediaServer.basePath,
...config.mediaServer.archivePaths
]
this.basePaths = []
for (let i = 0; i < basePaths.length; i++) {
basePaths[i] = path.resolve(untildify(basePaths[i]))
if (fs.existsSync(basePaths[i])) {
this.basePaths.push(basePaths[i])
}
}
}
/**
* Check if the `currentPath` is inside a archive folder structure and
* not in den main media folder.
*/
isInArchive (currentPath: string): boolean {
if (path.resolve(currentPath).includes(this.main)) {
return false
}
return true
}
/**
* Get the directory where a presentation file (Praesentation.baldr.yml) is
* located in (The first folder with a prefix like `10_`)
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/Material/Duke-Ellington.jpg` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing`
*/
getPresParentDir (currentPath: string): string | undefined {
const parentFile = findParentFile(currentPath, 'Praesentation.baldr.yml')
if (parentFile != null) |
}
/**
* Move a file path into a directory relative to the current
* presentation directory.
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/NB/Duke-Ellington.jpg` `BD` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/BD/Duke-Ellington.jpg`
*
* @param currentPath - The current path.
* @param subDir - A relative path.
*/
moveIntoSubdir (currentPath: string, subDir: string): string {
const fileName = path.basename(currentPath)
const presPath = this.getPresParentDir(currentPath)
if (presPath == null) {
throw new Error('The parent presentation folder couldn’t be detected!')
}
return path.join(presPath, subDir, fileName)
}
/**
* A deactivaed directory is a directory which has no direct counter part in
* the main media folder, which is not mirrored. It is a real archived folder
* in the archive folder. Activated folders have a prefix like `10_`
*
* true:
*
* - `/archive/10/10_Jazz/30_Stile/10_New-Orleans-Dixieland/Material/Texte.tex`
* - `/archive/10/10_Jazz/History-of-Jazz/Inhalt.tex`
* - `/archive/12/20_Tradition/30_Volksmusik/Bartok/10_Tanzsuite/Gliederung.tex`
*
* false:
*
* `/archive/10/10_Jazz/20_Vorformen/10_Worksongs-Spirtuals/Arbeitsblatt.tex`
*/
isInDeactivatedDir (currentPath: string): boolean {
currentPath = path.dirname(currentPath)
const relPath = this.getRelPath(currentPath)
if (relPath == null) return true
const segments = relPath.split(path.sep)
for (const segment of segments) {
if (segment.match(/^\d\d/) == null) {
return true
}
}
return false
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getRelPath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let relPath: string | undefined
for (const basePath of this.basePaths) {
if (currentPath.indexOf(basePath) === 0) {
relPath = currentPath.replace(basePath, '')
break
}
}
if (relPath !== undefined) {
return relPath.replace(new RegExp(`^${path.sep}`), '')
}
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getBasePath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let basePath: string | undefined
for (const bPath of this.basePaths) {
if (currentPath.indexOf(bPath) === 0) {
basePath = bPath
break
}
}
if (basePath !== undefined) {
return basePath.replace(new RegExp(`${path.sep}$`), '')
}
}
/**
* The mirrored path of the current give file path, for example:
*
* This folder in the main media folder structure
*
* `/var/data/baldr/media/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau/TX`
*
* gets converted to
*
* `/mnt/xpsschulearchiv/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getMirroredPath (currentPath: string): string | undefined {
const basePath = this.getBasePath(currentPath)
const relPath = this.getRelPath(currentPath)
let mirroredBasePath: string | undefined
for (const bPath of this.basePaths) {
if (basePath !== bPath) {
mirroredBasePath = bPath
break
}
}
if (mirroredBasePath !== undefined && relPath !== undefined) {
return path.join(mirroredBasePath, relPath)
}
}
}
export const locationIndicator = new LocationIndicator()
export default locationIndicator
| {
return path.dirname(parentFile)
} | conditional_block |
location-indicator.ts | /**
* @module @bldr/media-manager/location-indicator
*/
// Node packages.
import path from 'path'
import fs from 'fs'
// Project packages.
import config from '@bldr/config'
import { untildify, findParentFile } from '@bldr/core-node'
/**
* Indicates in which folder structure a file is located.
*
* Merge the configurations entries of `config.mediaServer.basePath` and
* `config.mediaServer.archivePaths`. Store only the accessible ones.
*/
class | {
/**
* The base path of the main media folder.
*/
public main: string
/**
* Multiple base paths of media collections (the main base path and some
* archive base paths)
*/
public readonly basePaths: string[]
constructor () {
this.main = config.mediaServer.basePath
const basePaths = [
config.mediaServer.basePath,
...config.mediaServer.archivePaths
]
this.basePaths = []
for (let i = 0; i < basePaths.length; i++) {
basePaths[i] = path.resolve(untildify(basePaths[i]))
if (fs.existsSync(basePaths[i])) {
this.basePaths.push(basePaths[i])
}
}
}
/**
* Check if the `currentPath` is inside a archive folder structure and
* not in den main media folder.
*/
isInArchive (currentPath: string): boolean {
if (path.resolve(currentPath).includes(this.main)) {
return false
}
return true
}
/**
* Get the directory where a presentation file (Praesentation.baldr.yml) is
* located in (The first folder with a prefix like `10_`)
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/Material/Duke-Ellington.jpg` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing`
*/
getPresParentDir (currentPath: string): string | undefined {
const parentFile = findParentFile(currentPath, 'Praesentation.baldr.yml')
if (parentFile != null) {
return path.dirname(parentFile)
}
}
/**
* Move a file path into a directory relative to the current
* presentation directory.
*
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/NB/Duke-Ellington.jpg` `BD` ->
* `/baldr/media/10/10_Jazz/30_Stile/20_Swing/BD/Duke-Ellington.jpg`
*
* @param currentPath - The current path.
* @param subDir - A relative path.
*/
moveIntoSubdir (currentPath: string, subDir: string): string {
const fileName = path.basename(currentPath)
const presPath = this.getPresParentDir(currentPath)
if (presPath == null) {
throw new Error('The parent presentation folder couldn’t be detected!')
}
return path.join(presPath, subDir, fileName)
}
/**
* A deactivaed directory is a directory which has no direct counter part in
* the main media folder, which is not mirrored. It is a real archived folder
* in the archive folder. Activated folders have a prefix like `10_`
*
* true:
*
* - `/archive/10/10_Jazz/30_Stile/10_New-Orleans-Dixieland/Material/Texte.tex`
* - `/archive/10/10_Jazz/History-of-Jazz/Inhalt.tex`
* - `/archive/12/20_Tradition/30_Volksmusik/Bartok/10_Tanzsuite/Gliederung.tex`
*
* false:
*
* `/archive/10/10_Jazz/20_Vorformen/10_Worksongs-Spirtuals/Arbeitsblatt.tex`
*/
isInDeactivatedDir (currentPath: string): boolean {
currentPath = path.dirname(currentPath)
const relPath = this.getRelPath(currentPath)
if (relPath == null) return true
const segments = relPath.split(path.sep)
for (const segment of segments) {
if (segment.match(/^\d\d/) == null) {
return true
}
}
return false
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getRelPath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let relPath: string | undefined
for (const basePath of this.basePaths) {
if (currentPath.indexOf(basePath) === 0) {
relPath = currentPath.replace(basePath, '')
break
}
}
if (relPath !== undefined) {
return relPath.replace(new RegExp(`^${path.sep}`), '')
}
}
/**
* Get the path relative to one of the base paths and `currentPath`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getBasePath (currentPath: string): string | undefined {
currentPath = path.resolve(currentPath)
let basePath: string | undefined
for (const bPath of this.basePaths) {
if (currentPath.indexOf(bPath) === 0) {
basePath = bPath
break
}
}
if (basePath !== undefined) {
return basePath.replace(new RegExp(`${path.sep}$`), '')
}
}
/**
* The mirrored path of the current give file path, for example:
*
* This folder in the main media folder structure
*
* `/var/data/baldr/media/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau/TX`
*
* gets converted to
*
* `/mnt/xpsschulearchiv/12/10_Interpreten/20_Auffuehrungspraxis/20_Instrumentenbau`.
*
* @param currentPath - The path of a file or a directory inside
* a media server folder structure or inside its archive folders.
*/
getMirroredPath (currentPath: string): string | undefined {
const basePath = this.getBasePath(currentPath)
const relPath = this.getRelPath(currentPath)
let mirroredBasePath: string | undefined
for (const bPath of this.basePaths) {
if (basePath !== bPath) {
mirroredBasePath = bPath
break
}
}
if (mirroredBasePath !== undefined && relPath !== undefined) {
return path.join(mirroredBasePath, relPath)
}
}
}
export const locationIndicator = new LocationIndicator()
export default locationIndicator
| LocationIndicator | identifier_name |
ShareButton.tsx | import * as React from 'react';
import { st, classes } from './ShareButton.st.css';
import { ButtonProps } from '../Button';
import { Omit } from '../../types';
import {
TextButton,
TextButtonProps,
TEXT_BUTTON_PRIORITY,
} from '../TextButton';
import { ReactComponent as Share } from '../../assets/icons/Share.svg';
interface ShareData {
url: string;
text?: string;
title?: string;
}
export interface ShareButtonProps
extends Omit<TextButtonProps, 'onClick' | 'children'> {
onClick(sharePromise: Promise<void> | undefined): void;
shareData: ShareData;
withIcon?: boolean;
text?: React.ReactChild;
}
declare global {
interface Navigator {
share(data: ShareData): Promise<void>;
}
}
/** Opens navigator.share where available */
export class ShareButton extends React.Component<ShareButtonProps> {
static displayName = 'ShareButton';
onButtonClick: ButtonProps['onClick'] = () => {
let sharePromise: Promise<void> | undefined;
const { shareData } = this.props;
if (navigator.share) |
this.props.onClick(sharePromise);
};
render() {
const { shareData, text, withIcon, className, ...rest } = this.props;
return (
<TextButton
className={st(
classes.root,
{ withIcon, withText: Boolean(text) },
className,
)}
priority={TEXT_BUTTON_PRIORITY.secondary}
prefixIcon={withIcon ? <Share className={classes.icon} /> : undefined}
{...rest}
onClick={this.onButtonClick}
data-hook={this.props['data-hook']}
>
<div className={classes.text}>{text}</div>
</TextButton>
);
}
}
| {
sharePromise = navigator.share(shareData);
} | conditional_block |
ShareButton.tsx | import * as React from 'react';
import { st, classes } from './ShareButton.st.css';
import { ButtonProps } from '../Button';
import { Omit } from '../../types';
import {
TextButton,
TextButtonProps,
TEXT_BUTTON_PRIORITY,
} from '../TextButton';
import { ReactComponent as Share } from '../../assets/icons/Share.svg';
interface ShareData {
url: string;
text?: string;
title?: string;
}
export interface ShareButtonProps
extends Omit<TextButtonProps, 'onClick' | 'children'> {
onClick(sharePromise: Promise<void> | undefined): void;
shareData: ShareData;
withIcon?: boolean;
text?: React.ReactChild;
}
declare global {
interface Navigator {
share(data: ShareData): Promise<void>;
}
}
/** Opens navigator.share where available */
export class ShareButton extends React.Component<ShareButtonProps> {
static displayName = 'ShareButton';
onButtonClick: ButtonProps['onClick'] = () => {
let sharePromise: Promise<void> | undefined;
const { shareData } = this.props;
if (navigator.share) {
sharePromise = navigator.share(shareData);
}
this.props.onClick(sharePromise);
};
render() |
}
| {
const { shareData, text, withIcon, className, ...rest } = this.props;
return (
<TextButton
className={st(
classes.root,
{ withIcon, withText: Boolean(text) },
className,
)}
priority={TEXT_BUTTON_PRIORITY.secondary}
prefixIcon={withIcon ? <Share className={classes.icon} /> : undefined}
{...rest}
onClick={this.onButtonClick}
data-hook={this.props['data-hook']}
>
<div className={classes.text}>{text}</div>
</TextButton>
);
} | identifier_body |
ShareButton.tsx | import * as React from 'react';
import { st, classes } from './ShareButton.st.css';
import { ButtonProps } from '../Button';
import { Omit } from '../../types';
import {
TextButton,
TextButtonProps,
TEXT_BUTTON_PRIORITY,
} from '../TextButton';
import { ReactComponent as Share } from '../../assets/icons/Share.svg';
interface ShareData {
url: string;
text?: string;
title?: string;
}
export interface ShareButtonProps
extends Omit<TextButtonProps, 'onClick' | 'children'> {
onClick(sharePromise: Promise<void> | undefined): void;
shareData: ShareData;
withIcon?: boolean;
text?: React.ReactChild;
}
declare global {
interface Navigator {
share(data: ShareData): Promise<void>;
}
}
/** Opens navigator.share where available */
export class ShareButton extends React.Component<ShareButtonProps> {
static displayName = 'ShareButton';
onButtonClick: ButtonProps['onClick'] = () => {
let sharePromise: Promise<void> | undefined;
const { shareData } = this.props;
if (navigator.share) {
sharePromise = navigator.share(shareData);
}
this.props.onClick(sharePromise);
};
| () {
const { shareData, text, withIcon, className, ...rest } = this.props;
return (
<TextButton
className={st(
classes.root,
{ withIcon, withText: Boolean(text) },
className,
)}
priority={TEXT_BUTTON_PRIORITY.secondary}
prefixIcon={withIcon ? <Share className={classes.icon} /> : undefined}
{...rest}
onClick={this.onButtonClick}
data-hook={this.props['data-hook']}
>
<div className={classes.text}>{text}</div>
</TextButton>
);
}
}
| render | identifier_name |
ShareButton.tsx | import * as React from 'react';
import { st, classes } from './ShareButton.st.css';
import { ButtonProps } from '../Button';
import { Omit } from '../../types';
import {
TextButton,
TextButtonProps,
TEXT_BUTTON_PRIORITY,
} from '../TextButton';
import { ReactComponent as Share } from '../../assets/icons/Share.svg';
interface ShareData {
url: string;
text?: string;
title?: string;
}
export interface ShareButtonProps
extends Omit<TextButtonProps, 'onClick' | 'children'> {
onClick(sharePromise: Promise<void> | undefined): void;
shareData: ShareData;
withIcon?: boolean;
text?: React.ReactChild;
}
declare global {
interface Navigator {
share(data: ShareData): Promise<void>;
}
}
/** Opens navigator.share where available */
export class ShareButton extends React.Component<ShareButtonProps> {
static displayName = 'ShareButton';
onButtonClick: ButtonProps['onClick'] = () => {
let sharePromise: Promise<void> | undefined;
const { shareData } = this.props;
if (navigator.share) {
sharePromise = navigator.share(shareData);
}
this.props.onClick(sharePromise);
};
render() {
const { shareData, text, withIcon, className, ...rest } = this.props;
return (
<TextButton
className={st(
classes.root, | priority={TEXT_BUTTON_PRIORITY.secondary}
prefixIcon={withIcon ? <Share className={classes.icon} /> : undefined}
{...rest}
onClick={this.onButtonClick}
data-hook={this.props['data-hook']}
>
<div className={classes.text}>{text}</div>
</TextButton>
);
}
} | { withIcon, withText: Boolean(text) },
className,
)} | random_line_split |
wisp.py | import RPi.GPIO as GPIO
import time
import subprocess
import datetime
import time
# Added to support voice command
import threading
import speech_recognition as sr
STATE_UNOCCUPIED = 0
STATE_OCCUPIED = 1
OCCUPIED_DISTANCE = 50
UNOCCUPIED_DURATION = 1200
def process_command(self, command):
if command.lower() == "snap camera one":
take_webcam_still()
if command.lower() == "roll camera one":
take_webcam_video()
if command.lower() == "snap camera two":
take_picam_still()
if command.lower() == "roll camera two":
take_picam_video()
Added to support voice command
class VoiceCommand(object):
def __init__(self, interval=1):
""" Constructor
:type interval: int
:param interval: Check interval, in seconds
"""
self.interval = interval
self.parent_function = None
thread = threading.Thread(target=self.init_speech_recognition, args=())
thread.daemon = True # Daemonize thread
thread.start()
def set_command(self, func):
self.parent_function = func
def init_speech_recognition(self):
# this is called from the background thread
def callback(recognizer, audio):
# received audio data, now we'll recognize it using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
command = recognizer.recognize_google(audio)
#print("Google Speech Recognition thinks you said " + command)
self.parent_function(command)
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
r = sr.Recognizer()
m = sr.Microphone()
with m as source:
r.adjust_for_ambient_noise(source) # we only need to calibrate once, before we start listening
# start listening in the background (note that we don't have to do this inside a `with` statement)
stop_listening = r.listen_in_background(m, callback)
# `stop_listening` is now a function that, when called, stops background listening
# do some other computation for 5 seconds, then stop listening and keep doing other computations
#for _ in range(50): time.sleep(0.1) # we're still listening even though the main thread is doing other things
#stop_listening() # calling this function requests that the background listener stop listening
while True: time.sleep(0.025)
def check_proximity():
TRIG = 23
ECHO = 24
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
GPIO.output(TRIG, False)
time.sleep(1)
GPIO.output(TRIG, True)
time.sleep(0.00001)
GPIO.output(TRIG, False)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
while GPIO.input(ECHO)==1:
pulse_end = time.time()
pulse_duration = pulse_end - pulse_start
distance = pulse_duration * 17150
distance = round(distance, 2)
return distance
def take_webcam_still():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_webcam_video():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def | ():
subprocess.call(["raspistill", "-o", "image.jpg"])
def take_picam_video():
subprocess.call(["raspivid", "-o", "video.h264"])
def say(content):
subprocess.call(["mpg123", content])
#fswebcam -p YUYV -d /dev/video0 -r 1024x768 --top-banner --title "The Garage" --subtitle "tyler" --info "hello" image.jpg
def main(argv):
state = STATE_UNOCCUPIED
unoccupied_start = time.time()
GPIO.setmode(GPIO.BCM)
logger_level = logging.DEBUG
logger = logging.getLogger('shop_log')
logger.setLevel(logger_level)
# create logging file handler
file_handler = logging.FileHandler('shop.log', 'a')
file_handler.setLevel(logger_level)
# create logging console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(logger_level)
#Set logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
vc = self.VoiceCommand()
vc.set_command(self.process_command)
while 1:
prox_distance = check_proximity()
if prox_distance < OCCUPIED_DISTANCE:
if state == STATE_UNOCCUPIED:
say("greet.mp3")
state = STATE_OCCUPIED
logger.info("Workshop Occupied")
unoccupied_start = datetime.datetime.now()
if prox_distance > OCCUPIED_DISTANCE:
if (time.time() - unoccupied_start) > UNOCCUPIED_DURATION:
say("goodbye.mp3")
state = STATE_UNOCCUPIED
logger.info("Workshop Unoccupied")
GPIO.cleanup()
'''Voice Commands
# Information Commands
Datetime - Gives the Date and Time as audio
Status - Provides memory, disk, processor and network information
Project Report - Reports project name, number of images, videos and audio files in the current project
#Media-centric
Snap Webcam - Takes a picture with the webcam
Snap Picam - Takes a picture with the Picam
Roll Webcam - Captures a video with webcam
Cut Webcam - Stops video capture from webcam
Roll Picam - Captures a video with picam
Cut Picam - Stops video capture from picam
Record audio xx - Record xx seconds of audio
Turn on light X - Turns on the specified light
Turn off light X - Turns off the specified light
'''
| take_picam_still | identifier_name |
wisp.py | import RPi.GPIO as GPIO
import time
import subprocess
import datetime
import time
# Added to support voice command
import threading
import speech_recognition as sr
STATE_UNOCCUPIED = 0
STATE_OCCUPIED = 1
OCCUPIED_DISTANCE = 50
UNOCCUPIED_DURATION = 1200
def process_command(self, command):
if command.lower() == "snap camera one":
take_webcam_still()
if command.lower() == "roll camera one":
take_webcam_video()
if command.lower() == "snap camera two":
take_picam_still()
if command.lower() == "roll camera two":
take_picam_video()
Added to support voice command
class VoiceCommand(object):
def __init__(self, interval=1):
""" Constructor
:type interval: int
:param interval: Check interval, in seconds
"""
self.interval = interval
self.parent_function = None
thread = threading.Thread(target=self.init_speech_recognition, args=())
thread.daemon = True # Daemonize thread
thread.start()
def set_command(self, func):
self.parent_function = func
def init_speech_recognition(self):
# this is called from the background thread
|
def check_proximity():
TRIG = 23
ECHO = 24
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
GPIO.output(TRIG, False)
time.sleep(1)
GPIO.output(TRIG, True)
time.sleep(0.00001)
GPIO.output(TRIG, False)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
while GPIO.input(ECHO)==1:
pulse_end = time.time()
pulse_duration = pulse_end - pulse_start
distance = pulse_duration * 17150
distance = round(distance, 2)
return distance
def take_webcam_still():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_webcam_video():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_picam_still():
subprocess.call(["raspistill", "-o", "image.jpg"])
def take_picam_video():
subprocess.call(["raspivid", "-o", "video.h264"])
def say(content):
subprocess.call(["mpg123", content])
#fswebcam -p YUYV -d /dev/video0 -r 1024x768 --top-banner --title "The Garage" --subtitle "tyler" --info "hello" image.jpg
def main(argv):
state = STATE_UNOCCUPIED
unoccupied_start = time.time()
GPIO.setmode(GPIO.BCM)
logger_level = logging.DEBUG
logger = logging.getLogger('shop_log')
logger.setLevel(logger_level)
# create logging file handler
file_handler = logging.FileHandler('shop.log', 'a')
file_handler.setLevel(logger_level)
# create logging console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(logger_level)
#Set logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
vc = self.VoiceCommand()
vc.set_command(self.process_command)
while 1:
prox_distance = check_proximity()
if prox_distance < OCCUPIED_DISTANCE:
if state == STATE_UNOCCUPIED:
say("greet.mp3")
state = STATE_OCCUPIED
logger.info("Workshop Occupied")
unoccupied_start = datetime.datetime.now()
if prox_distance > OCCUPIED_DISTANCE:
if (time.time() - unoccupied_start) > UNOCCUPIED_DURATION:
say("goodbye.mp3")
state = STATE_UNOCCUPIED
logger.info("Workshop Unoccupied")
GPIO.cleanup()
'''Voice Commands
# Information Commands
Datetime - Gives the Date and Time as audio
Status - Provides memory, disk, processor and network information
Project Report - Reports project name, number of images, videos and audio files in the current project
#Media-centric
Snap Webcam - Takes a picture with the webcam
Snap Picam - Takes a picture with the Picam
Roll Webcam - Captures a video with webcam
Cut Webcam - Stops video capture from webcam
Roll Picam - Captures a video with picam
Cut Picam - Stops video capture from picam
Record audio xx - Record xx seconds of audio
Turn on light X - Turns on the specified light
Turn off light X - Turns off the specified light
'''
| def callback(recognizer, audio):
# received audio data, now we'll recognize it using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
command = recognizer.recognize_google(audio)
#print("Google Speech Recognition thinks you said " + command)
self.parent_function(command)
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
r = sr.Recognizer()
m = sr.Microphone()
with m as source:
r.adjust_for_ambient_noise(source) # we only need to calibrate once, before we start listening
# start listening in the background (note that we don't have to do this inside a `with` statement)
stop_listening = r.listen_in_background(m, callback)
# `stop_listening` is now a function that, when called, stops background listening
# do some other computation for 5 seconds, then stop listening and keep doing other computations
#for _ in range(50): time.sleep(0.1) # we're still listening even though the main thread is doing other things
#stop_listening() # calling this function requests that the background listener stop listening
while True: time.sleep(0.025) | identifier_body |
wisp.py | import RPi.GPIO as GPIO
import time
import subprocess
import datetime
import time
# Added to support voice command
import threading
import speech_recognition as sr
STATE_UNOCCUPIED = 0
STATE_OCCUPIED = 1
OCCUPIED_DISTANCE = 50
UNOCCUPIED_DURATION = 1200
def process_command(self, command):
if command.lower() == "snap camera one":
take_webcam_still()
if command.lower() == "roll camera one":
take_webcam_video()
if command.lower() == "snap camera two":
take_picam_still()
if command.lower() == "roll camera two":
take_picam_video()
Added to support voice command
class VoiceCommand(object):
def __init__(self, interval=1):
""" Constructor
:type interval: int
:param interval: Check interval, in seconds
"""
self.interval = interval
self.parent_function = None
thread = threading.Thread(target=self.init_speech_recognition, args=())
thread.daemon = True # Daemonize thread
thread.start()
def set_command(self, func):
self.parent_function = func
def init_speech_recognition(self):
# this is called from the background thread
def callback(recognizer, audio):
# received audio data, now we'll recognize it using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
command = recognizer.recognize_google(audio)
#print("Google Speech Recognition thinks you said " + command)
self.parent_function(command)
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
r = sr.Recognizer()
m = sr.Microphone()
with m as source:
r.adjust_for_ambient_noise(source) # we only need to calibrate once, before we start listening
# start listening in the background (note that we don't have to do this inside a `with` statement)
stop_listening = r.listen_in_background(m, callback)
# `stop_listening` is now a function that, when called, stops background listening
# do some other computation for 5 seconds, then stop listening and keep doing other computations
#for _ in range(50): time.sleep(0.1) # we're still listening even though the main thread is doing other things
#stop_listening() # calling this function requests that the background listener stop listening
while True: time.sleep(0.025)
def check_proximity():
TRIG = 23
ECHO = 24
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
GPIO.output(TRIG, False)
time.sleep(1)
GPIO.output(TRIG, True)
time.sleep(0.00001)
GPIO.output(TRIG, False)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
while GPIO.input(ECHO)==1:
pulse_end = time.time()
pulse_duration = pulse_end - pulse_start
distance = pulse_duration * 17150
distance = round(distance, 2)
return distance
def take_webcam_still():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_webcam_video():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_picam_still():
subprocess.call(["raspistill", "-o", "image.jpg"])
def take_picam_video():
subprocess.call(["raspivid", "-o", "video.h264"])
| subprocess.call(["mpg123", content])
#fswebcam -p YUYV -d /dev/video0 -r 1024x768 --top-banner --title "The Garage" --subtitle "tyler" --info "hello" image.jpg
def main(argv):
state = STATE_UNOCCUPIED
unoccupied_start = time.time()
GPIO.setmode(GPIO.BCM)
logger_level = logging.DEBUG
logger = logging.getLogger('shop_log')
logger.setLevel(logger_level)
# create logging file handler
file_handler = logging.FileHandler('shop.log', 'a')
file_handler.setLevel(logger_level)
# create logging console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(logger_level)
#Set logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
vc = self.VoiceCommand()
vc.set_command(self.process_command)
while 1:
prox_distance = check_proximity()
if prox_distance < OCCUPIED_DISTANCE:
if state == STATE_UNOCCUPIED:
say("greet.mp3")
state = STATE_OCCUPIED
logger.info("Workshop Occupied")
unoccupied_start = datetime.datetime.now()
if prox_distance > OCCUPIED_DISTANCE:
if (time.time() - unoccupied_start) > UNOCCUPIED_DURATION:
say("goodbye.mp3")
state = STATE_UNOCCUPIED
logger.info("Workshop Unoccupied")
GPIO.cleanup()
'''Voice Commands
# Information Commands
Datetime - Gives the Date and Time as audio
Status - Provides memory, disk, processor and network information
Project Report - Reports project name, number of images, videos and audio files in the current project
#Media-centric
Snap Webcam - Takes a picture with the webcam
Snap Picam - Takes a picture with the Picam
Roll Webcam - Captures a video with webcam
Cut Webcam - Stops video capture from webcam
Roll Picam - Captures a video with picam
Cut Picam - Stops video capture from picam
Record audio xx - Record xx seconds of audio
Turn on light X - Turns on the specified light
Turn off light X - Turns off the specified light
''' | def say(content): | random_line_split |
wisp.py | import RPi.GPIO as GPIO
import time
import subprocess
import datetime
import time
# Added to support voice command
import threading
import speech_recognition as sr
STATE_UNOCCUPIED = 0
STATE_OCCUPIED = 1
OCCUPIED_DISTANCE = 50
UNOCCUPIED_DURATION = 1200
def process_command(self, command):
if command.lower() == "snap camera one":
take_webcam_still()
if command.lower() == "roll camera one":
take_webcam_video()
if command.lower() == "snap camera two":
take_picam_still()
if command.lower() == "roll camera two":
take_picam_video()
Added to support voice command
class VoiceCommand(object):
def __init__(self, interval=1):
""" Constructor
:type interval: int
:param interval: Check interval, in seconds
"""
self.interval = interval
self.parent_function = None
thread = threading.Thread(target=self.init_speech_recognition, args=())
thread.daemon = True # Daemonize thread
thread.start()
def set_command(self, func):
self.parent_function = func
def init_speech_recognition(self):
# this is called from the background thread
def callback(recognizer, audio):
# received audio data, now we'll recognize it using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
command = recognizer.recognize_google(audio)
#print("Google Speech Recognition thinks you said " + command)
self.parent_function(command)
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
r = sr.Recognizer()
m = sr.Microphone()
with m as source:
r.adjust_for_ambient_noise(source) # we only need to calibrate once, before we start listening
# start listening in the background (note that we don't have to do this inside a `with` statement)
stop_listening = r.listen_in_background(m, callback)
# `stop_listening` is now a function that, when called, stops background listening
# do some other computation for 5 seconds, then stop listening and keep doing other computations
#for _ in range(50): time.sleep(0.1) # we're still listening even though the main thread is doing other things
#stop_listening() # calling this function requests that the background listener stop listening
while True: time.sleep(0.025)
def check_proximity():
TRIG = 23
ECHO = 24
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
GPIO.output(TRIG, False)
time.sleep(1)
GPIO.output(TRIG, True)
time.sleep(0.00001)
GPIO.output(TRIG, False)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
while GPIO.input(ECHO)==1:
pulse_end = time.time()
pulse_duration = pulse_end - pulse_start
distance = pulse_duration * 17150
distance = round(distance, 2)
return distance
def take_webcam_still():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_webcam_video():
subprocess.call(["fswebcam", "-p", "YUYV", "-d", "/dev/video0", "-r", "1024x768", "--top-banner", '"Volunder Waycraft"', "image.jpg"])
def take_picam_still():
subprocess.call(["raspistill", "-o", "image.jpg"])
def take_picam_video():
subprocess.call(["raspivid", "-o", "video.h264"])
def say(content):
subprocess.call(["mpg123", content])
#fswebcam -p YUYV -d /dev/video0 -r 1024x768 --top-banner --title "The Garage" --subtitle "tyler" --info "hello" image.jpg
def main(argv):
state = STATE_UNOCCUPIED
unoccupied_start = time.time()
GPIO.setmode(GPIO.BCM)
logger_level = logging.DEBUG
logger = logging.getLogger('shop_log')
logger.setLevel(logger_level)
# create logging file handler
file_handler = logging.FileHandler('shop.log', 'a')
file_handler.setLevel(logger_level)
# create logging console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(logger_level)
#Set logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
vc = self.VoiceCommand()
vc.set_command(self.process_command)
while 1:
prox_distance = check_proximity()
if prox_distance < OCCUPIED_DISTANCE:
|
if prox_distance > OCCUPIED_DISTANCE:
if (time.time() - unoccupied_start) > UNOCCUPIED_DURATION:
say("goodbye.mp3")
state = STATE_UNOCCUPIED
logger.info("Workshop Unoccupied")
GPIO.cleanup()
'''Voice Commands
# Information Commands
Datetime - Gives the Date and Time as audio
Status - Provides memory, disk, processor and network information
Project Report - Reports project name, number of images, videos and audio files in the current project
#Media-centric
Snap Webcam - Takes a picture with the webcam
Snap Picam - Takes a picture with the Picam
Roll Webcam - Captures a video with webcam
Cut Webcam - Stops video capture from webcam
Roll Picam - Captures a video with picam
Cut Picam - Stops video capture from picam
Record audio xx - Record xx seconds of audio
Turn on light X - Turns on the specified light
Turn off light X - Turns off the specified light
'''
| if state == STATE_UNOCCUPIED:
say("greet.mp3")
state = STATE_OCCUPIED
logger.info("Workshop Occupied")
unoccupied_start = datetime.datetime.now() | conditional_block |
PickListSubList.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.PickListSubList = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _ObjectUtils = _interopRequireDefault(require("../utils/ObjectUtils"));
var _PickListItem = require("./PickListItem");
var _DomHandler = _interopRequireDefault(require("../utils/DomHandler"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function _getRequireWildcardCache() { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(n); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); }
function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
function _createSuper(Derived) { return function () { var Super = _getPrototypeOf(Derived), result; if (_isNativeReflectConstruct()) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
function _assertThisInitialized(self) |
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var PickListSubList = /*#__PURE__*/function (_Component) {
_inherits(PickListSubList, _Component);
var _super = _createSuper(PickListSubList);
function PickListSubList() {
var _this;
_classCallCheck(this, PickListSubList);
_this = _super.call(this);
_this.onItemClick = _this.onItemClick.bind(_assertThisInitialized(_this));
_this.onItemKeyDown = _this.onItemKeyDown.bind(_assertThisInitialized(_this));
return _this;
}
_createClass(PickListSubList, [{
key: "onItemClick",
value: function onItemClick(event) {
var originalEvent = event.originalEvent;
var item = event.value;
var selection = _toConsumableArray(this.props.selection);
var index = _ObjectUtils.default.findIndexInList(item, selection);
var selected = index !== -1;
var metaSelection = this.props.metaKeySelection;
if (metaSelection) {
var metaKey = originalEvent.metaKey || originalEvent.ctrlKey;
if (selected && metaKey) {
selection.splice(index, 1);
} else {
if (!metaKey) {
selection.length = 0;
}
selection.push(item);
}
} else {
if (selected) selection.splice(index, 1);else selection.push(item);
}
if (this.props.onSelectionChange) {
this.props.onSelectionChange({
event: originalEvent,
value: selection
});
}
}
}, {
key: "onItemKeyDown",
value: function onItemKeyDown(event) {
var listItem = event.originalEvent.currentTarget;
switch (event.originalEvent.which) {
//down
case 40:
var nextItem = this.findNextItem(listItem);
if (nextItem) {
nextItem.focus();
}
event.originalEvent.preventDefault();
break;
//up
case 38:
var prevItem = this.findPrevItem(listItem);
if (prevItem) {
prevItem.focus();
}
event.originalEvent.preventDefault();
break;
//enter
case 13:
this.onItemClick(event);
event.originalEvent.preventDefault();
break;
default:
break;
}
}
}, {
key: "findNextItem",
value: function findNextItem(item) {
var nextItem = item.nextElementSibling;
if (nextItem) return !_DomHandler.default.hasClass(nextItem, 'p-picklist-item') ? this.findNextItem(nextItem) : nextItem;else return null;
}
}, {
key: "findPrevItem",
value: function findPrevItem(item) {
var prevItem = item.previousElementSibling;
if (prevItem) return !_DomHandler.default.hasClass(prevItem, 'p-picklist-item') ? this.findPrevItem(prevItem) : prevItem;else return null;
}
}, {
key: "isSelected",
value: function isSelected(item) {
return _ObjectUtils.default.findIndexInList(item, this.props.selection) !== -1;
}
}, {
key: "render",
value: function render() {
var _this2 = this;
var header = null;
var items = null;
var wrapperClassName = (0, _classnames.default)('p-picklist-listwrapper', this.props.className, {
'p-picklist-listwrapper-nocontrols': !this.props.showControls
});
var listClassName = (0, _classnames.default)('p-picklist-list', this.props.listClassName);
if (this.props.header) {
header = /*#__PURE__*/_react.default.createElement("div", {
className: "p-picklist-caption"
}, this.props.header);
}
if (this.props.list) {
items = this.props.list.map(function (item, i) {
return /*#__PURE__*/_react.default.createElement(_PickListItem.PickListItem, {
key: JSON.stringify(item),
value: item,
template: _this2.props.itemTemplate,
selected: _this2.isSelected(item),
onClick: _this2.onItemClick,
onKeyDown: _this2.onItemKeyDown,
tabIndex: _this2.props.tabIndex
});
});
}
return /*#__PURE__*/_react.default.createElement("div", {
className: wrapperClassName
}, header, /*#__PURE__*/_react.default.createElement("ul", {
className: listClassName,
style: this.props.style,
role: "listbox",
"aria-multiselectable": true
}, items));
}
}]);
return PickListSubList;
}(_react.Component);
exports.PickListSubList = PickListSubList;
_defineProperty(PickListSubList, "defaultProps", {
list: null,
selection: null,
header: null,
className: null,
listClassName: null,
style: null,
showControls: true,
metaKeySelection: true,
tabIndex: null,
itemTemplate: null,
onItemClick: null,
onSelectionChange: null
});
_defineProperty(PickListSubList, "propTypes", {
list: _propTypes.default.array,
selection: _propTypes.default.array,
header: _propTypes.default.string,
className: _propTypes.default.string,
listClassName: _propTypes.default.string,
style: _propTypes.default.object,
showControls: _propTypes.default.bool,
metaKeySelection: _propTypes.default.bool,
tabIndex: _propTypes.default.string,
itemTemplate: _propTypes.default.func,
onItemClick: _propTypes.default.func,
onSelectionChange: _propTypes.default.func
}); | { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } | identifier_body |
PickListSubList.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.PickListSubList = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _ObjectUtils = _interopRequireDefault(require("../utils/ObjectUtils"));
var _PickListItem = require("./PickListItem");
var _DomHandler = _interopRequireDefault(require("../utils/DomHandler"));
|
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(n); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); }
function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
function _createSuper(Derived) { return function () { var Super = _getPrototypeOf(Derived), result; if (_isNativeReflectConstruct()) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var PickListSubList = /*#__PURE__*/function (_Component) {
_inherits(PickListSubList, _Component);
var _super = _createSuper(PickListSubList);
function PickListSubList() {
var _this;
_classCallCheck(this, PickListSubList);
_this = _super.call(this);
_this.onItemClick = _this.onItemClick.bind(_assertThisInitialized(_this));
_this.onItemKeyDown = _this.onItemKeyDown.bind(_assertThisInitialized(_this));
return _this;
}
_createClass(PickListSubList, [{
key: "onItemClick",
value: function onItemClick(event) {
var originalEvent = event.originalEvent;
var item = event.value;
var selection = _toConsumableArray(this.props.selection);
var index = _ObjectUtils.default.findIndexInList(item, selection);
var selected = index !== -1;
var metaSelection = this.props.metaKeySelection;
if (metaSelection) {
var metaKey = originalEvent.metaKey || originalEvent.ctrlKey;
if (selected && metaKey) {
selection.splice(index, 1);
} else {
if (!metaKey) {
selection.length = 0;
}
selection.push(item);
}
} else {
if (selected) selection.splice(index, 1);else selection.push(item);
}
if (this.props.onSelectionChange) {
this.props.onSelectionChange({
event: originalEvent,
value: selection
});
}
}
}, {
key: "onItemKeyDown",
value: function onItemKeyDown(event) {
var listItem = event.originalEvent.currentTarget;
switch (event.originalEvent.which) {
//down
case 40:
var nextItem = this.findNextItem(listItem);
if (nextItem) {
nextItem.focus();
}
event.originalEvent.preventDefault();
break;
//up
case 38:
var prevItem = this.findPrevItem(listItem);
if (prevItem) {
prevItem.focus();
}
event.originalEvent.preventDefault();
break;
//enter
case 13:
this.onItemClick(event);
event.originalEvent.preventDefault();
break;
default:
break;
}
}
}, {
key: "findNextItem",
value: function findNextItem(item) {
var nextItem = item.nextElementSibling;
if (nextItem) return !_DomHandler.default.hasClass(nextItem, 'p-picklist-item') ? this.findNextItem(nextItem) : nextItem;else return null;
}
}, {
key: "findPrevItem",
value: function findPrevItem(item) {
var prevItem = item.previousElementSibling;
if (prevItem) return !_DomHandler.default.hasClass(prevItem, 'p-picklist-item') ? this.findPrevItem(prevItem) : prevItem;else return null;
}
}, {
key: "isSelected",
value: function isSelected(item) {
return _ObjectUtils.default.findIndexInList(item, this.props.selection) !== -1;
}
}, {
key: "render",
value: function render() {
var _this2 = this;
var header = null;
var items = null;
var wrapperClassName = (0, _classnames.default)('p-picklist-listwrapper', this.props.className, {
'p-picklist-listwrapper-nocontrols': !this.props.showControls
});
var listClassName = (0, _classnames.default)('p-picklist-list', this.props.listClassName);
if (this.props.header) {
header = /*#__PURE__*/_react.default.createElement("div", {
className: "p-picklist-caption"
}, this.props.header);
}
if (this.props.list) {
items = this.props.list.map(function (item, i) {
return /*#__PURE__*/_react.default.createElement(_PickListItem.PickListItem, {
key: JSON.stringify(item),
value: item,
template: _this2.props.itemTemplate,
selected: _this2.isSelected(item),
onClick: _this2.onItemClick,
onKeyDown: _this2.onItemKeyDown,
tabIndex: _this2.props.tabIndex
});
});
}
return /*#__PURE__*/_react.default.createElement("div", {
className: wrapperClassName
}, header, /*#__PURE__*/_react.default.createElement("ul", {
className: listClassName,
style: this.props.style,
role: "listbox",
"aria-multiselectable": true
}, items));
}
}]);
return PickListSubList;
}(_react.Component);
exports.PickListSubList = PickListSubList;
_defineProperty(PickListSubList, "defaultProps", {
list: null,
selection: null,
header: null,
className: null,
listClassName: null,
style: null,
showControls: true,
metaKeySelection: true,
tabIndex: null,
itemTemplate: null,
onItemClick: null,
onSelectionChange: null
});
_defineProperty(PickListSubList, "propTypes", {
list: _propTypes.default.array,
selection: _propTypes.default.array,
header: _propTypes.default.string,
className: _propTypes.default.string,
listClassName: _propTypes.default.string,
style: _propTypes.default.object,
showControls: _propTypes.default.bool,
metaKeySelection: _propTypes.default.bool,
tabIndex: _propTypes.default.string,
itemTemplate: _propTypes.default.func,
onItemClick: _propTypes.default.func,
onSelectionChange: _propTypes.default.func
}); | function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function _getRequireWildcardCache() { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | random_line_split |
PickListSubList.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.PickListSubList = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _ObjectUtils = _interopRequireDefault(require("../utils/ObjectUtils"));
var _PickListItem = require("./PickListItem");
var _DomHandler = _interopRequireDefault(require("../utils/DomHandler"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function _getRequireWildcardCache() { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function | (arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(n); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); }
function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
function _createSuper(Derived) { return function () { var Super = _getPrototypeOf(Derived), result; if (_isNativeReflectConstruct()) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var PickListSubList = /*#__PURE__*/function (_Component) {
_inherits(PickListSubList, _Component);
var _super = _createSuper(PickListSubList);
function PickListSubList() {
var _this;
_classCallCheck(this, PickListSubList);
_this = _super.call(this);
_this.onItemClick = _this.onItemClick.bind(_assertThisInitialized(_this));
_this.onItemKeyDown = _this.onItemKeyDown.bind(_assertThisInitialized(_this));
return _this;
}
_createClass(PickListSubList, [{
key: "onItemClick",
value: function onItemClick(event) {
var originalEvent = event.originalEvent;
var item = event.value;
var selection = _toConsumableArray(this.props.selection);
var index = _ObjectUtils.default.findIndexInList(item, selection);
var selected = index !== -1;
var metaSelection = this.props.metaKeySelection;
if (metaSelection) {
var metaKey = originalEvent.metaKey || originalEvent.ctrlKey;
if (selected && metaKey) {
selection.splice(index, 1);
} else {
if (!metaKey) {
selection.length = 0;
}
selection.push(item);
}
} else {
if (selected) selection.splice(index, 1);else selection.push(item);
}
if (this.props.onSelectionChange) {
this.props.onSelectionChange({
event: originalEvent,
value: selection
});
}
}
}, {
key: "onItemKeyDown",
value: function onItemKeyDown(event) {
var listItem = event.originalEvent.currentTarget;
switch (event.originalEvent.which) {
//down
case 40:
var nextItem = this.findNextItem(listItem);
if (nextItem) {
nextItem.focus();
}
event.originalEvent.preventDefault();
break;
//up
case 38:
var prevItem = this.findPrevItem(listItem);
if (prevItem) {
prevItem.focus();
}
event.originalEvent.preventDefault();
break;
//enter
case 13:
this.onItemClick(event);
event.originalEvent.preventDefault();
break;
default:
break;
}
}
}, {
key: "findNextItem",
value: function findNextItem(item) {
var nextItem = item.nextElementSibling;
if (nextItem) return !_DomHandler.default.hasClass(nextItem, 'p-picklist-item') ? this.findNextItem(nextItem) : nextItem;else return null;
}
}, {
key: "findPrevItem",
value: function findPrevItem(item) {
var prevItem = item.previousElementSibling;
if (prevItem) return !_DomHandler.default.hasClass(prevItem, 'p-picklist-item') ? this.findPrevItem(prevItem) : prevItem;else return null;
}
}, {
key: "isSelected",
value: function isSelected(item) {
return _ObjectUtils.default.findIndexInList(item, this.props.selection) !== -1;
}
}, {
key: "render",
value: function render() {
var _this2 = this;
var header = null;
var items = null;
var wrapperClassName = (0, _classnames.default)('p-picklist-listwrapper', this.props.className, {
'p-picklist-listwrapper-nocontrols': !this.props.showControls
});
var listClassName = (0, _classnames.default)('p-picklist-list', this.props.listClassName);
if (this.props.header) {
header = /*#__PURE__*/_react.default.createElement("div", {
className: "p-picklist-caption"
}, this.props.header);
}
if (this.props.list) {
items = this.props.list.map(function (item, i) {
return /*#__PURE__*/_react.default.createElement(_PickListItem.PickListItem, {
key: JSON.stringify(item),
value: item,
template: _this2.props.itemTemplate,
selected: _this2.isSelected(item),
onClick: _this2.onItemClick,
onKeyDown: _this2.onItemKeyDown,
tabIndex: _this2.props.tabIndex
});
});
}
return /*#__PURE__*/_react.default.createElement("div", {
className: wrapperClassName
}, header, /*#__PURE__*/_react.default.createElement("ul", {
className: listClassName,
style: this.props.style,
role: "listbox",
"aria-multiselectable": true
}, items));
}
}]);
return PickListSubList;
}(_react.Component);
exports.PickListSubList = PickListSubList;
_defineProperty(PickListSubList, "defaultProps", {
list: null,
selection: null,
header: null,
className: null,
listClassName: null,
style: null,
showControls: true,
metaKeySelection: true,
tabIndex: null,
itemTemplate: null,
onItemClick: null,
onSelectionChange: null
});
_defineProperty(PickListSubList, "propTypes", {
list: _propTypes.default.array,
selection: _propTypes.default.array,
header: _propTypes.default.string,
className: _propTypes.default.string,
listClassName: _propTypes.default.string,
style: _propTypes.default.object,
showControls: _propTypes.default.bool,
metaKeySelection: _propTypes.default.bool,
tabIndex: _propTypes.default.string,
itemTemplate: _propTypes.default.func,
onItemClick: _propTypes.default.func,
onSelectionChange: _propTypes.default.func
}); | _toConsumableArray | identifier_name |
cholesky_op_test.py | """Tests for tensorflow.ops.tf.Cholesky."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class CholeskyOpTest(tf.test.TestCase):
def _verifyCholesky(self, x):
|
def testBasic(self):
self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]))
def testBatch(self):
simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2)
self._verifyCholesky(simple_array)
self._verifyCholesky(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array)))
# Generate random positive-definite matrices.
matrices = np.random.rand(10, 5, 5)
for i in xrange(10):
matrices[i] = np.dot(matrices[i].T, matrices[i])
self._verifyCholesky(matrices)
def testNonSquareMatrix(self):
with self.assertRaises(ValueError):
tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.cholesky(tensor3)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("LLT decomposition was not successful. The "
"input might not be valid."):
# All rows of the matrix below add to zero
self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1.,
1.]]))
def testEmpty(self):
self._verifyCholesky(np.empty([0, 2, 2]))
self._verifyCholesky(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main()
| with self.test_session() as sess:
# Verify that LL^T == x.
if x.ndim == 2:
chol = tf.cholesky(x)
verification = tf.matmul(chol,
chol,
transpose_a=False,
transpose_b=True)
else:
chol = tf.batch_cholesky(x)
verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True)
chol_np, verification_np = sess.run([chol, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, chol)
# Check that the cholesky is lower triangular, and has positive diagonal
# elements.
if chol_np.shape[-1] > 0:
chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2],
chol_np.shape[-1]))
for chol_matrix in chol_reshaped:
self.assertAllClose(chol_matrix, np.tril(chol_matrix))
self.assertTrue((np.diag(chol_matrix) > 0.0).all()) | identifier_body |
cholesky_op_test.py | """Tests for tensorflow.ops.tf.Cholesky."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class CholeskyOpTest(tf.test.TestCase):
def _verifyCholesky(self, x):
with self.test_session() as sess:
# Verify that LL^T == x.
if x.ndim == 2:
chol = tf.cholesky(x)
verification = tf.matmul(chol,
chol,
transpose_a=False,
transpose_b=True)
else:
chol = tf.batch_cholesky(x)
verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True)
chol_np, verification_np = sess.run([chol, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, chol)
# Check that the cholesky is lower triangular, and has positive diagonal
# elements.
if chol_np.shape[-1] > 0:
chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2],
chol_np.shape[-1]))
for chol_matrix in chol_reshaped:
self.assertAllClose(chol_matrix, np.tril(chol_matrix))
self.assertTrue((np.diag(chol_matrix) > 0.0).all())
def testBasic(self):
self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]))
def | (self):
simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2)
self._verifyCholesky(simple_array)
self._verifyCholesky(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array)))
# Generate random positive-definite matrices.
matrices = np.random.rand(10, 5, 5)
for i in xrange(10):
matrices[i] = np.dot(matrices[i].T, matrices[i])
self._verifyCholesky(matrices)
def testNonSquareMatrix(self):
with self.assertRaises(ValueError):
tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.cholesky(tensor3)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("LLT decomposition was not successful. The "
"input might not be valid."):
# All rows of the matrix below add to zero
self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1.,
1.]]))
def testEmpty(self):
self._verifyCholesky(np.empty([0, 2, 2]))
self._verifyCholesky(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main()
| testBatch | identifier_name |
cholesky_op_test.py | """Tests for tensorflow.ops.tf.Cholesky."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class CholeskyOpTest(tf.test.TestCase):
def _verifyCholesky(self, x):
with self.test_session() as sess:
# Verify that LL^T == x.
if x.ndim == 2:
chol = tf.cholesky(x)
verification = tf.matmul(chol,
chol,
transpose_a=False,
transpose_b=True)
else:
chol = tf.batch_cholesky(x)
verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True)
chol_np, verification_np = sess.run([chol, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, chol)
# Check that the cholesky is lower triangular, and has positive diagonal
# elements.
if chol_np.shape[-1] > 0:
chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2],
chol_np.shape[-1]))
for chol_matrix in chol_reshaped:
self.assertAllClose(chol_matrix, np.tril(chol_matrix))
self.assertTrue((np.diag(chol_matrix) > 0.0).all())
def testBasic(self):
self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]))
def testBatch(self):
simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2)
self._verifyCholesky(simple_array)
self._verifyCholesky(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array)))
# Generate random positive-definite matrices.
matrices = np.random.rand(10, 5, 5)
for i in xrange(10):
matrices[i] = np.dot(matrices[i].T, matrices[i])
self._verifyCholesky(matrices)
def testNonSquareMatrix(self):
with self.assertRaises(ValueError):
tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.cholesky(tensor3)
def testNotInvertible(self): | # The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("LLT decomposition was not successful. The "
"input might not be valid."):
# All rows of the matrix below add to zero
self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1.,
1.]]))
def testEmpty(self):
self._verifyCholesky(np.empty([0, 2, 2]))
self._verifyCholesky(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main() | random_line_split |
|
cholesky_op_test.py | """Tests for tensorflow.ops.tf.Cholesky."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class CholeskyOpTest(tf.test.TestCase):
def _verifyCholesky(self, x):
with self.test_session() as sess:
# Verify that LL^T == x.
if x.ndim == 2:
chol = tf.cholesky(x)
verification = tf.matmul(chol,
chol,
transpose_a=False,
transpose_b=True)
else:
chol = tf.batch_cholesky(x)
verification = tf.batch_matmul(chol, chol, adj_x=False, adj_y=True)
chol_np, verification_np = sess.run([chol, verification])
self.assertAllClose(x, verification_np)
self.assertShapeEqual(x, chol)
# Check that the cholesky is lower triangular, and has positive diagonal
# elements.
if chol_np.shape[-1] > 0:
|
def testBasic(self):
self._verifyCholesky(np.array([[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]))
def testBatch(self):
simple_array = np.array([[[1., 0.], [0., 5.]]]) # shape (1, 2, 2)
self._verifyCholesky(simple_array)
self._verifyCholesky(np.vstack((simple_array, simple_array)))
odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]])
self._verifyCholesky(np.vstack((odd_sized_array, odd_sized_array)))
# Generate random positive-definite matrices.
matrices = np.random.rand(10, 5, 5)
for i in xrange(10):
matrices[i] = np.dot(matrices[i].T, matrices[i])
self._verifyCholesky(matrices)
def testNonSquareMatrix(self):
with self.assertRaises(ValueError):
tf.cholesky(np.array([[1., 2., 3.], [3., 4., 5.]]))
def testWrongDimensions(self):
tensor3 = tf.constant([1., 2.])
with self.assertRaises(ValueError):
tf.cholesky(tensor3)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session():
with self.assertRaisesOpError("LLT decomposition was not successful. The "
"input might not be valid."):
# All rows of the matrix below add to zero
self._verifyCholesky(np.array([[1., -1., 0.], [-1., 1., -1.], [0., -1.,
1.]]))
def testEmpty(self):
self._verifyCholesky(np.empty([0, 2, 2]))
self._verifyCholesky(np.empty([2, 0, 0]))
if __name__ == "__main__":
tf.test.main()
| chol_reshaped = np.reshape(chol_np, (-1, chol_np.shape[-2],
chol_np.shape[-1]))
for chol_matrix in chol_reshaped:
self.assertAllClose(chol_matrix, np.tril(chol_matrix))
self.assertTrue((np.diag(chol_matrix) > 0.0).all()) | conditional_block |
util.js | /**
* utils
*
* @namespace mix.util
*
* @author Yang,junlong at 2016-07-28 20:27:54 build.
* @version $Id$
*/
var fs = require('fs');
var url = require('url');
var pth = require('path');
var util = require('util');
//var iconv = require('iconv-lite');
var crypto = require('crypto');
var PLATFORM = process.platform;
var ISWIN = PLATFORM.indexOf('win') === 0;
/**
* text file exts
*
* @type <Array>
*/
var TEXT_FILE_EXTS = [
'css', 'tpl', 'js', 'php',
'txt', 'json', 'xml', 'htm',
'text', 'xhtml', 'html', 'md',
'conf', 'po', 'config', 'tmpl',
'coffee', 'less', 'sass', 'jsp',
'scss', 'manifest', 'bak', 'asp',
'tmp', 'haml', 'jade', 'aspx',
'ashx', 'java', 'py', 'c', 'cpp',
'h', 'cshtml', 'asax', 'master',
'ascx', 'cs', 'ftl', 'vm', 'ejs',
'styl', 'jsx', 'handlebars'
];
/**
* image file exts
*
* @type <Array>
*/
var IMAGE_FILE_EXTS = [
'svg', 'tif', 'tiff', 'wbmp',
'png', 'bmp', 'fax', 'gif',
'ico', 'jfif', 'jpe', 'jpeg',
'jpg', 'woff', 'cur', 'webp',
'swf', 'ttf', 'eot', 'woff2'
];
/**
* mime types
*
* @type <Object>
*/
var MIME_TYPES = {
//text
'css': 'text/css',
'tpl': 'text/html',
'js': 'text/javascript',
'jsx': 'text/javascript',
'php': 'text/html',
'asp': 'text/html',
'jsp': 'text/jsp',
'txt': 'text/plain',
'json': 'application/json',
'xml': 'text/xml',
'htm': 'text/html',
'text': 'text/plain',
'md': 'text/plain',
'xhtml': 'text/html',
'html': 'text/html',
'conf': 'text/plain',
'po': 'text/plain',
'config': 'text/plain',
'coffee': 'text/javascript',
'less': 'text/css',
'sass': 'text/css',
'scss': 'text/css',
'styl': 'text/css',
'manifest': 'text/cache-manifest',
//image
'svg': 'image/svg+xml',
'tif': 'image/tiff',
'tiff': 'image/tiff',
'wbmp': 'image/vnd.wap.wbmp',
'webp': 'image/webp',
'png': 'image/png',
'bmp': 'image/bmp',
'fax': 'image/fax',
'gif': 'image/gif',
'ico': 'image/x-icon',
'jfif': 'image/jpeg',
'jpg': 'image/jpeg',
'jpe': 'image/jpeg',
'jpeg': 'image/jpeg',
'eot': 'application/vnd.ms-fontobject',
'woff': 'application/font-woff',
'woff2': 'application/font-woff',
'ttf': 'application/octet-stream',
'cur': 'application/octet-stream'
};
/**
* 通用唯一识别码 (Universally Unique Identifier)
*
* @return string
*/
exports.uuid = function () {
var _uuid = [],
_stra = "0123456789ABCDEF".split('');
for (var i = 0; i < 36; i++){
_uuid[i] = Math.floor(Math.random() * 16);
}
_uuid[14] = 4;
_uuid[19] = (_uuid[19] & 3) | 8;
for (i = 0; i < 36; i++) {
_uuid[i] = _stra[_uuid[i]];
}
_uuid[8] = _uuid[13] = _uuid[18] = _uuid[23] = '-';
return _uuid.join('');
};
/**
* md5 crypto
*
* @param <String> | <Binary> data
* @param <Number> len
* @return <String>
*/
exports.md5 = function(data, len) {
var md5sum = crypto.createHash('md5');
var encoding = typeof data === 'string' ? 'utf8' : 'binary';
md5sum.update(data, encoding);
len = len || mix.config.get('project.md5Length', 7);
return md5sum.digest('hex').substring(0, len);
};
/**
* base64 encode
*
* @param <Buffer | Array | String> data
* @return <String>
*/
exports.base64 = function(data) {
if(data instanceof Buffer){
//do nothing for quickly determining.
} else if(data instanceof Array){
data = new Buffer(data);
} else {
//convert to string.
data = new Buffer(String(data || ''));
}
return data.toString('base64');
};
exports.map = function(obj, callback, scope) {
if (obj.length === +obj.length) {
for (var i = 0, l = obj.length; i < l; i++) {
if (callback.call(scope, obj[i], i, obj) === false) {
return;
}
}
} else {
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
if (callback.call(scope, obj[key], key, obj) === false) {
return;
}
}
}
}
};
/**
* camel format
*
* @usage
* aaa-bbb_ccc ddd -> AaaBbbCccDdd
*
* @param {String} string
* @return {String}
*/
exports.camelcase = (function(){
var list = {};
return function(string){
var result = '';
if(string){
string.split(/[-_ ]+/).forEach(function(ele){
result += ele[0].toUpperCase() + ele.substring(1);
});
} else {
result = string;
}
return list[string] || (list[string] = result);
}
})();
/**
* 数据类型判断
*
* @param <Mixed> source
* @param <String> type
* @return <Boolean>
*/
exports.is = function (source, type) {
type = exports.camelcase(type);
return toString.call(source) === '[object ' + type + ']';
};
// 简单的浅拷贝, 覆盖已经存在的属性
exports.extend = function (destination) {
if (!destination) {
return;
}
var args = Array.prototype.slice.call(arguments, 1);
for (i = 0, l = args.length; i < l; i++) {
var source = args[i];
for(var property in source){
var value = source[property];
if (value !== undefined){
destination[property] = value;
}
}
}
return destination;
};
/**
* print object to terminal
*
* @param <Object> object
* @param <String> prefix
* @return <Void>
*/
exports.print = function (object, prefix) {
prefix = prefix || '';
for(var key in object){
if(object.hasOwnProperty(key)){
if(typeof object[key] === 'object'){
arguments.callee(object[key], prefix + key + '.');
} else {
console.log(prefix + key + '=' + object[key]);
}
}
}
}
/**
* hostname & ip address
*
* @return <String>
*/
exports.hostname = function () {
var net = require('os').networkInterfaces();
for(var key in net){
if(net.hasOwnProperty(key)){
var details = net[key];
if(details && details.length){
for(var i = 0, len = details.length; i < len; i++){
var ip = String(details[i].address).trim();
if(ip && /^\d+(?:\.\d+){3}$/.test(ip) && ip !== '127.0.0.1'){
return ip;
}
}
}
}
}
return '127.0.0.1';
};
/**
* if text file
*
* @param <String> file
* @return <Boolean>
*/
exports.isTxt = function(file) {
return fileTypeReg('text').test(file || '');
};
/**
* if image file
*
* @param <String> file
* @return <Boolean>
*/
exports.isImg = function(file) {
return fileTypeReg('image').test(file || '');
};
/**
* if platform windows
*
* @return <Boolean>
*/
exports.isWin = function() {
return ISWIN;
}
/**
* if buffer utf8 charset
*
* @param <Buffer> bytes
* @return <Boolean>
*/
exports.isUtf8 = function(bytes) {
var i = 0;
while(i < bytes.length) {
if((// ASCII
0x00 <= bytes[i] && bytes[i] <= 0x7F
)) {
i += 1;
continue;
}
if((// non-overlong 2-byte
(0xC2 <= bytes[i] && bytes[i] <= 0xDF) &&
(0x80 <= bytes[i+1] && bytes[i+1] <= 0xBF)
)) {
i += 2;
continue;
}
if(
(// excluding overlongs
bytes[i] == 0xE0 &&
(0xA0 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
) || (// straight 3-byte
((0xE1 <= bytes[i] && bytes[i] <= 0xEC) ||
bytes[i] == 0xEE ||
bytes[i] == 0xEF) &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
) || (// excluding surrogates
bytes[i] == 0xED &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0x9F) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
)
) {
i += 3;
continue;
}
if(
(// planes 1-3
bytes[i] == 0xF0 &&
(0x90 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
) || (// planes 4-15
(0xF1 <= bytes[i] && bytes[i] <= 0xF3) &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
) || (// plane 16
bytes[i] == 0xF4 &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0x8F) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
)
) {
i += 4;
continue;
}
return false;
}
return true;
};
/**
* if path is file
*
* @param <String> path
* @return <Boolean>
*/
exports.isFile = function(path) {
return exports.exists(path) && fs.statSync(path).isFile();
};
/**
* if path is dir
*
* @param <String> path
* @return {Boolean}
*/
exports.isDir = function(path) {
return exports.exists(path) && fs.statSync(path).isDirectory();
};
/**
* adapt buffer charset
*
* @param <Buffer> buffer
* @return <Buffer>
*/
exports.buffer = function(buffer) {
if(exports.isUtf8(buffer)) {
buffer = buffer.toString('utf8');
if (buffer.charCodeAt(0) === 0xFEFF) {
buffer = buffer.substring(1);
}
} else {
buffer = iconv.decode(buffer, 'gbk');
}
return buffer;
};
/**
* reads the entire contents of a file
*
* If the encoding option is specified then this function returns a string.
* Otherwise it returns a buffer.
*
* @param <String> file
* @param <Boolean> convert
* @return <String> | <Buffer>
*/
exports.read = function(file, convert) {
var content = false;
if(exports.exists(file)) {
content = fs.readFileSync(file);
if(convert || exports.isTxt(file)) {
content = exports.buffer(content);
}
}
return content;
};
/**
* read file to json
*
* @param <String> path
* @return <Object>
*/
exports.readJSON = (function(){
var cache = {};
return function(path) {
if(cache[path]) {
return cache[path];
}
var json = exports.read(path, true);
var result = {};
if(!json) {
return false;
}
try {
result = JSON.parse(json);
} catch(e){
mix.log.error('parse json file[' + path + '] fail, error [' + e.message + ']');
}
return (cache[path] = result);
}
})();
/**
* Makes directory
*
* @param <String> path
* @param <Integer> mode
* @return <undefined>
*/
exports.mkdir = function(path, mode) {
var exists = exports.exists;
if(exists(path)) {
return;
}
path.split('/').reduce(function(prev, next) {
if(prev && !exists(prev)) {
fs.mkdirSync(prev, mode);
}
return prev + '/' + next;
});
if(!exists(path)) {
fs.mkdirSync(path, mode);
}
};
/**
* write data to a file,
* replacing the file if it already exists.
* data can be a string or a buffer
*
* @param <String> | <Buffer> | <Integer> file
* @param <String> | <Buffer> data
* @param <Object> | <String> options
* @param <Boolean> append
* @return <undefined>
*/
exports.write = function(file, data, options, append) {
if(!exports.exists(file)){
exports.mkdir(exports.pathinfo(file).dirname);
}
if(append) {
fs.appendFileSync(file, data, options);
} else {
fs.writeFileSync(file, data, options);
}
};
/**
* Gets file modification time
*
* @param <String> path
* @return <Timestamp>
*/
exports.mtime = function(path) {
var time = 0;
if(exports.exists(path)){
time = fs.statSync(path).mtime;
}
return time;
};
/**
* Touch a file
*
* @param <String> path
* @param <timestamp> mtime
* @return <undefined>
*/
exports.touch = function(path, mtime) {
if(!exports.exists(path)){
exports.write(path, '');
}
if(mtime instanceof Date){
//do nothing for quickly determining.
} else if(typeof mtime === 'number') {
var time = new Date();
time.setTime(mtime);
mtime = time;
} else {
mix.log.error('invalid argument [mtime]');
}
fs.utimesSync(path, mtime, mtime);
};
exports.del = function(rPath, include, exclude){
var removedAll = true;
var path;
if(rPath && exports.exists(rPath)) {
var stat = fs.lstatSync(rPath);
var isFile = stat.isFile() || stat.isSymbolicLink();
if (stat.isSymbolicLink()) {
path = rPath;
} else {
path = exports.realpath(rPath);
}
if(/^(?:\w:)?\/$/.test(path)){
mix.log.error('unable to delete directory [' + rPath + '].');
}
if(stat.isDirectory()){
fs.readdirSync(path).forEach(function(name){
if(name != '.' && name != '..') {
removedAll = exports.del(path + '/' + name, include, exclude) && removedAll;
}
});
if(removedAll) {
fs.rmdirSync(path);
}
} else if(isFile && exports.filter(path, include, exclude)) {
fs.unlinkSync(path);
} else {
removedAll = false;
}
} else {
mix.log.error('unable to delete [' + rPath + ']: No such file or directory.');
}
return removedAll;
},
/**
* Node.js file system
*
* @type <Object>
*/
exports.fs = fs;
/**
* Test whether or not the given path exists by checking with the file system.
*
* @param <String> | <Buffer> path
* @return <Boolean> Returns true if the file exists, false otherwise.
*/
exports.exists = fs.existsSync;
/**
* Create path by arguments.
*
* @param <String> | <Array> path
* @return <String>
*/
exports.path = function(path) {
var type = typeof path;
if(arguments.length > 1) {
path = Array.prototype.join.call(arguments, '/');
} else if(type === 'string') {
// do nothing for quickly determining.
} else if(type === 'object') {
// array object
path = Array.prototype.join.call(path, '/');
} else if(type === 'undefined') {
path = '';
}
if(path){
path = pth.normalize(path.replace(/[\/\\]+/g, '/')).replace(/\\/g, '/');
if(path !== '/'){
path = path.replace(/\/$/, '');
}
}
return path;
};
/**
* Only paths that can be converted to UTF8 strings are supported.
*
* @param {String} path [description]
* @param {Object} options [description]
* @return <String>|<False> Returns the resolved path.
*/
exports.realpath = function(path, options){
if(path && exports.exists(path)){
path = fs.realpathSync(path, options);
if(ISWIN){
path = path.replace(/\\/g, '/');
}
if(path !== '/'){
path = path.replace(/\/$/, '');
}
return path;
} else {
return false;
}
};
/**
* Returns a parent directory's path
*
* @param <String> path
* @return <String>
*/
exports.dirname = function(path) {
return pth.resolve(path, '..');
};
/**
* Returns an object whose properties represent significant elements of the path
*
* @param <String> path
* @return <Object>
*
* The returned object will have the following properties:
* {
* origin: './test.js?ddd=11',
* rest: './test',
* hash: '',
* query: '?ddd=11',
* fullname: './test.js',
* dirname: '.',
* ext: '.js',
* filename: 'test',
* basename: 'test.js'
* }
*/
exports.pathinfo = function(path) {
/**
* parse path by url.parse
*
* {
* protocol: null,
* slashes: null,
* auth: null,
* host: null,
* port: null,
* hostname: null,
* hash: null,
* search: '?dfad=121',
* query: 'dfad=121',
* pathname: './test.js',
* path: './test.js?dfad=121',
* href: './test.js?dfad=121'
* }
* @type {[type]}
*/
var urls = url.parse(path);
/**
* parse path by path.parse
*
* {
* root: '',
* dir: '.',
* base: 'test.js?dfad=121',
* ext: '.js?dfad=121',
* name: 'test'
* }
* @type <Object>
*/
var pths = pth.parse(urls.pathname || path);
// tobe output var
var origin = urls.path;
var root = pths.root;
var hash = urls.hash; | var dirname = pths.dir || '.';
var filename = pths.name;
var basename = (pths.base || '').replace(query, '');
var rest = dirname + '/' + filename;
var ext = (pths.ext || '').replace(query, '');
return {
'origin': origin,
'dirname': dirname,
'fullname': fullname,
'filename': filename,
'basename': basename,
'query': query,
'rest': rest,
'hash': hash,
'root': root,
'ext': ext
};
};
/**
* Escape special regular charset
*
* @param <String> str
* @return <String>
*/
exports.escapeReg = function(str) {
return str.replace(/[\.\\\+\*\?\[\^\]\$\(\){}=!<>\|:\/]/g, '\\$&');
};
/**
* Escape shell cmd
*
* @param <String> str
* @return <String>
*/
exports.escapeShellCmd = function(str){
return str.replace(/ /g, '"$&"');
};
exports.escapeShellArg = function(cmd){
return '"' + cmd + '"';
};
exports.stringQuote = function(str, quotes){
var info = {
origin : str,
rest : str = str.trim(),
quote : ''
};
if(str){
quotes = quotes || '\'"';
var strLen = str.length - 1;
for(var i = 0, len = quotes.length; i < len; i++){
var c = quotes[i];
if(str[0] === c && str[strLen] === c){
info.quote = c;
info.rest = str.substring(1, strLen);
break;
}
}
}
return info;
};
/**
* Replace var
*
* @return <String>
* @example
* replaceVar('/${namespace}/path/to/*.js', true);
* -> '/common/path/to/*.js'
*/
exports.replaceVar = function(value, escape){
return value.replace(/\$\{([^\}]+)\}/g, function(all, $1){
var val = mix.config.get($1) || 'test';
if(typeof val === 'undefined'){
mix.log.error('undefined property [' + $1 + '].');
} else {
return escape ? exports.escapeReg(val) : val;
}
return all;
});
};
/**
* Replace Matches
*
* @param <String> value
* @param <Array> matches
* @return <String>
* @example
* replaceMatches('/$1/path/to.js', ['aa', 'bb']);
* -> '/bb/path/to.js'
*/
exports.replaceMatches = function(value, matches) {
return value.replace(/\$(\d+|&)/g, function(all, $1){
var key = $1 === '&' ? '0' : $1;
return matches.hasOwnProperty(key) ? (matches[key] || '') : all;
});
};
/**
* Replace roadmap.path Props
* Extend Props to File
*
* @param <Object> source
* @param <Array> matches
* @param <Object> scope
* @return <Object>
* @example
* replaceProps(road.path[0], ['aa', 'bb', 'cc'], this);
*/
exports.replaceProps = function(source, matches, scope) {
var type = typeof source;
if(type === 'object'){
if(exports.is(source, 'Array')){
scope = scope || [];
} else {
scope = scope || {};
}
exports.map(source, function(value, key){
scope[key] = exports.replaceProps(value, matches);
});
return scope;
} else if(type === 'string'){
return exports.replaceVar(exports.replaceMatches(source, matches));
} else if(type === 'function'){
return source(scope, matches);
} else {
return source;
}
};
// check lib version
exports.matchVersion = function(str) {
var version = false;
var reg = /\b\d+(\.\d+){2}/;
var match = str.match(reg);
if(match){
version = match[0];
}
return version;
};
/**
* Filter path by include&exclude regular
*
* @param <String> str [description]
* @param <Regular> | <Array> include [description]
* @param <Regular> | <Array> exclude [description]
* @return <Boolean>
*/
exports.filter = function(str, include, exclude){
function normalize(pattern){
var type = toString.call(pattern);
switch (type){
case '[object String]':
return exports.glob(pattern);
case '[object RegExp]':
return pattern;
default:
mix.log.error('invalid regexp [' + pattern + '].');
}
}
function match(str, patterns){
var matched = false;
if (!exports.is(patterns, 'Array')){
patterns = [patterns];
}
patterns.every(function(pattern){
if (!pattern){
return true;
}
matched = matched || str.search(normalize(pattern)) > -1;
return !matched;
});
return matched;
}
var isInclude, isExclude;
if (include) {
isInclude = match(str, include);
}else{
isInclude = true;
}
if (exclude) {
isExclude = match(str, exclude);
}
return isInclude && !isExclude;
};
/**
* match files using the pattern
*
* @see npm node-glob
*
* @param <Regular> pattern
* @param <String> str
* @return <Boolean> | <Regular>
*/
exports.glob = function(pattern, str){
var sep = exports.escapeReg('/');
pattern = new RegExp('^' + sep + '?' +
exports.escapeReg(
pattern
.replace(/\\/g, '/')
.replace(/^\//, '')
)
.replace(new RegExp(sep + '\\*\\*' + sep, 'g'), sep + '.*(?:' + sep + ')?')
.replace(new RegExp(sep + '\\*\\*', 'g'), sep + '.*')
.replace(/\\\*\\\*/g, '.*')
.replace(/\\\*/g, '[^' + sep + ']*')
.replace(/\\\?/g, '[^' + sep + ']') + '$',
'i'
);
if(typeof str === 'string'){
return pattern.test(str);
} else {
return pattern;
}
};
/**
* find project source(files)
*
* @param <String> path
* @param <String | Function> include
* @param <String | Function> exclude
* @param <String> root
* @param <Function> callback
* @return <Object>
*/
exports.find = function(rPath, include, exclude, root) {
var args = Array.prototype.slice.call(arguments, 0);
var last = args[args.length-1];
var list = [];
var path = exports.realpath(rPath);
var filterPath = root ? path.substring(root.length) : path;
include = exports.is(include, 'function') ? undefined : include;
exclude = exports.is(exclude, 'function') ? undefined : exclude;
if(path){
var stat = fs.statSync(path);
if(stat.isDirectory() && (include || exports.filter(filterPath, include, exclude))){
fs.readdirSync(path).forEach(function(p){
if(p[0] != '.') {
list = list.concat(exports.find(path + '/' + p, include, exclude, root, last));
}
});
} else if(stat.isFile() && exports.filter(filterPath, include, exclude)) {
list.push(path);
if(exports.is(last, 'function')) {
last(path);
}
}
} else {
mix.log.error('unable to find [' + rPath + ']: No such file or directory.');
}
return list.sort();
};
/**
* File pipe process core impl
*
* @param <String> type
* @param <Function> callback
* @param <Function> | <String> def
* @return <undefined>
*/
exports.pipe = function(type, callback, def) {
var processors = mix.config.get('modules.' + type, def);
if(processors){
var typeOf = typeof processors;
if(typeOf === 'string'){
processors = processors.trim().split(/\s*,\s*/);
} else if(typeOf === 'function'){
processors = [ processors ];
}
type = type.split('.')[0];
processors.forEach(function(processor, index){
var typeOf = typeof processor, key;
if(typeOf === 'string'){
key = type + '.' + processor;
processor = mix.require(type, processor);
} else {
key = type + '.' + index;
}
if(typeof processor === 'function'){
var settings = mix.config.get('settings.' + key, {});
if(processor.defaultOptions){
settings = exports.extend(processor.settings, settings);
}
callback(processor, settings, key);
} else {
mix.log.warning('invalid processor [modules.' + key + ']');
}
});
}
};
/**
* Open path or url from terminal
*
* @param <String> path | url
* @param <Function> callback
* @return <undefined>
*/
exports.open = function(path, callback) {
var child_process = require('child_process');
var cmd = exports.escapeShellArg(path);
if(ISWIN) {
cmd = 'start "" ' + cmd;
} else {
if(process.env['XDG_SESSION_COOKIE']){
cmd = 'xdg-open ' + cmd;
} else if(process.env['GNOME_DESKTOP_SESSION_ID']){
cmd = 'gnome-open ' + cmd;
} else {
cmd = 'open ' + cmd;
}
}
child_process.exec(cmd, function(){
typeof callback == 'function' && callback(path);
});
};
/**
* file type regular
*
* @param <String> type
* @return <Regular>
*/
function fileTypeReg(type) {
var map = [], ext = mix.config.get('project.fileType.' + type);
if(type === 'text'){
map = TEXT_FILE_EXTS;
} else if(type === 'image'){
map = IMAGE_FILE_EXTS;
} else {
mix.log.error('invalid file type [' + type + ']');
}
// custom file type
if(ext && ext.length){
if(typeof ext === 'string'){
ext = ext.split(/\s*,\s*/);
}
map = map.concat(ext);
}
map = map.join('|');
return new RegExp('\\.(?:' + map + ')$', 'i');
} | var query = urls.search;
var fullname = origin.replace(query, ''); | random_line_split |
util.js | /**
* utils
*
* @namespace mix.util
*
* @author Yang,junlong at 2016-07-28 20:27:54 build.
* @version $Id$
*/
var fs = require('fs');
var url = require('url');
var pth = require('path');
var util = require('util');
//var iconv = require('iconv-lite');
var crypto = require('crypto');
var PLATFORM = process.platform;
var ISWIN = PLATFORM.indexOf('win') === 0;
/**
* text file exts
*
* @type <Array>
*/
var TEXT_FILE_EXTS = [
'css', 'tpl', 'js', 'php',
'txt', 'json', 'xml', 'htm',
'text', 'xhtml', 'html', 'md',
'conf', 'po', 'config', 'tmpl',
'coffee', 'less', 'sass', 'jsp',
'scss', 'manifest', 'bak', 'asp',
'tmp', 'haml', 'jade', 'aspx',
'ashx', 'java', 'py', 'c', 'cpp',
'h', 'cshtml', 'asax', 'master',
'ascx', 'cs', 'ftl', 'vm', 'ejs',
'styl', 'jsx', 'handlebars'
];
/**
* image file exts
*
* @type <Array>
*/
var IMAGE_FILE_EXTS = [
'svg', 'tif', 'tiff', 'wbmp',
'png', 'bmp', 'fax', 'gif',
'ico', 'jfif', 'jpe', 'jpeg',
'jpg', 'woff', 'cur', 'webp',
'swf', 'ttf', 'eot', 'woff2'
];
/**
* mime types
*
* @type <Object>
*/
var MIME_TYPES = {
//text
'css': 'text/css',
'tpl': 'text/html',
'js': 'text/javascript',
'jsx': 'text/javascript',
'php': 'text/html',
'asp': 'text/html',
'jsp': 'text/jsp',
'txt': 'text/plain',
'json': 'application/json',
'xml': 'text/xml',
'htm': 'text/html',
'text': 'text/plain',
'md': 'text/plain',
'xhtml': 'text/html',
'html': 'text/html',
'conf': 'text/plain',
'po': 'text/plain',
'config': 'text/plain',
'coffee': 'text/javascript',
'less': 'text/css',
'sass': 'text/css',
'scss': 'text/css',
'styl': 'text/css',
'manifest': 'text/cache-manifest',
//image
'svg': 'image/svg+xml',
'tif': 'image/tiff',
'tiff': 'image/tiff',
'wbmp': 'image/vnd.wap.wbmp',
'webp': 'image/webp',
'png': 'image/png',
'bmp': 'image/bmp',
'fax': 'image/fax',
'gif': 'image/gif',
'ico': 'image/x-icon',
'jfif': 'image/jpeg',
'jpg': 'image/jpeg',
'jpe': 'image/jpeg',
'jpeg': 'image/jpeg',
'eot': 'application/vnd.ms-fontobject',
'woff': 'application/font-woff',
'woff2': 'application/font-woff',
'ttf': 'application/octet-stream',
'cur': 'application/octet-stream'
};
/**
* 通用唯一识别码 (Universally Unique Identifier)
*
* @return string
*/
exports.uuid = function () {
var _uuid = [],
_stra = "0123456789ABCDEF".split('');
for (var i = 0; i < 36; i++){
_uuid[i] = Math.floor(Math.random() * 16);
}
_uuid[14] = 4;
_uuid[19] = (_uuid[19] & 3) | 8;
for (i = 0; i < 36; i++) {
_uuid[i] = _stra[_uuid[i]];
}
_uuid[8] = _uuid[13] = _uuid[18] = _uuid[23] = '-';
return _uuid.join('');
};
/**
* md5 crypto
*
* @param <String> | <Binary> data
* @param <Number> len
* @return <String>
*/
exports.md5 = function(data, len) {
var md5sum = crypto.createHash('md5');
var encoding = typeof data === 'string' ? 'utf8' : 'binary';
md5sum.update(data, encoding);
len = len || mix.config.get('project.md5Length', 7);
return md5sum.digest('hex').substring(0, len);
};
/**
* base64 encode
*
* @param <Buffer | Array | String> data
* @return <String>
*/
exports.base64 = function(data) {
if(data instanceof Buffer){
//do nothing for quickly determining.
} else if(data instanceof Array){
data = new Buffer(data);
} else {
//convert to string.
data = new Buffer(String(data || ''));
}
return data.toString('base64');
};
exports.map = function(obj, callback, scope) {
if (obj.length === +obj.length) {
for (var i = 0, l = obj.length; i < l; i++) {
if (callback.call(scope, obj[i], i, obj) === false) {
return;
}
}
} else {
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
if (callback.call(scope, obj[key], key, obj) === false) {
return;
}
}
}
}
};
/**
* camel format
*
* @usage
* aaa-bbb_ccc ddd -> AaaBbbCccDdd
*
* @param {String} string
* @return {String}
*/
exports.camelcase = (function(){
var list = {};
return function(string){
var result = '';
if(string){
string.split(/[-_ ]+/).forEach(function(ele){
result += ele[0].toUpperCase() + ele.substring(1);
});
} else {
result = string;
}
return list[string] || (list[string] = result);
}
})();
/**
* 数据类型判断
*
* @param <Mixed> source
* @param <String> type
* @return <Boolean>
*/
exports.is = function (source, type) {
type = exports.camelcase(type);
return toString.call(source) === '[object ' + type + ']';
};
// 简单的浅拷贝, 覆盖已经存在的属性
exports.extend = function (destination) {
if (!destination) {
return;
}
var args = Array.prototype.slice.call(arguments, 1);
for (i = 0, l = args.length; i < l; i++) {
var source = args[i];
for(var property in source){
var value = source[property];
if (value !== undefined){
destination[property] = value;
}
}
}
return destination;
};
/**
* print object to terminal
*
* @param <Object> object
* @param <String> prefix
* @return <Void>
*/
exports.print = function (object, prefix) {
prefix = prefix || '';
for(var key in object){
if(object.hasOwnProperty(key)){
if(typeof object[key] === 'object'){
arguments.callee(object[key], prefix + key + '.');
} else {
console.log(prefix + key + '=' + object[key]);
}
}
}
}
/**
* hostname & ip address
*
* @return <String>
*/
exports.hostname = function () {
var net = require('os').networkInterfaces();
for(var key in net){
if(net.hasOwnProperty(key)){
var details = net[key];
if(details && details.length){
for(var i = 0, len = details.length; i < len; i++){
var ip = String(details[i].address).trim();
if(ip && /^\d+(?:\.\d+){3}$/.test(ip) && ip !== '127.0.0.1'){
return ip;
}
}
}
}
}
return '127.0.0.1';
};
/**
* if text file
*
* @param <String> file
* @return <Boolean>
*/
exports.isTxt = function(file) {
return fileTypeReg('text').test(file || '');
};
/**
* if image file
*
* @param <String> file
* @return <Boolean>
*/
exports.isImg = function(file) {
return fileTypeReg('image').test(file || '');
};
/**
* if platform windows
*
* @return <Boolean>
*/
exports.isWin = function() {
return ISWIN;
}
/**
* if buffer utf8 charset
*
* @param <Buffer> bytes
* @return <Boolean>
*/
exports.isUtf8 = function(bytes) {
var i = 0;
while(i < bytes.length) {
if((// ASCII
0x00 <= bytes[i] && bytes[i] <= 0x7F
)) {
i += 1;
continue;
}
if((// non-overlong 2-byte
(0xC2 <= bytes[i] && bytes[i] <= 0xDF) &&
(0x80 <= bytes[i+1] && bytes[i+1] <= 0xBF)
)) {
i += 2;
continue;
}
if(
(// excluding overlongs
bytes[i] == 0xE0 &&
(0xA0 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
) || (// straight 3-byte
((0xE1 <= bytes[i] && bytes[i] <= 0xEC) ||
bytes[i] == 0xEE ||
bytes[i] == 0xEF) &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
) || (// excluding surrogates
bytes[i] == 0xED &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0x9F) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
)
) {
i += 3;
continue;
}
if(
(// planes 1-3
bytes[i] == 0xF0 &&
(0x90 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
) || (// planes 4-15
(0xF1 <= bytes[i] && bytes[i] <= 0xF3) &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
) || (// plane 16
bytes[i] == 0xF4 &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0x8F) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
)
) {
i += 4;
continue;
}
return false;
}
return true;
};
/**
* if path is file
*
* @param <String> path
* @return <Boolean>
*/
exports.isFile = function(path) {
return exports.exists(path) && fs.statSync(path).isFile();
};
/**
* if path is dir
*
* @param <String> path
* @return {Boolean}
*/
exports.isDir = function(path) {
return exports.exists(path) && fs.statSync(path).isDirectory();
};
/**
* adapt buffer charset
*
* @param <Buffer> buffer
* @return <Buffer>
*/
exports.buffer = function(buffer) {
if(exports.isUtf8(buffer)) {
buffer = buffer.toString('utf8');
if (buffer.charCodeAt(0) === 0xFEFF) {
buffer = buffer.substring(1);
}
} else {
buffer = iconv.decode(buffer, 'gbk');
}
return buffer;
};
/**
* reads the entire contents of a file
*
* If the encoding option is specified then this function returns a string.
* Otherwise it returns a buffer.
*
* @param <String> file
* @param <Boolean> convert
* @return <String> | <Buffer>
*/
exports.read = function(file, convert) {
var content = false;
if(exports.exists(file)) {
content = fs.readFileSync(file);
if(convert || exports.isTxt(file)) {
content = exports.buffer(content);
}
}
return content;
};
/**
* read file to json
*
* @param <String> path
* @return <Object>
*/
exports.readJSON = (function(){
var cache = {};
return function(path) {
if(cache[path]) {
return cache[path];
}
var json = exports.read(path, true);
var result = {};
if(!json) {
return false;
}
try {
result = JSON.parse(json);
} catch(e){
mix.log.error('parse json file[' + path + '] fail, error [' + e.message + ']');
}
return (cache[path] = result);
}
})();
/**
* Makes directory
*
* @param <String> path
* @param <Integer> mode
* @return <undefined>
*/
exports.mkdir = function(path, mode) {
var exists = exports.exists;
if(exists(path)) {
return;
}
path.split('/').reduce(function(prev, next) {
if(prev && !exists(prev)) {
fs.mkdirSync(prev, mode);
}
return prev + '/' + next;
});
if(!exists(path)) {
fs.mkdirSync(path, mode);
}
};
/**
* write data to a file,
* replacing the file if it already exists.
* data can be a string or a buffer
*
* @param <String> | <Buffer> | <Integer> file
* @param <String> | <Buffer> data
* @param <Object> | <String> options
* @param <Boolean> append
* @return <undefined>
*/
exports.write = function(file, data, options, append) {
if(!exports.exists(file)){
exports.mkdir(exports.pathinfo(file).dirname);
}
if(append) {
fs.appendFileSync(file, data, options);
} else {
fs.writeFileSync(file, data, options);
}
};
/**
* Gets file modification time
*
* @param <String> path
* @return <Timestamp>
*/
exports.mtime = function(path) {
var time = 0;
if(exports.exists(path)){
time = fs.statSync(path).mtime;
}
return time;
};
/**
* Touch a file
*
* @param <String> path
* @param <timestamp> mtime
* @return <undefined>
*/
exports.touch = function(path, mtime) {
if(!exports.exists(path)){
exports.write(path, '');
}
if(mtime instanceof Date){
//do nothing for quickly determining.
} else if(typeof mtime === 'number') {
var time = new Date();
time.setTime(mtime);
mtime = time;
} else {
mix.log.error('invalid argument [mtime]');
}
fs.utimesSync(path, mtime, mtime);
};
exports.del = function(rPath, include, exclude){
var removedAll = true;
var path;
if(rPath && exports.exists(rPath)) {
var stat = fs.lstatSync(rPath);
var isFile = stat.isFile() || stat.isSymbolicLink();
if (stat.isSymbolicLink()) {
path = rPath;
} else {
path = exports.realpath(rPath);
}
if(/^(?:\w:)?\/$/.test(path)){
mix.log.error('unable to delete directory [' + rPath + '].');
}
if(stat.isDirectory()){
fs.readdirSync(path).forEach(function(name){
if(name != '.' && name != '..') {
removedAll = exports.del(path + '/' + name, include, exclude) && removedAll;
}
});
if(removedAll) {
fs.rmdirSync(path);
}
} else if(isFile && exports.filter(path, include, exclude)) {
fs.unlinkSync(path);
} else {
removedAll = false;
}
} else {
mix.log.error('unable to delete [' + rPath + ']: No such file or directory.');
}
return removedAll;
},
/**
* Node.js file system
*
* @type <Object>
*/
exports.fs = fs;
/**
* Test whether or not the given path exists by checking with the file system.
*
* @param <String> | <Buffer> path
* @return <Boolean> Returns true if the file exists, false otherwise.
*/
exports.exists = fs.existsSync;
/**
* Create path by arguments.
*
* @param <String> | <Array> path
* @return <String>
*/
exports.path = function(path) {
var type = typeof path;
if(arguments.length > 1) {
path = Array.prototype.join.call(arguments, '/');
} else if(type === 'string') {
// do nothing for quickly determining.
} else if(type === 'object') {
// array object
path = Array.prototype.join.call(path, '/');
} else if(type === 'undefined') {
path = '';
}
if(path){
path = pth.normalize(path.replace(/[\/\\]+/g, '/')).replace(/\\/g, '/');
if(path !== '/'){
path = path.replace(/\/$/, '');
}
}
return path;
};
/**
* Only paths that can be converted to UTF8 strings are supported.
*
* @param {String} path [description]
* @param {Object} options [description]
* @return <String>|<False> Returns the resolved path.
*/
exports.realpath = function(path, options){
if(path && exports.exists(path)){
path = fs.realpathSync(path, options);
if(ISWIN){
path = path.replace(/\\/g, '/');
}
if(path !== '/'){
path = path.replace(/\/$/, '');
}
return path;
} else {
return false;
}
};
/**
* Returns a parent directory's path
*
* @param <String> path
* @return <String>
*/
exports.dirname = function(path) {
return pth.resolve(path, '..');
};
/**
* Returns an object whose properties represent significant elements of the path
*
* @param <String> path
* @return <Object>
*
* The returned object will have the following properties:
* {
* origin: './test.js?ddd=11',
* rest: './test',
* hash: '',
* query: '?ddd=11',
* fullname: './test.js',
* dirname: '.',
* ext: '.js',
* filename: 'test',
* basename: 'test.js'
* }
*/
exports.pathinfo = function(path) {
/**
* parse path by url.parse
*
* {
* protocol: null,
* slashes: null,
* auth: null,
* host: null,
* port: null,
* hostname: null,
* hash: null,
* search: '?dfad=121',
* query: 'dfad=121',
* pathname: './test.js',
* path: './test.js?dfad=121',
* href: './test.js?dfad=121'
* }
* @type {[type]}
*/
var urls = url.parse(path);
/**
* parse path by path.parse
*
* {
* root: '',
* dir: '.',
* base: 'test.js?dfad=121',
* ext: '.js?dfad=121',
* name: 'test'
* }
* @type <Object>
*/
var pths = pth.parse(urls.pathname || path);
// tobe output var
var origin = urls.path;
var root = pths.root;
var hash = urls.hash;
var query = urls.search;
var fullname = origin.replace(query, '');
var dirname = pths.dir || '.';
var filename = pths.name;
var basename = (pths.base || '').replace(query, '');
var rest = dirname + '/' + filename;
var ext = (pths.ext || '').replace(query, '');
return {
'origin': origin,
'dirname': dirname,
'fullname': fullname,
'filename': filename,
'basename': basename,
'query': query,
'rest': rest,
'hash': hash,
'root': root,
'ext': ext
};
};
/**
* Escape special regular charset
*
* @param <String> str
* @return <String>
*/
exports.escapeReg = function(str) {
return str.replace(/[\.\\\+\*\?\[\^\]\$\(\){}=!<>\|:\/]/g, '\\$&');
};
/**
* Escape shell cmd
*
* @param <String> str
* @return <String>
*/
exports.escapeShellCmd = function(str){
return str.replace(/ /g, '"$&"');
};
exports.escapeShellArg = function(cmd){
return '"' + cmd + '"';
};
exports.stringQuote = function(str, quotes){
var info = {
origin : str,
rest : str = str.trim(),
quote : ''
};
if(str){
quotes = quotes || '\'"';
var strLen = str.length - 1;
for(var i = 0, len = quotes.length; i < len; i++){
var c = quotes[i];
if(str[0] === c && str[strLen] === c){
info.quote = c;
info.rest = str.substring(1, strLen);
break;
}
}
}
return info;
};
/**
* Replace var
*
* @return <String>
* @example
* replaceVar('/${namespace}/path/to/*.js', true);
* -> '/common/path/to/*.js'
*/
exports.replaceVar = function(value, escape){
return value.replace(/\$\{([^\}]+)\}/g, function(all, $1){
var val = mix.config.get($1) || 'test';
if(typeof val === 'undefined'){
mix.log.error('undefined property [' + $1 + '].');
} else {
return escape ? exports.escapeReg(val) : val;
}
return all;
});
};
/**
* Replace Matches
*
* @param <String> value
* @param <Array> matches
* @return <String>
* @example
* replaceMatches('/$1/path/to.js', ['aa', 'bb']);
* -> '/bb/path/to.js'
*/
exports.replaceMatches = function(value, matches) {
return value.replace(/\$(\d+|&)/g, function(all, $1){
var key = $1 === '&' ? '0' : $1;
return matches.hasOwnProperty(key) ? (matches[key] || '') : all;
});
};
/**
* Replace roadmap.path Props
* Extend Props to File
*
* @param <Object> source
* @param <Array> matches
* @param <Object> scope
* @return <Object>
* @example
* replaceProps(road.path[0], ['aa', 'bb', 'cc'], this);
*/
exports.replaceProps = function(source, matches, scope) {
var type = typeof source;
if(type === 'object'){
if(exports.is(source, 'Array')){
scope = scope || [];
} else {
scope = scope || {};
}
exports.map(source, function(value, key){
scope[key] = exports.replaceProps(value, matches);
});
return scope;
} else if(type === 'string'){
return exports.replaceVar(exports.replaceMatches(source, matches));
} else if(type === 'function'){
return source(scope, matches);
} else {
return source;
}
};
// check lib version
exports.matchVersion = function(str) {
var version = false;
var reg = /\b\d+(\.\d+){2}/;
var match = str.match(reg);
if(match){
version = match[0];
}
return version;
};
/**
* Filter path by include&exclude regular
*
* @param <String> str [description]
* @param <Regular> | <Array> include [description]
* @param <Regular> | <Array> exclude [description]
* @return <Boolean>
*/
exports.filter = function(str, include, exclude){
function normalize(pattern){
var type = toString.call(pattern);
swi | = false;
if (!exports.is(patterns, 'Array')){
patterns = [patterns];
}
patterns.every(function(pattern){
if (!pattern){
return true;
}
matched = matched || str.search(normalize(pattern)) > -1;
return !matched;
});
return matched;
}
var isInclude, isExclude;
if (include) {
isInclude = match(str, include);
}else{
isInclude = true;
}
if (exclude) {
isExclude = match(str, exclude);
}
return isInclude && !isExclude;
};
/**
* match files using the pattern
*
* @see npm node-glob
*
* @param <Regular> pattern
* @param <String> str
* @return <Boolean> | <Regular>
*/
exports.glob = function(pattern, str){
var sep = exports.escapeReg('/');
pattern = new RegExp('^' + sep + '?' +
exports.escapeReg(
pattern
.replace(/\\/g, '/')
.replace(/^\//, '')
)
.replace(new RegExp(sep + '\\*\\*' + sep, 'g'), sep + '.*(?:' + sep + ')?')
.replace(new RegExp(sep + '\\*\\*', 'g'), sep + '.*')
.replace(/\\\*\\\*/g, '.*')
.replace(/\\\*/g, '[^' + sep + ']*')
.replace(/\\\?/g, '[^' + sep + ']') + '$',
'i'
);
if(typeof str === 'string'){
return pattern.test(str);
} else {
return pattern;
}
};
/**
* find project source(files)
*
* @param <String> path
* @param <String | Function> include
* @param <String | Function> exclude
* @param <String> root
* @param <Function> callback
* @return <Object>
*/
exports.find = function(rPath, include, exclude, root) {
var args = Array.prototype.slice.call(arguments, 0);
var last = args[args.length-1];
var list = [];
var path = exports.realpath(rPath);
var filterPath = root ? path.substring(root.length) : path;
include = exports.is(include, 'function') ? undefined : include;
exclude = exports.is(exclude, 'function') ? undefined : exclude;
if(path){
var stat = fs.statSync(path);
if(stat.isDirectory() && (include || exports.filter(filterPath, include, exclude))){
fs.readdirSync(path).forEach(function(p){
if(p[0] != '.') {
list = list.concat(exports.find(path + '/' + p, include, exclude, root, last));
}
});
} else if(stat.isFile() && exports.filter(filterPath, include, exclude)) {
list.push(path);
if(exports.is(last, 'function')) {
last(path);
}
}
} else {
mix.log.error('unable to find [' + rPath + ']: No such file or directory.');
}
return list.sort();
};
/**
* File pipe process core impl
*
* @param <String> type
* @param <Function> callback
* @param <Function> | <String> def
* @return <undefined>
*/
exports.pipe = function(type, callback, def) {
var processors = mix.config.get('modules.' + type, def);
if(processors){
var typeOf = typeof processors;
if(typeOf === 'string'){
processors = processors.trim().split(/\s*,\s*/);
} else if(typeOf === 'function'){
processors = [ processors ];
}
type = type.split('.')[0];
processors.forEach(function(processor, index){
var typeOf = typeof processor, key;
if(typeOf === 'string'){
key = type + '.' + processor;
processor = mix.require(type, processor);
} else {
key = type + '.' + index;
}
if(typeof processor === 'function'){
var settings = mix.config.get('settings.' + key, {});
if(processor.defaultOptions){
settings = exports.extend(processor.settings, settings);
}
callback(processor, settings, key);
} else {
mix.log.warning('invalid processor [modules.' + key + ']');
}
});
}
};
/**
* Open path or url from terminal
*
* @param <String> path | url
* @param <Function> callback
* @return <undefined>
*/
exports.open = function(path, callback) {
var child_process = require('child_process');
var cmd = exports.escapeShellArg(path);
if(ISWIN) {
cmd = 'start "" ' + cmd;
} else {
if(process.env['XDG_SESSION_COOKIE']){
cmd = 'xdg-open ' + cmd;
} else if(process.env['GNOME_DESKTOP_SESSION_ID']){
cmd = 'gnome-open ' + cmd;
} else {
cmd = 'open ' + cmd;
}
}
child_process.exec(cmd, function(){
typeof callback == 'function' && callback(path);
});
};
/**
* file type regular
*
* @param <String> type
* @return <Regular>
*/
function fileTypeReg(type) {
var map = [], ext = mix.config.get('project.fileType.' + type);
if(type === 'text'){
map = TEXT_FILE_EXTS;
} else if(type === 'image'){
map = IMAGE_FILE_EXTS;
} else {
mix.log.error('invalid file type [' + type + ']');
}
// custom file type
if(ext && ext.length){
if(typeof ext === 'string'){
ext = ext.split(/\s*,\s*/);
}
map = map.concat(ext);
}
map = map.join('|');
return new RegExp('\\.(?:' + map + ')$', 'i');
}
| tch (type){
case '[object String]':
return exports.glob(pattern);
case '[object RegExp]':
return pattern;
default:
mix.log.error('invalid regexp [' + pattern + '].');
}
}
function match(str, patterns){
var matched | identifier_body |
util.js | /**
* utils
*
* @namespace mix.util
*
* @author Yang,junlong at 2016-07-28 20:27:54 build.
* @version $Id$
*/
var fs = require('fs');
var url = require('url');
var pth = require('path');
var util = require('util');
//var iconv = require('iconv-lite');
var crypto = require('crypto');
var PLATFORM = process.platform;
var ISWIN = PLATFORM.indexOf('win') === 0;
/**
* text file exts
*
* @type <Array>
*/
var TEXT_FILE_EXTS = [
'css', 'tpl', 'js', 'php',
'txt', 'json', 'xml', 'htm',
'text', 'xhtml', 'html', 'md',
'conf', 'po', 'config', 'tmpl',
'coffee', 'less', 'sass', 'jsp',
'scss', 'manifest', 'bak', 'asp',
'tmp', 'haml', 'jade', 'aspx',
'ashx', 'java', 'py', 'c', 'cpp',
'h', 'cshtml', 'asax', 'master',
'ascx', 'cs', 'ftl', 'vm', 'ejs',
'styl', 'jsx', 'handlebars'
];
/**
* image file exts
*
* @type <Array>
*/
var IMAGE_FILE_EXTS = [
'svg', 'tif', 'tiff', 'wbmp',
'png', 'bmp', 'fax', 'gif',
'ico', 'jfif', 'jpe', 'jpeg',
'jpg', 'woff', 'cur', 'webp',
'swf', 'ttf', 'eot', 'woff2'
];
/**
* mime types
*
* @type <Object>
*/
var MIME_TYPES = {
//text
'css': 'text/css',
'tpl': 'text/html',
'js': 'text/javascript',
'jsx': 'text/javascript',
'php': 'text/html',
'asp': 'text/html',
'jsp': 'text/jsp',
'txt': 'text/plain',
'json': 'application/json',
'xml': 'text/xml',
'htm': 'text/html',
'text': 'text/plain',
'md': 'text/plain',
'xhtml': 'text/html',
'html': 'text/html',
'conf': 'text/plain',
'po': 'text/plain',
'config': 'text/plain',
'coffee': 'text/javascript',
'less': 'text/css',
'sass': 'text/css',
'scss': 'text/css',
'styl': 'text/css',
'manifest': 'text/cache-manifest',
//image
'svg': 'image/svg+xml',
'tif': 'image/tiff',
'tiff': 'image/tiff',
'wbmp': 'image/vnd.wap.wbmp',
'webp': 'image/webp',
'png': 'image/png',
'bmp': 'image/bmp',
'fax': 'image/fax',
'gif': 'image/gif',
'ico': 'image/x-icon',
'jfif': 'image/jpeg',
'jpg': 'image/jpeg',
'jpe': 'image/jpeg',
'jpeg': 'image/jpeg',
'eot': 'application/vnd.ms-fontobject',
'woff': 'application/font-woff',
'woff2': 'application/font-woff',
'ttf': 'application/octet-stream',
'cur': 'application/octet-stream'
};
/**
* 通用唯一识别码 (Universally Unique Identifier)
*
* @return string
*/
exports.uuid = function () {
var _uuid = [],
_stra = "0123456789ABCDEF".split('');
for (var i = 0; i < 36; i++){
_uuid[i] = Math.floor(Math.random() * 16);
}
_uuid[14] = 4;
_uuid[19] = (_uuid[19] & 3) | 8;
for (i = 0; i < 36; i++) {
_uuid[i] = _stra[_uuid[i]];
}
_uuid[8] = _uuid[13] = _uuid[18] = _uuid[23] = '-';
return _uuid.join('');
};
/**
* md5 crypto
*
* @param <String> | <Binary> data
* @param <Number> len
* @return <String>
*/
exports.md5 = function(data, len) {
var md5sum = crypto.createHash('md5');
var encoding = typeof data === 'string' ? 'utf8' : 'binary';
md5sum.update(data, encoding);
len = len || mix.config.get('project.md5Length', 7);
return md5sum.digest('hex').substring(0, len);
};
/**
* base64 encode
*
* @param <Buffer | Array | String> data
* @return <String>
*/
exports.base64 = function(data) {
if(data instanceof Buffer){
//do nothing for quickly determining.
} else if(data instanceof Array){
data = new Buffer(data);
} else {
//convert to string.
data = new Buffer(String(data || ''));
}
return data.toString('base64');
};
exports.map = function(obj, callback, scope) {
if (obj.length === +obj.length) {
for (var i = 0, l = obj.length; i < l; i++) {
if (callback.call(scope, obj[i], i, obj) === false) {
return;
}
}
} else {
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
if (callback.call(scope, obj[key], key, obj) === false) {
return;
}
}
}
}
};
/**
* camel format
*
* @usage
* aaa-bbb_ccc ddd -> AaaBbbCccDdd
*
* @param {String} string
* @return {String}
*/
exports.camelcase = (function(){
var list = {};
return function(string){
var result = '';
if(string){
string.split(/[-_ ]+/).forEach(function(ele){
result += ele[0].toUpperCase() + ele.substring(1);
});
} else {
result = string;
}
return list[string] || (list[string] = result);
}
})();
/**
* 数据类型判断
*
* @param <Mixed> source
* @param <String> type
* @return <Boolean>
*/
exports.is = function (source, type) {
type = exports.camelcase(type);
return toString.call(source) === '[object ' + type + ']';
};
// 简单的浅拷贝, 覆盖已经存在的属性
exports.extend = function (destination) {
if (!destination) {
return;
}
var args = Array.prototype.slice.call(arguments, 1);
for (i = 0, l = args.length; i < l; i++) {
var source = args[i];
for(var property in source){
var value = source[property];
if (value !== undefined){
destination[property] = value;
}
}
}
return destination;
};
/**
* print object to terminal
*
* @param <Object> object
* @param <String> prefix
* @return <Void>
*/
exports.print = function (object, prefix) {
prefix = prefix || '';
for(var key in object){
if(object.hasOwnProperty(key)){
if(typeof object[key] === 'object'){
arguments.callee(object[key], prefix + key + '.');
} else {
console.log(prefix + key + '=' + object[key]);
}
}
}
}
/**
* hostname & ip address
*
* @return <String>
*/
exports.hostname = function () {
var net = require('os').networkInterfaces();
for(var key in net){
if(net.hasOwnProperty(key)){
var details = net[key];
if(details && details.length){
for(var i = 0, len = details.length; i < len; i++){
var ip = String(details[i].address).trim();
if(ip && /^\d+(?:\.\d+){3}$/.test(ip) && ip !== '127.0.0.1'){
return ip;
}
}
}
}
}
return '127.0.0.1';
};
/**
* if text file
*
* @param <String> file
* @return <Boolean>
*/
exports.isTxt = function(file) {
return fileTypeReg('text').test(file || '');
};
/**
* if image file
*
* @param <String> file
* @return <Boolean>
*/
exports.isImg = function(file) {
return fileTypeReg('image').test(file || '');
};
/**
* if platform windows
*
* @return <Boolean>
*/
exports.isWin = function() {
return ISWIN;
}
/**
* if buffer utf8 charset
*
* @param <Buffer> bytes
* @return <Boolean>
*/
exports.isUtf8 = function(bytes) {
var i = 0;
while(i < bytes.length) {
if((// ASCII
0x00 <= bytes[i] && bytes[i] <= 0x7F
)) {
i += 1;
continue;
}
if((// non-overlong 2-byte
(0xC2 <= bytes[i] && bytes[i] <= 0xDF) &&
(0x80 <= bytes[i+1] && bytes[i+1] <= 0xBF)
)) {
i += 2;
continue;
}
if(
(// excluding overlongs
bytes[i] == 0xE0 &&
(0xA0 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
) || (// straight 3-byte
((0xE1 <= bytes[i] && bytes[i] <= 0xEC) ||
bytes[i] == 0xEE ||
bytes[i] == 0xEF) &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
) || (// excluding surrogates
bytes[i] == 0xED &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0x9F) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF)
)
) {
i += 3;
continue;
}
if(
(// planes 1-3
bytes[i] == 0xF0 &&
(0x90 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
) || (// planes 4-15
(0xF1 <= bytes[i] && bytes[i] <= 0xF3) &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0xBF) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
) || (// plane 16
bytes[i] == 0xF4 &&
(0x80 <= bytes[i + 1] && bytes[i + 1] <= 0x8F) &&
(0x80 <= bytes[i + 2] && bytes[i + 2] <= 0xBF) &&
(0x80 <= bytes[i + 3] && bytes[i + 3] <= 0xBF)
)
) {
i += 4;
continue;
}
return false;
}
return true;
};
/**
* if path is file
*
* @param <String> path
* @return <Boolean>
*/
exports.isFile = function(path) {
return exports.exists(path) && fs.statSync(path).isFile();
};
/**
* if path is dir
*
* @param <String> path
* @return {Boolean}
*/
exports.isDir = function(path) {
return exports.exists(path) && fs.statSync(path).isDirectory();
};
/**
* adapt buffer charset
*
* @param <Buffer> buffer
* @return <Buffer>
*/
exports.buffer = function(buffer) {
if(exports.isUtf8(buffer)) {
buffer = buffer.toString('utf8');
if (buffer.charCodeAt(0) === 0xFEFF) {
buffer = buffer.substring(1);
}
} else {
buffer = iconv.decode(buffer, 'gbk');
}
return buffer;
};
/**
* reads the entire contents of a file
*
* If the encoding option is specified then this function returns a string.
* Otherwise it returns a buffer.
*
* @param <String> file
* @param <Boolean> convert
* @return <String> | <Buffer>
*/
exports.read = function(file, convert) {
var content = false;
if(exports.exists(file)) {
content = fs.readFileSync(file);
if(convert || exports.isTxt(file)) {
content = exports.buffer(content);
}
}
return content;
};
/**
* read file to json
*
* @param <String> path
* @return <Object>
*/
exports.readJSON = (function(){
var cache = {};
return function(path) {
if(cache[path]) {
return cache[path];
}
var json = exports.read(path, true);
var result = {};
if(!json) {
return false;
}
try {
result = JSON.parse(json);
} catch(e){
mix.log.error('parse json file[' + path + '] fail, error [' + e.message + ']');
}
return (cache[path] = result);
}
})();
/**
* Makes directory
*
* @param <String> path
* @param <Integer> mode
* @return <undefined>
*/
exports.mkdir = function(path, mode) {
var exists = exports.exists;
if(exists(path)) {
return;
}
path.split('/').reduce(function(prev, next) {
if(prev && !exists(prev)) {
fs.mkdirSync(prev, mode);
}
return prev + '/' + next;
});
if(!exists(path)) {
fs.mkdirSync(path, mode);
}
};
/**
* write data to a file,
* replacing the file if it already exists.
* data can be a string or a buffer
*
* @param <String> | <Buffer> | <Integer> file
* @param <String> | <Buffer> data
* @param <Object> | <String> options
* @param <Boolean> append
* @return <undefined>
*/
exports.write = function(file, data, options, append) {
if(!exports.exists(file)){
exports.mkdir(exports.pathinfo(file).dirname);
}
if(append) {
fs.appendFileSync(file, data, options);
} else {
fs.writeFileSync(file, data, options);
}
};
/**
* Gets file modification time
*
* @param <String> path
* @return <Timestamp>
*/
exports.mtime = function(path) {
var time = 0;
if(exports.exists(path)){
time = fs.statSync(path).mtime;
}
return time;
};
/**
* Touch a file
*
* @param <String> path
* @param <timestamp> mtime
* @return <undefined>
*/
exports.touch = function(path, mtime) {
if(!exports.exists(path)){
exports.write(path, '');
}
if(mtime instanceof Date){
//do nothing for quickly determining.
} else if(typeof mtime === 'number') {
var time = new Date();
time.setTime(mtime);
mtime = time;
} else {
mix.log.error('invalid argument [mtime]');
}
fs.utimesSync(path, mtime, mtime);
};
exports.del = function(rPath, include, exclude){
var removedAll = true;
var path;
if(rPath && exports.exists(rPath)) {
var stat = fs.lstatSync(rPath);
var isFile = stat.isFile() || stat.isSymbolicLink();
if (stat.isSymbolicLink()) {
path = rPath;
} else {
path = exports.realpath(rPath);
}
if(/^(?:\w:)?\/$/.test(path)){
mix.log.error('unable to delete directory [' + rPath + '].');
}
if(stat.isDirectory()){
fs.readdirSync(path).forEach(function(name){
if(name != '.' && name != '..') {
removedAll = exports.del(path + '/' + name, include, exclude) && removedAll;
}
});
if(removedAll) {
fs.rmdirSync(path);
}
} else if(isFile && exports.filter(path, include, exclude)) {
fs.unlinkSync(path);
} else {
removedAll = false;
}
} else {
mix.log.error('unable to delete [' + rPath + ']: No such file or directory.');
}
return removedAll;
},
/**
* Node.js file system
*
* @type <Object>
*/
exports.fs = fs;
/**
* Test whether or not the given path exists by checking with the file system.
*
* @param <String> | <Buffer> path
* @return <Boolean> Returns true if the file exists, false otherwise.
*/
exports.exists = fs.existsSync;
/**
* Create path by arguments.
*
* @param <String> | <Array> path
* @return <String>
*/
exports.path = function(path) {
var type = typeof path;
if(arguments.length > 1) {
path = Array.prototype.join.call(arguments, '/');
} else if(type === 'string') {
// do nothing for quickly determining.
} else if(type === 'object') {
// array object
path = Array.prototype.join.call(path, '/');
} else if(type === 'undefined') {
path = '';
}
if(path){
path = pth.normalize(path.replace(/[\/\\]+/g, '/')).replace(/\\/g, '/');
if(path !== '/'){
path = path.replace(/\/$/, '');
}
}
return path;
};
/**
* Only paths that can be converted to UTF8 strings are supported.
*
* @param {String} path [description]
* @param {Object} options [description]
* @return <String>|<False> Returns the resolved path.
*/
exports.realpath = function(path, options){
if(path && exports.exists(path)){
path = fs.realpathSync(path, options);
if(ISWIN){
path = path.replace(/\\/g, '/');
}
if(path !== '/'){
path = path.replace(/\/$/, '');
}
return path;
} else {
return false;
}
};
/**
* Returns a parent directory's path
*
* @param <String> path
* @return <String>
*/
exports.dirname = function(path) {
return pth.resolve(path, '..');
};
/**
* Returns an object whose properties represent significant elements of the path
*
* @param <String> path
* @return <Object>
*
* The returned object will have the following properties:
* {
* origin: './test.js?ddd=11',
* rest: './test',
* hash: '',
* query: '?ddd=11',
* fullname: './test.js',
* dirname: '.',
* ext: '.js',
* filename: 'test',
* basename: 'test.js'
* }
*/
exports.pathinfo = function(path) {
/**
* parse path by url.parse
*
* {
* protocol: null,
* slashes: null,
* auth: null,
* host: null,
* port: null,
* hostname: null,
* hash: null,
* search: '?dfad=121',
* query: 'dfad=121',
* pathname: './test.js',
* path: './test.js?dfad=121',
* href: './test.js?dfad=121'
* }
* @type {[type]}
*/
var urls = url.parse(path);
/**
* parse path by path.parse
*
* {
* root: '',
* dir: '.',
* base: 'test.js?dfad=121',
* ext: '.js?dfad=121',
* name: 'test'
* }
* @type <Object>
*/
var pths = pth.parse(urls.pathname || path);
// tobe output var
var origin = urls.path;
var root = pths.root;
var hash = urls.hash;
var query = urls.search;
var fullname = origin.replace(query, '');
var dirname = pths.dir || '.';
var filename = pths.name;
var basename = (pths.base || '').replace(query, '');
var rest = dirname + '/' + filename;
var ext = (pths.ext || '').replace(query, '');
return {
'origin': origin,
'dirname': dirname,
'fullname': fullname,
'filename': filename,
'basename': basename,
'query': query,
'rest': rest,
'hash': hash,
'root': root,
'ext': ext
};
};
/**
* Escape special regular charset
*
* @param <String> str
* @return <String>
*/
exports.escapeReg = function(str) {
return str.replace(/[\.\\\+\*\?\[\^\]\$\(\){}=!<>\|:\/]/g, '\\$&');
};
/**
* Escape shell cmd
*
* @param <String> str
* @return <String>
*/
exports.escapeShellCmd = function(str){
return str.replace(/ /g, '"$&"');
};
exports.escapeShellArg = function(cmd){
return '"' + cmd + '"';
};
exports.stringQuote = function(str, quotes){
var info = {
origin : str,
rest : str = str.trim(),
quote : ''
};
if(str){
quotes = quotes || '\'"';
var strLen = str.length - 1;
for(var i = 0, len = quotes.length; i < len; i++){
var c = quotes[i];
if(str[0] === c && str[strLen] === c){
info.quote = c;
info.rest = str.substring(1, strLen);
break;
}
}
}
return info;
};
/**
* Replace var
*
* @return <String>
* @example
* replaceVar('/${namespace}/path/to/*.js', true);
* -> '/common/path/to/*.js'
*/
exports.replaceVar = function(value, escape){
return value.replace(/\$\{([^\}]+)\}/g, function(all, $1){
var val = mix.config.get($1) || 'test';
if(typeof val === 'undefined'){
mix.log.error('undefined property [' + $1 + '].');
} else {
return escape ? exports.escapeReg(val) : val;
}
return all;
});
};
/**
* Replace Matches
*
* @param <String> value
* @param <Array> matches
* @return <String>
* @example
* replaceMatches('/$1/path/to.js', ['aa', 'bb']);
* -> '/bb/path/to.js'
*/
exports.replaceMatches = function(value, matches) {
return value.replace(/\$(\d+|&)/g, function(all, $1){
var key = $1 === '&' ? '0' : $1;
return matches.hasOwnProperty(key) ? (matches[key] || '') : all;
});
};
/**
* Replace roadmap.path Props
* Extend Props to File
*
* @param <Object> source
* @param <Array> matches
* @param <Object> scope
* @return <Object>
* @example
* replaceProps(road.path[0], ['aa', 'bb', 'cc'], this);
*/
exports.replaceProps = function(source, matches, scope) {
var type = typeof source;
if(type === 'object'){
if(exports.is(source, 'Array')){
scope = scope || [];
} else {
scope = scope || {};
}
exports.map(source, function(value, key){
scope[key] = exports.replaceProps(value, matches);
});
return scope;
} else if(type === 'string'){
return exports.replaceVar(exports.replaceMatches(source, matches));
} else if(type === 'function'){
return source(scope, matches);
} else {
return source;
}
};
// check lib version
exports.matchVersion = function(str) {
var version = false;
var reg = /\b\d+(\.\d+){2}/;
var match = str.match(reg);
if(match){
version = match[0];
}
return version;
};
/**
* Filter path by include&exclude regular
*
* @param <String> str [description]
* @param <Regular> | <Array> include [description]
* @param <Regular> | <Array> exclude [description]
* @return <Boolean>
*/
exports.filter = function(str, include, exclude){
function normalize(pattern){
var type = toString.call(pattern);
switch (type){
case '[object String]':
return exports.glob(pattern);
case '[object RegExp]':
return pattern;
default:
mix.log.error('invalid regexp [' + pattern + '].');
}
}
function match(str, patterns){
var matched = false;
| (!exports.is(patterns, 'Array')){
patterns = [patterns];
}
patterns.every(function(pattern){
if (!pattern){
return true;
}
matched = matched || str.search(normalize(pattern)) > -1;
return !matched;
});
return matched;
}
var isInclude, isExclude;
if (include) {
isInclude = match(str, include);
}else{
isInclude = true;
}
if (exclude) {
isExclude = match(str, exclude);
}
return isInclude && !isExclude;
};
/**
* match files using the pattern
*
* @see npm node-glob
*
* @param <Regular> pattern
* @param <String> str
* @return <Boolean> | <Regular>
*/
exports.glob = function(pattern, str){
var sep = exports.escapeReg('/');
pattern = new RegExp('^' + sep + '?' +
exports.escapeReg(
pattern
.replace(/\\/g, '/')
.replace(/^\//, '')
)
.replace(new RegExp(sep + '\\*\\*' + sep, 'g'), sep + '.*(?:' + sep + ')?')
.replace(new RegExp(sep + '\\*\\*', 'g'), sep + '.*')
.replace(/\\\*\\\*/g, '.*')
.replace(/\\\*/g, '[^' + sep + ']*')
.replace(/\\\?/g, '[^' + sep + ']') + '$',
'i'
);
if(typeof str === 'string'){
return pattern.test(str);
} else {
return pattern;
}
};
/**
* find project source(files)
*
* @param <String> path
* @param <String | Function> include
* @param <String | Function> exclude
* @param <String> root
* @param <Function> callback
* @return <Object>
*/
exports.find = function(rPath, include, exclude, root) {
var args = Array.prototype.slice.call(arguments, 0);
var last = args[args.length-1];
var list = [];
var path = exports.realpath(rPath);
var filterPath = root ? path.substring(root.length) : path;
include = exports.is(include, 'function') ? undefined : include;
exclude = exports.is(exclude, 'function') ? undefined : exclude;
if(path){
var stat = fs.statSync(path);
if(stat.isDirectory() && (include || exports.filter(filterPath, include, exclude))){
fs.readdirSync(path).forEach(function(p){
if(p[0] != '.') {
list = list.concat(exports.find(path + '/' + p, include, exclude, root, last));
}
});
} else if(stat.isFile() && exports.filter(filterPath, include, exclude)) {
list.push(path);
if(exports.is(last, 'function')) {
last(path);
}
}
} else {
mix.log.error('unable to find [' + rPath + ']: No such file or directory.');
}
return list.sort();
};
/**
* File pipe process core impl
*
* @param <String> type
* @param <Function> callback
* @param <Function> | <String> def
* @return <undefined>
*/
exports.pipe = function(type, callback, def) {
var processors = mix.config.get('modules.' + type, def);
if(processors){
var typeOf = typeof processors;
if(typeOf === 'string'){
processors = processors.trim().split(/\s*,\s*/);
} else if(typeOf === 'function'){
processors = [ processors ];
}
type = type.split('.')[0];
processors.forEach(function(processor, index){
var typeOf = typeof processor, key;
if(typeOf === 'string'){
key = type + '.' + processor;
processor = mix.require(type, processor);
} else {
key = type + '.' + index;
}
if(typeof processor === 'function'){
var settings = mix.config.get('settings.' + key, {});
if(processor.defaultOptions){
settings = exports.extend(processor.settings, settings);
}
callback(processor, settings, key);
} else {
mix.log.warning('invalid processor [modules.' + key + ']');
}
});
}
};
/**
* Open path or url from terminal
*
* @param <String> path | url
* @param <Function> callback
* @return <undefined>
*/
exports.open = function(path, callback) {
var child_process = require('child_process');
var cmd = exports.escapeShellArg(path);
if(ISWIN) {
cmd = 'start "" ' + cmd;
} else {
if(process.env['XDG_SESSION_COOKIE']){
cmd = 'xdg-open ' + cmd;
} else if(process.env['GNOME_DESKTOP_SESSION_ID']){
cmd = 'gnome-open ' + cmd;
} else {
cmd = 'open ' + cmd;
}
}
child_process.exec(cmd, function(){
typeof callback == 'function' && callback(path);
});
};
/**
* file type regular
*
* @param <String> type
* @return <Regular>
*/
function fileTypeReg(type) {
var map = [], ext = mix.config.get('project.fileType.' + type);
if(type === 'text'){
map = TEXT_FILE_EXTS;
} else if(type === 'image'){
map = IMAGE_FILE_EXTS;
} else {
mix.log.error('invalid file type [' + type + ']');
}
// custom file type
if(ext && ext.length){
if(typeof ext === 'string'){
ext = ext.split(/\s*,\s*/);
}
map = map.concat(ext);
}
map = map.join('|');
return new RegExp('\\.(?:' + map + ')$', 'i');
}
| if | identifier_name |
user-experience-list.component.ts | import { Component, OnInit } from '@angular/core';
import { ExperienceService } from '../../services/experience.service';
import { Router } from '@angular/router';
import { Experience } from '../../models/experience-form-data';
import { AuthService } from '../../../login/services/auth.service';
@Component({
selector: 'app-user-experience-list',
templateUrl: './user-experience-list.component.html',
styleUrls: ['./user-experience-list.component.scss']
})
export class | implements OnInit {
experienceList: Experience[];
constructor(
private authService: AuthService,
private experienceService: ExperienceService,
private router: Router) { }
ngOnInit() {
this.authService.af.auth.onAuthStateChanged(
currentUser => {
if (currentUser) {
this.experienceService.getExperiencesByUserId(currentUser.uid)
.subscribe(experienceList => {
let returnExperiences: Experience[] = [];
for (let experience of experienceList) {
returnExperiences.push(new Experience(experience));
}
this.experienceList = returnExperiences;
});
}
}
);
}
}
| UserExperienceListComponent | identifier_name |
user-experience-list.component.ts | import { Component, OnInit } from '@angular/core'; |
@Component({
selector: 'app-user-experience-list',
templateUrl: './user-experience-list.component.html',
styleUrls: ['./user-experience-list.component.scss']
})
export class UserExperienceListComponent implements OnInit {
experienceList: Experience[];
constructor(
private authService: AuthService,
private experienceService: ExperienceService,
private router: Router) { }
ngOnInit() {
this.authService.af.auth.onAuthStateChanged(
currentUser => {
if (currentUser) {
this.experienceService.getExperiencesByUserId(currentUser.uid)
.subscribe(experienceList => {
let returnExperiences: Experience[] = [];
for (let experience of experienceList) {
returnExperiences.push(new Experience(experience));
}
this.experienceList = returnExperiences;
});
}
}
);
}
} | import { ExperienceService } from '../../services/experience.service';
import { Router } from '@angular/router';
import { Experience } from '../../models/experience-form-data';
import { AuthService } from '../../../login/services/auth.service'; | random_line_split |
user-experience-list.component.ts | import { Component, OnInit } from '@angular/core';
import { ExperienceService } from '../../services/experience.service';
import { Router } from '@angular/router';
import { Experience } from '../../models/experience-form-data';
import { AuthService } from '../../../login/services/auth.service';
@Component({
selector: 'app-user-experience-list',
templateUrl: './user-experience-list.component.html',
styleUrls: ['./user-experience-list.component.scss']
})
export class UserExperienceListComponent implements OnInit {
experienceList: Experience[];
constructor(
private authService: AuthService,
private experienceService: ExperienceService,
private router: Router) { }
ngOnInit() {
this.authService.af.auth.onAuthStateChanged(
currentUser => {
if (currentUser) |
}
);
}
}
| {
this.experienceService.getExperiencesByUserId(currentUser.uid)
.subscribe(experienceList => {
let returnExperiences: Experience[] = [];
for (let experience of experienceList) {
returnExperiences.push(new Experience(experience));
}
this.experienceList = returnExperiences;
});
} | conditional_block |
user-experience-list.component.ts | import { Component, OnInit } from '@angular/core';
import { ExperienceService } from '../../services/experience.service';
import { Router } from '@angular/router';
import { Experience } from '../../models/experience-form-data';
import { AuthService } from '../../../login/services/auth.service';
@Component({
selector: 'app-user-experience-list',
templateUrl: './user-experience-list.component.html',
styleUrls: ['./user-experience-list.component.scss']
})
export class UserExperienceListComponent implements OnInit {
experienceList: Experience[];
constructor(
private authService: AuthService,
private experienceService: ExperienceService,
private router: Router) { }
ngOnInit() |
}
| {
this.authService.af.auth.onAuthStateChanged(
currentUser => {
if (currentUser) {
this.experienceService.getExperiencesByUserId(currentUser.uid)
.subscribe(experienceList => {
let returnExperiences: Experience[] = [];
for (let experience of experienceList) {
returnExperiences.push(new Experience(experience));
}
this.experienceList = returnExperiences;
});
}
}
);
} | identifier_body |
SerialPortChoice.py | # -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT | # with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import wx
# Local imports
import eg
class SerialPortChoice(wx.Choice):
"""
A wx.Choice control that shows all available serial ports on the system.
"""
def __init__(
self,
parent,
id=-1,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
style=0,
validator=wx.DefaultValidator,
name=wx.ChoiceNameStr,
value=None
):
"""
:Parameters:
`value` : int
The initial port to select (0 = COM1:). The first available
port will be selected if the given port does not exist or
no value is given.
"""
ports = eg.SerialThread.GetAllPorts()
self.ports = ports
choices = [("COM%d" % (portnum + 1)) for portnum in ports]
wx.Choice.__init__(
self, parent, id, pos, size, choices, style, validator, name
)
try:
portPos = ports.index(value)
except ValueError:
portPos = 0
self.SetSelection(portPos)
def GetValue(self):
"""
Return the currently selected serial port.
:rtype: int
:returns: The serial port as an integer (0 = COM1:)
"""
try:
port = self.ports[self.GetSelection()]
except:
port = 0
return port | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along | random_line_split |
SerialPortChoice.py | # -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import wx
# Local imports
import eg
class SerialPortChoice(wx.Choice):
"""
A wx.Choice control that shows all available serial ports on the system.
"""
def __init__(
self,
parent,
id=-1,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
style=0,
validator=wx.DefaultValidator,
name=wx.ChoiceNameStr,
value=None
):
"""
:Parameters:
`value` : int
The initial port to select (0 = COM1:). The first available
port will be selected if the given port does not exist or
no value is given.
"""
ports = eg.SerialThread.GetAllPorts()
self.ports = ports
choices = [("COM%d" % (portnum + 1)) for portnum in ports]
wx.Choice.__init__(
self, parent, id, pos, size, choices, style, validator, name
)
try:
portPos = ports.index(value)
except ValueError:
portPos = 0
self.SetSelection(portPos)
def G | self):
"""
Return the currently selected serial port.
:rtype: int
:returns: The serial port as an integer (0 = COM1:)
"""
try:
port = self.ports[self.GetSelection()]
except:
port = 0
return port
| etValue( | identifier_name |
SerialPortChoice.py | # -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import wx
# Local imports
import eg
class SerialPortChoice(wx.Choice):
" | ""
A wx.Choice control that shows all available serial ports on the system.
"""
def __init__(
self,
parent,
id=-1,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
style=0,
validator=wx.DefaultValidator,
name=wx.ChoiceNameStr,
value=None
):
"""
:Parameters:
`value` : int
The initial port to select (0 = COM1:). The first available
port will be selected if the given port does not exist or
no value is given.
"""
ports = eg.SerialThread.GetAllPorts()
self.ports = ports
choices = [("COM%d" % (portnum + 1)) for portnum in ports]
wx.Choice.__init__(
self, parent, id, pos, size, choices, style, validator, name
)
try:
portPos = ports.index(value)
except ValueError:
portPos = 0
self.SetSelection(portPos)
def GetValue(self):
"""
Return the currently selected serial port.
:rtype: int
:returns: The serial port as an integer (0 = COM1:)
"""
try:
port = self.ports[self.GetSelection()]
except:
port = 0
return port
| identifier_body |
|
dashlet-interactions.component.ts | import { Component, OnInit, OnDestroy } from '@angular/core';
import { MatDialog } from '@angular/material';
import { Observable } from 'rxjs/Observable';
import { Store } from '@ngrx/store';
import { takeWhile } from 'rxjs/operators';
import { AppState, getMyInteractions, Interaction, CompleteInteractionAction, AuthService } from '@memberhivex/core';
@Component({
selector: 'mh-dashlet-interactions',
templateUrl: './dashlet-interactions.component.html',
styleUrls: ['./dashlet-interactions.component.scss']
})
export class DashletInteractionsComponent implements OnInit, OnDestroy {
private _alive: boolean = true;
myId: string = '';
myInteractions$: Observable<Interaction[]>;
myOutstanding: Interaction[];
myCompleted: Interaction[];
constructor(private _store: Store<AppState>, private _auth: AuthService, private _dialog: MatDialog) {
this.myId = this._auth.personId;
}
ngOnInit(): void {
this.myInteractions$ = this._store.select(getMyInteractions);
this.myInteractions$.pipe(takeWhile(() => this._alive)).subscribe((data: Interaction[]) => {
this.myOutstanding = data.filter(
(i: Interaction) => !i.actions[this.myId].doneOn && !i.actions[this.myId].completedOn
);
this.myCompleted = data.filter(
(i: Interaction) => !i.actions[this.myId].doneOn && i.actions[this.myId].completedOn
);
});
}
|
complete(id: number, checked: boolean): void {
// console.log(id, checked);
this._store.dispatch(new CompleteInteractionAction({ id: id, complete: checked }));
}
delete(interaction: Interaction): void {
// this._interactionService.remove(interaction.id);
}
ngOnDestroy(): void {
this._alive = false;
}
} | settingsDlg(): void {
// open settings dialog
} | random_line_split |
dashlet-interactions.component.ts | import { Component, OnInit, OnDestroy } from '@angular/core';
import { MatDialog } from '@angular/material';
import { Observable } from 'rxjs/Observable';
import { Store } from '@ngrx/store';
import { takeWhile } from 'rxjs/operators';
import { AppState, getMyInteractions, Interaction, CompleteInteractionAction, AuthService } from '@memberhivex/core';
@Component({
selector: 'mh-dashlet-interactions',
templateUrl: './dashlet-interactions.component.html',
styleUrls: ['./dashlet-interactions.component.scss']
})
export class DashletInteractionsComponent implements OnInit, OnDestroy {
private _alive: boolean = true;
myId: string = '';
myInteractions$: Observable<Interaction[]>;
myOutstanding: Interaction[];
myCompleted: Interaction[];
constructor(private _store: Store<AppState>, private _auth: AuthService, private _dialog: MatDialog) {
this.myId = this._auth.personId;
}
ngOnInit(): void {
this.myInteractions$ = this._store.select(getMyInteractions);
this.myInteractions$.pipe(takeWhile(() => this._alive)).subscribe((data: Interaction[]) => {
this.myOutstanding = data.filter(
(i: Interaction) => !i.actions[this.myId].doneOn && !i.actions[this.myId].completedOn
);
this.myCompleted = data.filter(
(i: Interaction) => !i.actions[this.myId].doneOn && i.actions[this.myId].completedOn
);
});
}
| (): void {
// open settings dialog
}
complete(id: number, checked: boolean): void {
// console.log(id, checked);
this._store.dispatch(new CompleteInteractionAction({ id: id, complete: checked }));
}
delete(interaction: Interaction): void {
// this._interactionService.remove(interaction.id);
}
ngOnDestroy(): void {
this._alive = false;
}
}
| settingsDlg | identifier_name |
dashlet-interactions.component.ts | import { Component, OnInit, OnDestroy } from '@angular/core';
import { MatDialog } from '@angular/material';
import { Observable } from 'rxjs/Observable';
import { Store } from '@ngrx/store';
import { takeWhile } from 'rxjs/operators';
import { AppState, getMyInteractions, Interaction, CompleteInteractionAction, AuthService } from '@memberhivex/core';
@Component({
selector: 'mh-dashlet-interactions',
templateUrl: './dashlet-interactions.component.html',
styleUrls: ['./dashlet-interactions.component.scss']
})
export class DashletInteractionsComponent implements OnInit, OnDestroy {
private _alive: boolean = true;
myId: string = '';
myInteractions$: Observable<Interaction[]>;
myOutstanding: Interaction[];
myCompleted: Interaction[];
constructor(private _store: Store<AppState>, private _auth: AuthService, private _dialog: MatDialog) {
this.myId = this._auth.personId;
}
ngOnInit(): void {
this.myInteractions$ = this._store.select(getMyInteractions);
this.myInteractions$.pipe(takeWhile(() => this._alive)).subscribe((data: Interaction[]) => {
this.myOutstanding = data.filter(
(i: Interaction) => !i.actions[this.myId].doneOn && !i.actions[this.myId].completedOn
);
this.myCompleted = data.filter(
(i: Interaction) => !i.actions[this.myId].doneOn && i.actions[this.myId].completedOn
);
});
}
settingsDlg(): void |
complete(id: number, checked: boolean): void {
// console.log(id, checked);
this._store.dispatch(new CompleteInteractionAction({ id: id, complete: checked }));
}
delete(interaction: Interaction): void {
// this._interactionService.remove(interaction.id);
}
ngOnDestroy(): void {
this._alive = false;
}
}
| {
// open settings dialog
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.