file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
test_rng.rs | extern crate cryptopals;
extern crate rand;
extern crate time;
use cryptopals::crypto::rng::{MT, untemper};
use rand::{Rng, SeedableRng, thread_rng};
use time::{get_time};
#[test]
fn test_rng_deterministic() |
#[test]
fn test_seed_recovery_from_time() {
let mut time = get_time().sec;
time += thread_rng().gen_range(40, 1000);
let mut m: MT = SeedableRng::from_seed(time as u32);
let output = m.gen::<u32>();
for seed in get_time().sec + 2000 .. 0 {
let mut checker: MT = SeedableRng::from_seed(seed as u32);
if checker.gen::<u32>() == output {
assert_eq!(seed, time);
break;
}
}
}
#[test]
fn test_untemper() {
let mut m: MT = SeedableRng::from_seed(314159);
for i in 0 .. 624 {
let output = m.gen::<u32>();
assert_eq!(untemper(output), m.state[i]);
}
}
#[test]
fn test_rng_clone_from_output() {
let mut m: MT = SeedableRng::from_seed(314159);
let mut state = [0; 624];
for i in 0 .. 624 {
state[i] = untemper(m.gen::<u32>());
}
let mut cloned = MT { state: state, index: 624 };
for _ in 0 .. 1024 {
assert_eq!(cloned.gen::<u32>(), m.gen::<u32>());
}
}
| {
let mut m1: MT = SeedableRng::from_seed(314159);
let mut m2: MT = SeedableRng::from_seed(314159);
for _ in 0 .. 1024 {
assert_eq!(m1.gen::<u32>(), m2.gen::<u32>());
}
} | identifier_body |
test_rng.rs | extern crate cryptopals;
extern crate rand;
extern crate time;
use cryptopals::crypto::rng::{MT, untemper};
use rand::{Rng, SeedableRng, thread_rng};
use time::{get_time};
#[test]
fn test_rng_deterministic() {
let mut m1: MT = SeedableRng::from_seed(314159);
let mut m2: MT = SeedableRng::from_seed(314159);
for _ in 0 .. 1024 {
assert_eq!(m1.gen::<u32>(), m2.gen::<u32>());
}
}
#[test]
fn test_seed_recovery_from_time() {
let mut time = get_time().sec;
time += thread_rng().gen_range(40, 1000);
let mut m: MT = SeedableRng::from_seed(time as u32);
let output = m.gen::<u32>();
for seed in get_time().sec + 2000 .. 0 {
let mut checker: MT = SeedableRng::from_seed(seed as u32);
if checker.gen::<u32>() == output |
}
}
#[test]
fn test_untemper() {
let mut m: MT = SeedableRng::from_seed(314159);
for i in 0 .. 624 {
let output = m.gen::<u32>();
assert_eq!(untemper(output), m.state[i]);
}
}
#[test]
fn test_rng_clone_from_output() {
let mut m: MT = SeedableRng::from_seed(314159);
let mut state = [0; 624];
for i in 0 .. 624 {
state[i] = untemper(m.gen::<u32>());
}
let mut cloned = MT { state: state, index: 624 };
for _ in 0 .. 1024 {
assert_eq!(cloned.gen::<u32>(), m.gen::<u32>());
}
}
| {
assert_eq!(seed, time);
break;
} | conditional_block |
test_rng.rs | extern crate cryptopals;
extern crate rand;
extern crate time;
use cryptopals::crypto::rng::{MT, untemper};
use rand::{Rng, SeedableRng, thread_rng};
use time::{get_time};
#[test]
fn test_rng_deterministic() {
let mut m1: MT = SeedableRng::from_seed(314159);
let mut m2: MT = SeedableRng::from_seed(314159);
for _ in 0 .. 1024 {
assert_eq!(m1.gen::<u32>(), m2.gen::<u32>());
}
}
#[test]
fn | () {
let mut time = get_time().sec;
time += thread_rng().gen_range(40, 1000);
let mut m: MT = SeedableRng::from_seed(time as u32);
let output = m.gen::<u32>();
for seed in get_time().sec + 2000 .. 0 {
let mut checker: MT = SeedableRng::from_seed(seed as u32);
if checker.gen::<u32>() == output {
assert_eq!(seed, time);
break;
}
}
}
#[test]
fn test_untemper() {
let mut m: MT = SeedableRng::from_seed(314159);
for i in 0 .. 624 {
let output = m.gen::<u32>();
assert_eq!(untemper(output), m.state[i]);
}
}
#[test]
fn test_rng_clone_from_output() {
let mut m: MT = SeedableRng::from_seed(314159);
let mut state = [0; 624];
for i in 0 .. 624 {
state[i] = untemper(m.gen::<u32>());
}
let mut cloned = MT { state: state, index: 624 };
for _ in 0 .. 1024 {
assert_eq!(cloned.gen::<u32>(), m.gen::<u32>());
}
}
| test_seed_recovery_from_time | identifier_name |
configparser.py | #!/usr/bin/env python
# encoding: utf-8
#
# AuthorDetector
# Copyright (C) 2013 Larroque Stephen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from auxlib import *
from collections import OrderedDict
json = import_module('ujson')
if json is None:
json = import_module('json')
if json is None:
raise RuntimeError('Unable to find a json implementation')
## ConfigParser
#
# Configuration parser, will parse and load in memory the configuration and commandline switches
class ConfigParser(object):
# Configuration file path
configfile = 'config.json'
# Configuration parameters tree (will be referenced by almost all other objects across the whole application)
config = []
## Constructor
def __init__(self, *args, **kwargs):
|
## Initialize the ConfigParser object by checking that the configuration file exists
# @param configfile Path to the configuration file (must exists or else the application will crash!)
def init(self, configfile=None, *args, **kwargs):
if configfile:
try:
with open(configfile): pass # check that the file exists
self.configfile = configfile
except IOError, e:
print "Can't open the specified configuration file %s, error: %s" % (configfile, str(e))
return
## Load a configuration file into the local dict
# @param pargs Recognized (processed) commandline arguments (this will overwrite parameters from the configuration file in case of conflicts)
# @param extras Unrecognized (unprocessed) commandline arguments (will also overwrite parameters from the configuration file)
# @param comments If set to true, Javascript-like comments will be filtered from the configuration file
def load(self, pargs=None, extras=None, comments=True, *args, **kwargs):
# Loading the configuration file
with file(self.configfile) as f:
# If there are comments in the config, filter them before converting the json to a Python object
if comments:
self.config = json.loads(self._removecomments(f.read()))
# Else we can directly load the json
else:
self.config = json.loads(f.read())
# Overwriting with recognized commandline switches
if pargs:
for key, value in pargs.iteritems():
# only add the argument in config if the argument has a value (not False nor None) and this key is not already defined in the config (so an argument can only overwrite config if defined)
if not (self.config.has_key(key) and not value):
self.config[key] = value
# Overwriting with extras commandline switches
if extras:
i = 0
while i < len(extras):
key = extras[i]
# Check if the argument accepts a value
if '--' in key and i+1 < len(extras) and not '--' in extras[i+1]: # if the argument begins with --, and there is an argument after this one, and the next argument is in fact a value (does not begin with --), we store it with the value
self.config[key.lstrip('-')] = extras[i+1]
i += 1 # skip the next argument (which we used as a value)
# Else this argument has no value, we just set it to True
else:
self.config[key.lstrip('-')] = True
i += 1
## Reload the configuration file
def reload(self, *args, **kwargs):
self.load(comments=True, *args, **kwargs)
## Save the current configuration (with commandline arguments processed) into a file
# @param file Path to where the configuration file should be saved
def save(self, file, *args, **kwargs):
with open(file, 'wb') as f: # open in binary mode to avoid line returns translation (else the reading will be flawed!). We have to do it both at saving and at reading.
f.write( json.dumps(self.config, sort_keys=True, indent=4) ) # write the config as a json serialized string, but beautified to be more human readable
return True
# Get a value from the config dict (this is a proxy method)
def get(self, *args, **kwargs):
if isinstance(self.config, (dict, OrderedDict)):
return self.config.get(*args, **kwargs)
else:
# Safe list getter, with exception handling and default value supplied
try:
return self.config[args[0]]
except IndexError:
if len(args > 1):
return args[1]
else: # by default if no default value was specified, we return None (just like for dictionaries)
return None
# Set a value in the config dict (this is a proxy method)
def set(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
# Set a value in the config dict (this is a proxy method)
def update(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
## Filter efficiently Javascript-like inline and multiline comments from a JSON file
# Author: WizKid https://gist.github.com/WizKid/1170297
# @param s string to filter
# @return string filtered string without comments
def _removecomments(self, s):
inCommentSingle = False
inCommentMulti = False
inString = False
t = []
l = len(s)
i = 0
fromIndex = 0
while i < l:
c = s[i]
if not inCommentMulti and not inCommentSingle:
if c == '"':
slashes = 0
for j in xrange(i - 1, 0, -1):
if s[j] != '\\':
break
slashes += 1
if slashes % 2 == 0:
inString = not inString
elif not inString:
if c == '#':
inCommentSingle = True
t.append(s[fromIndex:i])
elif c == '/' and i + 1 < l:
cn = s[i + 1]
if cn == '/':
inCommentSingle = True
t.append(s[fromIndex:i])
i += 1
elif cn == '*':
inCommentMulti = True
t.append(s[fromIndex:i])
i += 1
elif inCommentSingle and (c == '\n' or c == '\r'):
inCommentSingle = False
fromIndex = i
elif inCommentMulti and c == '*' and i + 1 < l and s[i + 1] == '/':
inCommentMulti = False
i += 1
fromIndex = i + 1
i += 1
if not inCommentSingle and not inCommentMulti:
t.append(s[fromIndex:len(s)])
return "".join(t)
| return object.__init__(self, *args, **kwargs) | identifier_body |
configparser.py | #!/usr/bin/env python
# encoding: utf-8
#
# AuthorDetector
# Copyright (C) 2013 Larroque Stephen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from auxlib import *
from collections import OrderedDict
json = import_module('ujson')
if json is None:
json = import_module('json')
if json is None:
raise RuntimeError('Unable to find a json implementation')
## ConfigParser
#
# Configuration parser, will parse and load in memory the configuration and commandline switches
class ConfigParser(object):
# Configuration file path
configfile = 'config.json'
# Configuration parameters tree (will be referenced by almost all other objects across the whole application)
config = []
## Constructor
def __init__(self, *args, **kwargs):
return object.__init__(self, *args, **kwargs)
## Initialize the ConfigParser object by checking that the configuration file exists
# @param configfile Path to the configuration file (must exists or else the application will crash!)
def init(self, configfile=None, *args, **kwargs):
if configfile:
try:
with open(configfile): pass # check that the file exists
self.configfile = configfile
except IOError, e:
print "Can't open the specified configuration file %s, error: %s" % (configfile, str(e))
return
## Load a configuration file into the local dict
# @param pargs Recognized (processed) commandline arguments (this will overwrite parameters from the configuration file in case of conflicts)
# @param extras Unrecognized (unprocessed) commandline arguments (will also overwrite parameters from the configuration file)
# @param comments If set to true, Javascript-like comments will be filtered from the configuration file
def load(self, pargs=None, extras=None, comments=True, *args, **kwargs):
# Loading the configuration file
with file(self.configfile) as f:
# If there are comments in the config, filter them before converting the json to a Python object
if comments:
self.config = json.loads(self._removecomments(f.read()))
# Else we can directly load the json
else:
self.config = json.loads(f.read())
# Overwriting with recognized commandline switches
if pargs:
for key, value in pargs.iteritems():
# only add the argument in config if the argument has a value (not False nor None) and this key is not already defined in the config (so an argument can only overwrite config if defined)
if not (self.config.has_key(key) and not value):
self.config[key] = value
# Overwriting with extras commandline switches
if extras:
i = 0
while i < len(extras):
key = extras[i]
# Check if the argument accepts a value
if '--' in key and i+1 < len(extras) and not '--' in extras[i+1]: # if the argument begins with --, and there is an argument after this one, and the next argument is in fact a value (does not begin with --), we store it with the value
self.config[key.lstrip('-')] = extras[i+1]
i += 1 # skip the next argument (which we used as a value)
# Else this argument has no value, we just set it to True
else:
self.config[key.lstrip('-')] = True
i += 1
## Reload the configuration file
def reload(self, *args, **kwargs):
self.load(comments=True, *args, **kwargs)
## Save the current configuration (with commandline arguments processed) into a file
# @param file Path to where the configuration file should be saved
def save(self, file, *args, **kwargs):
with open(file, 'wb') as f: # open in binary mode to avoid line returns translation (else the reading will be flawed!). We have to do it both at saving and at reading.
f.write( json.dumps(self.config, sort_keys=True, indent=4) ) # write the config as a json serialized string, but beautified to be more human readable
return True
# Get a value from the config dict (this is a proxy method)
def get(self, *args, **kwargs):
if isinstance(self.config, (dict, OrderedDict)):
return self.config.get(*args, **kwargs)
else:
# Safe list getter, with exception handling and default value supplied
try:
return self.config[args[0]]
except IndexError:
if len(args > 1):
return args[1]
else: # by default if no default value was specified, we return None (just like for dictionaries)
return None
# Set a value in the config dict (this is a proxy method)
def set(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
# Set a value in the config dict (this is a proxy method)
def update(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
## Filter efficiently Javascript-like inline and multiline comments from a JSON file
# Author: WizKid https://gist.github.com/WizKid/1170297
# @param s string to filter
# @return string filtered string without comments
def _removecomments(self, s):
inCommentSingle = False
inCommentMulti = False
inString = False
t = []
l = len(s)
i = 0
fromIndex = 0
while i < l:
c = s[i]
if not inCommentMulti and not inCommentSingle:
if c == '"':
slashes = 0
for j in xrange(i - 1, 0, -1):
if s[j] != '\\':
break
slashes += 1
if slashes % 2 == 0:
inString = not inString
elif not inString:
if c == '#':
inCommentSingle = True
t.append(s[fromIndex:i])
elif c == '/' and i + 1 < l:
cn = s[i + 1]
if cn == '/':
|
elif cn == '*':
inCommentMulti = True
t.append(s[fromIndex:i])
i += 1
elif inCommentSingle and (c == '\n' or c == '\r'):
inCommentSingle = False
fromIndex = i
elif inCommentMulti and c == '*' and i + 1 < l and s[i + 1] == '/':
inCommentMulti = False
i += 1
fromIndex = i + 1
i += 1
if not inCommentSingle and not inCommentMulti:
t.append(s[fromIndex:len(s)])
return "".join(t)
| inCommentSingle = True
t.append(s[fromIndex:i])
i += 1 | conditional_block |
configparser.py | #!/usr/bin/env python
# encoding: utf-8
#
# AuthorDetector
# Copyright (C) 2013 Larroque Stephen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from auxlib import *
from collections import OrderedDict
json = import_module('ujson')
if json is None:
json = import_module('json')
if json is None:
raise RuntimeError('Unable to find a json implementation')
## ConfigParser
#
# Configuration parser, will parse and load in memory the configuration and commandline switches
class ConfigParser(object):
# Configuration file path
configfile = 'config.json'
# Configuration parameters tree (will be referenced by almost all other objects across the whole application)
config = []
## Constructor
def __init__(self, *args, **kwargs):
return object.__init__(self, *args, **kwargs)
## Initialize the ConfigParser object by checking that the configuration file exists
# @param configfile Path to the configuration file (must exists or else the application will crash!)
def init(self, configfile=None, *args, **kwargs):
if configfile:
try:
with open(configfile): pass # check that the file exists
self.configfile = configfile
except IOError, e:
print "Can't open the specified configuration file %s, error: %s" % (configfile, str(e))
return
## Load a configuration file into the local dict
# @param pargs Recognized (processed) commandline arguments (this will overwrite parameters from the configuration file in case of conflicts)
# @param extras Unrecognized (unprocessed) commandline arguments (will also overwrite parameters from the configuration file)
# @param comments If set to true, Javascript-like comments will be filtered from the configuration file
def load(self, pargs=None, extras=None, comments=True, *args, **kwargs):
# Loading the configuration file
with file(self.configfile) as f:
# If there are comments in the config, filter them before converting the json to a Python object
if comments:
self.config = json.loads(self._removecomments(f.read()))
# Else we can directly load the json
else:
self.config = json.loads(f.read())
# Overwriting with recognized commandline switches
if pargs:
for key, value in pargs.iteritems():
# only add the argument in config if the argument has a value (not False nor None) and this key is not already defined in the config (so an argument can only overwrite config if defined)
if not (self.config.has_key(key) and not value):
self.config[key] = value
# Overwriting with extras commandline switches
if extras:
i = 0
while i < len(extras):
key = extras[i]
# Check if the argument accepts a value
if '--' in key and i+1 < len(extras) and not '--' in extras[i+1]: # if the argument begins with --, and there is an argument after this one, and the next argument is in fact a value (does not begin with --), we store it with the value
self.config[key.lstrip('-')] = extras[i+1]
i += 1 # skip the next argument (which we used as a value)
# Else this argument has no value, we just set it to True
else:
self.config[key.lstrip('-')] = True
i += 1
## Reload the configuration file
def reload(self, *args, **kwargs):
self.load(comments=True, *args, **kwargs)
## Save the current configuration (with commandline arguments processed) into a file
# @param file Path to where the configuration file should be saved
def save(self, file, *args, **kwargs):
with open(file, 'wb') as f: # open in binary mode to avoid line returns translation (else the reading will be flawed!). We have to do it both at saving and at reading.
f.write( json.dumps(self.config, sort_keys=True, indent=4) ) # write the config as a json serialized string, but beautified to be more human readable
return True
# Get a value from the config dict (this is a proxy method)
def get(self, *args, **kwargs):
if isinstance(self.config, (dict, OrderedDict)):
return self.config.get(*args, **kwargs)
else:
# Safe list getter, with exception handling and default value supplied
try:
return self.config[args[0]]
except IndexError:
if len(args > 1):
return args[1]
else: # by default if no default value was specified, we return None (just like for dictionaries)
return None
# Set a value in the config dict (this is a proxy method)
def set(self, *args, **kwargs): | # Set a value in the config dict (this is a proxy method)
def update(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
## Filter efficiently Javascript-like inline and multiline comments from a JSON file
# Author: WizKid https://gist.github.com/WizKid/1170297
# @param s string to filter
# @return string filtered string without comments
def _removecomments(self, s):
inCommentSingle = False
inCommentMulti = False
inString = False
t = []
l = len(s)
i = 0
fromIndex = 0
while i < l:
c = s[i]
if not inCommentMulti and not inCommentSingle:
if c == '"':
slashes = 0
for j in xrange(i - 1, 0, -1):
if s[j] != '\\':
break
slashes += 1
if slashes % 2 == 0:
inString = not inString
elif not inString:
if c == '#':
inCommentSingle = True
t.append(s[fromIndex:i])
elif c == '/' and i + 1 < l:
cn = s[i + 1]
if cn == '/':
inCommentSingle = True
t.append(s[fromIndex:i])
i += 1
elif cn == '*':
inCommentMulti = True
t.append(s[fromIndex:i])
i += 1
elif inCommentSingle and (c == '\n' or c == '\r'):
inCommentSingle = False
fromIndex = i
elif inCommentMulti and c == '*' and i + 1 < l and s[i + 1] == '/':
inCommentMulti = False
i += 1
fromIndex = i + 1
i += 1
if not inCommentSingle and not inCommentMulti:
t.append(s[fromIndex:len(s)])
return "".join(t) | return self.config.update(*args, **kwargs)
| random_line_split |
configparser.py | #!/usr/bin/env python
# encoding: utf-8
#
# AuthorDetector
# Copyright (C) 2013 Larroque Stephen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from auxlib import *
from collections import OrderedDict
json = import_module('ujson')
if json is None:
json = import_module('json')
if json is None:
raise RuntimeError('Unable to find a json implementation')
## ConfigParser
#
# Configuration parser, will parse and load in memory the configuration and commandline switches
class ConfigParser(object):
# Configuration file path
configfile = 'config.json'
# Configuration parameters tree (will be referenced by almost all other objects across the whole application)
config = []
## Constructor
def | (self, *args, **kwargs):
return object.__init__(self, *args, **kwargs)
## Initialize the ConfigParser object by checking that the configuration file exists
# @param configfile Path to the configuration file (must exists or else the application will crash!)
def init(self, configfile=None, *args, **kwargs):
if configfile:
try:
with open(configfile): pass # check that the file exists
self.configfile = configfile
except IOError, e:
print "Can't open the specified configuration file %s, error: %s" % (configfile, str(e))
return
## Load a configuration file into the local dict
# @param pargs Recognized (processed) commandline arguments (this will overwrite parameters from the configuration file in case of conflicts)
# @param extras Unrecognized (unprocessed) commandline arguments (will also overwrite parameters from the configuration file)
# @param comments If set to true, Javascript-like comments will be filtered from the configuration file
def load(self, pargs=None, extras=None, comments=True, *args, **kwargs):
# Loading the configuration file
with file(self.configfile) as f:
# If there are comments in the config, filter them before converting the json to a Python object
if comments:
self.config = json.loads(self._removecomments(f.read()))
# Else we can directly load the json
else:
self.config = json.loads(f.read())
# Overwriting with recognized commandline switches
if pargs:
for key, value in pargs.iteritems():
# only add the argument in config if the argument has a value (not False nor None) and this key is not already defined in the config (so an argument can only overwrite config if defined)
if not (self.config.has_key(key) and not value):
self.config[key] = value
# Overwriting with extras commandline switches
if extras:
i = 0
while i < len(extras):
key = extras[i]
# Check if the argument accepts a value
if '--' in key and i+1 < len(extras) and not '--' in extras[i+1]: # if the argument begins with --, and there is an argument after this one, and the next argument is in fact a value (does not begin with --), we store it with the value
self.config[key.lstrip('-')] = extras[i+1]
i += 1 # skip the next argument (which we used as a value)
# Else this argument has no value, we just set it to True
else:
self.config[key.lstrip('-')] = True
i += 1
## Reload the configuration file
def reload(self, *args, **kwargs):
self.load(comments=True, *args, **kwargs)
## Save the current configuration (with commandline arguments processed) into a file
# @param file Path to where the configuration file should be saved
def save(self, file, *args, **kwargs):
with open(file, 'wb') as f: # open in binary mode to avoid line returns translation (else the reading will be flawed!). We have to do it both at saving and at reading.
f.write( json.dumps(self.config, sort_keys=True, indent=4) ) # write the config as a json serialized string, but beautified to be more human readable
return True
# Get a value from the config dict (this is a proxy method)
def get(self, *args, **kwargs):
if isinstance(self.config, (dict, OrderedDict)):
return self.config.get(*args, **kwargs)
else:
# Safe list getter, with exception handling and default value supplied
try:
return self.config[args[0]]
except IndexError:
if len(args > 1):
return args[1]
else: # by default if no default value was specified, we return None (just like for dictionaries)
return None
# Set a value in the config dict (this is a proxy method)
def set(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
# Set a value in the config dict (this is a proxy method)
def update(self, *args, **kwargs):
return self.config.update(*args, **kwargs)
## Filter efficiently Javascript-like inline and multiline comments from a JSON file
# Author: WizKid https://gist.github.com/WizKid/1170297
# @param s string to filter
# @return string filtered string without comments
def _removecomments(self, s):
inCommentSingle = False
inCommentMulti = False
inString = False
t = []
l = len(s)
i = 0
fromIndex = 0
while i < l:
c = s[i]
if not inCommentMulti and not inCommentSingle:
if c == '"':
slashes = 0
for j in xrange(i - 1, 0, -1):
if s[j] != '\\':
break
slashes += 1
if slashes % 2 == 0:
inString = not inString
elif not inString:
if c == '#':
inCommentSingle = True
t.append(s[fromIndex:i])
elif c == '/' and i + 1 < l:
cn = s[i + 1]
if cn == '/':
inCommentSingle = True
t.append(s[fromIndex:i])
i += 1
elif cn == '*':
inCommentMulti = True
t.append(s[fromIndex:i])
i += 1
elif inCommentSingle and (c == '\n' or c == '\r'):
inCommentSingle = False
fromIndex = i
elif inCommentMulti and c == '*' and i + 1 < l and s[i + 1] == '/':
inCommentMulti = False
i += 1
fromIndex = i + 1
i += 1
if not inCommentSingle and not inCommentMulti:
t.append(s[fromIndex:len(s)])
return "".join(t)
| __init__ | identifier_name |
angularFactoryService.js | angular.module('MODULE.SUBMODULE').factory(
'FACTORY_SERVICE',
function FACTORY_SERVICEProvider(
$log,
otherService
) {
'use strict';
var constructor;
constructor = function constructor (spec) {
var initialize,
instanceVar1,
instanceVar2,
private1,
private2,
public1,
public2;
// -----
// PRIVATE
// ----------
initialize = function initialize(spec) {
// Constructor-like function.
instanceVar1 = 42;
instanceVar2 = 'foo';
};
private1 = function private1(spec) {
// body...
};
private2 = function private2(spec) {
// body...
};
// -----
// PUBLIC
// ----------
public1 = function public1(spec) {
// body...
};
public2 = function public2(spec) {
// body...
};
destroy = function destroy () {
// body...
}
// -----
// EXECUTE | // ----------
initialize(spec);
return {
public1: public1,
public2: public2,
destroy: destroy
};
}
return constructor
}
);
// Usage
angular.module('MODULE.SUBMODULE').contoller(
'CONTROLLER',
function SERVICEProvider(
FACTORY_SERVICE
) {
if (currentObject) {
currentObject.destroy();
currentObject = undefined;
}
currentObject = FACTORY_SERVICE(spec);
}
); | random_line_split |
|
angularFactoryService.js | angular.module('MODULE.SUBMODULE').factory(
'FACTORY_SERVICE',
function FACTORY_SERVICEProvider(
$log,
otherService
) {
'use strict';
var constructor;
constructor = function constructor (spec) {
var initialize,
instanceVar1,
instanceVar2,
private1,
private2,
public1,
public2;
// -----
// PRIVATE
// ----------
initialize = function initialize(spec) {
// Constructor-like function.
instanceVar1 = 42;
instanceVar2 = 'foo';
};
private1 = function private1(spec) {
// body...
};
private2 = function private2(spec) {
// body...
};
// -----
// PUBLIC
// ----------
public1 = function public1(spec) {
// body...
};
public2 = function public2(spec) {
// body...
};
destroy = function destroy () {
// body...
}
// -----
// EXECUTE
// ----------
initialize(spec);
return {
public1: public1,
public2: public2,
destroy: destroy
};
}
return constructor
}
);
// Usage
angular.module('MODULE.SUBMODULE').contoller(
'CONTROLLER',
function SERVICEProvider(
FACTORY_SERVICE
) {
if (currentObject) |
currentObject = FACTORY_SERVICE(spec);
}
);
| {
currentObject.destroy();
currentObject = undefined;
} | conditional_block |
manager.rs | // Copyright 2015-2018 Deyan Ginev. See the LICENSE
// file at the top-level directory of this distribution.
//
// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.
// This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use crate::backend::DEFAULT_DB_ADDRESS;
use crate::dispatcher::finalize::Finalize;
use crate::dispatcher::sink::Sink;
use crate::dispatcher::ventilator::Ventilator;
use crate::helpers::{TaskProgress, TaskReport};
use crate::models::Service;
use zmq::Error;
/// Manager struct responsible for dispatching and receiving tasks
pub struct TaskManager {
/// port for requesting/dispatching jobs
pub source_port: usize,
/// port for responding/receiving results
pub result_port: usize,
/// the size of the dispatch queue
/// (also the batch size for Task store queue requests)
pub queue_size: usize,
/// size of an individual message chunk sent via zeromq
/// (keep this small to avoid large RAM use, increase to reduce network bandwidth)
pub message_size: usize,
/// address for the Task store postgres endpoint
pub backend_address: String,
}
impl Default for TaskManager {
fn default() -> TaskManager {
TaskManager {
source_port: 51695,
result_port: 51696,
queue_size: 100,
message_size: 100_000,
backend_address: DEFAULT_DB_ADDRESS.to_string(),
}
}
}
impl TaskManager {
/// Starts a new manager, spinning of dispatch/sink servers, listening on the specified ports
pub fn start(&self, job_limit: Option<usize>) -> Result<(), Error> {
// We'll use some local memoization shared between source and sink:
let services: HashMap<String, Option<Service>> = HashMap::new();
let progress_queue: HashMap<i64, TaskProgress> = HashMap::new();
let done_queue: Vec<TaskReport> = Vec::new();
let services_arc = Arc::new(Mutex::new(services));
let progress_queue_arc = Arc::new(Mutex::new(progress_queue));
let done_queue_arc = Arc::new(Mutex::new(done_queue));
// First prepare the source ventilator
let source_port = self.source_port;
let source_queue_size = self.queue_size;
let source_message_size = self.message_size;
let source_backend_address = self.backend_address.clone();
let vent_services_arc = services_arc.clone();
let vent_progress_queue_arc = progress_queue_arc.clone();
let vent_done_queue_arc = done_queue_arc.clone();
let vent_thread = thread::spawn(move || {
Ventilator {
port: source_port,
queue_size: source_queue_size,
message_size: source_message_size,
backend_address: source_backend_address.clone(),
}
.start(
&vent_services_arc,
&vent_progress_queue_arc,
&vent_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in ventilator thread: {:?}", e));
});
// Next prepare the finalize thread which will persist finished jobs to the DB
let finalize_backend_address = self.backend_address.clone();
let finalize_done_queue_arc = done_queue_arc.clone();
let finalize_thread = thread::spawn(move || {
Finalize {
backend_address: finalize_backend_address,
job_limit,
}
.start(&finalize_done_queue_arc)
.unwrap_or_else(|e| panic!("Failed in finalize thread: {:?}", e));
});
// Now prepare the results sink
let result_port = self.result_port;
let result_queue_size = self.queue_size;
let result_message_size = self.message_size;
let result_backend_address = self.backend_address.clone();
let sink_services_arc = services_arc;
let sink_progress_queue_arc = progress_queue_arc;
let sink_done_queue_arc = done_queue_arc;
let sink_thread = thread::spawn(move || {
Sink {
port: result_port,
queue_size: result_queue_size,
message_size: result_message_size,
backend_address: result_backend_address.clone(),
}
.start(
&sink_services_arc,
&sink_progress_queue_arc,
&sink_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in sink thread: {:?}", e));
});
if vent_thread.join().is_err() | else if sink_thread.join().is_err() {
println!("Sink thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if finalize_thread.join().is_err() {
println!("DB thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else {
println!("Manager successfully terminated!");
Ok(())
}
}
}
| {
println!("Ventilator thread died unexpectedly!");
Err(zmq::Error::ETERM)
} | conditional_block |
manager.rs | // Copyright 2015-2018 Deyan Ginev. See the LICENSE
// file at the top-level directory of this distribution.
//
// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.
// This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use crate::backend::DEFAULT_DB_ADDRESS;
use crate::dispatcher::finalize::Finalize;
use crate::dispatcher::sink::Sink;
use crate::dispatcher::ventilator::Ventilator;
use crate::helpers::{TaskProgress, TaskReport};
use crate::models::Service;
use zmq::Error;
/// Manager struct responsible for dispatching and receiving tasks
pub struct TaskManager {
/// port for requesting/dispatching jobs
pub source_port: usize,
/// port for responding/receiving results
pub result_port: usize,
/// the size of the dispatch queue
/// (also the batch size for Task store queue requests)
pub queue_size: usize,
/// size of an individual message chunk sent via zeromq
/// (keep this small to avoid large RAM use, increase to reduce network bandwidth)
pub message_size: usize,
/// address for the Task store postgres endpoint
pub backend_address: String,
}
impl Default for TaskManager {
fn default() -> TaskManager {
TaskManager {
source_port: 51695,
result_port: 51696,
queue_size: 100,
message_size: 100_000,
backend_address: DEFAULT_DB_ADDRESS.to_string(),
}
}
}
impl TaskManager {
/// Starts a new manager, spinning of dispatch/sink servers, listening on the specified ports
pub fn start(&self, job_limit: Option<usize>) -> Result<(), Error> |
}
| {
// We'll use some local memoization shared between source and sink:
let services: HashMap<String, Option<Service>> = HashMap::new();
let progress_queue: HashMap<i64, TaskProgress> = HashMap::new();
let done_queue: Vec<TaskReport> = Vec::new();
let services_arc = Arc::new(Mutex::new(services));
let progress_queue_arc = Arc::new(Mutex::new(progress_queue));
let done_queue_arc = Arc::new(Mutex::new(done_queue));
// First prepare the source ventilator
let source_port = self.source_port;
let source_queue_size = self.queue_size;
let source_message_size = self.message_size;
let source_backend_address = self.backend_address.clone();
let vent_services_arc = services_arc.clone();
let vent_progress_queue_arc = progress_queue_arc.clone();
let vent_done_queue_arc = done_queue_arc.clone();
let vent_thread = thread::spawn(move || {
Ventilator {
port: source_port,
queue_size: source_queue_size,
message_size: source_message_size,
backend_address: source_backend_address.clone(),
}
.start(
&vent_services_arc,
&vent_progress_queue_arc,
&vent_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in ventilator thread: {:?}", e));
});
// Next prepare the finalize thread which will persist finished jobs to the DB
let finalize_backend_address = self.backend_address.clone();
let finalize_done_queue_arc = done_queue_arc.clone();
let finalize_thread = thread::spawn(move || {
Finalize {
backend_address: finalize_backend_address,
job_limit,
}
.start(&finalize_done_queue_arc)
.unwrap_or_else(|e| panic!("Failed in finalize thread: {:?}", e));
});
// Now prepare the results sink
let result_port = self.result_port;
let result_queue_size = self.queue_size;
let result_message_size = self.message_size;
let result_backend_address = self.backend_address.clone();
let sink_services_arc = services_arc;
let sink_progress_queue_arc = progress_queue_arc;
let sink_done_queue_arc = done_queue_arc;
let sink_thread = thread::spawn(move || {
Sink {
port: result_port,
queue_size: result_queue_size,
message_size: result_message_size,
backend_address: result_backend_address.clone(),
}
.start(
&sink_services_arc,
&sink_progress_queue_arc,
&sink_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in sink thread: {:?}", e));
});
if vent_thread.join().is_err() {
println!("Ventilator thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if sink_thread.join().is_err() {
println!("Sink thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if finalize_thread.join().is_err() {
println!("DB thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else {
println!("Manager successfully terminated!");
Ok(())
}
} | identifier_body |
manager.rs | // Copyright 2015-2018 Deyan Ginev. See the LICENSE
// file at the top-level directory of this distribution.
//
// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.
// This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use crate::backend::DEFAULT_DB_ADDRESS;
use crate::dispatcher::finalize::Finalize;
use crate::dispatcher::sink::Sink;
use crate::dispatcher::ventilator::Ventilator;
use crate::helpers::{TaskProgress, TaskReport};
use crate::models::Service;
use zmq::Error;
/// Manager struct responsible for dispatching and receiving tasks
pub struct | {
/// port for requesting/dispatching jobs
pub source_port: usize,
/// port for responding/receiving results
pub result_port: usize,
/// the size of the dispatch queue
/// (also the batch size for Task store queue requests)
pub queue_size: usize,
/// size of an individual message chunk sent via zeromq
/// (keep this small to avoid large RAM use, increase to reduce network bandwidth)
pub message_size: usize,
/// address for the Task store postgres endpoint
pub backend_address: String,
}
impl Default for TaskManager {
fn default() -> TaskManager {
TaskManager {
source_port: 51695,
result_port: 51696,
queue_size: 100,
message_size: 100_000,
backend_address: DEFAULT_DB_ADDRESS.to_string(),
}
}
}
impl TaskManager {
/// Starts a new manager, spinning of dispatch/sink servers, listening on the specified ports
pub fn start(&self, job_limit: Option<usize>) -> Result<(), Error> {
// We'll use some local memoization shared between source and sink:
let services: HashMap<String, Option<Service>> = HashMap::new();
let progress_queue: HashMap<i64, TaskProgress> = HashMap::new();
let done_queue: Vec<TaskReport> = Vec::new();
let services_arc = Arc::new(Mutex::new(services));
let progress_queue_arc = Arc::new(Mutex::new(progress_queue));
let done_queue_arc = Arc::new(Mutex::new(done_queue));
// First prepare the source ventilator
let source_port = self.source_port;
let source_queue_size = self.queue_size;
let source_message_size = self.message_size;
let source_backend_address = self.backend_address.clone();
let vent_services_arc = services_arc.clone();
let vent_progress_queue_arc = progress_queue_arc.clone();
let vent_done_queue_arc = done_queue_arc.clone();
let vent_thread = thread::spawn(move || {
Ventilator {
port: source_port,
queue_size: source_queue_size,
message_size: source_message_size,
backend_address: source_backend_address.clone(),
}
.start(
&vent_services_arc,
&vent_progress_queue_arc,
&vent_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in ventilator thread: {:?}", e));
});
// Next prepare the finalize thread which will persist finished jobs to the DB
let finalize_backend_address = self.backend_address.clone();
let finalize_done_queue_arc = done_queue_arc.clone();
let finalize_thread = thread::spawn(move || {
Finalize {
backend_address: finalize_backend_address,
job_limit,
}
.start(&finalize_done_queue_arc)
.unwrap_or_else(|e| panic!("Failed in finalize thread: {:?}", e));
});
// Now prepare the results sink
let result_port = self.result_port;
let result_queue_size = self.queue_size;
let result_message_size = self.message_size;
let result_backend_address = self.backend_address.clone();
let sink_services_arc = services_arc;
let sink_progress_queue_arc = progress_queue_arc;
let sink_done_queue_arc = done_queue_arc;
let sink_thread = thread::spawn(move || {
Sink {
port: result_port,
queue_size: result_queue_size,
message_size: result_message_size,
backend_address: result_backend_address.clone(),
}
.start(
&sink_services_arc,
&sink_progress_queue_arc,
&sink_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in sink thread: {:?}", e));
});
if vent_thread.join().is_err() {
println!("Ventilator thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if sink_thread.join().is_err() {
println!("Sink thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if finalize_thread.join().is_err() {
println!("DB thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else {
println!("Manager successfully terminated!");
Ok(())
}
}
}
| TaskManager | identifier_name |
manager.rs | // Copyright 2015-2018 Deyan Ginev. See the LICENSE
// file at the top-level directory of this distribution.
//
// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.
// This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use crate::backend::DEFAULT_DB_ADDRESS;
use crate::dispatcher::finalize::Finalize;
use crate::dispatcher::sink::Sink;
use crate::dispatcher::ventilator::Ventilator;
use crate::helpers::{TaskProgress, TaskReport};
use crate::models::Service;
use zmq::Error;
/// Manager struct responsible for dispatching and receiving tasks
pub struct TaskManager {
/// port for requesting/dispatching jobs
pub source_port: usize,
/// port for responding/receiving results
pub result_port: usize,
/// the size of the dispatch queue
/// (also the batch size for Task store queue requests)
pub queue_size: usize,
/// size of an individual message chunk sent via zeromq
/// (keep this small to avoid large RAM use, increase to reduce network bandwidth)
pub message_size: usize,
/// address for the Task store postgres endpoint
pub backend_address: String,
}
impl Default for TaskManager {
fn default() -> TaskManager {
TaskManager {
source_port: 51695,
result_port: 51696,
queue_size: 100,
message_size: 100_000,
backend_address: DEFAULT_DB_ADDRESS.to_string(),
}
}
}
impl TaskManager {
/// Starts a new manager, spinning of dispatch/sink servers, listening on the specified ports
pub fn start(&self, job_limit: Option<usize>) -> Result<(), Error> {
// We'll use some local memoization shared between source and sink:
let services: HashMap<String, Option<Service>> = HashMap::new();
let progress_queue: HashMap<i64, TaskProgress> = HashMap::new();
let done_queue: Vec<TaskReport> = Vec::new();
let services_arc = Arc::new(Mutex::new(services));
let progress_queue_arc = Arc::new(Mutex::new(progress_queue));
let done_queue_arc = Arc::new(Mutex::new(done_queue));
// First prepare the source ventilator
let source_port = self.source_port;
let source_queue_size = self.queue_size;
let source_message_size = self.message_size;
let source_backend_address = self.backend_address.clone();
let vent_services_arc = services_arc.clone();
let vent_progress_queue_arc = progress_queue_arc.clone();
let vent_done_queue_arc = done_queue_arc.clone();
let vent_thread = thread::spawn(move || {
Ventilator {
port: source_port,
queue_size: source_queue_size,
message_size: source_message_size,
backend_address: source_backend_address.clone(),
}
.start(
&vent_services_arc,
&vent_progress_queue_arc,
&vent_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in ventilator thread: {:?}", e));
});
// Next prepare the finalize thread which will persist finished jobs to the DB
let finalize_backend_address = self.backend_address.clone();
let finalize_done_queue_arc = done_queue_arc.clone();
let finalize_thread = thread::spawn(move || {
Finalize {
backend_address: finalize_backend_address,
job_limit,
}
.start(&finalize_done_queue_arc)
.unwrap_or_else(|e| panic!("Failed in finalize thread: {:?}", e));
});
// Now prepare the results sink
let result_port = self.result_port;
let result_queue_size = self.queue_size;
let result_message_size = self.message_size;
let result_backend_address = self.backend_address.clone();
let sink_services_arc = services_arc;
let sink_progress_queue_arc = progress_queue_arc;
let sink_done_queue_arc = done_queue_arc;
let sink_thread = thread::spawn(move || {
Sink {
port: result_port,
queue_size: result_queue_size,
message_size: result_message_size,
backend_address: result_backend_address.clone(),
}
.start(
&sink_services_arc,
&sink_progress_queue_arc,
&sink_done_queue_arc,
job_limit,
)
.unwrap_or_else(|e| panic!("Failed in sink thread: {:?}", e));
});
if vent_thread.join().is_err() {
println!("Ventilator thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if sink_thread.join().is_err() {
println!("Sink thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else if finalize_thread.join().is_err() {
println!("DB thread died unexpectedly!");
Err(zmq::Error::ETERM)
} else {
println!("Manager successfully terminated!"); | Ok(())
}
}
} | random_line_split |
|
file.js | /**
* @file
* Provides JavaScript additions to the managed file field type.
*
* This file provides progress bar support (if available), popup windows for
* file previews, and disabling of other file fields during Ajax uploads (which
* prevents separate file fields from accidentally uploading files).
*/
(function ($, Drupal) {
"use strict";
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileValidateAutoAttach = {
attach: function (context, settings) {
var $context = $(context);
var elements;
function initFileValidation(selector) |
if (settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(initFileValidation);
}
},
detach: function (context, settings, trigger) {
var $context = $(context);
var elements;
function removeFileValidation(selector) {
$context.find(selector)
.removeOnce('fileValidate')
.off('change.fileValidate', Drupal.file.validateExtension);
}
if (trigger === 'unload' && settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(removeFileValidation);
}
}
};
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileAutoUpload = {
attach: function (context) {
$(context).find('input[type="file"]').once('auto-file-upload').on('change.autoFileUpload', Drupal.file.triggerUploadButton);
},
detach: function (context, setting, trigger) {
if (trigger === 'unload') {
$(context).find('input[type="file"]').removeOnce('auto-file-upload').off('.autoFileUpload');
}
}
};
/**
* Attach behaviors to the file upload and remove buttons.
*/
Drupal.behaviors.fileButtons = {
attach: function (context) {
var $context = $(context);
$context.find('.form-submit').on('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').on('mousedown', Drupal.file.progressBar);
},
detach: function (context) {
var $context = $(context);
$context.find('.form-submit').off('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').off('mousedown', Drupal.file.progressBar);
}
};
/**
* Attach behaviors to links within managed file elements.
*/
Drupal.behaviors.filePreviewLinks = {
attach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').on('click', Drupal.file.openInNewWindow);
},
detach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').off('click', Drupal.file.openInNewWindow);
}
};
/**
* File upload utility functions.
*/
Drupal.file = Drupal.file || {
/**
* Client-side file input validation of file extensions.
*/
validateExtension: function (event) {
event.preventDefault();
// Remove any previous errors.
$('.file-upload-js-error').remove();
// Add client side validation for the input[type=file].
var extensionPattern = event.data.extensions.replace(/,\s*/g, '|');
if (extensionPattern.length > 1 && this.value.length > 0) {
var acceptableMatch = new RegExp('\\.(' + extensionPattern + ')$', 'gi');
if (!acceptableMatch.test(this.value)) {
var error = Drupal.t("The selected file %filename cannot be uploaded. Only files with the following extensions are allowed: %extensions.", {
// According to the specifications of HTML5, a file upload control
// should not reveal the real local path to the file that a user
// has selected. Some web browsers implement this restriction by
// replacing the local path with "C:\fakepath\", which can cause
// confusion by leaving the user thinking perhaps Drupal could not
// find the file because it messed up the file path. To avoid this
// confusion, therefore, we strip out the bogus fakepath string.
'%filename': this.value.replace('C:\\fakepath\\', ''),
'%extensions': extensionPattern.replace(/\|/g, ', ')
});
$(this).closest('div.form-managed-file').prepend('<div class="messages messages--error file-upload-js-error" aria-live="polite">' + error + '</div>');
this.value = '';
// Cancel all other change event handlers.
event.stopImmediatePropagation();
}
}
},
/**
* Trigger the upload_button mouse event to auto-upload as a managed file.
*/
triggerUploadButton: function (event) {
$(event.target).closest('.form-managed-file').find('.form-submit').trigger('mousedown');
},
/**
* Prevent file uploads when using buttons not intended to upload.
*/
disableFields: function (event) {
var $clickedButton = $(this);
// Only disable upload fields for Ajax buttons.
if (!$clickedButton.hasClass('ajax-processed')) {
return;
}
// Check if we're working with an "Upload" button.
var $enabledFields = [];
if ($clickedButton.closest('div.form-managed-file').length > 0) {
$enabledFields = $clickedButton.closest('div.form-managed-file').find('input.form-file');
}
// Temporarily disable upload fields other than the one we're currently
// working with. Filter out fields that are already disabled so that they
// do not get enabled when we re-enable these fields at the end of behavior
// processing. Re-enable in a setTimeout set to a relatively short amount
// of time (1 second). All the other mousedown handlers (like Drupal's Ajax
// behaviors) are excuted before any timeout functions are called, so we
// don't have to worry about the fields being re-enabled too soon.
// @todo If the previous sentence is true, why not set the timeout to 0?
var $fieldsToTemporarilyDisable = $('div.form-managed-file input.form-file').not($enabledFields).not(':disabled');
$fieldsToTemporarilyDisable.prop('disabled', true);
setTimeout(function () {
$fieldsToTemporarilyDisable.prop('disabled', false);
}, 1000);
},
/**
* Add progress bar support if possible.
*/
progressBar: function (event) {
var $clickedButton = $(this);
var $progressId = $clickedButton.closest('div.form-managed-file').find('input.file-progress');
if ($progressId.length) {
var originalName = $progressId.attr('name');
// Replace the name with the required identifier.
$progressId.attr('name', originalName.match(/APC_UPLOAD_PROGRESS|UPLOAD_IDENTIFIER/)[0]);
// Restore the original name after the upload begins.
setTimeout(function () {
$progressId.attr('name', originalName);
}, 1000);
}
// Show the progress bar if the upload takes longer than half a second.
setTimeout(function () {
$clickedButton.closest('div.form-managed-file').find('div.ajax-progress-bar').slideDown();
}, 500);
},
/**
* Open links to files within forms in a new window.
*/
openInNewWindow: function (event) {
event.preventDefault();
$(this).attr('target', '_blank');
window.open(this.href, 'filePreview', 'toolbar=0,scrollbars=1,location=1,statusbar=1,menubar=0,resizable=1,width=500,height=550');
}
};
})(jQuery, Drupal);
| {
$context.find(selector)
.once('fileValidate')
.on('change.fileValidate', { extensions: elements[selector] }, Drupal.file.validateExtension);
} | identifier_body |
file.js | /**
* @file
* Provides JavaScript additions to the managed file field type.
*
* This file provides progress bar support (if available), popup windows for
* file previews, and disabling of other file fields during Ajax uploads (which
* prevents separate file fields from accidentally uploading files).
*/
(function ($, Drupal) {
"use strict";
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileValidateAutoAttach = {
attach: function (context, settings) {
var $context = $(context);
var elements;
function initFileValidation(selector) {
$context.find(selector)
.once('fileValidate')
.on('change.fileValidate', { extensions: elements[selector] }, Drupal.file.validateExtension);
}
if (settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(initFileValidation);
}
},
detach: function (context, settings, trigger) {
var $context = $(context);
var elements;
function removeFileValidation(selector) {
$context.find(selector)
.removeOnce('fileValidate')
.off('change.fileValidate', Drupal.file.validateExtension);
}
if (trigger === 'unload' && settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(removeFileValidation);
}
}
};
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileAutoUpload = {
attach: function (context) {
$(context).find('input[type="file"]').once('auto-file-upload').on('change.autoFileUpload', Drupal.file.triggerUploadButton);
},
detach: function (context, setting, trigger) {
if (trigger === 'unload') {
$(context).find('input[type="file"]').removeOnce('auto-file-upload').off('.autoFileUpload');
}
}
};
/**
* Attach behaviors to the file upload and remove buttons.
*/
Drupal.behaviors.fileButtons = {
attach: function (context) {
var $context = $(context);
$context.find('.form-submit').on('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').on('mousedown', Drupal.file.progressBar);
},
detach: function (context) {
var $context = $(context);
$context.find('.form-submit').off('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').off('mousedown', Drupal.file.progressBar);
}
};
/**
* Attach behaviors to links within managed file elements.
*/
Drupal.behaviors.filePreviewLinks = {
attach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').on('click', Drupal.file.openInNewWindow);
},
detach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').off('click', Drupal.file.openInNewWindow);
}
};
/**
* File upload utility functions.
*/
Drupal.file = Drupal.file || {
/**
* Client-side file input validation of file extensions.
*/
validateExtension: function (event) {
event.preventDefault();
// Remove any previous errors.
$('.file-upload-js-error').remove();
// Add client side validation for the input[type=file].
var extensionPattern = event.data.extensions.replace(/,\s*/g, '|');
if (extensionPattern.length > 1 && this.value.length > 0) {
var acceptableMatch = new RegExp('\\.(' + extensionPattern + ')$', 'gi');
if (!acceptableMatch.test(this.value)) {
var error = Drupal.t("The selected file %filename cannot be uploaded. Only files with the following extensions are allowed: %extensions.", {
// According to the specifications of HTML5, a file upload control
// should not reveal the real local path to the file that a user
// has selected. Some web browsers implement this restriction by
// replacing the local path with "C:\fakepath\", which can cause
// confusion by leaving the user thinking perhaps Drupal could not
// find the file because it messed up the file path. To avoid this
// confusion, therefore, we strip out the bogus fakepath string.
'%filename': this.value.replace('C:\\fakepath\\', ''),
'%extensions': extensionPattern.replace(/\|/g, ', ')
});
$(this).closest('div.form-managed-file').prepend('<div class="messages messages--error file-upload-js-error" aria-live="polite">' + error + '</div>');
this.value = '';
// Cancel all other change event handlers.
event.stopImmediatePropagation();
}
}
},
/**
* Trigger the upload_button mouse event to auto-upload as a managed file.
*/
triggerUploadButton: function (event) {
$(event.target).closest('.form-managed-file').find('.form-submit').trigger('mousedown');
},
/**
* Prevent file uploads when using buttons not intended to upload.
*/
disableFields: function (event) {
var $clickedButton = $(this);
// Only disable upload fields for Ajax buttons.
if (!$clickedButton.hasClass('ajax-processed')) {
return;
}
// Check if we're working with an "Upload" button.
var $enabledFields = [];
if ($clickedButton.closest('div.form-managed-file').length > 0) {
$enabledFields = $clickedButton.closest('div.form-managed-file').find('input.form-file');
}
// Temporarily disable upload fields other than the one we're currently
// working with. Filter out fields that are already disabled so that they
// do not get enabled when we re-enable these fields at the end of behavior
// processing. Re-enable in a setTimeout set to a relatively short amount
// of time (1 second). All the other mousedown handlers (like Drupal's Ajax
// behaviors) are excuted before any timeout functions are called, so we
// don't have to worry about the fields being re-enabled too soon. | // @todo If the previous sentence is true, why not set the timeout to 0?
var $fieldsToTemporarilyDisable = $('div.form-managed-file input.form-file').not($enabledFields).not(':disabled');
$fieldsToTemporarilyDisable.prop('disabled', true);
setTimeout(function () {
$fieldsToTemporarilyDisable.prop('disabled', false);
}, 1000);
},
/**
* Add progress bar support if possible.
*/
progressBar: function (event) {
var $clickedButton = $(this);
var $progressId = $clickedButton.closest('div.form-managed-file').find('input.file-progress');
if ($progressId.length) {
var originalName = $progressId.attr('name');
// Replace the name with the required identifier.
$progressId.attr('name', originalName.match(/APC_UPLOAD_PROGRESS|UPLOAD_IDENTIFIER/)[0]);
// Restore the original name after the upload begins.
setTimeout(function () {
$progressId.attr('name', originalName);
}, 1000);
}
// Show the progress bar if the upload takes longer than half a second.
setTimeout(function () {
$clickedButton.closest('div.form-managed-file').find('div.ajax-progress-bar').slideDown();
}, 500);
},
/**
* Open links to files within forms in a new window.
*/
openInNewWindow: function (event) {
event.preventDefault();
$(this).attr('target', '_blank');
window.open(this.href, 'filePreview', 'toolbar=0,scrollbars=1,location=1,statusbar=1,menubar=0,resizable=1,width=500,height=550');
}
};
})(jQuery, Drupal); | random_line_split |
|
file.js | /**
* @file
* Provides JavaScript additions to the managed file field type.
*
* This file provides progress bar support (if available), popup windows for
* file previews, and disabling of other file fields during Ajax uploads (which
* prevents separate file fields from accidentally uploading files).
*/
(function ($, Drupal) {
"use strict";
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileValidateAutoAttach = {
attach: function (context, settings) {
var $context = $(context);
var elements;
function initFileValidation(selector) {
$context.find(selector)
.once('fileValidate')
.on('change.fileValidate', { extensions: elements[selector] }, Drupal.file.validateExtension);
}
if (settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(initFileValidation);
}
},
detach: function (context, settings, trigger) {
var $context = $(context);
var elements;
function | (selector) {
$context.find(selector)
.removeOnce('fileValidate')
.off('change.fileValidate', Drupal.file.validateExtension);
}
if (trigger === 'unload' && settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(removeFileValidation);
}
}
};
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileAutoUpload = {
attach: function (context) {
$(context).find('input[type="file"]').once('auto-file-upload').on('change.autoFileUpload', Drupal.file.triggerUploadButton);
},
detach: function (context, setting, trigger) {
if (trigger === 'unload') {
$(context).find('input[type="file"]').removeOnce('auto-file-upload').off('.autoFileUpload');
}
}
};
/**
* Attach behaviors to the file upload and remove buttons.
*/
Drupal.behaviors.fileButtons = {
attach: function (context) {
var $context = $(context);
$context.find('.form-submit').on('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').on('mousedown', Drupal.file.progressBar);
},
detach: function (context) {
var $context = $(context);
$context.find('.form-submit').off('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').off('mousedown', Drupal.file.progressBar);
}
};
/**
* Attach behaviors to links within managed file elements.
*/
Drupal.behaviors.filePreviewLinks = {
attach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').on('click', Drupal.file.openInNewWindow);
},
detach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').off('click', Drupal.file.openInNewWindow);
}
};
/**
* File upload utility functions.
*/
Drupal.file = Drupal.file || {
/**
* Client-side file input validation of file extensions.
*/
validateExtension: function (event) {
event.preventDefault();
// Remove any previous errors.
$('.file-upload-js-error').remove();
// Add client side validation for the input[type=file].
var extensionPattern = event.data.extensions.replace(/,\s*/g, '|');
if (extensionPattern.length > 1 && this.value.length > 0) {
var acceptableMatch = new RegExp('\\.(' + extensionPattern + ')$', 'gi');
if (!acceptableMatch.test(this.value)) {
var error = Drupal.t("The selected file %filename cannot be uploaded. Only files with the following extensions are allowed: %extensions.", {
// According to the specifications of HTML5, a file upload control
// should not reveal the real local path to the file that a user
// has selected. Some web browsers implement this restriction by
// replacing the local path with "C:\fakepath\", which can cause
// confusion by leaving the user thinking perhaps Drupal could not
// find the file because it messed up the file path. To avoid this
// confusion, therefore, we strip out the bogus fakepath string.
'%filename': this.value.replace('C:\\fakepath\\', ''),
'%extensions': extensionPattern.replace(/\|/g, ', ')
});
$(this).closest('div.form-managed-file').prepend('<div class="messages messages--error file-upload-js-error" aria-live="polite">' + error + '</div>');
this.value = '';
// Cancel all other change event handlers.
event.stopImmediatePropagation();
}
}
},
/**
* Trigger the upload_button mouse event to auto-upload as a managed file.
*/
triggerUploadButton: function (event) {
$(event.target).closest('.form-managed-file').find('.form-submit').trigger('mousedown');
},
/**
* Prevent file uploads when using buttons not intended to upload.
*/
disableFields: function (event) {
var $clickedButton = $(this);
// Only disable upload fields for Ajax buttons.
if (!$clickedButton.hasClass('ajax-processed')) {
return;
}
// Check if we're working with an "Upload" button.
var $enabledFields = [];
if ($clickedButton.closest('div.form-managed-file').length > 0) {
$enabledFields = $clickedButton.closest('div.form-managed-file').find('input.form-file');
}
// Temporarily disable upload fields other than the one we're currently
// working with. Filter out fields that are already disabled so that they
// do not get enabled when we re-enable these fields at the end of behavior
// processing. Re-enable in a setTimeout set to a relatively short amount
// of time (1 second). All the other mousedown handlers (like Drupal's Ajax
// behaviors) are excuted before any timeout functions are called, so we
// don't have to worry about the fields being re-enabled too soon.
// @todo If the previous sentence is true, why not set the timeout to 0?
var $fieldsToTemporarilyDisable = $('div.form-managed-file input.form-file').not($enabledFields).not(':disabled');
$fieldsToTemporarilyDisable.prop('disabled', true);
setTimeout(function () {
$fieldsToTemporarilyDisable.prop('disabled', false);
}, 1000);
},
/**
* Add progress bar support if possible.
*/
progressBar: function (event) {
var $clickedButton = $(this);
var $progressId = $clickedButton.closest('div.form-managed-file').find('input.file-progress');
if ($progressId.length) {
var originalName = $progressId.attr('name');
// Replace the name with the required identifier.
$progressId.attr('name', originalName.match(/APC_UPLOAD_PROGRESS|UPLOAD_IDENTIFIER/)[0]);
// Restore the original name after the upload begins.
setTimeout(function () {
$progressId.attr('name', originalName);
}, 1000);
}
// Show the progress bar if the upload takes longer than half a second.
setTimeout(function () {
$clickedButton.closest('div.form-managed-file').find('div.ajax-progress-bar').slideDown();
}, 500);
},
/**
* Open links to files within forms in a new window.
*/
openInNewWindow: function (event) {
event.preventDefault();
$(this).attr('target', '_blank');
window.open(this.href, 'filePreview', 'toolbar=0,scrollbars=1,location=1,statusbar=1,menubar=0,resizable=1,width=500,height=550');
}
};
})(jQuery, Drupal);
| removeFileValidation | identifier_name |
file.js | /**
* @file
* Provides JavaScript additions to the managed file field type.
*
* This file provides progress bar support (if available), popup windows for
* file previews, and disabling of other file fields during Ajax uploads (which
* prevents separate file fields from accidentally uploading files).
*/
(function ($, Drupal) {
"use strict";
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileValidateAutoAttach = {
attach: function (context, settings) {
var $context = $(context);
var elements;
function initFileValidation(selector) {
$context.find(selector)
.once('fileValidate')
.on('change.fileValidate', { extensions: elements[selector] }, Drupal.file.validateExtension);
}
if (settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(initFileValidation);
}
},
detach: function (context, settings, trigger) {
var $context = $(context);
var elements;
function removeFileValidation(selector) {
$context.find(selector)
.removeOnce('fileValidate')
.off('change.fileValidate', Drupal.file.validateExtension);
}
if (trigger === 'unload' && settings.file && settings.file.elements) {
elements = settings.file.elements;
Object.keys(elements).forEach(removeFileValidation);
}
}
};
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileAutoUpload = {
attach: function (context) {
$(context).find('input[type="file"]').once('auto-file-upload').on('change.autoFileUpload', Drupal.file.triggerUploadButton);
},
detach: function (context, setting, trigger) {
if (trigger === 'unload') {
$(context).find('input[type="file"]').removeOnce('auto-file-upload').off('.autoFileUpload');
}
}
};
/**
* Attach behaviors to the file upload and remove buttons.
*/
Drupal.behaviors.fileButtons = {
attach: function (context) {
var $context = $(context);
$context.find('.form-submit').on('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').on('mousedown', Drupal.file.progressBar);
},
detach: function (context) {
var $context = $(context);
$context.find('.form-submit').off('mousedown', Drupal.file.disableFields);
$context.find('.form-managed-file .form-submit').off('mousedown', Drupal.file.progressBar);
}
};
/**
* Attach behaviors to links within managed file elements.
*/
Drupal.behaviors.filePreviewLinks = {
attach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').on('click', Drupal.file.openInNewWindow);
},
detach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').off('click', Drupal.file.openInNewWindow);
}
};
/**
* File upload utility functions.
*/
Drupal.file = Drupal.file || {
/**
* Client-side file input validation of file extensions.
*/
validateExtension: function (event) {
event.preventDefault();
// Remove any previous errors.
$('.file-upload-js-error').remove();
// Add client side validation for the input[type=file].
var extensionPattern = event.data.extensions.replace(/,\s*/g, '|');
if (extensionPattern.length > 1 && this.value.length > 0) |
},
/**
* Trigger the upload_button mouse event to auto-upload as a managed file.
*/
triggerUploadButton: function (event) {
$(event.target).closest('.form-managed-file').find('.form-submit').trigger('mousedown');
},
/**
* Prevent file uploads when using buttons not intended to upload.
*/
disableFields: function (event) {
var $clickedButton = $(this);
// Only disable upload fields for Ajax buttons.
if (!$clickedButton.hasClass('ajax-processed')) {
return;
}
// Check if we're working with an "Upload" button.
var $enabledFields = [];
if ($clickedButton.closest('div.form-managed-file').length > 0) {
$enabledFields = $clickedButton.closest('div.form-managed-file').find('input.form-file');
}
// Temporarily disable upload fields other than the one we're currently
// working with. Filter out fields that are already disabled so that they
// do not get enabled when we re-enable these fields at the end of behavior
// processing. Re-enable in a setTimeout set to a relatively short amount
// of time (1 second). All the other mousedown handlers (like Drupal's Ajax
// behaviors) are excuted before any timeout functions are called, so we
// don't have to worry about the fields being re-enabled too soon.
// @todo If the previous sentence is true, why not set the timeout to 0?
var $fieldsToTemporarilyDisable = $('div.form-managed-file input.form-file').not($enabledFields).not(':disabled');
$fieldsToTemporarilyDisable.prop('disabled', true);
setTimeout(function () {
$fieldsToTemporarilyDisable.prop('disabled', false);
}, 1000);
},
/**
* Add progress bar support if possible.
*/
progressBar: function (event) {
var $clickedButton = $(this);
var $progressId = $clickedButton.closest('div.form-managed-file').find('input.file-progress');
if ($progressId.length) {
var originalName = $progressId.attr('name');
// Replace the name with the required identifier.
$progressId.attr('name', originalName.match(/APC_UPLOAD_PROGRESS|UPLOAD_IDENTIFIER/)[0]);
// Restore the original name after the upload begins.
setTimeout(function () {
$progressId.attr('name', originalName);
}, 1000);
}
// Show the progress bar if the upload takes longer than half a second.
setTimeout(function () {
$clickedButton.closest('div.form-managed-file').find('div.ajax-progress-bar').slideDown();
}, 500);
},
/**
* Open links to files within forms in a new window.
*/
openInNewWindow: function (event) {
event.preventDefault();
$(this).attr('target', '_blank');
window.open(this.href, 'filePreview', 'toolbar=0,scrollbars=1,location=1,statusbar=1,menubar=0,resizable=1,width=500,height=550');
}
};
})(jQuery, Drupal);
| {
var acceptableMatch = new RegExp('\\.(' + extensionPattern + ')$', 'gi');
if (!acceptableMatch.test(this.value)) {
var error = Drupal.t("The selected file %filename cannot be uploaded. Only files with the following extensions are allowed: %extensions.", {
// According to the specifications of HTML5, a file upload control
// should not reveal the real local path to the file that a user
// has selected. Some web browsers implement this restriction by
// replacing the local path with "C:\fakepath\", which can cause
// confusion by leaving the user thinking perhaps Drupal could not
// find the file because it messed up the file path. To avoid this
// confusion, therefore, we strip out the bogus fakepath string.
'%filename': this.value.replace('C:\\fakepath\\', ''),
'%extensions': extensionPattern.replace(/\|/g, ', ')
});
$(this).closest('div.form-managed-file').prepend('<div class="messages messages--error file-upload-js-error" aria-live="polite">' + error + '</div>');
this.value = '';
// Cancel all other change event handlers.
event.stopImmediatePropagation();
}
} | conditional_block |
index.d.ts | // Type definitions for redlock 3.0
// Project: https://github.com/mike-marcacci/node-redlock
// Definitions by: Ilya Mochalov <https://github.com/chrootsu>
// BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.2
import * as Promise from 'bluebird';
import { EventEmitter } from 'events';
export = Redlock;
declare namespace Redlock {
type Callback<T> = (err: any, value?: T) => void;
class Lock {
redlock: Redlock;
resource: string;
value: string | null;
expiration: number;
constructor(redlock: Redlock, resource: string, value: string | null, expiration: number);
unlock(callback?: Callback<void>): Promise<void>;
extend(ttl: number, callback?: Callback<Lock>): Promise<Lock>;
}
interface Options {
driftFactor?: number | undefined;
retryCount?: number | undefined;
retryDelay?: number | undefined;
retryJitter?: number | undefined;
lockScript?(origLockScript: string): string;
unlockScript?(origUnlockScript: string): string;
extendScript?(origExtendScript: string): string;
}
class LockError extends Error {
readonly name: 'LockError';
constructor(message?: string);
}
type EvalArg = string | number;
interface CompatibleRedisClient {
eval(args: EvalArg[], callback?: (err: Error | null, res: any) => void): any;
}
}
declare class Redlock extends EventEmitter {
LockError: typeof Redlock.LockError;
Lock: typeof Redlock.Lock;
driftFactor: number;
retryCount: number; | retryDelay: number;
retryJitter: number;
servers: Redlock.CompatibleRedisClient[];
constructor(clients: Redlock.CompatibleRedisClient[], options?: Redlock.Options);
acquire(resource: string | string[], ttl: number, callback?: Redlock.Callback<Redlock.Lock>): Promise<Redlock.Lock>;
lock(resource: string | string[], ttl: number, callback?: Redlock.Callback<Redlock.Lock>): Promise<Redlock.Lock>;
disposer(resource: string, ttl: number, errorHandler?: Redlock.Callback<void>): Promise.Disposer<Redlock.Lock>;
release(lock: Redlock.Lock, callback?: Redlock.Callback<void>): Promise<void>;
unlock(lock: Redlock.Lock, callback?: Redlock.Callback<void>): Promise<void>;
extend(lock: Redlock.Lock, ttl: number, callback?: Redlock.Callback<Redlock.Lock>): Promise<Redlock.Lock>;
addListener(event: 'clientError', listener: (err: any) => void): this;
on(event: 'clientError', listener: (err: any) => void): this;
once(event: 'clientError', listener: (err: any) => void): this;
removeListener(event: 'clientError', listener: (err: any) => void): this;
} | random_line_split |
|
index.d.ts | // Type definitions for redlock 3.0
// Project: https://github.com/mike-marcacci/node-redlock
// Definitions by: Ilya Mochalov <https://github.com/chrootsu>
// BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.2
import * as Promise from 'bluebird';
import { EventEmitter } from 'events';
export = Redlock;
declare namespace Redlock {
type Callback<T> = (err: any, value?: T) => void;
class | {
redlock: Redlock;
resource: string;
value: string | null;
expiration: number;
constructor(redlock: Redlock, resource: string, value: string | null, expiration: number);
unlock(callback?: Callback<void>): Promise<void>;
extend(ttl: number, callback?: Callback<Lock>): Promise<Lock>;
}
interface Options {
driftFactor?: number | undefined;
retryCount?: number | undefined;
retryDelay?: number | undefined;
retryJitter?: number | undefined;
lockScript?(origLockScript: string): string;
unlockScript?(origUnlockScript: string): string;
extendScript?(origExtendScript: string): string;
}
class LockError extends Error {
readonly name: 'LockError';
constructor(message?: string);
}
type EvalArg = string | number;
interface CompatibleRedisClient {
eval(args: EvalArg[], callback?: (err: Error | null, res: any) => void): any;
}
}
declare class Redlock extends EventEmitter {
LockError: typeof Redlock.LockError;
Lock: typeof Redlock.Lock;
driftFactor: number;
retryCount: number;
retryDelay: number;
retryJitter: number;
servers: Redlock.CompatibleRedisClient[];
constructor(clients: Redlock.CompatibleRedisClient[], options?: Redlock.Options);
acquire(resource: string | string[], ttl: number, callback?: Redlock.Callback<Redlock.Lock>): Promise<Redlock.Lock>;
lock(resource: string | string[], ttl: number, callback?: Redlock.Callback<Redlock.Lock>): Promise<Redlock.Lock>;
disposer(resource: string, ttl: number, errorHandler?: Redlock.Callback<void>): Promise.Disposer<Redlock.Lock>;
release(lock: Redlock.Lock, callback?: Redlock.Callback<void>): Promise<void>;
unlock(lock: Redlock.Lock, callback?: Redlock.Callback<void>): Promise<void>;
extend(lock: Redlock.Lock, ttl: number, callback?: Redlock.Callback<Redlock.Lock>): Promise<Redlock.Lock>;
addListener(event: 'clientError', listener: (err: any) => void): this;
on(event: 'clientError', listener: (err: any) => void): this;
once(event: 'clientError', listener: (err: any) => void): this;
removeListener(event: 'clientError', listener: (err: any) => void): this;
}
| Lock | identifier_name |
progressbar.py | """ progressbar2 related utils"""
from codekit.codetools import warn
from public import public
from time import sleep
import progressbar
import functools
@public
def setup_logging(verbosity=0):
"""Configure progressbar sys.stderr wrapper which is required to play nice
with logging and not have strange formatting artifacts.
"""
progressbar.streams.wrap_stderr()
@public
def countdown_timer(seconds=10):
"""Show a simple countdown progress bar
Parameters
----------
seconds
Period of time the progress bar takes to reach zero.
"""
tick = 0.1 # seconds
n_ticks = int(seconds / tick)
widgets = ['Pause for panic: ', progressbar.ETA(), ' ', progressbar.Bar()]
pbar = progressbar.ProgressBar(
widgets=widgets, max_value=n_ticks
).start()
for i in range(n_ticks):
|
pbar.finish()
@public
def wait_for_user_panic(**kwargs):
"""Display a scary message and count down progresss bar so an interative
user a chance to panic and kill the program.
Parameters
----------
kwargs
Passed verbatim to countdown_timer()
"""
warn('Now is the time to panic and Ctrl-C')
countdown_timer(**kwargs)
@public
@functools.lru_cache()
def wait_for_user_panic_once(**kwargs):
"""Same functionality as wait_for_user_panic() but will only display a
countdown once, reguardless of how many times it is called.
Parameters
----------
kwargs
Passed verbatim to wait_for_user_panic()
"""
wait_for_user_panic(**kwargs)
@public
def eta_bar(msg, max_value):
"""Display an adaptive ETA / countdown bar with a message.
Parameters
----------
msg: str
Message to prefix countdown bar line with
max_value: max_value
The max number of progress bar steps/updates
"""
widgets = [
"{msg}:".format(msg=msg),
progressbar.Bar(), ' ', progressbar.AdaptiveETA(),
]
return progressbar.ProgressBar(widgets=widgets, max_value=max_value)
| pbar.update(i)
sleep(tick) | conditional_block |
progressbar.py | """ progressbar2 related utils"""
from codekit.codetools import warn
from public import public
from time import sleep
import progressbar
import functools
@public
def setup_logging(verbosity=0):
"""Configure progressbar sys.stderr wrapper which is required to play nice
with logging and not have strange formatting artifacts.
"""
progressbar.streams.wrap_stderr()
@public
def countdown_timer(seconds=10):
"""Show a simple countdown progress bar
Parameters
----------
seconds
Period of time the progress bar takes to reach zero.
"""
tick = 0.1 # seconds
n_ticks = int(seconds / tick)
widgets = ['Pause for panic: ', progressbar.ETA(), ' ', progressbar.Bar()]
pbar = progressbar.ProgressBar(
widgets=widgets, max_value=n_ticks
).start()
| pbar.update(i)
sleep(tick)
pbar.finish()
@public
def wait_for_user_panic(**kwargs):
"""Display a scary message and count down progresss bar so an interative
user a chance to panic and kill the program.
Parameters
----------
kwargs
Passed verbatim to countdown_timer()
"""
warn('Now is the time to panic and Ctrl-C')
countdown_timer(**kwargs)
@public
@functools.lru_cache()
def wait_for_user_panic_once(**kwargs):
"""Same functionality as wait_for_user_panic() but will only display a
countdown once, reguardless of how many times it is called.
Parameters
----------
kwargs
Passed verbatim to wait_for_user_panic()
"""
wait_for_user_panic(**kwargs)
@public
def eta_bar(msg, max_value):
"""Display an adaptive ETA / countdown bar with a message.
Parameters
----------
msg: str
Message to prefix countdown bar line with
max_value: max_value
The max number of progress bar steps/updates
"""
widgets = [
"{msg}:".format(msg=msg),
progressbar.Bar(), ' ', progressbar.AdaptiveETA(),
]
return progressbar.ProgressBar(widgets=widgets, max_value=max_value) | for i in range(n_ticks): | random_line_split |
progressbar.py | """ progressbar2 related utils"""
from codekit.codetools import warn
from public import public
from time import sleep
import progressbar
import functools
@public
def setup_logging(verbosity=0):
"""Configure progressbar sys.stderr wrapper which is required to play nice
with logging and not have strange formatting artifacts.
"""
progressbar.streams.wrap_stderr()
@public
def countdown_timer(seconds=10):
"""Show a simple countdown progress bar
Parameters
----------
seconds
Period of time the progress bar takes to reach zero.
"""
tick = 0.1 # seconds
n_ticks = int(seconds / tick)
widgets = ['Pause for panic: ', progressbar.ETA(), ' ', progressbar.Bar()]
pbar = progressbar.ProgressBar(
widgets=widgets, max_value=n_ticks
).start()
for i in range(n_ticks):
pbar.update(i)
sleep(tick)
pbar.finish()
@public
def wait_for_user_panic(**kwargs):
"""Display a scary message and count down progresss bar so an interative
user a chance to panic and kill the program.
Parameters
----------
kwargs
Passed verbatim to countdown_timer()
"""
warn('Now is the time to panic and Ctrl-C')
countdown_timer(**kwargs)
@public
@functools.lru_cache()
def wait_for_user_panic_once(**kwargs):
"""Same functionality as wait_for_user_panic() but will only display a
countdown once, reguardless of how many times it is called.
Parameters
----------
kwargs
Passed verbatim to wait_for_user_panic()
"""
wait_for_user_panic(**kwargs)
@public
def eta_bar(msg, max_value):
| """Display an adaptive ETA / countdown bar with a message.
Parameters
----------
msg: str
Message to prefix countdown bar line with
max_value: max_value
The max number of progress bar steps/updates
"""
widgets = [
"{msg}:".format(msg=msg),
progressbar.Bar(), ' ', progressbar.AdaptiveETA(),
]
return progressbar.ProgressBar(widgets=widgets, max_value=max_value) | identifier_body |
|
progressbar.py | """ progressbar2 related utils"""
from codekit.codetools import warn
from public import public
from time import sleep
import progressbar
import functools
@public
def setup_logging(verbosity=0):
"""Configure progressbar sys.stderr wrapper which is required to play nice
with logging and not have strange formatting artifacts.
"""
progressbar.streams.wrap_stderr()
@public
def countdown_timer(seconds=10):
"""Show a simple countdown progress bar
Parameters
----------
seconds
Period of time the progress bar takes to reach zero.
"""
tick = 0.1 # seconds
n_ticks = int(seconds / tick)
widgets = ['Pause for panic: ', progressbar.ETA(), ' ', progressbar.Bar()]
pbar = progressbar.ProgressBar(
widgets=widgets, max_value=n_ticks
).start()
for i in range(n_ticks):
pbar.update(i)
sleep(tick)
pbar.finish()
@public
def wait_for_user_panic(**kwargs):
"""Display a scary message and count down progresss bar so an interative
user a chance to panic and kill the program.
Parameters
----------
kwargs
Passed verbatim to countdown_timer()
"""
warn('Now is the time to panic and Ctrl-C')
countdown_timer(**kwargs)
@public
@functools.lru_cache()
def wait_for_user_panic_once(**kwargs):
"""Same functionality as wait_for_user_panic() but will only display a
countdown once, reguardless of how many times it is called.
Parameters
----------
kwargs
Passed verbatim to wait_for_user_panic()
"""
wait_for_user_panic(**kwargs)
@public
def | (msg, max_value):
"""Display an adaptive ETA / countdown bar with a message.
Parameters
----------
msg: str
Message to prefix countdown bar line with
max_value: max_value
The max number of progress bar steps/updates
"""
widgets = [
"{msg}:".format(msg=msg),
progressbar.Bar(), ' ', progressbar.AdaptiveETA(),
]
return progressbar.ProgressBar(widgets=widgets, max_value=max_value)
| eta_bar | identifier_name |
messageController.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./messageController';
import * as nls from 'vs/nls';
import { TimeoutTimer } from 'vs/base/common/async';
import { KeyCode } from 'vs/base/common/keyCodes';
import { IDisposable, dispose, Disposable } from 'vs/base/common/lifecycle';
import { alert } from 'vs/base/browser/ui/aria/aria';
import { Range } from 'vs/editor/common/core/range';
import * as editorCommon from 'vs/editor/common/editorCommon';
import { registerEditorContribution, EditorCommand, registerEditorCommand } from 'vs/editor/browser/editorExtensions';
import { ICodeEditor, IContentWidget, IContentWidgetPosition, ContentWidgetPositionPreference } from 'vs/editor/browser/editorBrowser';
import { IContextKeyService, RawContextKey, IContextKey } from 'vs/platform/contextkey/common/contextkey';
import { IPosition } from 'vs/editor/common/core/position';
import { registerThemingParticipant, HIGH_CONTRAST } from 'vs/platform/theme/common/themeService';
import { inputValidationInfoBorder, inputValidationInfoBackground, inputValidationInfoForeground } from 'vs/platform/theme/common/colorRegistry';
import { KeybindingWeight } from 'vs/platform/keybinding/common/keybindingsRegistry';
export class MessageController extends Disposable implements editorCommon.IEditorContribution {
private static readonly _id = 'editor.contrib.messageController';
static MESSAGE_VISIBLE = new RawContextKey<boolean>('messageVisible', false);
static get(editor: ICodeEditor): MessageController {
return editor.getContribution<MessageController>(MessageController._id);
}
getId(): string {
return MessageController._id;
}
private _editor: ICodeEditor;
private _visible: IContextKey<boolean>;
private _messageWidget: MessageWidget;
private _messageListeners: IDisposable[] = [];
constructor(
editor: ICodeEditor,
@IContextKeyService contextKeyService: IContextKeyService
) {
super();
this._editor = editor;
this._visible = MessageController.MESSAGE_VISIBLE.bindTo(contextKeyService);
this._register(this._editor.onDidAttemptReadOnlyEdit(() => this._onDidAttemptReadOnlyEdit()));
}
dispose(): void {
super.dispose();
this._visible.reset();
}
isVisible() {
return this._visible.get();
}
showMessage(message: string, position: IPosition): void {
alert(message);
this._visible.set(true);
dispose(this._messageWidget);
this._messageListeners = dispose(this._messageListeners);
this._messageWidget = new MessageWidget(this._editor, position, message);
// close on blur, cursor, model change, dispose
this._messageListeners.push(this._editor.onDidBlurEditorText(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeCursorPosition(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidDispose(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeModel(() => this.closeMessage()));
// close after 3s
this._messageListeners.push(new TimeoutTimer(() => this.closeMessage(), 3000));
// close on mouse move
let bounds: Range;
this._messageListeners.push(this._editor.onMouseMove(e => {
// outside the text area
if (!e.target.position) {
return;
}
if (!bounds) {
// define bounding box around position and first mouse occurance
bounds = new Range(position.lineNumber - 3, 1, e.target.position.lineNumber + 3, 1);
} else if (!bounds.containsPosition(e.target.position)) {
// check if position is still in bounds
this.closeMessage();
}
}));
}
closeMessage(): void {
this._visible.reset();
this._messageListeners = dispose(this._messageListeners);
this._messageListeners.push(MessageWidget.fadeOut(this._messageWidget));
}
private | (): void {
if (this._editor.hasModel()) {
this.showMessage(nls.localize('editor.readonly', "Cannot edit in read-only editor"), this._editor.getPosition());
}
}
}
const MessageCommand = EditorCommand.bindToContribution<MessageController>(MessageController.get);
registerEditorCommand(new MessageCommand({
id: 'leaveEditorMessage',
precondition: MessageController.MESSAGE_VISIBLE,
handler: c => c.closeMessage(),
kbOpts: {
weight: KeybindingWeight.EditorContrib + 30,
primary: KeyCode.Escape
}
}));
class MessageWidget implements IContentWidget {
// Editor.IContentWidget.allowEditorOverflow
readonly allowEditorOverflow = true;
readonly suppressMouseDown = false;
private _editor: ICodeEditor;
private _position: IPosition;
private _domNode: HTMLDivElement;
static fadeOut(messageWidget: MessageWidget): IDisposable {
let handle: any;
const dispose = () => {
messageWidget.dispose();
clearTimeout(handle);
messageWidget.getDomNode().removeEventListener('animationend', dispose);
};
handle = setTimeout(dispose, 110);
messageWidget.getDomNode().addEventListener('animationend', dispose);
messageWidget.getDomNode().classList.add('fadeOut');
return { dispose };
}
constructor(editor: ICodeEditor, { lineNumber, column }: IPosition, text: string) {
this._editor = editor;
this._editor.revealLinesInCenterIfOutsideViewport(lineNumber, lineNumber, editorCommon.ScrollType.Smooth);
this._position = { lineNumber, column: column - 1 };
this._domNode = document.createElement('div');
this._domNode.classList.add('monaco-editor-overlaymessage');
const message = document.createElement('div');
message.classList.add('message');
message.textContent = text;
this._domNode.appendChild(message);
const anchor = document.createElement('div');
anchor.classList.add('anchor');
this._domNode.appendChild(anchor);
this._editor.addContentWidget(this);
this._domNode.classList.add('fadeIn');
}
dispose() {
this._editor.removeContentWidget(this);
}
getId(): string {
return 'messageoverlay';
}
getDomNode(): HTMLElement {
return this._domNode;
}
getPosition(): IContentWidgetPosition {
return { position: this._position, preference: [ContentWidgetPositionPreference.ABOVE] };
}
}
registerEditorContribution(MessageController);
registerThemingParticipant((theme, collector) => {
const border = theme.getColor(inputValidationInfoBorder);
if (border) {
let borderWidth = theme.type === HIGH_CONTRAST ? 2 : 1;
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .anchor { border-top-color: ${border}; }`);
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { border: ${borderWidth}px solid ${border}; }`);
}
const background = theme.getColor(inputValidationInfoBackground);
if (background) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { background-color: ${background}; }`);
}
const foreground = theme.getColor(inputValidationInfoForeground);
if (foreground) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { color: ${foreground}; }`);
}
});
| _onDidAttemptReadOnlyEdit | identifier_name |
messageController.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./messageController';
import * as nls from 'vs/nls';
import { TimeoutTimer } from 'vs/base/common/async';
import { KeyCode } from 'vs/base/common/keyCodes';
import { IDisposable, dispose, Disposable } from 'vs/base/common/lifecycle';
import { alert } from 'vs/base/browser/ui/aria/aria';
import { Range } from 'vs/editor/common/core/range';
import * as editorCommon from 'vs/editor/common/editorCommon';
import { registerEditorContribution, EditorCommand, registerEditorCommand } from 'vs/editor/browser/editorExtensions';
import { ICodeEditor, IContentWidget, IContentWidgetPosition, ContentWidgetPositionPreference } from 'vs/editor/browser/editorBrowser';
import { IContextKeyService, RawContextKey, IContextKey } from 'vs/platform/contextkey/common/contextkey';
import { IPosition } from 'vs/editor/common/core/position';
import { registerThemingParticipant, HIGH_CONTRAST } from 'vs/platform/theme/common/themeService';
import { inputValidationInfoBorder, inputValidationInfoBackground, inputValidationInfoForeground } from 'vs/platform/theme/common/colorRegistry';
import { KeybindingWeight } from 'vs/platform/keybinding/common/keybindingsRegistry';
export class MessageController extends Disposable implements editorCommon.IEditorContribution {
private static readonly _id = 'editor.contrib.messageController';
static MESSAGE_VISIBLE = new RawContextKey<boolean>('messageVisible', false);
static get(editor: ICodeEditor): MessageController {
return editor.getContribution<MessageController>(MessageController._id);
}
getId(): string {
return MessageController._id;
}
private _editor: ICodeEditor;
private _visible: IContextKey<boolean>;
private _messageWidget: MessageWidget;
private _messageListeners: IDisposable[] = [];
constructor(
editor: ICodeEditor,
@IContextKeyService contextKeyService: IContextKeyService
) {
super();
this._editor = editor;
this._visible = MessageController.MESSAGE_VISIBLE.bindTo(contextKeyService);
this._register(this._editor.onDidAttemptReadOnlyEdit(() => this._onDidAttemptReadOnlyEdit()));
}
dispose(): void {
super.dispose();
this._visible.reset();
}
isVisible() {
return this._visible.get();
}
showMessage(message: string, position: IPosition): void {
alert(message);
this._visible.set(true);
dispose(this._messageWidget);
this._messageListeners = dispose(this._messageListeners);
this._messageWidget = new MessageWidget(this._editor, position, message);
// close on blur, cursor, model change, dispose
this._messageListeners.push(this._editor.onDidBlurEditorText(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeCursorPosition(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidDispose(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeModel(() => this.closeMessage()));
// close after 3s
this._messageListeners.push(new TimeoutTimer(() => this.closeMessage(), 3000));
// close on mouse move
let bounds: Range;
this._messageListeners.push(this._editor.onMouseMove(e => {
// outside the text area
if (!e.target.position) {
return;
}
if (!bounds) {
// define bounding box around position and first mouse occurance
bounds = new Range(position.lineNumber - 3, 1, e.target.position.lineNumber + 3, 1);
} else if (!bounds.containsPosition(e.target.position)) {
// check if position is still in bounds
this.closeMessage();
}
}));
}
closeMessage(): void {
this._visible.reset();
this._messageListeners = dispose(this._messageListeners);
this._messageListeners.push(MessageWidget.fadeOut(this._messageWidget));
}
private _onDidAttemptReadOnlyEdit(): void {
if (this._editor.hasModel()) {
this.showMessage(nls.localize('editor.readonly', "Cannot edit in read-only editor"), this._editor.getPosition());
}
}
}
const MessageCommand = EditorCommand.bindToContribution<MessageController>(MessageController.get);
registerEditorCommand(new MessageCommand({
id: 'leaveEditorMessage',
precondition: MessageController.MESSAGE_VISIBLE,
handler: c => c.closeMessage(),
kbOpts: {
weight: KeybindingWeight.EditorContrib + 30,
primary: KeyCode.Escape
}
}));
class MessageWidget implements IContentWidget {
// Editor.IContentWidget.allowEditorOverflow
readonly allowEditorOverflow = true;
readonly suppressMouseDown = false;
private _editor: ICodeEditor;
private _position: IPosition;
private _domNode: HTMLDivElement;
static fadeOut(messageWidget: MessageWidget): IDisposable {
let handle: any;
const dispose = () => {
messageWidget.dispose();
clearTimeout(handle);
messageWidget.getDomNode().removeEventListener('animationend', dispose);
};
handle = setTimeout(dispose, 110);
messageWidget.getDomNode().addEventListener('animationend', dispose);
messageWidget.getDomNode().classList.add('fadeOut');
return { dispose };
}
constructor(editor: ICodeEditor, { lineNumber, column }: IPosition, text: string) {
this._editor = editor;
this._editor.revealLinesInCenterIfOutsideViewport(lineNumber, lineNumber, editorCommon.ScrollType.Smooth);
this._position = { lineNumber, column: column - 1 };
this._domNode = document.createElement('div');
this._domNode.classList.add('monaco-editor-overlaymessage');
const message = document.createElement('div');
message.classList.add('message');
message.textContent = text;
this._domNode.appendChild(message);
const anchor = document.createElement('div');
anchor.classList.add('anchor');
this._domNode.appendChild(anchor);
this._editor.addContentWidget(this);
this._domNode.classList.add('fadeIn');
}
dispose() {
this._editor.removeContentWidget(this);
}
getId(): string |
getDomNode(): HTMLElement {
return this._domNode;
}
getPosition(): IContentWidgetPosition {
return { position: this._position, preference: [ContentWidgetPositionPreference.ABOVE] };
}
}
registerEditorContribution(MessageController);
registerThemingParticipant((theme, collector) => {
const border = theme.getColor(inputValidationInfoBorder);
if (border) {
let borderWidth = theme.type === HIGH_CONTRAST ? 2 : 1;
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .anchor { border-top-color: ${border}; }`);
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { border: ${borderWidth}px solid ${border}; }`);
}
const background = theme.getColor(inputValidationInfoBackground);
if (background) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { background-color: ${background}; }`);
}
const foreground = theme.getColor(inputValidationInfoForeground);
if (foreground) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { color: ${foreground}; }`);
}
});
| {
return 'messageoverlay';
} | identifier_body |
messageController.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./messageController';
import * as nls from 'vs/nls';
import { TimeoutTimer } from 'vs/base/common/async';
import { KeyCode } from 'vs/base/common/keyCodes';
import { IDisposable, dispose, Disposable } from 'vs/base/common/lifecycle';
import { alert } from 'vs/base/browser/ui/aria/aria';
import { Range } from 'vs/editor/common/core/range';
import * as editorCommon from 'vs/editor/common/editorCommon';
import { registerEditorContribution, EditorCommand, registerEditorCommand } from 'vs/editor/browser/editorExtensions';
import { ICodeEditor, IContentWidget, IContentWidgetPosition, ContentWidgetPositionPreference } from 'vs/editor/browser/editorBrowser';
import { IContextKeyService, RawContextKey, IContextKey } from 'vs/platform/contextkey/common/contextkey';
import { IPosition } from 'vs/editor/common/core/position';
import { registerThemingParticipant, HIGH_CONTRAST } from 'vs/platform/theme/common/themeService';
import { inputValidationInfoBorder, inputValidationInfoBackground, inputValidationInfoForeground } from 'vs/platform/theme/common/colorRegistry';
import { KeybindingWeight } from 'vs/platform/keybinding/common/keybindingsRegistry';
export class MessageController extends Disposable implements editorCommon.IEditorContribution {
private static readonly _id = 'editor.contrib.messageController';
static MESSAGE_VISIBLE = new RawContextKey<boolean>('messageVisible', false);
static get(editor: ICodeEditor): MessageController {
return editor.getContribution<MessageController>(MessageController._id);
}
getId(): string {
return MessageController._id;
}
private _editor: ICodeEditor;
private _visible: IContextKey<boolean>;
private _messageWidget: MessageWidget;
private _messageListeners: IDisposable[] = [];
constructor(
editor: ICodeEditor,
@IContextKeyService contextKeyService: IContextKeyService
) {
super();
this._editor = editor;
this._visible = MessageController.MESSAGE_VISIBLE.bindTo(contextKeyService);
this._register(this._editor.onDidAttemptReadOnlyEdit(() => this._onDidAttemptReadOnlyEdit()));
}
dispose(): void {
super.dispose();
this._visible.reset();
}
isVisible() {
return this._visible.get();
}
showMessage(message: string, position: IPosition): void {
alert(message);
this._visible.set(true);
dispose(this._messageWidget);
this._messageListeners = dispose(this._messageListeners);
this._messageWidget = new MessageWidget(this._editor, position, message);
// close on blur, cursor, model change, dispose
this._messageListeners.push(this._editor.onDidBlurEditorText(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeCursorPosition(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidDispose(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeModel(() => this.closeMessage()));
// close after 3s
this._messageListeners.push(new TimeoutTimer(() => this.closeMessage(), 3000));
// close on mouse move
let bounds: Range;
this._messageListeners.push(this._editor.onMouseMove(e => {
// outside the text area
if (!e.target.position) {
return;
}
if (!bounds) {
// define bounding box around position and first mouse occurance
bounds = new Range(position.lineNumber - 3, 1, e.target.position.lineNumber + 3, 1);
} else if (!bounds.containsPosition(e.target.position)) {
// check if position is still in bounds
this.closeMessage();
}
}));
}
closeMessage(): void {
this._visible.reset();
this._messageListeners = dispose(this._messageListeners);
this._messageListeners.push(MessageWidget.fadeOut(this._messageWidget));
}
private _onDidAttemptReadOnlyEdit(): void {
if (this._editor.hasModel()) |
}
}
const MessageCommand = EditorCommand.bindToContribution<MessageController>(MessageController.get);
registerEditorCommand(new MessageCommand({
id: 'leaveEditorMessage',
precondition: MessageController.MESSAGE_VISIBLE,
handler: c => c.closeMessage(),
kbOpts: {
weight: KeybindingWeight.EditorContrib + 30,
primary: KeyCode.Escape
}
}));
class MessageWidget implements IContentWidget {
// Editor.IContentWidget.allowEditorOverflow
readonly allowEditorOverflow = true;
readonly suppressMouseDown = false;
private _editor: ICodeEditor;
private _position: IPosition;
private _domNode: HTMLDivElement;
static fadeOut(messageWidget: MessageWidget): IDisposable {
let handle: any;
const dispose = () => {
messageWidget.dispose();
clearTimeout(handle);
messageWidget.getDomNode().removeEventListener('animationend', dispose);
};
handle = setTimeout(dispose, 110);
messageWidget.getDomNode().addEventListener('animationend', dispose);
messageWidget.getDomNode().classList.add('fadeOut');
return { dispose };
}
constructor(editor: ICodeEditor, { lineNumber, column }: IPosition, text: string) {
this._editor = editor;
this._editor.revealLinesInCenterIfOutsideViewport(lineNumber, lineNumber, editorCommon.ScrollType.Smooth);
this._position = { lineNumber, column: column - 1 };
this._domNode = document.createElement('div');
this._domNode.classList.add('monaco-editor-overlaymessage');
const message = document.createElement('div');
message.classList.add('message');
message.textContent = text;
this._domNode.appendChild(message);
const anchor = document.createElement('div');
anchor.classList.add('anchor');
this._domNode.appendChild(anchor);
this._editor.addContentWidget(this);
this._domNode.classList.add('fadeIn');
}
dispose() {
this._editor.removeContentWidget(this);
}
getId(): string {
return 'messageoverlay';
}
getDomNode(): HTMLElement {
return this._domNode;
}
getPosition(): IContentWidgetPosition {
return { position: this._position, preference: [ContentWidgetPositionPreference.ABOVE] };
}
}
registerEditorContribution(MessageController);
registerThemingParticipant((theme, collector) => {
const border = theme.getColor(inputValidationInfoBorder);
if (border) {
let borderWidth = theme.type === HIGH_CONTRAST ? 2 : 1;
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .anchor { border-top-color: ${border}; }`);
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { border: ${borderWidth}px solid ${border}; }`);
}
const background = theme.getColor(inputValidationInfoBackground);
if (background) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { background-color: ${background}; }`);
}
const foreground = theme.getColor(inputValidationInfoForeground);
if (foreground) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { color: ${foreground}; }`);
}
});
| {
this.showMessage(nls.localize('editor.readonly', "Cannot edit in read-only editor"), this._editor.getPosition());
} | conditional_block |
messageController.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./messageController';
import * as nls from 'vs/nls';
import { TimeoutTimer } from 'vs/base/common/async';
import { KeyCode } from 'vs/base/common/keyCodes';
import { IDisposable, dispose, Disposable } from 'vs/base/common/lifecycle';
import { alert } from 'vs/base/browser/ui/aria/aria';
import { Range } from 'vs/editor/common/core/range';
import * as editorCommon from 'vs/editor/common/editorCommon';
import { registerEditorContribution, EditorCommand, registerEditorCommand } from 'vs/editor/browser/editorExtensions';
import { ICodeEditor, IContentWidget, IContentWidgetPosition, ContentWidgetPositionPreference } from 'vs/editor/browser/editorBrowser';
import { IContextKeyService, RawContextKey, IContextKey } from 'vs/platform/contextkey/common/contextkey';
import { IPosition } from 'vs/editor/common/core/position';
import { registerThemingParticipant, HIGH_CONTRAST } from 'vs/platform/theme/common/themeService';
import { inputValidationInfoBorder, inputValidationInfoBackground, inputValidationInfoForeground } from 'vs/platform/theme/common/colorRegistry';
import { KeybindingWeight } from 'vs/platform/keybinding/common/keybindingsRegistry';
export class MessageController extends Disposable implements editorCommon.IEditorContribution {
private static readonly _id = 'editor.contrib.messageController';
static MESSAGE_VISIBLE = new RawContextKey<boolean>('messageVisible', false);
static get(editor: ICodeEditor): MessageController {
return editor.getContribution<MessageController>(MessageController._id);
}
getId(): string {
return MessageController._id;
}
private _editor: ICodeEditor;
private _visible: IContextKey<boolean>;
private _messageWidget: MessageWidget;
private _messageListeners: IDisposable[] = [];
constructor(
editor: ICodeEditor,
@IContextKeyService contextKeyService: IContextKeyService
) {
super();
this._editor = editor;
this._visible = MessageController.MESSAGE_VISIBLE.bindTo(contextKeyService);
this._register(this._editor.onDidAttemptReadOnlyEdit(() => this._onDidAttemptReadOnlyEdit()));
}
dispose(): void {
super.dispose();
this._visible.reset();
}
isVisible() {
return this._visible.get();
}
showMessage(message: string, position: IPosition): void {
alert(message);
this._visible.set(true);
dispose(this._messageWidget);
this._messageListeners = dispose(this._messageListeners);
this._messageWidget = new MessageWidget(this._editor, position, message);
// close on blur, cursor, model change, dispose
this._messageListeners.push(this._editor.onDidBlurEditorText(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeCursorPosition(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidDispose(() => this.closeMessage()));
this._messageListeners.push(this._editor.onDidChangeModel(() => this.closeMessage()));
// close after 3s
this._messageListeners.push(new TimeoutTimer(() => this.closeMessage(), 3000));
// close on mouse move
let bounds: Range;
this._messageListeners.push(this._editor.onMouseMove(e => {
// outside the text area
if (!e.target.position) {
return;
}
if (!bounds) {
// define bounding box around position and first mouse occurance
bounds = new Range(position.lineNumber - 3, 1, e.target.position.lineNumber + 3, 1);
} else if (!bounds.containsPosition(e.target.position)) {
// check if position is still in bounds
this.closeMessage();
}
}));
}
closeMessage(): void {
this._visible.reset();
this._messageListeners = dispose(this._messageListeners);
this._messageListeners.push(MessageWidget.fadeOut(this._messageWidget));
}
private _onDidAttemptReadOnlyEdit(): void {
if (this._editor.hasModel()) {
this.showMessage(nls.localize('editor.readonly', "Cannot edit in read-only editor"), this._editor.getPosition());
}
}
}
const MessageCommand = EditorCommand.bindToContribution<MessageController>(MessageController.get);
registerEditorCommand(new MessageCommand({
id: 'leaveEditorMessage',
precondition: MessageController.MESSAGE_VISIBLE,
handler: c => c.closeMessage(),
kbOpts: {
weight: KeybindingWeight.EditorContrib + 30,
primary: KeyCode.Escape
}
}));
class MessageWidget implements IContentWidget {
// Editor.IContentWidget.allowEditorOverflow
readonly allowEditorOverflow = true;
readonly suppressMouseDown = false;
private _editor: ICodeEditor;
private _position: IPosition;
private _domNode: HTMLDivElement;
static fadeOut(messageWidget: MessageWidget): IDisposable {
let handle: any;
const dispose = () => {
messageWidget.dispose();
clearTimeout(handle);
messageWidget.getDomNode().removeEventListener('animationend', dispose);
};
handle = setTimeout(dispose, 110);
messageWidget.getDomNode().addEventListener('animationend', dispose);
messageWidget.getDomNode().classList.add('fadeOut');
return { dispose };
}
constructor(editor: ICodeEditor, { lineNumber, column }: IPosition, text: string) {
this._editor = editor;
this._editor.revealLinesInCenterIfOutsideViewport(lineNumber, lineNumber, editorCommon.ScrollType.Smooth);
this._position = { lineNumber, column: column - 1 };
this._domNode = document.createElement('div');
this._domNode.classList.add('monaco-editor-overlaymessage');
const message = document.createElement('div');
message.classList.add('message');
message.textContent = text;
this._domNode.appendChild(message);
const anchor = document.createElement('div');
anchor.classList.add('anchor');
this._domNode.appendChild(anchor);
| }
dispose() {
this._editor.removeContentWidget(this);
}
getId(): string {
return 'messageoverlay';
}
getDomNode(): HTMLElement {
return this._domNode;
}
getPosition(): IContentWidgetPosition {
return { position: this._position, preference: [ContentWidgetPositionPreference.ABOVE] };
}
}
registerEditorContribution(MessageController);
registerThemingParticipant((theme, collector) => {
const border = theme.getColor(inputValidationInfoBorder);
if (border) {
let borderWidth = theme.type === HIGH_CONTRAST ? 2 : 1;
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .anchor { border-top-color: ${border}; }`);
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { border: ${borderWidth}px solid ${border}; }`);
}
const background = theme.getColor(inputValidationInfoBackground);
if (background) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { background-color: ${background}; }`);
}
const foreground = theme.getColor(inputValidationInfoForeground);
if (foreground) {
collector.addRule(`.monaco-editor .monaco-editor-overlaymessage .message { color: ${foreground}; }`);
}
}); | this._editor.addContentWidget(this);
this._domNode.classList.add('fadeIn'); | random_line_split |
__init__.py | from holoviews.element import (
ElementConversion, Points as HvPoints, Polygons as HvPolygons,
Path as HvPath
)
from .geo import (_Element, Feature, Tiles, is_geographic, # noqa (API import)
WMTS, Points, Image, Text, LineContours, RGB,
FilledContours, Path, Polygons, Shape, Dataset,
Contours, TriMesh, Graph, Nodes, EdgePaths, QuadMesh,
VectorField, Labels, HexTiles, Rectangles, Segments)
class GeoConversion(ElementConversion):
"""
GeoConversion is a very simple container object which can
be given an existing Dataset and provides methods to convert
the Dataset into most other Element types. If the requested
key dimensions correspond to geographical coordinates the
conversion interface will automatically use a geographical
Element type while all other plot will use regular HoloViews
Elements.
"""
def __init__(self, cube):
self._element = cube
def __call__(self, *args, **kwargs):
|
def linecontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(LineContours, kdims, vdims, mdims, **kwargs)
def filledcontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(FilledContours, kdims, vdims, mdims, **kwargs)
def image(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(Image, kdims, vdims, mdims, **kwargs)
def points(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Points if is_geographic(self._element, kdims) else HvPoints
return self(el_type, kdims, vdims, mdims, **kwargs)
def polygons(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Polygons if is_geographic(self._element, kdims) else HvPolygons
return self(el_type, kdims, vdims, mdims, **kwargs)
def path(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Path if is_geographic(self._element, kdims) else HvPath
return self(el_type, kdims, vdims, mdims, **kwargs)
Dataset._conversion_interface = GeoConversion
| group_type = args[0]
if 'crs' not in kwargs and issubclass(group_type, _Element):
kwargs['crs'] = self._element.crs
is_gpd = self._element.interface.datatype == 'geodataframe'
if is_gpd:
kdims = args[1] if len(args) > 1 else kwargs.get('kdims', None)
if len(args) > 1:
args = (Dataset, [])+args[2:]
else:
args = (Dataset,)
kwargs['kdims'] = []
converted = super(GeoConversion, self).__call__(*args, **kwargs)
if is_gpd:
if kdims is None: kdims = group_type.kdims
converted = converted.map(lambda x: x.clone(kdims=kdims, new_type=group_type), Dataset)
return converted | identifier_body |
__init__.py | from holoviews.element import (
ElementConversion, Points as HvPoints, Polygons as HvPolygons,
Path as HvPath
)
from .geo import (_Element, Feature, Tiles, is_geographic, # noqa (API import)
WMTS, Points, Image, Text, LineContours, RGB,
FilledContours, Path, Polygons, Shape, Dataset,
Contours, TriMesh, Graph, Nodes, EdgePaths, QuadMesh,
VectorField, Labels, HexTiles, Rectangles, Segments)
class GeoConversion(ElementConversion):
"""
GeoConversion is a very simple container object which can
be given an existing Dataset and provides methods to convert
the Dataset into most other Element types. If the requested
key dimensions correspond to geographical coordinates the
conversion interface will automatically use a geographical
Element type while all other plot will use regular HoloViews
Elements.
"""
def __init__(self, cube):
self._element = cube
def __call__(self, *args, **kwargs):
group_type = args[0]
if 'crs' not in kwargs and issubclass(group_type, _Element):
kwargs['crs'] = self._element.crs
is_gpd = self._element.interface.datatype == 'geodataframe'
if is_gpd:
kdims = args[1] if len(args) > 1 else kwargs.get('kdims', None)
if len(args) > 1:
args = (Dataset, [])+args[2:]
else:
args = (Dataset,)
kwargs['kdims'] = []
converted = super(GeoConversion, self).__call__(*args, **kwargs)
if is_gpd:
if kdims is None: kdims = group_type.kdims
converted = converted.map(lambda x: x.clone(kdims=kdims, new_type=group_type), Dataset)
return converted
def linecontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(LineContours, kdims, vdims, mdims, **kwargs)
def filledcontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(FilledContours, kdims, vdims, mdims, **kwargs)
def image(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(Image, kdims, vdims, mdims, **kwargs)
def points(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Points if is_geographic(self._element, kdims) else HvPoints
return self(el_type, kdims, vdims, mdims, **kwargs)
def polygons(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: |
el_type = Polygons if is_geographic(self._element, kdims) else HvPolygons
return self(el_type, kdims, vdims, mdims, **kwargs)
def path(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Path if is_geographic(self._element, kdims) else HvPath
return self(el_type, kdims, vdims, mdims, **kwargs)
Dataset._conversion_interface = GeoConversion
| kdims = self._element.kdims | conditional_block |
__init__.py | from holoviews.element import (
ElementConversion, Points as HvPoints, Polygons as HvPolygons,
Path as HvPath
)
from .geo import (_Element, Feature, Tiles, is_geographic, # noqa (API import)
WMTS, Points, Image, Text, LineContours, RGB,
FilledContours, Path, Polygons, Shape, Dataset,
Contours, TriMesh, Graph, Nodes, EdgePaths, QuadMesh,
VectorField, Labels, HexTiles, Rectangles, Segments)
class GeoConversion(ElementConversion):
"""
GeoConversion is a very simple container object which can
be given an existing Dataset and provides methods to convert
the Dataset into most other Element types. If the requested
key dimensions correspond to geographical coordinates the
conversion interface will automatically use a geographical
Element type while all other plot will use regular HoloViews
Elements.
"""
def __init__(self, cube):
self._element = cube
def __call__(self, *args, **kwargs):
group_type = args[0]
if 'crs' not in kwargs and issubclass(group_type, _Element):
kwargs['crs'] = self._element.crs
is_gpd = self._element.interface.datatype == 'geodataframe'
if is_gpd:
kdims = args[1] if len(args) > 1 else kwargs.get('kdims', None)
if len(args) > 1:
args = (Dataset, [])+args[2:]
else:
args = (Dataset,)
kwargs['kdims'] = []
converted = super(GeoConversion, self).__call__(*args, **kwargs)
if is_gpd:
if kdims is None: kdims = group_type.kdims
converted = converted.map(lambda x: x.clone(kdims=kdims, new_type=group_type), Dataset) | return self(LineContours, kdims, vdims, mdims, **kwargs)
def filledcontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(FilledContours, kdims, vdims, mdims, **kwargs)
def image(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(Image, kdims, vdims, mdims, **kwargs)
def points(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Points if is_geographic(self._element, kdims) else HvPoints
return self(el_type, kdims, vdims, mdims, **kwargs)
def polygons(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Polygons if is_geographic(self._element, kdims) else HvPolygons
return self(el_type, kdims, vdims, mdims, **kwargs)
def path(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Path if is_geographic(self._element, kdims) else HvPath
return self(el_type, kdims, vdims, mdims, **kwargs)
Dataset._conversion_interface = GeoConversion | return converted
def linecontours(self, kdims=None, vdims=None, mdims=None, **kwargs): | random_line_split |
__init__.py | from holoviews.element import (
ElementConversion, Points as HvPoints, Polygons as HvPolygons,
Path as HvPath
)
from .geo import (_Element, Feature, Tiles, is_geographic, # noqa (API import)
WMTS, Points, Image, Text, LineContours, RGB,
FilledContours, Path, Polygons, Shape, Dataset,
Contours, TriMesh, Graph, Nodes, EdgePaths, QuadMesh,
VectorField, Labels, HexTiles, Rectangles, Segments)
class GeoConversion(ElementConversion):
"""
GeoConversion is a very simple container object which can
be given an existing Dataset and provides methods to convert
the Dataset into most other Element types. If the requested
key dimensions correspond to geographical coordinates the
conversion interface will automatically use a geographical
Element type while all other plot will use regular HoloViews
Elements.
"""
def __init__(self, cube):
self._element = cube
def | (self, *args, **kwargs):
group_type = args[0]
if 'crs' not in kwargs and issubclass(group_type, _Element):
kwargs['crs'] = self._element.crs
is_gpd = self._element.interface.datatype == 'geodataframe'
if is_gpd:
kdims = args[1] if len(args) > 1 else kwargs.get('kdims', None)
if len(args) > 1:
args = (Dataset, [])+args[2:]
else:
args = (Dataset,)
kwargs['kdims'] = []
converted = super(GeoConversion, self).__call__(*args, **kwargs)
if is_gpd:
if kdims is None: kdims = group_type.kdims
converted = converted.map(lambda x: x.clone(kdims=kdims, new_type=group_type), Dataset)
return converted
def linecontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(LineContours, kdims, vdims, mdims, **kwargs)
def filledcontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(FilledContours, kdims, vdims, mdims, **kwargs)
def image(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(Image, kdims, vdims, mdims, **kwargs)
def points(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Points if is_geographic(self._element, kdims) else HvPoints
return self(el_type, kdims, vdims, mdims, **kwargs)
def polygons(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Polygons if is_geographic(self._element, kdims) else HvPolygons
return self(el_type, kdims, vdims, mdims, **kwargs)
def path(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Path if is_geographic(self._element, kdims) else HvPath
return self(el_type, kdims, vdims, mdims, **kwargs)
Dataset._conversion_interface = GeoConversion
| __call__ | identifier_name |
kalloc.rs | // The MIT License (MIT)
//
// Copyright (c) 2015 Kashyap
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use super::spinlock::{Spinlock, DUMMY_LOCK, init_lock};
use super::mmu::{Address, PG_SIZE, pg_roundup};
use super::memlayout::{v2p,PHYSTOP};
use super::uart::uart_put_str;
use super::console::panic;
use super::rlibc::memset;
struct KmemT {
lock: Spinlock,
use_lock: u32 , //TODO is u32 the right type?
// TODO struct run *freelist;
}
static mut kmem : KmemT = KmemT{ lock: DUMMY_LOCK, use_lock: 0} ;
static mut end : Address = 0;
pub fn kinit1(vstart: Address, vend: Address) {
unsafe {
init_lock(& mut kmem.lock, "kmem");
kmem.use_lock = 0;
}
free_range(vstart, vend);
}
fn free_range(vstart: Address, vend: Address) {
let mut address = pg_roundup(vstart);
// Keep it around for future debugging
//unsafe {
// asm!("mov $0 , %rax" : /* no outputs */ : "r"(vend) : "eax");
// asm!("mov $0 , %rbx" : /* no outputs */ : "r"(address) : "eax");
//}
unsafe {
end = vstart;
}
loop {
kfree(address);
address = address + PG_SIZE;
if address > vend |
}
}
fn kfree(v : Address) {
//struct run *r;
if ((v % PG_SIZE) > 0) || (v2p(v) >= PHYSTOP) {
panic("kfree");
}
unsafe {
if v < end {
panic("kfree");
}
}
unsafe {
// Fill with junk to catch dangling refs.
memset(v as * mut u8, 1, PG_SIZE as usize);
}
//
// if(kmem.use_lock)
// acquire(&kmem.lock);
// r = (struct run*)v;
// r->next = kmem.freelist;
// kmem.freelist = r;
// if(kmem.use_lock)
// release(&kmem.lock);
//
//
//
// */
}
| {
break;
} | conditional_block |
kalloc.rs | // The MIT License (MIT)
//
// Copyright (c) 2015 Kashyap
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use super::spinlock::{Spinlock, DUMMY_LOCK, init_lock};
use super::mmu::{Address, PG_SIZE, pg_roundup};
use super::memlayout::{v2p,PHYSTOP};
use super::uart::uart_put_str;
use super::console::panic;
use super::rlibc::memset;
struct KmemT {
lock: Spinlock,
use_lock: u32 , //TODO is u32 the right type?
// TODO struct run *freelist;
}
static mut kmem : KmemT = KmemT{ lock: DUMMY_LOCK, use_lock: 0} ;
static mut end : Address = 0;
pub fn kinit1(vstart: Address, vend: Address) {
unsafe {
init_lock(& mut kmem.lock, "kmem");
kmem.use_lock = 0;
}
free_range(vstart, vend);
}
fn free_range(vstart: Address, vend: Address) |
fn kfree(v : Address) {
//struct run *r;
if ((v % PG_SIZE) > 0) || (v2p(v) >= PHYSTOP) {
panic("kfree");
}
unsafe {
if v < end {
panic("kfree");
}
}
unsafe {
// Fill with junk to catch dangling refs.
memset(v as * mut u8, 1, PG_SIZE as usize);
}
//
// if(kmem.use_lock)
// acquire(&kmem.lock);
// r = (struct run*)v;
// r->next = kmem.freelist;
// kmem.freelist = r;
// if(kmem.use_lock)
// release(&kmem.lock);
//
//
//
// */
}
| {
let mut address = pg_roundup(vstart);
// Keep it around for future debugging
//unsafe {
// asm!("mov $0 , %rax" : /* no outputs */ : "r"(vend) : "eax");
// asm!("mov $0 , %rbx" : /* no outputs */ : "r"(address) : "eax");
//}
unsafe {
end = vstart;
}
loop {
kfree(address);
address = address + PG_SIZE;
if address > vend {
break;
}
}
} | identifier_body |
kalloc.rs | // The MIT License (MIT)
//
// Copyright (c) 2015 Kashyap
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use super::spinlock::{Spinlock, DUMMY_LOCK, init_lock};
use super::mmu::{Address, PG_SIZE, pg_roundup};
use super::memlayout::{v2p,PHYSTOP};
use super::uart::uart_put_str;
use super::console::panic;
use super::rlibc::memset;
struct KmemT {
lock: Spinlock,
use_lock: u32 , //TODO is u32 the right type?
// TODO struct run *freelist;
} |
static mut kmem : KmemT = KmemT{ lock: DUMMY_LOCK, use_lock: 0} ;
static mut end : Address = 0;
pub fn kinit1(vstart: Address, vend: Address) {
unsafe {
init_lock(& mut kmem.lock, "kmem");
kmem.use_lock = 0;
}
free_range(vstart, vend);
}
fn free_range(vstart: Address, vend: Address) {
let mut address = pg_roundup(vstart);
// Keep it around for future debugging
//unsafe {
// asm!("mov $0 , %rax" : /* no outputs */ : "r"(vend) : "eax");
// asm!("mov $0 , %rbx" : /* no outputs */ : "r"(address) : "eax");
//}
unsafe {
end = vstart;
}
loop {
kfree(address);
address = address + PG_SIZE;
if address > vend {
break;
}
}
}
fn kfree(v : Address) {
//struct run *r;
if ((v % PG_SIZE) > 0) || (v2p(v) >= PHYSTOP) {
panic("kfree");
}
unsafe {
if v < end {
panic("kfree");
}
}
unsafe {
// Fill with junk to catch dangling refs.
memset(v as * mut u8, 1, PG_SIZE as usize);
}
//
// if(kmem.use_lock)
// acquire(&kmem.lock);
// r = (struct run*)v;
// r->next = kmem.freelist;
// kmem.freelist = r;
// if(kmem.use_lock)
// release(&kmem.lock);
//
//
//
// */
} | random_line_split |
|
kalloc.rs | // The MIT License (MIT)
//
// Copyright (c) 2015 Kashyap
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use super::spinlock::{Spinlock, DUMMY_LOCK, init_lock};
use super::mmu::{Address, PG_SIZE, pg_roundup};
use super::memlayout::{v2p,PHYSTOP};
use super::uart::uart_put_str;
use super::console::panic;
use super::rlibc::memset;
struct KmemT {
lock: Spinlock,
use_lock: u32 , //TODO is u32 the right type?
// TODO struct run *freelist;
}
static mut kmem : KmemT = KmemT{ lock: DUMMY_LOCK, use_lock: 0} ;
static mut end : Address = 0;
pub fn | (vstart: Address, vend: Address) {
unsafe {
init_lock(& mut kmem.lock, "kmem");
kmem.use_lock = 0;
}
free_range(vstart, vend);
}
fn free_range(vstart: Address, vend: Address) {
let mut address = pg_roundup(vstart);
// Keep it around for future debugging
//unsafe {
// asm!("mov $0 , %rax" : /* no outputs */ : "r"(vend) : "eax");
// asm!("mov $0 , %rbx" : /* no outputs */ : "r"(address) : "eax");
//}
unsafe {
end = vstart;
}
loop {
kfree(address);
address = address + PG_SIZE;
if address > vend {
break;
}
}
}
fn kfree(v : Address) {
//struct run *r;
if ((v % PG_SIZE) > 0) || (v2p(v) >= PHYSTOP) {
panic("kfree");
}
unsafe {
if v < end {
panic("kfree");
}
}
unsafe {
// Fill with junk to catch dangling refs.
memset(v as * mut u8, 1, PG_SIZE as usize);
}
//
// if(kmem.use_lock)
// acquire(&kmem.lock);
// r = (struct run*)v;
// r->next = kmem.freelist;
// kmem.freelist = r;
// if(kmem.use_lock)
// release(&kmem.lock);
//
//
//
// */
}
| kinit1 | identifier_name |
boiler_dispatch.rs | //! Experimental boiler library for dispatching messages to handlers
use boiler::{Message, EMsg};
pub trait MessageHandler {
fn invoke(&mut self, message: Message);
}
impl<F: FnMut(Message)> MessageHandler for F {
fn invoke(&mut self, message: Message) {
self(message);
}
}
pub struct MessageDispatcher<'a> {
handlers: Vec<Option<Box<MessageHandler + 'a>>>,
fallback: Option<Box<FnMut(Message)>>
}
impl<'a> MessageDispatcher<'a> {
pub fn new() -> Self {
// Fill the vector with Nones, we can't use vec! for this
let mut handlers = Vec::with_capacity(10000);
for _ in 0..8000 {
handlers.push(None);
}
MessageDispatcher {
handlers: handlers,
fallback: None
}
}
pub fn register<H: MessageHandler + 'a>(&mut self, msg: EMsg, handler: H) {
self.handlers[msg as usize] = Some(Box::new(handler));
}
pub fn register_fallback(&mut self, handler: Box<FnMut(Message)>) {
self.fallback = Some(handler);
}
pub fn handle(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.handlers[message.header.emsg() as usize] |
// We were unable to find anything, call the fallback
self.invoke_fallback(message);
}
fn invoke_fallback(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.fallback {
handler(message);
}
}
}
| {
handler.invoke(message);
return;
} | conditional_block |
boiler_dispatch.rs | //! Experimental boiler library for dispatching messages to handlers
use boiler::{Message, EMsg};
pub trait MessageHandler {
fn invoke(&mut self, message: Message);
}
impl<F: FnMut(Message)> MessageHandler for F {
fn invoke(&mut self, message: Message) {
self(message);
}
}
pub struct MessageDispatcher<'a> {
handlers: Vec<Option<Box<MessageHandler + 'a>>>,
fallback: Option<Box<FnMut(Message)>>
}
impl<'a> MessageDispatcher<'a> {
pub fn new() -> Self {
// Fill the vector with Nones, we can't use vec! for this
let mut handlers = Vec::with_capacity(10000);
for _ in 0..8000 {
handlers.push(None);
}
MessageDispatcher {
handlers: handlers,
fallback: None
}
}
pub fn register<H: MessageHandler + 'a>(&mut self, msg: EMsg, handler: H) {
self.handlers[msg as usize] = Some(Box::new(handler));
}
pub fn register_fallback(&mut self, handler: Box<FnMut(Message)>) {
self.fallback = Some(handler);
}
pub fn handle(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.handlers[message.header.emsg() as usize] {
handler.invoke(message);
return; | }
fn invoke_fallback(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.fallback {
handler(message);
}
}
} | }
// We were unable to find anything, call the fallback
self.invoke_fallback(message); | random_line_split |
boiler_dispatch.rs | //! Experimental boiler library for dispatching messages to handlers
use boiler::{Message, EMsg};
pub trait MessageHandler {
fn invoke(&mut self, message: Message);
}
impl<F: FnMut(Message)> MessageHandler for F {
fn | (&mut self, message: Message) {
self(message);
}
}
pub struct MessageDispatcher<'a> {
handlers: Vec<Option<Box<MessageHandler + 'a>>>,
fallback: Option<Box<FnMut(Message)>>
}
impl<'a> MessageDispatcher<'a> {
pub fn new() -> Self {
// Fill the vector with Nones, we can't use vec! for this
let mut handlers = Vec::with_capacity(10000);
for _ in 0..8000 {
handlers.push(None);
}
MessageDispatcher {
handlers: handlers,
fallback: None
}
}
pub fn register<H: MessageHandler + 'a>(&mut self, msg: EMsg, handler: H) {
self.handlers[msg as usize] = Some(Box::new(handler));
}
pub fn register_fallback(&mut self, handler: Box<FnMut(Message)>) {
self.fallback = Some(handler);
}
pub fn handle(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.handlers[message.header.emsg() as usize] {
handler.invoke(message);
return;
}
// We were unable to find anything, call the fallback
self.invoke_fallback(message);
}
fn invoke_fallback(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.fallback {
handler(message);
}
}
}
| invoke | identifier_name |
boiler_dispatch.rs | //! Experimental boiler library for dispatching messages to handlers
use boiler::{Message, EMsg};
pub trait MessageHandler {
fn invoke(&mut self, message: Message);
}
impl<F: FnMut(Message)> MessageHandler for F {
fn invoke(&mut self, message: Message) {
self(message);
}
}
pub struct MessageDispatcher<'a> {
handlers: Vec<Option<Box<MessageHandler + 'a>>>,
fallback: Option<Box<FnMut(Message)>>
}
impl<'a> MessageDispatcher<'a> {
pub fn new() -> Self {
// Fill the vector with Nones, we can't use vec! for this
let mut handlers = Vec::with_capacity(10000);
for _ in 0..8000 {
handlers.push(None);
}
MessageDispatcher {
handlers: handlers,
fallback: None
}
}
pub fn register<H: MessageHandler + 'a>(&mut self, msg: EMsg, handler: H) {
self.handlers[msg as usize] = Some(Box::new(handler));
}
pub fn register_fallback(&mut self, handler: Box<FnMut(Message)>) |
pub fn handle(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.handlers[message.header.emsg() as usize] {
handler.invoke(message);
return;
}
// We were unable to find anything, call the fallback
self.invoke_fallback(message);
}
fn invoke_fallback(&mut self, message: Message) {
if let &mut Some(ref mut handler) = &mut self.fallback {
handler(message);
}
}
}
| {
self.fallback = Some(handler);
} | identifier_body |
response.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright (c) 2016 Jacob Peddicord <[email protected]>
*/
use serde_json::{self, Error};
use submission::Submission;
#[derive(Debug)]
pub struct SubmissionResult {
pub status: SubmissionStatus,
pub meta: SubmissionMeta,
}
impl SubmissionResult {
pub fn new(status: SubmissionStatus) -> SubmissionResult {
SubmissionResult {
status,
meta: SubmissionMeta::None,
}
}
pub fn with_meta(status: SubmissionStatus, meta: SubmissionMeta) -> SubmissionResult {
SubmissionResult { status, meta }
}
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum SubmissionStatus {
Successful,
FailedTests,
BadCompile,
Crashed,
Timeout,
InternalError,
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum SubmissionMeta {
None,
GeneralFailure { stderr: String },
TestFailures { pass: u8, fail: u8, diff: String },
InternalError(String),
}
#[derive(Debug, Serialize)]
pub struct Response {
id: u32,
user: u32,
problem: String,
result: SubmissionStatus,
meta: SubmissionMeta,
}
impl Response {
pub fn new(sub: &Submission, result: SubmissionResult) -> Response {
let meta = match result.meta {
SubmissionMeta::GeneralFailure { stderr } => {
let mut trunc = stderr.clone();
trunc.truncate(8000);
SubmissionMeta::GeneralFailure { stderr: trunc }
}
_ => result.meta,
};
Response {
id: sub.get_id(),
user: sub.get_user(),
problem: sub.get_problem_name(),
result: result.status,
meta,
}
}
pub fn new_error(msg: String) -> Response {
Response {
id: 0,
user: 0,
problem: String::new(),
result: SubmissionStatus::InternalError,
meta: SubmissionMeta::InternalError(msg),
}
}
pub fn encode(&self) -> Result<String, Error> {
serde_json::to_string(&self)
}
pub fn get_status(&self) -> &SubmissionStatus {
&self.result | } | } | random_line_split |
response.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright (c) 2016 Jacob Peddicord <[email protected]>
*/
use serde_json::{self, Error};
use submission::Submission;
#[derive(Debug)]
pub struct SubmissionResult {
pub status: SubmissionStatus,
pub meta: SubmissionMeta,
}
impl SubmissionResult {
pub fn new(status: SubmissionStatus) -> SubmissionResult {
SubmissionResult {
status,
meta: SubmissionMeta::None,
}
}
pub fn with_meta(status: SubmissionStatus, meta: SubmissionMeta) -> SubmissionResult {
SubmissionResult { status, meta }
}
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum SubmissionStatus {
Successful,
FailedTests,
BadCompile,
Crashed,
Timeout,
InternalError,
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum | {
None,
GeneralFailure { stderr: String },
TestFailures { pass: u8, fail: u8, diff: String },
InternalError(String),
}
#[derive(Debug, Serialize)]
pub struct Response {
id: u32,
user: u32,
problem: String,
result: SubmissionStatus,
meta: SubmissionMeta,
}
impl Response {
pub fn new(sub: &Submission, result: SubmissionResult) -> Response {
let meta = match result.meta {
SubmissionMeta::GeneralFailure { stderr } => {
let mut trunc = stderr.clone();
trunc.truncate(8000);
SubmissionMeta::GeneralFailure { stderr: trunc }
}
_ => result.meta,
};
Response {
id: sub.get_id(),
user: sub.get_user(),
problem: sub.get_problem_name(),
result: result.status,
meta,
}
}
pub fn new_error(msg: String) -> Response {
Response {
id: 0,
user: 0,
problem: String::new(),
result: SubmissionStatus::InternalError,
meta: SubmissionMeta::InternalError(msg),
}
}
pub fn encode(&self) -> Result<String, Error> {
serde_json::to_string(&self)
}
pub fn get_status(&self) -> &SubmissionStatus {
&self.result
}
}
| SubmissionMeta | identifier_name |
Gruntfile.js | /*
* grunt-static-i18next
*
*
* Copyright (c) 2014 Stas Yermakov
* Licensed under the MIT license.
*/
'use strict';
module.exports = function (grunt) {
// load all npm grunt tasks
require('load-grunt-tasks')(grunt);
// Project configuration.
grunt.initConfig({
yeoman: {
// configurable paths
tasks: 'tasks',
src: 'src',
dist: 'dist',
test_app: 'test/fixtures/app'
},
watch: {
gruntfile: {
files: ['Gruntfile.js']
},
typescript: {
files: ["<%= yeoman.src %>/**/*.ts"],
tasks: ["typescript", "test"] |
jshint: {
all: [
'Gruntfile.js',
'tasks/*.js',
'<%= nodeunit.tests %>'
],
options: {
jshintrc: '.jshintrc',
reporter: require('jshint-stylish')
}
},
// Unit tests.
nodeunit: {
tests: ['test/*_test.js']
},
// Compile TypeScript source codes
typescript: {
dist: {
src: ['<%= yeoman.src %>/**/*.ts'],
dest: '<%= yeoman.tasks %>',
options: {
expand: true,
target: 'es5', //or es3
rootDir: '<%= yeoman.src %>/',
sourceMap: false,
declaration: false,
module: 'commonjs'
}
}
},
// Empties folders to start fresh
clean: {
dist: {
files: [
{
dot: true,
src: [
'.tmp',
'<%= yeoman.dist %>/*',
'!<%= yeoman.dist %>/.git*'
]
}
]
},
server: '.tmp'
},
// Run some tasks in parallel to speed up the build process
concurrent: {
server: [
'typescript'
],
test: [
'typescript'
],
dist: [
'typescript'
]
}
});
// Actually load this plugin's task(s).
grunt.loadTasks('tasks');
grunt.registerTask('serve', function (target) {
grunt.task.run([
'clean:server',
'concurrent:server',
'watch'
]);
});
// Whenever the "test" task is run, first clean the ".tmp" dir, then run this
// plugin's task(s), then test the result.
grunt.registerTask('test', ['clean', 'nodeunit']);
// By default, lint and run all tests.
grunt.registerTask('default', ['jshint', 'test']);
}; | }
}, | random_line_split |
hbase_master.py | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from hbase import hbase
from hbase_service import hbase_service
from hbase_decommission import hbase_decommission
from resource_management.libraries.functions.check_process_status import check_process_status
class HbaseMaster(Script):
def install(self, env):
self.install_packages(env)
def configure(self, env, action = None):
import params
env.set_params(params)
hbase('master', action)
def start(self, env):
import params
env.set_params(params)
self.configure(env, action = 'start') # for security
hbase_service( 'master',
action = 'start'
)
def stop(self, env):
import params
env.set_params(params)
hbase_service( 'master',
action = 'stop'
)
def status(self, env):
import status_params
env.set_params(status_params)
pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
check_process_status(pid_file)
def decommission(self, env):
import params
env.set_params(params)
hbase_decommission(env)
if __name__ == "__main__":
| HbaseMaster().execute() | conditional_block |
|
hbase_master.py | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from hbase import hbase
from hbase_service import hbase_service
from hbase_decommission import hbase_decommission
from resource_management.libraries.functions.check_process_status import check_process_status
class HbaseMaster(Script):
def install(self, env):
self.install_packages(env)
def | (self, env, action = None):
import params
env.set_params(params)
hbase('master', action)
def start(self, env):
import params
env.set_params(params)
self.configure(env, action = 'start') # for security
hbase_service( 'master',
action = 'start'
)
def stop(self, env):
import params
env.set_params(params)
hbase_service( 'master',
action = 'stop'
)
def status(self, env):
import status_params
env.set_params(status_params)
pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
check_process_status(pid_file)
def decommission(self, env):
import params
env.set_params(params)
hbase_decommission(env)
if __name__ == "__main__":
HbaseMaster().execute()
| configure | identifier_name |
hbase_master.py | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from hbase import hbase
from hbase_service import hbase_service
from hbase_decommission import hbase_decommission
from resource_management.libraries.functions.check_process_status import check_process_status
class HbaseMaster(Script):
def install(self, env):
self.install_packages(env)
def configure(self, env, action = None):
import params
env.set_params(params) | env.set_params(params)
self.configure(env, action = 'start') # for security
hbase_service( 'master',
action = 'start'
)
def stop(self, env):
import params
env.set_params(params)
hbase_service( 'master',
action = 'stop'
)
def status(self, env):
import status_params
env.set_params(status_params)
pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
check_process_status(pid_file)
def decommission(self, env):
import params
env.set_params(params)
hbase_decommission(env)
if __name__ == "__main__":
HbaseMaster().execute() |
hbase('master', action)
def start(self, env):
import params | random_line_split |
hbase_master.py | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from hbase import hbase
from hbase_service import hbase_service
from hbase_decommission import hbase_decommission
from resource_management.libraries.functions.check_process_status import check_process_status
class HbaseMaster(Script):
def install(self, env):
self.install_packages(env)
def configure(self, env, action = None):
import params
env.set_params(params)
hbase('master', action)
def start(self, env):
|
def stop(self, env):
import params
env.set_params(params)
hbase_service( 'master',
action = 'stop'
)
def status(self, env):
import status_params
env.set_params(status_params)
pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
check_process_status(pid_file)
def decommission(self, env):
import params
env.set_params(params)
hbase_decommission(env)
if __name__ == "__main__":
HbaseMaster().execute()
| import params
env.set_params(params)
self.configure(env, action = 'start') # for security
hbase_service( 'master',
action = 'start'
) | identifier_body |
project-suggested-contributions.js | /**
* window.c.ProjectSuggestedContributions component
* A Project-show page helper to show suggested amounts of contributions
*
* Example of use:
* view: () => {
* ...
* m.component(c.ProjectSuggestedContributions, {project: project})
* ...
* }
*/
import m from 'mithril';
import _ from 'underscore';
const projectSuggestedContributions = {
| (ctrl, args) {
const project = args.project();
const suggestionUrl = amount => `/projects/${project.project_id}/contributions/new?amount=${amount}`,
suggestedValues = [10, 25, 50, 100];
return m('#suggestions', _.map(suggestedValues, amount => project ? m(`a[href="${suggestionUrl(amount)}"].card-reward.card-big.card-secondary.u-marginbottom-20`, [
m('.fontsize-larger', `R$ ${amount}`)
]) : ''));
}
};
export default projectSuggestedContributions;
| view | identifier_name |
project-suggested-contributions.js | /**
* window.c.ProjectSuggestedContributions component
* A Project-show page helper to show suggested amounts of contributions
*
* Example of use:
* view: () => {
* ...
* m.component(c.ProjectSuggestedContributions, {project: project})
* ...
* }
*/
import m from 'mithril';
import _ from 'underscore';
const projectSuggestedContributions = {
view(ctrl, args) {
const project = args.project();
const suggestionUrl = amount => `/projects/${project.project_id}/contributions/new?amount=${amount}`,
suggestedValues = [10, 25, 50, 100];
return m('#suggestions', _.map(suggestedValues, amount => project ? m(`a[href="${suggestionUrl(amount)}"].card-reward.card-big.card-secondary.u-marginbottom-20`, [
m('.fontsize-larger', `R$ ${amount}`)
]) : ''));
}
};
| export default projectSuggestedContributions; | random_line_split |
|
project-suggested-contributions.js | /**
* window.c.ProjectSuggestedContributions component
* A Project-show page helper to show suggested amounts of contributions
*
* Example of use:
* view: () => {
* ...
* m.component(c.ProjectSuggestedContributions, {project: project})
* ...
* }
*/
import m from 'mithril';
import _ from 'underscore';
const projectSuggestedContributions = {
view(ctrl, args) | };
export default projectSuggestedContributions;
| {
const project = args.project();
const suggestionUrl = amount => `/projects/${project.project_id}/contributions/new?amount=${amount}`,
suggestedValues = [10, 25, 50, 100];
return m('#suggestions', _.map(suggestedValues, amount => project ? m(`a[href="${suggestionUrl(amount)}"].card-reward.card-big.card-secondary.u-marginbottom-20`, [
m('.fontsize-larger', `R$ ${amount}`)
]) : ''));
}
| identifier_body |
root_moves_list.rs |
use std::slice;
use std::ops::{Deref,DerefMut,Index,IndexMut};
use std::iter::{Iterator,IntoIterator,FusedIterator,TrustedLen,ExactSizeIterator};
use std::ptr;
use std::mem;
use std::sync::atomic::{Ordering,AtomicUsize};
use pleco::{MoveList, BitMove};
use super::{RootMove, MAX_MOVES};
pub struct RootMoveList {
len: AtomicUsize,
moves: [RootMove; MAX_MOVES],
}
impl Clone for RootMoveList {
fn clone(&self) -> Self {
RootMoveList {
len: AtomicUsize::new(self.len.load(Ordering::SeqCst)),
moves: self.moves
}
}
}
unsafe impl Send for RootMoveList {}
unsafe impl Sync for RootMoveList {}
impl RootMoveList {
/// Creates an empty `RootMoveList`.
#[inline]
pub fn new() -> Self {
unsafe {
RootMoveList {
len: AtomicUsize::new(0),
moves: [mem::uninitialized(); MAX_MOVES],
}
}
}
/// Returns the length of the list.
#[inline(always)]
pub fn len(&self) -> usize {
self.len.load(Ordering::SeqCst)
}
/// Replaces the current `RootMoveList` with another `RootMoveList`.
pub fn clone_from_other(&mut self, other: &RootMoveList) {
self.len.store(other.len(), Ordering::SeqCst);
unsafe {
let self_moves: *mut [RootMove; MAX_MOVES] = self.moves.as_mut_ptr() as *mut [RootMove; MAX_MOVES];
let other_moves: *const [RootMove; MAX_MOVES] = other.moves.as_ptr() as *const [RootMove; MAX_MOVES];
ptr::copy_nonoverlapping(other_moves, self_moves, 1);
}
}
/// Replaces the current `RootMoveList` with the moves inside a `MoveList`.
pub fn replace(&mut self, moves: &MoveList) {
self.len.store(moves.len(), Ordering::SeqCst);
for (i, mov) in moves.iter().enumerate() {
self[i] = RootMove::new(*mov);
}
}
/// Applies `RootMove::rollback()` to each `RootMove` inside.
#[inline]
pub fn rollback(&mut self) {
self.iter_mut()
.for_each(|b| b.prev_score = b.score);
}
/// Returns the first `RootMove` in the list.
///
/// # Safety
///
/// May return a nonsense `RootMove` if the list hasn't been initalized since the start.
#[inline]
pub fn first(&mut self) -> &mut RootMove {
unsafe {
self.get_unchecked_mut(0)
}
}
/// Converts to a `MoveList`.
pub fn to_list(&self) -> MoveList {
let vec = self.iter().map(|m| m.bit_move).collect::<Vec<BitMove>>();
MoveList::from(vec)
}
/// Returns the previous best score.
#[inline]
pub fn prev_best_score(&self) -> i32 {
unsafe {
self.get_unchecked(0).prev_score
}
}
#[inline]
pub fn insert_score_depth(&mut self, index: usize, score: i32, depth: i16) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
rm.depth_reached = depth;
}
}
#[inline]
pub fn insert_score(&mut self, index: usize, score: i32) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
}
}
pub fn find(&mut self, mov: BitMove) -> Option<&mut RootMove> {
self.iter_mut()
.find(|m| m.bit_move == mov)
}
}
impl Deref for RootMoveList {
type Target = [RootMove];
#[inline]
fn deref(&self) -> &[RootMove] {
unsafe {
let p = self.moves.as_ptr();
slice::from_raw_parts(p, self.len())
}
}
}
impl DerefMut for RootMoveList {
#[inline]
fn deref_mut(&mut self) -> &mut [RootMove] {
unsafe {
let p = self.moves.as_mut_ptr();
slice::from_raw_parts_mut(p, self.len())
}
}
}
impl Index<usize> for RootMoveList {
type Output = RootMove;
#[inline]
fn index(&self, index: usize) -> &RootMove {
&(**self)[index]
}
}
impl IndexMut<usize> for RootMoveList {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut RootMove {
&mut (**self)[index]
}
}
pub struct MoveIter<'a> {
movelist: &'a RootMoveList,
idx: usize,
len: usize
}
impl<'a> Iterator for MoveIter<'a> {
type Item = RootMove;
#[inline]
fn next(&mut self) -> Option<Self::Item> |
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len - self.idx, Some(self.len - self.idx))
}
}
impl<'a> IntoIterator for &'a RootMoveList {
type Item = RootMove;
type IntoIter = MoveIter<'a>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
MoveIter {
movelist: &self,
idx: 0,
len: self.len()
}
}
}
impl<'a> ExactSizeIterator for MoveIter<'a> {}
impl<'a> FusedIterator for MoveIter<'a> {}
unsafe impl<'a> TrustedLen for MoveIter<'a> {} | {
if self.idx >= self.len {
None
} else {
unsafe {
let m = *self.movelist.get_unchecked(self.idx);
self.idx += 1;
Some(m)
}
}
} | identifier_body |
root_moves_list.rs |
use std::slice;
use std::ops::{Deref,DerefMut,Index,IndexMut};
use std::iter::{Iterator,IntoIterator,FusedIterator,TrustedLen,ExactSizeIterator};
use std::ptr;
use std::mem;
use std::sync::atomic::{Ordering,AtomicUsize};
use pleco::{MoveList, BitMove};
use super::{RootMove, MAX_MOVES};
pub struct RootMoveList {
len: AtomicUsize,
moves: [RootMove; MAX_MOVES],
}
impl Clone for RootMoveList {
fn clone(&self) -> Self {
RootMoveList {
len: AtomicUsize::new(self.len.load(Ordering::SeqCst)),
moves: self.moves
}
}
}
unsafe impl Send for RootMoveList {}
unsafe impl Sync for RootMoveList {}
impl RootMoveList {
/// Creates an empty `RootMoveList`.
#[inline]
pub fn new() -> Self {
unsafe {
RootMoveList {
len: AtomicUsize::new(0),
moves: [mem::uninitialized(); MAX_MOVES],
}
}
}
/// Returns the length of the list.
#[inline(always)]
pub fn len(&self) -> usize {
self.len.load(Ordering::SeqCst)
}
/// Replaces the current `RootMoveList` with another `RootMoveList`.
pub fn clone_from_other(&mut self, other: &RootMoveList) {
self.len.store(other.len(), Ordering::SeqCst);
unsafe {
let self_moves: *mut [RootMove; MAX_MOVES] = self.moves.as_mut_ptr() as *mut [RootMove; MAX_MOVES];
let other_moves: *const [RootMove; MAX_MOVES] = other.moves.as_ptr() as *const [RootMove; MAX_MOVES];
ptr::copy_nonoverlapping(other_moves, self_moves, 1);
}
}
/// Replaces the current `RootMoveList` with the moves inside a `MoveList`.
pub fn | (&mut self, moves: &MoveList) {
self.len.store(moves.len(), Ordering::SeqCst);
for (i, mov) in moves.iter().enumerate() {
self[i] = RootMove::new(*mov);
}
}
/// Applies `RootMove::rollback()` to each `RootMove` inside.
#[inline]
pub fn rollback(&mut self) {
self.iter_mut()
.for_each(|b| b.prev_score = b.score);
}
/// Returns the first `RootMove` in the list.
///
/// # Safety
///
/// May return a nonsense `RootMove` if the list hasn't been initalized since the start.
#[inline]
pub fn first(&mut self) -> &mut RootMove {
unsafe {
self.get_unchecked_mut(0)
}
}
/// Converts to a `MoveList`.
pub fn to_list(&self) -> MoveList {
let vec = self.iter().map(|m| m.bit_move).collect::<Vec<BitMove>>();
MoveList::from(vec)
}
/// Returns the previous best score.
#[inline]
pub fn prev_best_score(&self) -> i32 {
unsafe {
self.get_unchecked(0).prev_score
}
}
#[inline]
pub fn insert_score_depth(&mut self, index: usize, score: i32, depth: i16) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
rm.depth_reached = depth;
}
}
#[inline]
pub fn insert_score(&mut self, index: usize, score: i32) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
}
}
pub fn find(&mut self, mov: BitMove) -> Option<&mut RootMove> {
self.iter_mut()
.find(|m| m.bit_move == mov)
}
}
impl Deref for RootMoveList {
type Target = [RootMove];
#[inline]
fn deref(&self) -> &[RootMove] {
unsafe {
let p = self.moves.as_ptr();
slice::from_raw_parts(p, self.len())
}
}
}
impl DerefMut for RootMoveList {
#[inline]
fn deref_mut(&mut self) -> &mut [RootMove] {
unsafe {
let p = self.moves.as_mut_ptr();
slice::from_raw_parts_mut(p, self.len())
}
}
}
impl Index<usize> for RootMoveList {
type Output = RootMove;
#[inline]
fn index(&self, index: usize) -> &RootMove {
&(**self)[index]
}
}
impl IndexMut<usize> for RootMoveList {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut RootMove {
&mut (**self)[index]
}
}
pub struct MoveIter<'a> {
movelist: &'a RootMoveList,
idx: usize,
len: usize
}
impl<'a> Iterator for MoveIter<'a> {
type Item = RootMove;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.idx >= self.len {
None
} else {
unsafe {
let m = *self.movelist.get_unchecked(self.idx);
self.idx += 1;
Some(m)
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len - self.idx, Some(self.len - self.idx))
}
}
impl<'a> IntoIterator for &'a RootMoveList {
type Item = RootMove;
type IntoIter = MoveIter<'a>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
MoveIter {
movelist: &self,
idx: 0,
len: self.len()
}
}
}
impl<'a> ExactSizeIterator for MoveIter<'a> {}
impl<'a> FusedIterator for MoveIter<'a> {}
unsafe impl<'a> TrustedLen for MoveIter<'a> {} | replace | identifier_name |
root_moves_list.rs | use std::slice;
use std::ops::{Deref,DerefMut,Index,IndexMut};
use std::iter::{Iterator,IntoIterator,FusedIterator,TrustedLen,ExactSizeIterator};
use std::ptr;
use std::mem;
use std::sync::atomic::{Ordering,AtomicUsize};
use pleco::{MoveList, BitMove};
use super::{RootMove, MAX_MOVES};
pub struct RootMoveList {
len: AtomicUsize,
moves: [RootMove; MAX_MOVES],
}
impl Clone for RootMoveList {
fn clone(&self) -> Self {
RootMoveList {
len: AtomicUsize::new(self.len.load(Ordering::SeqCst)),
moves: self.moves
}
}
}
unsafe impl Send for RootMoveList {}
unsafe impl Sync for RootMoveList {}
impl RootMoveList {
/// Creates an empty `RootMoveList`.
#[inline]
pub fn new() -> Self {
unsafe {
RootMoveList {
len: AtomicUsize::new(0),
moves: [mem::uninitialized(); MAX_MOVES],
}
}
}
/// Returns the length of the list.
#[inline(always)]
pub fn len(&self) -> usize {
self.len.load(Ordering::SeqCst)
}
/// Replaces the current `RootMoveList` with another `RootMoveList`.
pub fn clone_from_other(&mut self, other: &RootMoveList) {
self.len.store(other.len(), Ordering::SeqCst);
unsafe {
let self_moves: *mut [RootMove; MAX_MOVES] = self.moves.as_mut_ptr() as *mut [RootMove; MAX_MOVES];
let other_moves: *const [RootMove; MAX_MOVES] = other.moves.as_ptr() as *const [RootMove; MAX_MOVES];
ptr::copy_nonoverlapping(other_moves, self_moves, 1);
}
}
/// Replaces the current `RootMoveList` with the moves inside a `MoveList`.
pub fn replace(&mut self, moves: &MoveList) {
self.len.store(moves.len(), Ordering::SeqCst);
for (i, mov) in moves.iter().enumerate() {
self[i] = RootMove::new(*mov);
}
}
/// Applies `RootMove::rollback()` to each `RootMove` inside.
#[inline]
pub fn rollback(&mut self) {
self.iter_mut()
.for_each(|b| b.prev_score = b.score);
}
/// Returns the first `RootMove` in the list.
///
/// # Safety
///
/// May return a nonsense `RootMove` if the list hasn't been initalized since the start.
#[inline]
pub fn first(&mut self) -> &mut RootMove {
unsafe {
self.get_unchecked_mut(0)
}
}
| /// Converts to a `MoveList`.
pub fn to_list(&self) -> MoveList {
let vec = self.iter().map(|m| m.bit_move).collect::<Vec<BitMove>>();
MoveList::from(vec)
}
/// Returns the previous best score.
#[inline]
pub fn prev_best_score(&self) -> i32 {
unsafe {
self.get_unchecked(0).prev_score
}
}
#[inline]
pub fn insert_score_depth(&mut self, index: usize, score: i32, depth: i16) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
rm.depth_reached = depth;
}
}
#[inline]
pub fn insert_score(&mut self, index: usize, score: i32) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
}
}
pub fn find(&mut self, mov: BitMove) -> Option<&mut RootMove> {
self.iter_mut()
.find(|m| m.bit_move == mov)
}
}
impl Deref for RootMoveList {
type Target = [RootMove];
#[inline]
fn deref(&self) -> &[RootMove] {
unsafe {
let p = self.moves.as_ptr();
slice::from_raw_parts(p, self.len())
}
}
}
impl DerefMut for RootMoveList {
#[inline]
fn deref_mut(&mut self) -> &mut [RootMove] {
unsafe {
let p = self.moves.as_mut_ptr();
slice::from_raw_parts_mut(p, self.len())
}
}
}
impl Index<usize> for RootMoveList {
type Output = RootMove;
#[inline]
fn index(&self, index: usize) -> &RootMove {
&(**self)[index]
}
}
impl IndexMut<usize> for RootMoveList {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut RootMove {
&mut (**self)[index]
}
}
pub struct MoveIter<'a> {
movelist: &'a RootMoveList,
idx: usize,
len: usize
}
impl<'a> Iterator for MoveIter<'a> {
type Item = RootMove;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.idx >= self.len {
None
} else {
unsafe {
let m = *self.movelist.get_unchecked(self.idx);
self.idx += 1;
Some(m)
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len - self.idx, Some(self.len - self.idx))
}
}
impl<'a> IntoIterator for &'a RootMoveList {
type Item = RootMove;
type IntoIter = MoveIter<'a>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
MoveIter {
movelist: &self,
idx: 0,
len: self.len()
}
}
}
impl<'a> ExactSizeIterator for MoveIter<'a> {}
impl<'a> FusedIterator for MoveIter<'a> {}
unsafe impl<'a> TrustedLen for MoveIter<'a> {} | random_line_split |
|
root_moves_list.rs |
use std::slice;
use std::ops::{Deref,DerefMut,Index,IndexMut};
use std::iter::{Iterator,IntoIterator,FusedIterator,TrustedLen,ExactSizeIterator};
use std::ptr;
use std::mem;
use std::sync::atomic::{Ordering,AtomicUsize};
use pleco::{MoveList, BitMove};
use super::{RootMove, MAX_MOVES};
pub struct RootMoveList {
len: AtomicUsize,
moves: [RootMove; MAX_MOVES],
}
impl Clone for RootMoveList {
fn clone(&self) -> Self {
RootMoveList {
len: AtomicUsize::new(self.len.load(Ordering::SeqCst)),
moves: self.moves
}
}
}
unsafe impl Send for RootMoveList {}
unsafe impl Sync for RootMoveList {}
impl RootMoveList {
/// Creates an empty `RootMoveList`.
#[inline]
pub fn new() -> Self {
unsafe {
RootMoveList {
len: AtomicUsize::new(0),
moves: [mem::uninitialized(); MAX_MOVES],
}
}
}
/// Returns the length of the list.
#[inline(always)]
pub fn len(&self) -> usize {
self.len.load(Ordering::SeqCst)
}
/// Replaces the current `RootMoveList` with another `RootMoveList`.
pub fn clone_from_other(&mut self, other: &RootMoveList) {
self.len.store(other.len(), Ordering::SeqCst);
unsafe {
let self_moves: *mut [RootMove; MAX_MOVES] = self.moves.as_mut_ptr() as *mut [RootMove; MAX_MOVES];
let other_moves: *const [RootMove; MAX_MOVES] = other.moves.as_ptr() as *const [RootMove; MAX_MOVES];
ptr::copy_nonoverlapping(other_moves, self_moves, 1);
}
}
/// Replaces the current `RootMoveList` with the moves inside a `MoveList`.
pub fn replace(&mut self, moves: &MoveList) {
self.len.store(moves.len(), Ordering::SeqCst);
for (i, mov) in moves.iter().enumerate() {
self[i] = RootMove::new(*mov);
}
}
/// Applies `RootMove::rollback()` to each `RootMove` inside.
#[inline]
pub fn rollback(&mut self) {
self.iter_mut()
.for_each(|b| b.prev_score = b.score);
}
/// Returns the first `RootMove` in the list.
///
/// # Safety
///
/// May return a nonsense `RootMove` if the list hasn't been initalized since the start.
#[inline]
pub fn first(&mut self) -> &mut RootMove {
unsafe {
self.get_unchecked_mut(0)
}
}
/// Converts to a `MoveList`.
pub fn to_list(&self) -> MoveList {
let vec = self.iter().map(|m| m.bit_move).collect::<Vec<BitMove>>();
MoveList::from(vec)
}
/// Returns the previous best score.
#[inline]
pub fn prev_best_score(&self) -> i32 {
unsafe {
self.get_unchecked(0).prev_score
}
}
#[inline]
pub fn insert_score_depth(&mut self, index: usize, score: i32, depth: i16) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
rm.depth_reached = depth;
}
}
#[inline]
pub fn insert_score(&mut self, index: usize, score: i32) {
unsafe {
let rm: &mut RootMove = self.get_unchecked_mut(index);
rm.score = score;
}
}
pub fn find(&mut self, mov: BitMove) -> Option<&mut RootMove> {
self.iter_mut()
.find(|m| m.bit_move == mov)
}
}
impl Deref for RootMoveList {
type Target = [RootMove];
#[inline]
fn deref(&self) -> &[RootMove] {
unsafe {
let p = self.moves.as_ptr();
slice::from_raw_parts(p, self.len())
}
}
}
impl DerefMut for RootMoveList {
#[inline]
fn deref_mut(&mut self) -> &mut [RootMove] {
unsafe {
let p = self.moves.as_mut_ptr();
slice::from_raw_parts_mut(p, self.len())
}
}
}
impl Index<usize> for RootMoveList {
type Output = RootMove;
#[inline]
fn index(&self, index: usize) -> &RootMove {
&(**self)[index]
}
}
impl IndexMut<usize> for RootMoveList {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut RootMove {
&mut (**self)[index]
}
}
pub struct MoveIter<'a> {
movelist: &'a RootMoveList,
idx: usize,
len: usize
}
impl<'a> Iterator for MoveIter<'a> {
type Item = RootMove;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.idx >= self.len | else {
unsafe {
let m = *self.movelist.get_unchecked(self.idx);
self.idx += 1;
Some(m)
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len - self.idx, Some(self.len - self.idx))
}
}
impl<'a> IntoIterator for &'a RootMoveList {
type Item = RootMove;
type IntoIter = MoveIter<'a>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
MoveIter {
movelist: &self,
idx: 0,
len: self.len()
}
}
}
impl<'a> ExactSizeIterator for MoveIter<'a> {}
impl<'a> FusedIterator for MoveIter<'a> {}
unsafe impl<'a> TrustedLen for MoveIter<'a> {} | {
None
} | conditional_block |
cloudpipe.py | # Copyright 2011 Openstack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Connect your vlan to the world."""
import os
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.api.openstack import extensions
from nova.auth import manager
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import vm_states
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
FLAGS = flags.FLAGS
LOG = logging.getLogger("nova.api.openstack.compute.contrib.cloudpipe")
authorize = extensions.extension_authorizer('compute', 'cloudpipe')
class CloudpipeTemplate(xmlutil.TemplateBuilder):
def construct(self):
return xmlutil.MasterTemplate(xmlutil.make_flat_dict('cloudpipe'), 1)
class CloudpipesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('cloudpipes')
elem = xmlutil.make_flat_dict('cloudpipe', selector='cloudpipes',
subselector='cloudpipe')
root.append(elem)
return xmlutil.MasterTemplate(root, 1)
class CloudpipeController(object):
"""Handle creating and listing cloudpipe instances."""
def __init__(self):
self.compute_api = compute.API()
self.auth_manager = manager.AuthManager()
self.cloudpipe = pipelib.CloudPipe()
self.setup()
def setup(self):
"""Ensure the keychains and folders exist."""
# NOTE(vish): One of the drawbacks of doing this in the api is
# the keys will only be on the api node that launched
# the cloudpipe.
if not os.path.exists(FLAGS.keys_path):
os.makedirs(FLAGS.keys_path)
def _get_cloudpipe_for_project(self, context, project_id):
"""Get the cloudpipe instance for a project ID."""
# NOTE(todd): this should probably change to compute_api.get_all
# or db.instance_get_project_vpn
for instance in db.instance_get_all_by_project(context, project_id):
if (instance['image_id'] == str(FLAGS.vpn_image_id)
and instance['vm_state'] != vm_states.DELETED):
return instance
def _vpn_dict(self, project, vpn_instance):
rv = {'project_id': project.id,
'public_ip': project.vpn_ip,
'public_port': project.vpn_port}
if vpn_instance:
|
else:
rv['state'] = 'pending'
return rv
@wsgi.serializers(xml=CloudpipeTemplate)
def create(self, req, body):
"""Create a new cloudpipe instance, if none exists.
Parameters: {cloudpipe: {project_id: XYZ}}
"""
ctxt = req.environ['nova.context']
authorize(ctxt)
params = body.get('cloudpipe', {})
project_id = params.get('project_id', ctxt.project_id)
instance = self._get_cloudpipe_for_project(ctxt, project_id)
if not instance:
proj = self.auth_manager.get_project(project_id)
user_id = proj.project_manager_id
try:
self.cloudpipe.launch_vpn_instance(project_id, user_id)
except db.NoMoreNetworks:
msg = _("Unable to claim IP for VPN instances, ensure it "
"isn't running, and try again in a few minutes")
raise exception.ApiError(msg)
instance = self._get_cloudpipe_for_project(ctxt, proj)
return {'instance_id': instance['uuid']}
@wsgi.serializers(xml=CloudpipesTemplate)
def index(self, req):
"""List running cloudpipe instances."""
context = req.environ['nova.context']
authorize(context)
vpns = []
# TODO(todd): could use compute_api.get_all with admin context?
for project in self.auth_manager.get_projects():
instance = self._get_cloudpipe_for_project(context, project.id)
vpns.append(self._vpn_dict(project, instance))
return {'cloudpipes': vpns}
class Cloudpipe(extensions.ExtensionDescriptor):
"""Adds actions to create cloudpipe instances.
When running with the Vlan network mode, you need a mechanism to route
from the public Internet to your vlans. This mechanism is known as a
cloudpipe.
At the time of creating this class, only OpenVPN is supported. Support for
a SSH Bastion host is forthcoming.
"""
name = "Cloudpipe"
alias = "os-cloudpipe"
namespace = "http://docs.openstack.org/compute/ext/cloudpipe/api/v1.1"
updated = "2011-12-16T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-cloudpipe',
CloudpipeController())
resources.append(res)
return resources
| rv['instance_id'] = vpn_instance['uuid']
rv['created_at'] = utils.isotime(vpn_instance['created_at'])
address = vpn_instance.get('fixed_ip', None)
if address:
rv['internal_ip'] = address['address']
if project.vpn_ip and project.vpn_port:
if utils.vpn_ping(project.vpn_ip, project.vpn_port):
rv['state'] = 'running'
else:
rv['state'] = 'down'
else:
rv['state'] = 'invalid' | conditional_block |
cloudpipe.py | # Copyright 2011 Openstack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Connect your vlan to the world."""
import os
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.api.openstack import extensions
from nova.auth import manager
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import vm_states
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
FLAGS = flags.FLAGS
LOG = logging.getLogger("nova.api.openstack.compute.contrib.cloudpipe")
authorize = extensions.extension_authorizer('compute', 'cloudpipe')
class CloudpipeTemplate(xmlutil.TemplateBuilder):
def construct(self):
return xmlutil.MasterTemplate(xmlutil.make_flat_dict('cloudpipe'), 1)
class CloudpipesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('cloudpipes')
elem = xmlutil.make_flat_dict('cloudpipe', selector='cloudpipes',
subselector='cloudpipe')
root.append(elem)
return xmlutil.MasterTemplate(root, 1)
class CloudpipeController(object):
"""Handle creating and listing cloudpipe instances."""
def __init__(self):
self.compute_api = compute.API()
self.auth_manager = manager.AuthManager()
self.cloudpipe = pipelib.CloudPipe()
self.setup()
def | (self):
"""Ensure the keychains and folders exist."""
# NOTE(vish): One of the drawbacks of doing this in the api is
# the keys will only be on the api node that launched
# the cloudpipe.
if not os.path.exists(FLAGS.keys_path):
os.makedirs(FLAGS.keys_path)
def _get_cloudpipe_for_project(self, context, project_id):
"""Get the cloudpipe instance for a project ID."""
# NOTE(todd): this should probably change to compute_api.get_all
# or db.instance_get_project_vpn
for instance in db.instance_get_all_by_project(context, project_id):
if (instance['image_id'] == str(FLAGS.vpn_image_id)
and instance['vm_state'] != vm_states.DELETED):
return instance
def _vpn_dict(self, project, vpn_instance):
rv = {'project_id': project.id,
'public_ip': project.vpn_ip,
'public_port': project.vpn_port}
if vpn_instance:
rv['instance_id'] = vpn_instance['uuid']
rv['created_at'] = utils.isotime(vpn_instance['created_at'])
address = vpn_instance.get('fixed_ip', None)
if address:
rv['internal_ip'] = address['address']
if project.vpn_ip and project.vpn_port:
if utils.vpn_ping(project.vpn_ip, project.vpn_port):
rv['state'] = 'running'
else:
rv['state'] = 'down'
else:
rv['state'] = 'invalid'
else:
rv['state'] = 'pending'
return rv
@wsgi.serializers(xml=CloudpipeTemplate)
def create(self, req, body):
"""Create a new cloudpipe instance, if none exists.
Parameters: {cloudpipe: {project_id: XYZ}}
"""
ctxt = req.environ['nova.context']
authorize(ctxt)
params = body.get('cloudpipe', {})
project_id = params.get('project_id', ctxt.project_id)
instance = self._get_cloudpipe_for_project(ctxt, project_id)
if not instance:
proj = self.auth_manager.get_project(project_id)
user_id = proj.project_manager_id
try:
self.cloudpipe.launch_vpn_instance(project_id, user_id)
except db.NoMoreNetworks:
msg = _("Unable to claim IP for VPN instances, ensure it "
"isn't running, and try again in a few minutes")
raise exception.ApiError(msg)
instance = self._get_cloudpipe_for_project(ctxt, proj)
return {'instance_id': instance['uuid']}
@wsgi.serializers(xml=CloudpipesTemplate)
def index(self, req):
"""List running cloudpipe instances."""
context = req.environ['nova.context']
authorize(context)
vpns = []
# TODO(todd): could use compute_api.get_all with admin context?
for project in self.auth_manager.get_projects():
instance = self._get_cloudpipe_for_project(context, project.id)
vpns.append(self._vpn_dict(project, instance))
return {'cloudpipes': vpns}
class Cloudpipe(extensions.ExtensionDescriptor):
"""Adds actions to create cloudpipe instances.
When running with the Vlan network mode, you need a mechanism to route
from the public Internet to your vlans. This mechanism is known as a
cloudpipe.
At the time of creating this class, only OpenVPN is supported. Support for
a SSH Bastion host is forthcoming.
"""
name = "Cloudpipe"
alias = "os-cloudpipe"
namespace = "http://docs.openstack.org/compute/ext/cloudpipe/api/v1.1"
updated = "2011-12-16T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-cloudpipe',
CloudpipeController())
resources.append(res)
return resources
| setup | identifier_name |
cloudpipe.py | # Copyright 2011 Openstack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Connect your vlan to the world."""
import os
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.api.openstack import extensions
from nova.auth import manager
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import vm_states
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
FLAGS = flags.FLAGS
LOG = logging.getLogger("nova.api.openstack.compute.contrib.cloudpipe")
authorize = extensions.extension_authorizer('compute', 'cloudpipe')
class CloudpipeTemplate(xmlutil.TemplateBuilder):
def construct(self):
return xmlutil.MasterTemplate(xmlutil.make_flat_dict('cloudpipe'), 1)
class CloudpipesTemplate(xmlutil.TemplateBuilder):
def construct(self):
|
class CloudpipeController(object):
"""Handle creating and listing cloudpipe instances."""
def __init__(self):
self.compute_api = compute.API()
self.auth_manager = manager.AuthManager()
self.cloudpipe = pipelib.CloudPipe()
self.setup()
def setup(self):
"""Ensure the keychains and folders exist."""
# NOTE(vish): One of the drawbacks of doing this in the api is
# the keys will only be on the api node that launched
# the cloudpipe.
if not os.path.exists(FLAGS.keys_path):
os.makedirs(FLAGS.keys_path)
def _get_cloudpipe_for_project(self, context, project_id):
"""Get the cloudpipe instance for a project ID."""
# NOTE(todd): this should probably change to compute_api.get_all
# or db.instance_get_project_vpn
for instance in db.instance_get_all_by_project(context, project_id):
if (instance['image_id'] == str(FLAGS.vpn_image_id)
and instance['vm_state'] != vm_states.DELETED):
return instance
def _vpn_dict(self, project, vpn_instance):
rv = {'project_id': project.id,
'public_ip': project.vpn_ip,
'public_port': project.vpn_port}
if vpn_instance:
rv['instance_id'] = vpn_instance['uuid']
rv['created_at'] = utils.isotime(vpn_instance['created_at'])
address = vpn_instance.get('fixed_ip', None)
if address:
rv['internal_ip'] = address['address']
if project.vpn_ip and project.vpn_port:
if utils.vpn_ping(project.vpn_ip, project.vpn_port):
rv['state'] = 'running'
else:
rv['state'] = 'down'
else:
rv['state'] = 'invalid'
else:
rv['state'] = 'pending'
return rv
@wsgi.serializers(xml=CloudpipeTemplate)
def create(self, req, body):
"""Create a new cloudpipe instance, if none exists.
Parameters: {cloudpipe: {project_id: XYZ}}
"""
ctxt = req.environ['nova.context']
authorize(ctxt)
params = body.get('cloudpipe', {})
project_id = params.get('project_id', ctxt.project_id)
instance = self._get_cloudpipe_for_project(ctxt, project_id)
if not instance:
proj = self.auth_manager.get_project(project_id)
user_id = proj.project_manager_id
try:
self.cloudpipe.launch_vpn_instance(project_id, user_id)
except db.NoMoreNetworks:
msg = _("Unable to claim IP for VPN instances, ensure it "
"isn't running, and try again in a few minutes")
raise exception.ApiError(msg)
instance = self._get_cloudpipe_for_project(ctxt, proj)
return {'instance_id': instance['uuid']}
@wsgi.serializers(xml=CloudpipesTemplate)
def index(self, req):
"""List running cloudpipe instances."""
context = req.environ['nova.context']
authorize(context)
vpns = []
# TODO(todd): could use compute_api.get_all with admin context?
for project in self.auth_manager.get_projects():
instance = self._get_cloudpipe_for_project(context, project.id)
vpns.append(self._vpn_dict(project, instance))
return {'cloudpipes': vpns}
class Cloudpipe(extensions.ExtensionDescriptor):
"""Adds actions to create cloudpipe instances.
When running with the Vlan network mode, you need a mechanism to route
from the public Internet to your vlans. This mechanism is known as a
cloudpipe.
At the time of creating this class, only OpenVPN is supported. Support for
a SSH Bastion host is forthcoming.
"""
name = "Cloudpipe"
alias = "os-cloudpipe"
namespace = "http://docs.openstack.org/compute/ext/cloudpipe/api/v1.1"
updated = "2011-12-16T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-cloudpipe',
CloudpipeController())
resources.append(res)
return resources
| root = xmlutil.TemplateElement('cloudpipes')
elem = xmlutil.make_flat_dict('cloudpipe', selector='cloudpipes',
subselector='cloudpipe')
root.append(elem)
return xmlutil.MasterTemplate(root, 1) | identifier_body |
cloudpipe.py | # Copyright 2011 Openstack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the | # License for the specific language governing permissions and limitations
# under the License.
"""Connect your vlan to the world."""
import os
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.api.openstack import extensions
from nova.auth import manager
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import vm_states
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
FLAGS = flags.FLAGS
LOG = logging.getLogger("nova.api.openstack.compute.contrib.cloudpipe")
authorize = extensions.extension_authorizer('compute', 'cloudpipe')
class CloudpipeTemplate(xmlutil.TemplateBuilder):
def construct(self):
return xmlutil.MasterTemplate(xmlutil.make_flat_dict('cloudpipe'), 1)
class CloudpipesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('cloudpipes')
elem = xmlutil.make_flat_dict('cloudpipe', selector='cloudpipes',
subselector='cloudpipe')
root.append(elem)
return xmlutil.MasterTemplate(root, 1)
class CloudpipeController(object):
"""Handle creating and listing cloudpipe instances."""
def __init__(self):
self.compute_api = compute.API()
self.auth_manager = manager.AuthManager()
self.cloudpipe = pipelib.CloudPipe()
self.setup()
def setup(self):
"""Ensure the keychains and folders exist."""
# NOTE(vish): One of the drawbacks of doing this in the api is
# the keys will only be on the api node that launched
# the cloudpipe.
if not os.path.exists(FLAGS.keys_path):
os.makedirs(FLAGS.keys_path)
def _get_cloudpipe_for_project(self, context, project_id):
"""Get the cloudpipe instance for a project ID."""
# NOTE(todd): this should probably change to compute_api.get_all
# or db.instance_get_project_vpn
for instance in db.instance_get_all_by_project(context, project_id):
if (instance['image_id'] == str(FLAGS.vpn_image_id)
and instance['vm_state'] != vm_states.DELETED):
return instance
def _vpn_dict(self, project, vpn_instance):
rv = {'project_id': project.id,
'public_ip': project.vpn_ip,
'public_port': project.vpn_port}
if vpn_instance:
rv['instance_id'] = vpn_instance['uuid']
rv['created_at'] = utils.isotime(vpn_instance['created_at'])
address = vpn_instance.get('fixed_ip', None)
if address:
rv['internal_ip'] = address['address']
if project.vpn_ip and project.vpn_port:
if utils.vpn_ping(project.vpn_ip, project.vpn_port):
rv['state'] = 'running'
else:
rv['state'] = 'down'
else:
rv['state'] = 'invalid'
else:
rv['state'] = 'pending'
return rv
@wsgi.serializers(xml=CloudpipeTemplate)
def create(self, req, body):
"""Create a new cloudpipe instance, if none exists.
Parameters: {cloudpipe: {project_id: XYZ}}
"""
ctxt = req.environ['nova.context']
authorize(ctxt)
params = body.get('cloudpipe', {})
project_id = params.get('project_id', ctxt.project_id)
instance = self._get_cloudpipe_for_project(ctxt, project_id)
if not instance:
proj = self.auth_manager.get_project(project_id)
user_id = proj.project_manager_id
try:
self.cloudpipe.launch_vpn_instance(project_id, user_id)
except db.NoMoreNetworks:
msg = _("Unable to claim IP for VPN instances, ensure it "
"isn't running, and try again in a few minutes")
raise exception.ApiError(msg)
instance = self._get_cloudpipe_for_project(ctxt, proj)
return {'instance_id': instance['uuid']}
@wsgi.serializers(xml=CloudpipesTemplate)
def index(self, req):
"""List running cloudpipe instances."""
context = req.environ['nova.context']
authorize(context)
vpns = []
# TODO(todd): could use compute_api.get_all with admin context?
for project in self.auth_manager.get_projects():
instance = self._get_cloudpipe_for_project(context, project.id)
vpns.append(self._vpn_dict(project, instance))
return {'cloudpipes': vpns}
class Cloudpipe(extensions.ExtensionDescriptor):
"""Adds actions to create cloudpipe instances.
When running with the Vlan network mode, you need a mechanism to route
from the public Internet to your vlans. This mechanism is known as a
cloudpipe.
At the time of creating this class, only OpenVPN is supported. Support for
a SSH Bastion host is forthcoming.
"""
name = "Cloudpipe"
alias = "os-cloudpipe"
namespace = "http://docs.openstack.org/compute/ext/cloudpipe/api/v1.1"
updated = "2011-12-16T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-cloudpipe',
CloudpipeController())
resources.append(res)
return resources | random_line_split |
|
misc.py | # misc.py
# Copyright (C) 2012-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
Assorted utility functions for yum.
"""
from __future__ import print_function, absolute_import
from __future__ import unicode_literals
from dnf.pycomp import base64_decodebytes, basestring, unicode
from stat import *
import libdnf.utils
import dnf.const
import dnf.crypto
import dnf.exceptions
import dnf.i18n
import errno
import glob
import io
import os
import os.path
import pwd
import re
import shutil
import tempfile
_default_checksums = ['sha256']
_re_compiled_glob_match = None
def re_glob(s):
""" Tests if a string is a shell wildcard. """
global _re_compiled_glob_match
if _re_compiled_glob_match is None:
_re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search
return _re_compiled_glob_match(s)
_re_compiled_full_match = None
def re_full_search_needed(s):
""" Tests if a string needs a full nevra match, instead of just name. """
global _re_compiled_full_match
if _re_compiled_full_match is None:
# A glob, or a "." or "-" separator, followed by something (the ".")
one = re.compile(r'.*([-.*?]|\[.+\]).').match
# Any epoch, for envra
two = re.compile('[0-9]+:').match
_re_compiled_full_match = (one, two)
for rec in _re_compiled_full_match:
if rec(s):
return True
return False
def get_default_chksum_type():
return _default_checksums[0]
class GenericHolder(object):
"""Generic Holder class used to hold other objects of known types
It exists purely to be able to do object.somestuff, object.someotherstuff
or object[key] and pass object to another function that will
understand it"""
def __init__(self, iter=None):
self.__iter = iter
def __iter__(self):
if self.__iter is not None:
return iter(self[self.__iter])
def __getitem__(self, item):
if hasattr(self, item):
return getattr(self, item)
else:
raise KeyError(item)
def all_lists(self):
"""Return a dictionary of all lists."""
return {key: list_ for key, list_ in vars(self).items()
if type(list_) is list}
def merge_lists(self, other):
""" Concatenate the list attributes from 'other' to ours. """
for (key, val) in other.all_lists().items():
vars(self).setdefault(key, []).extend(val)
return self
def procgpgkey(rawkey):
'''Convert ASCII-armored GPG key to binary
'''
# Normalise newlines
rawkey = re.sub(b'\r\n?', b'\n', rawkey)
# Extract block
block = io.BytesIO()
inblock = 0
pastheaders = 0
for line in rawkey.split(b'\n'):
if line.startswith(b'-----BEGIN PGP PUBLIC KEY BLOCK-----'):
inblock = 1
elif inblock and line.strip() == b'':
pastheaders = 1
elif inblock and line.startswith(b'-----END PGP PUBLIC KEY BLOCK-----'):
# Hit the end of the block, get out
break
elif pastheaders and line.startswith(b'='):
# Hit the CRC line, don't include this and stop
break
elif pastheaders:
block.write(line + b'\n')
# Decode and return
return base64_decodebytes(block.getvalue())
def keyInstalled(ts, keyid, timestamp):
'''
Return if the GPG key described by the given keyid and timestamp are
installed in the rpmdb.
The keyid and timestamp should both be passed as integers.
The ts is an rpm transaction set object
Return values:
- -1 key is not installed
- 0 key with matching ID and timestamp is installed
- 1 key with matching ID is installed but has an older timestamp
- 2 key with matching ID is installed but has a newer timestamp
No effort is made to handle duplicates. The first matching keyid is used to
calculate the return result.
'''
# Search
for hdr in ts.dbMatch('name', 'gpg-pubkey'):
if hdr['version'] == keyid:
installedts = int(hdr['release'], 16)
if installedts == timestamp:
return 0
elif installedts < timestamp:
return 1
else:
return 2
return -1
def import_key_to_pubring(rawkey, keyid, gpgdir=None, make_ro_copy=True):
if not os.path.exists(gpgdir):
os.makedirs(gpgdir)
with dnf.crypto.pubring_dir(gpgdir), dnf.crypto.Context() as ctx:
# import the key
with open(os.path.join(gpgdir, 'gpg.conf'), 'wb') as fp:
fp.write(b'')
ctx.op_import(rawkey)
if make_ro_copy:
rodir = gpgdir + '-ro'
if not os.path.exists(rodir):
os.makedirs(rodir, mode=0o755)
for f in glob.glob(gpgdir + '/*'):
basename = os.path.basename(f)
ro_f = rodir + '/' + basename
shutil.copy(f, ro_f)
os.chmod(ro_f, 0o755)
# yes it is this stupid, why do you ask?
opts = """lock-never
no-auto-check-trustdb
trust-model direct
no-expensive-trust-checks
no-permission-warning
preserve-permissions
"""
with open(os.path.join(rodir, 'gpg.conf'), 'w', 0o755) as fp:
fp.write(opts)
return True
def getCacheDir():
"""return a path to a valid and safe cachedir - only used when not running
as root or when --tempcache is set"""
uid = os.geteuid()
try:
usertup = pwd.getpwuid(uid)
username = dnf.i18n.ucd(usertup[0])
prefix = '%s-%s-' % (dnf.const.PREFIX, username)
except KeyError:
prefix = '%s-%s-' % (dnf.const.PREFIX, uid)
# check for /var/tmp/prefix-* -
dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix)
cachedirs = sorted(glob.glob(dirpath))
for thisdir in cachedirs:
stats = os.lstat(thisdir)
if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid:
return thisdir
# make the dir (tempfile.mkdtemp())
cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR)
return cachedir
def seq_max_split(seq, max_entries):
""" Given a seq, split into a list of lists of length max_entries each. """
ret = []
num = len(seq)
seq = list(seq) # Trying to use a set/etc. here is bad
beg = 0
while num > max_entries:
end = beg + max_entries
ret.append(seq[beg:end])
beg += max_entries
num -= max_entries
ret.append(seq[beg:])
return ret
def unlink_f(filename):
""" Call os.unlink, but don't die if the file isn't there. This is the main
difference between "rm -f" and plain "rm". """
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def stat_f(filename, ignore_EACCES=False):
""" Call os.stat(), but don't die if the file isn't there. Returns None. """
try:
return os.stat(filename)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return None
if ignore_EACCES and e.errno == errno.EACCES:
return None
raise
def _getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. Note that no caching is done here. """
# We might normally call audit.audit_getloginuid(), except that requires
# importing all of the audit module. And it doesn't work anyway: BZ 518721
try:
with open("/proc/self/loginuid") as fo:
data = fo.read()
return int(data)
except (IOError, ValueError):
return os.getuid()
_cached_getloginuid = None
def getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. The value is cached, so you don't
have to save it. """
global _cached_getloginuid
if _cached_getloginuid is None:
|
return _cached_getloginuid
def decompress(filename, dest=None, check_timestamps=False):
"""take a filename and decompress it into the same relative location.
When the compression type is not recognized (or file is not compressed),
the content of the file is copied to the destination"""
if dest:
out = dest
else:
out = None
dot_pos = filename.rfind('.')
if dot_pos > 0:
ext = filename[dot_pos:]
if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'):
out = filename[:dot_pos]
if out is None:
raise dnf.exceptions.MiscError("Could not determine destination filename")
if check_timestamps:
fi = stat_f(filename)
fo = stat_f(out)
if fi and fo and fo.st_mtime == fi.st_mtime:
return out
try:
# libdnf.utils.decompress either decompress file to the destination or
# copy the content if the compression type is not recognized
libdnf.utils.decompress(filename, out, 0o644)
except RuntimeError as e:
raise dnf.exceptions.MiscError(str(e))
if check_timestamps and fi:
os.utime(out, (fi.st_mtime, fi.st_mtime))
return out
def read_in_items_from_dot_dir(thisglob, line_as_list=True):
""" Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all
the lines in all the files matching that glob, ignores comments and blank
lines, optional paramater 'line_as_list tells whether to treat each line
as a space or comma-separated list, defaults to True.
"""
results = []
for fname in glob.glob(thisglob):
with open(fname) as f:
for line in f:
if re.match(r'\s*(#|$)', line):
continue
line = line.rstrip() # no more trailing \n's
line = line.lstrip() # be nice
if not line:
continue
if line_as_list:
line = line.replace('\n', ' ')
line = line.replace(',', ' ')
results.extend(line.split())
continue
results.append(line)
return results
| _cached_getloginuid = _getloginuid() | conditional_block |
misc.py | # misc.py
# Copyright (C) 2012-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
Assorted utility functions for yum.
"""
from __future__ import print_function, absolute_import
from __future__ import unicode_literals
from dnf.pycomp import base64_decodebytes, basestring, unicode
from stat import *
import libdnf.utils
import dnf.const
import dnf.crypto
import dnf.exceptions
import dnf.i18n
import errno
import glob
import io
import os
import os.path
import pwd
import re
import shutil
import tempfile
_default_checksums = ['sha256']
_re_compiled_glob_match = None
def re_glob(s):
""" Tests if a string is a shell wildcard. """
global _re_compiled_glob_match
if _re_compiled_glob_match is None:
_re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search
return _re_compiled_glob_match(s)
_re_compiled_full_match = None
def re_full_search_needed(s):
""" Tests if a string needs a full nevra match, instead of just name. """
global _re_compiled_full_match
if _re_compiled_full_match is None:
# A glob, or a "." or "-" separator, followed by something (the ".")
one = re.compile(r'.*([-.*?]|\[.+\]).').match
# Any epoch, for envra
two = re.compile('[0-9]+:').match
_re_compiled_full_match = (one, two)
for rec in _re_compiled_full_match:
if rec(s):
return True
return False
def get_default_chksum_type():
return _default_checksums[0]
class GenericHolder(object):
"""Generic Holder class used to hold other objects of known types
It exists purely to be able to do object.somestuff, object.someotherstuff
or object[key] and pass object to another function that will
understand it"""
def __init__(self, iter=None):
self.__iter = iter
def __iter__(self):
if self.__iter is not None:
return iter(self[self.__iter])
def __getitem__(self, item):
if hasattr(self, item):
return getattr(self, item)
else:
raise KeyError(item)
def all_lists(self):
"""Return a dictionary of all lists."""
return {key: list_ for key, list_ in vars(self).items()
if type(list_) is list}
def merge_lists(self, other):
""" Concatenate the list attributes from 'other' to ours. """
for (key, val) in other.all_lists().items():
vars(self).setdefault(key, []).extend(val)
return self
def procgpgkey(rawkey):
'''Convert ASCII-armored GPG key to binary
'''
# Normalise newlines
rawkey = re.sub(b'\r\n?', b'\n', rawkey)
# Extract block
block = io.BytesIO()
inblock = 0
pastheaders = 0
for line in rawkey.split(b'\n'):
if line.startswith(b'-----BEGIN PGP PUBLIC KEY BLOCK-----'):
inblock = 1
elif inblock and line.strip() == b'':
pastheaders = 1
elif inblock and line.startswith(b'-----END PGP PUBLIC KEY BLOCK-----'):
# Hit the end of the block, get out
break
elif pastheaders and line.startswith(b'='):
# Hit the CRC line, don't include this and stop
break
elif pastheaders:
block.write(line + b'\n')
# Decode and return
return base64_decodebytes(block.getvalue())
def keyInstalled(ts, keyid, timestamp):
|
def import_key_to_pubring(rawkey, keyid, gpgdir=None, make_ro_copy=True):
if not os.path.exists(gpgdir):
os.makedirs(gpgdir)
with dnf.crypto.pubring_dir(gpgdir), dnf.crypto.Context() as ctx:
# import the key
with open(os.path.join(gpgdir, 'gpg.conf'), 'wb') as fp:
fp.write(b'')
ctx.op_import(rawkey)
if make_ro_copy:
rodir = gpgdir + '-ro'
if not os.path.exists(rodir):
os.makedirs(rodir, mode=0o755)
for f in glob.glob(gpgdir + '/*'):
basename = os.path.basename(f)
ro_f = rodir + '/' + basename
shutil.copy(f, ro_f)
os.chmod(ro_f, 0o755)
# yes it is this stupid, why do you ask?
opts = """lock-never
no-auto-check-trustdb
trust-model direct
no-expensive-trust-checks
no-permission-warning
preserve-permissions
"""
with open(os.path.join(rodir, 'gpg.conf'), 'w', 0o755) as fp:
fp.write(opts)
return True
def getCacheDir():
"""return a path to a valid and safe cachedir - only used when not running
as root or when --tempcache is set"""
uid = os.geteuid()
try:
usertup = pwd.getpwuid(uid)
username = dnf.i18n.ucd(usertup[0])
prefix = '%s-%s-' % (dnf.const.PREFIX, username)
except KeyError:
prefix = '%s-%s-' % (dnf.const.PREFIX, uid)
# check for /var/tmp/prefix-* -
dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix)
cachedirs = sorted(glob.glob(dirpath))
for thisdir in cachedirs:
stats = os.lstat(thisdir)
if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid:
return thisdir
# make the dir (tempfile.mkdtemp())
cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR)
return cachedir
def seq_max_split(seq, max_entries):
""" Given a seq, split into a list of lists of length max_entries each. """
ret = []
num = len(seq)
seq = list(seq) # Trying to use a set/etc. here is bad
beg = 0
while num > max_entries:
end = beg + max_entries
ret.append(seq[beg:end])
beg += max_entries
num -= max_entries
ret.append(seq[beg:])
return ret
def unlink_f(filename):
""" Call os.unlink, but don't die if the file isn't there. This is the main
difference between "rm -f" and plain "rm". """
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def stat_f(filename, ignore_EACCES=False):
""" Call os.stat(), but don't die if the file isn't there. Returns None. """
try:
return os.stat(filename)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return None
if ignore_EACCES and e.errno == errno.EACCES:
return None
raise
def _getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. Note that no caching is done here. """
# We might normally call audit.audit_getloginuid(), except that requires
# importing all of the audit module. And it doesn't work anyway: BZ 518721
try:
with open("/proc/self/loginuid") as fo:
data = fo.read()
return int(data)
except (IOError, ValueError):
return os.getuid()
_cached_getloginuid = None
def getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. The value is cached, so you don't
have to save it. """
global _cached_getloginuid
if _cached_getloginuid is None:
_cached_getloginuid = _getloginuid()
return _cached_getloginuid
def decompress(filename, dest=None, check_timestamps=False):
"""take a filename and decompress it into the same relative location.
When the compression type is not recognized (or file is not compressed),
the content of the file is copied to the destination"""
if dest:
out = dest
else:
out = None
dot_pos = filename.rfind('.')
if dot_pos > 0:
ext = filename[dot_pos:]
if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'):
out = filename[:dot_pos]
if out is None:
raise dnf.exceptions.MiscError("Could not determine destination filename")
if check_timestamps:
fi = stat_f(filename)
fo = stat_f(out)
if fi and fo and fo.st_mtime == fi.st_mtime:
return out
try:
# libdnf.utils.decompress either decompress file to the destination or
# copy the content if the compression type is not recognized
libdnf.utils.decompress(filename, out, 0o644)
except RuntimeError as e:
raise dnf.exceptions.MiscError(str(e))
if check_timestamps and fi:
os.utime(out, (fi.st_mtime, fi.st_mtime))
return out
def read_in_items_from_dot_dir(thisglob, line_as_list=True):
""" Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all
the lines in all the files matching that glob, ignores comments and blank
lines, optional paramater 'line_as_list tells whether to treat each line
as a space or comma-separated list, defaults to True.
"""
results = []
for fname in glob.glob(thisglob):
with open(fname) as f:
for line in f:
if re.match(r'\s*(#|$)', line):
continue
line = line.rstrip() # no more trailing \n's
line = line.lstrip() # be nice
if not line:
continue
if line_as_list:
line = line.replace('\n', ' ')
line = line.replace(',', ' ')
results.extend(line.split())
continue
results.append(line)
return results
| '''
Return if the GPG key described by the given keyid and timestamp are
installed in the rpmdb.
The keyid and timestamp should both be passed as integers.
The ts is an rpm transaction set object
Return values:
- -1 key is not installed
- 0 key with matching ID and timestamp is installed
- 1 key with matching ID is installed but has an older timestamp
- 2 key with matching ID is installed but has a newer timestamp
No effort is made to handle duplicates. The first matching keyid is used to
calculate the return result.
'''
# Search
for hdr in ts.dbMatch('name', 'gpg-pubkey'):
if hdr['version'] == keyid:
installedts = int(hdr['release'], 16)
if installedts == timestamp:
return 0
elif installedts < timestamp:
return 1
else:
return 2
return -1 | identifier_body |
misc.py | # misc.py
# Copyright (C) 2012-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
Assorted utility functions for yum.
"""
from __future__ import print_function, absolute_import
from __future__ import unicode_literals
from dnf.pycomp import base64_decodebytes, basestring, unicode
from stat import *
import libdnf.utils
import dnf.const
import dnf.crypto
import dnf.exceptions
import dnf.i18n
import errno
import glob
import io
import os
import os.path
import pwd
import re
import shutil
import tempfile
_default_checksums = ['sha256']
_re_compiled_glob_match = None
def re_glob(s):
""" Tests if a string is a shell wildcard. """
global _re_compiled_glob_match
if _re_compiled_glob_match is None:
_re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search
return _re_compiled_glob_match(s)
_re_compiled_full_match = None
def re_full_search_needed(s):
""" Tests if a string needs a full nevra match, instead of just name. """
global _re_compiled_full_match
if _re_compiled_full_match is None:
# A glob, or a "." or "-" separator, followed by something (the ".")
one = re.compile(r'.*([-.*?]|\[.+\]).').match
# Any epoch, for envra
two = re.compile('[0-9]+:').match
_re_compiled_full_match = (one, two)
for rec in _re_compiled_full_match:
if rec(s):
return True
return False
def get_default_chksum_type():
return _default_checksums[0]
class GenericHolder(object):
"""Generic Holder class used to hold other objects of known types
It exists purely to be able to do object.somestuff, object.someotherstuff
or object[key] and pass object to another function that will
understand it"""
def __init__(self, iter=None):
self.__iter = iter
def __iter__(self):
if self.__iter is not None:
return iter(self[self.__iter])
def __getitem__(self, item):
if hasattr(self, item):
return getattr(self, item)
else:
raise KeyError(item)
def all_lists(self):
"""Return a dictionary of all lists."""
return {key: list_ for key, list_ in vars(self).items()
if type(list_) is list}
def merge_lists(self, other):
""" Concatenate the list attributes from 'other' to ours. """
for (key, val) in other.all_lists().items():
vars(self).setdefault(key, []).extend(val)
return self
def procgpgkey(rawkey):
'''Convert ASCII-armored GPG key to binary
'''
# Normalise newlines
rawkey = re.sub(b'\r\n?', b'\n', rawkey)
# Extract block
block = io.BytesIO()
inblock = 0
pastheaders = 0
for line in rawkey.split(b'\n'):
if line.startswith(b'-----BEGIN PGP PUBLIC KEY BLOCK-----'):
inblock = 1
elif inblock and line.strip() == b'':
pastheaders = 1
elif inblock and line.startswith(b'-----END PGP PUBLIC KEY BLOCK-----'):
# Hit the end of the block, get out
break
elif pastheaders and line.startswith(b'='):
# Hit the CRC line, don't include this and stop
break
elif pastheaders:
block.write(line + b'\n')
# Decode and return
return base64_decodebytes(block.getvalue())
def keyInstalled(ts, keyid, timestamp):
'''
Return if the GPG key described by the given keyid and timestamp are
installed in the rpmdb.
The keyid and timestamp should both be passed as integers.
The ts is an rpm transaction set object
Return values:
- -1 key is not installed
- 0 key with matching ID and timestamp is installed
- 1 key with matching ID is installed but has an older timestamp
- 2 key with matching ID is installed but has a newer timestamp
No effort is made to handle duplicates. The first matching keyid is used to
calculate the return result.
'''
# Search
for hdr in ts.dbMatch('name', 'gpg-pubkey'):
if hdr['version'] == keyid:
installedts = int(hdr['release'], 16)
if installedts == timestamp:
return 0
elif installedts < timestamp:
return 1
else:
return 2
return -1
def import_key_to_pubring(rawkey, keyid, gpgdir=None, make_ro_copy=True):
if not os.path.exists(gpgdir):
os.makedirs(gpgdir)
with dnf.crypto.pubring_dir(gpgdir), dnf.crypto.Context() as ctx:
# import the key
with open(os.path.join(gpgdir, 'gpg.conf'), 'wb') as fp:
fp.write(b'')
ctx.op_import(rawkey)
if make_ro_copy:
rodir = gpgdir + '-ro'
if not os.path.exists(rodir):
os.makedirs(rodir, mode=0o755)
for f in glob.glob(gpgdir + '/*'):
basename = os.path.basename(f)
ro_f = rodir + '/' + basename
shutil.copy(f, ro_f)
os.chmod(ro_f, 0o755)
# yes it is this stupid, why do you ask?
opts = """lock-never | no-permission-warning
preserve-permissions
"""
with open(os.path.join(rodir, 'gpg.conf'), 'w', 0o755) as fp:
fp.write(opts)
return True
def getCacheDir():
"""return a path to a valid and safe cachedir - only used when not running
as root or when --tempcache is set"""
uid = os.geteuid()
try:
usertup = pwd.getpwuid(uid)
username = dnf.i18n.ucd(usertup[0])
prefix = '%s-%s-' % (dnf.const.PREFIX, username)
except KeyError:
prefix = '%s-%s-' % (dnf.const.PREFIX, uid)
# check for /var/tmp/prefix-* -
dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix)
cachedirs = sorted(glob.glob(dirpath))
for thisdir in cachedirs:
stats = os.lstat(thisdir)
if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid:
return thisdir
# make the dir (tempfile.mkdtemp())
cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR)
return cachedir
def seq_max_split(seq, max_entries):
""" Given a seq, split into a list of lists of length max_entries each. """
ret = []
num = len(seq)
seq = list(seq) # Trying to use a set/etc. here is bad
beg = 0
while num > max_entries:
end = beg + max_entries
ret.append(seq[beg:end])
beg += max_entries
num -= max_entries
ret.append(seq[beg:])
return ret
def unlink_f(filename):
""" Call os.unlink, but don't die if the file isn't there. This is the main
difference between "rm -f" and plain "rm". """
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def stat_f(filename, ignore_EACCES=False):
""" Call os.stat(), but don't die if the file isn't there. Returns None. """
try:
return os.stat(filename)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return None
if ignore_EACCES and e.errno == errno.EACCES:
return None
raise
def _getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. Note that no caching is done here. """
# We might normally call audit.audit_getloginuid(), except that requires
# importing all of the audit module. And it doesn't work anyway: BZ 518721
try:
with open("/proc/self/loginuid") as fo:
data = fo.read()
return int(data)
except (IOError, ValueError):
return os.getuid()
_cached_getloginuid = None
def getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. The value is cached, so you don't
have to save it. """
global _cached_getloginuid
if _cached_getloginuid is None:
_cached_getloginuid = _getloginuid()
return _cached_getloginuid
def decompress(filename, dest=None, check_timestamps=False):
"""take a filename and decompress it into the same relative location.
When the compression type is not recognized (or file is not compressed),
the content of the file is copied to the destination"""
if dest:
out = dest
else:
out = None
dot_pos = filename.rfind('.')
if dot_pos > 0:
ext = filename[dot_pos:]
if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'):
out = filename[:dot_pos]
if out is None:
raise dnf.exceptions.MiscError("Could not determine destination filename")
if check_timestamps:
fi = stat_f(filename)
fo = stat_f(out)
if fi and fo and fo.st_mtime == fi.st_mtime:
return out
try:
# libdnf.utils.decompress either decompress file to the destination or
# copy the content if the compression type is not recognized
libdnf.utils.decompress(filename, out, 0o644)
except RuntimeError as e:
raise dnf.exceptions.MiscError(str(e))
if check_timestamps and fi:
os.utime(out, (fi.st_mtime, fi.st_mtime))
return out
def read_in_items_from_dot_dir(thisglob, line_as_list=True):
""" Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all
the lines in all the files matching that glob, ignores comments and blank
lines, optional paramater 'line_as_list tells whether to treat each line
as a space or comma-separated list, defaults to True.
"""
results = []
for fname in glob.glob(thisglob):
with open(fname) as f:
for line in f:
if re.match(r'\s*(#|$)', line):
continue
line = line.rstrip() # no more trailing \n's
line = line.lstrip() # be nice
if not line:
continue
if line_as_list:
line = line.replace('\n', ' ')
line = line.replace(',', ' ')
results.extend(line.split())
continue
results.append(line)
return results | no-auto-check-trustdb
trust-model direct
no-expensive-trust-checks | random_line_split |
misc.py | # misc.py
# Copyright (C) 2012-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
Assorted utility functions for yum.
"""
from __future__ import print_function, absolute_import
from __future__ import unicode_literals
from dnf.pycomp import base64_decodebytes, basestring, unicode
from stat import *
import libdnf.utils
import dnf.const
import dnf.crypto
import dnf.exceptions
import dnf.i18n
import errno
import glob
import io
import os
import os.path
import pwd
import re
import shutil
import tempfile
_default_checksums = ['sha256']
_re_compiled_glob_match = None
def re_glob(s):
""" Tests if a string is a shell wildcard. """
global _re_compiled_glob_match
if _re_compiled_glob_match is None:
_re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search
return _re_compiled_glob_match(s)
_re_compiled_full_match = None
def re_full_search_needed(s):
""" Tests if a string needs a full nevra match, instead of just name. """
global _re_compiled_full_match
if _re_compiled_full_match is None:
# A glob, or a "." or "-" separator, followed by something (the ".")
one = re.compile(r'.*([-.*?]|\[.+\]).').match
# Any epoch, for envra
two = re.compile('[0-9]+:').match
_re_compiled_full_match = (one, two)
for rec in _re_compiled_full_match:
if rec(s):
return True
return False
def get_default_chksum_type():
return _default_checksums[0]
class GenericHolder(object):
"""Generic Holder class used to hold other objects of known types
It exists purely to be able to do object.somestuff, object.someotherstuff
or object[key] and pass object to another function that will
understand it"""
def | (self, iter=None):
self.__iter = iter
def __iter__(self):
if self.__iter is not None:
return iter(self[self.__iter])
def __getitem__(self, item):
if hasattr(self, item):
return getattr(self, item)
else:
raise KeyError(item)
def all_lists(self):
"""Return a dictionary of all lists."""
return {key: list_ for key, list_ in vars(self).items()
if type(list_) is list}
def merge_lists(self, other):
""" Concatenate the list attributes from 'other' to ours. """
for (key, val) in other.all_lists().items():
vars(self).setdefault(key, []).extend(val)
return self
def procgpgkey(rawkey):
'''Convert ASCII-armored GPG key to binary
'''
# Normalise newlines
rawkey = re.sub(b'\r\n?', b'\n', rawkey)
# Extract block
block = io.BytesIO()
inblock = 0
pastheaders = 0
for line in rawkey.split(b'\n'):
if line.startswith(b'-----BEGIN PGP PUBLIC KEY BLOCK-----'):
inblock = 1
elif inblock and line.strip() == b'':
pastheaders = 1
elif inblock and line.startswith(b'-----END PGP PUBLIC KEY BLOCK-----'):
# Hit the end of the block, get out
break
elif pastheaders and line.startswith(b'='):
# Hit the CRC line, don't include this and stop
break
elif pastheaders:
block.write(line + b'\n')
# Decode and return
return base64_decodebytes(block.getvalue())
def keyInstalled(ts, keyid, timestamp):
'''
Return if the GPG key described by the given keyid and timestamp are
installed in the rpmdb.
The keyid and timestamp should both be passed as integers.
The ts is an rpm transaction set object
Return values:
- -1 key is not installed
- 0 key with matching ID and timestamp is installed
- 1 key with matching ID is installed but has an older timestamp
- 2 key with matching ID is installed but has a newer timestamp
No effort is made to handle duplicates. The first matching keyid is used to
calculate the return result.
'''
# Search
for hdr in ts.dbMatch('name', 'gpg-pubkey'):
if hdr['version'] == keyid:
installedts = int(hdr['release'], 16)
if installedts == timestamp:
return 0
elif installedts < timestamp:
return 1
else:
return 2
return -1
def import_key_to_pubring(rawkey, keyid, gpgdir=None, make_ro_copy=True):
if not os.path.exists(gpgdir):
os.makedirs(gpgdir)
with dnf.crypto.pubring_dir(gpgdir), dnf.crypto.Context() as ctx:
# import the key
with open(os.path.join(gpgdir, 'gpg.conf'), 'wb') as fp:
fp.write(b'')
ctx.op_import(rawkey)
if make_ro_copy:
rodir = gpgdir + '-ro'
if not os.path.exists(rodir):
os.makedirs(rodir, mode=0o755)
for f in glob.glob(gpgdir + '/*'):
basename = os.path.basename(f)
ro_f = rodir + '/' + basename
shutil.copy(f, ro_f)
os.chmod(ro_f, 0o755)
# yes it is this stupid, why do you ask?
opts = """lock-never
no-auto-check-trustdb
trust-model direct
no-expensive-trust-checks
no-permission-warning
preserve-permissions
"""
with open(os.path.join(rodir, 'gpg.conf'), 'w', 0o755) as fp:
fp.write(opts)
return True
def getCacheDir():
"""return a path to a valid and safe cachedir - only used when not running
as root or when --tempcache is set"""
uid = os.geteuid()
try:
usertup = pwd.getpwuid(uid)
username = dnf.i18n.ucd(usertup[0])
prefix = '%s-%s-' % (dnf.const.PREFIX, username)
except KeyError:
prefix = '%s-%s-' % (dnf.const.PREFIX, uid)
# check for /var/tmp/prefix-* -
dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix)
cachedirs = sorted(glob.glob(dirpath))
for thisdir in cachedirs:
stats = os.lstat(thisdir)
if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid:
return thisdir
# make the dir (tempfile.mkdtemp())
cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR)
return cachedir
def seq_max_split(seq, max_entries):
""" Given a seq, split into a list of lists of length max_entries each. """
ret = []
num = len(seq)
seq = list(seq) # Trying to use a set/etc. here is bad
beg = 0
while num > max_entries:
end = beg + max_entries
ret.append(seq[beg:end])
beg += max_entries
num -= max_entries
ret.append(seq[beg:])
return ret
def unlink_f(filename):
""" Call os.unlink, but don't die if the file isn't there. This is the main
difference between "rm -f" and plain "rm". """
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def stat_f(filename, ignore_EACCES=False):
""" Call os.stat(), but don't die if the file isn't there. Returns None. """
try:
return os.stat(filename)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return None
if ignore_EACCES and e.errno == errno.EACCES:
return None
raise
def _getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. Note that no caching is done here. """
# We might normally call audit.audit_getloginuid(), except that requires
# importing all of the audit module. And it doesn't work anyway: BZ 518721
try:
with open("/proc/self/loginuid") as fo:
data = fo.read()
return int(data)
except (IOError, ValueError):
return os.getuid()
_cached_getloginuid = None
def getloginuid():
""" Get the audit-uid/login-uid, if available. os.getuid() is returned
instead if there was a problem. The value is cached, so you don't
have to save it. """
global _cached_getloginuid
if _cached_getloginuid is None:
_cached_getloginuid = _getloginuid()
return _cached_getloginuid
def decompress(filename, dest=None, check_timestamps=False):
"""take a filename and decompress it into the same relative location.
When the compression type is not recognized (or file is not compressed),
the content of the file is copied to the destination"""
if dest:
out = dest
else:
out = None
dot_pos = filename.rfind('.')
if dot_pos > 0:
ext = filename[dot_pos:]
if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'):
out = filename[:dot_pos]
if out is None:
raise dnf.exceptions.MiscError("Could not determine destination filename")
if check_timestamps:
fi = stat_f(filename)
fo = stat_f(out)
if fi and fo and fo.st_mtime == fi.st_mtime:
return out
try:
# libdnf.utils.decompress either decompress file to the destination or
# copy the content if the compression type is not recognized
libdnf.utils.decompress(filename, out, 0o644)
except RuntimeError as e:
raise dnf.exceptions.MiscError(str(e))
if check_timestamps and fi:
os.utime(out, (fi.st_mtime, fi.st_mtime))
return out
def read_in_items_from_dot_dir(thisglob, line_as_list=True):
""" Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all
the lines in all the files matching that glob, ignores comments and blank
lines, optional paramater 'line_as_list tells whether to treat each line
as a space or comma-separated list, defaults to True.
"""
results = []
for fname in glob.glob(thisglob):
with open(fname) as f:
for line in f:
if re.match(r'\s*(#|$)', line):
continue
line = line.rstrip() # no more trailing \n's
line = line.lstrip() # be nice
if not line:
continue
if line_as_list:
line = line.replace('\n', ' ')
line = line.replace(',', ' ')
results.extend(line.split())
continue
results.append(line)
return results
| __init__ | identifier_name |
rebuild_storage.py | #!python3
# -*- coding:utf-8 -*-
import os
import sys
import time
import ctypes
import shutil
import subprocess
IsPy3 = sys.version_info[0] >= 3
if IsPy3:
import winreg
else:
import codecs
import _winreg as winreg
BuildType = 'Release'
IsRebuild = True
Build = 'Rebuild'
Update = False
Copy = False
CleanAll = False
BuildTimeout = 30*60
MSBuild = None
IncrediBuild = None
UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译
#不同项目只需修改下面5个变量
SlnFile = '../storage.sln' #相对于本py脚本路径的相对路径
UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新
ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行
MSBuildFirstProjects = [r'storage'] #使用MSBuild需要工程文件在解决方案sln中的路径
# MSBuild首先编译的项目,填空不指定顺序
IncrediBuildFirstProjects = ['storage'] #使用IncrediBuild只需工程名字
#IncrediBuild首先编译的项目,填空不指定顺序
class ConsoleColor():
'''This class defines the values of color for printing on console window'''
Black = 0
DarkBlue = 1
DarkGreen = 2
DarkCyan = 3
DarkRed = 4
DarkMagenta = 5
DarkYellow = 6
Gray = 7
DarkGray = 8
Blue = 9
Green = 10
Cyan = 11
Red = 12
Magenta = 13
Yellow = 14
White = 15
class Coord(ctypes.Structure):
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRect(ctypes.Structure):
_fields_ = [('Left', ctypes.c_short),
('Top', ctypes.c_short),
('Right', ctypes.c_short),
('Bottom', ctypes.c_short),
]
class ConsoleScreenBufferInfo(ctypes.Structure):
_fields_ = [('dwSize', Coord),
('dwCursorPosition', Coord),
('wAttributes', ctypes.c_uint),
('srWindow', SmallRect),
('dwMaximumWindowSize', Coord),
]
class Win32API():
'''Some native methods for python calling'''
StdOutputHandle = -11
ConsoleOutputHandle = None
DefaultColor = None
@staticmethod
def SetConsoleColor(color):
'''Change the text color on console window'''
if not Win32API.DefaultColor:
if not Win32API.ConsoleOutputHandle:
Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)
bufferInfo = ConsoleScreenBufferInfo()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))
Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)
@staticmethod
def ResetConsoleColor():
'''Reset the default text color on console window'''
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)
class Logger():
LogFile = '@AutomationLog.txt'
LineSep = '\n'
@staticmethod
def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
if printToStdout:
isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)
if isValidColor:
Win32API.SetConsoleColor(consoleColor)
try:
sys.stdout.write(log)
except UnicodeError as e:
Win32API.SetConsoleColor(ConsoleColor.Red)
isValidColor = True
sys.stdout.write(str(type(e)) + ' can\'t print the log!\n')
if isValidColor:
Win32API.ResetConsoleColor()
if not writeToFile:
return
if IsPy3:
logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')
else:
logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')
try:
logFile.write(log)
# logFile.flush() # need flush in python 3, otherwise log won't be saved
| sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))
@staticmethod
def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)
@staticmethod
def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
t = time.localtime()
log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)
Logger.Write(log, consoleColor, writeToFile, printToStdout)
@staticmethod
def DeleteLog():
if os.path.exists(Logger.LogFile):
os.remove(Logger.LogFile)
def GetMSBuildPath():
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild'
ftemp = open('GetMSBuildPath.bat', 'wt')
ftemp.write(cmd)
ftemp.close()
p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)
p.wait()
lines = p.stdout.read().decode().splitlines()
os.remove('GetMSBuildPath.bat')
for line in lines:
if 'MSBuild.exe' in line:
return line
def GetIncrediBuildPath():
try:
key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command')
value, typeId = winreg.QueryValueEx(key, '')
if value:
start = value.find('"')
end = value.find('"', start + 1)
path = value[start+1:end]
buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')
return buildConsole
except FileNotFoundError as e:
Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)
def UpdateCode():
# put git to path first
if not shutil.which('git.exe'):
Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow)
return false
oldDir = os.getcwd()
for dir in UpdateDir:
os.chdir(dir)
ret = os.system('git pull')
os.chdir(oldDir)
if ret != 0:
Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)
return false
return True
def BuildProject(cmd):
for i in range(6):
Logger.WriteLine(cmd, ConsoleColor.Cyan)
buildFailed = True
startTime = time.time()
p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug
if IsPy3:
try:
buildFailed = p.wait(BuildTimeout)
except subprocess.TimeoutExpired as e:
Logger.Log('{0}'.format(e), ConsoleColor.Yellow)
p.kill()
else:
buildFailed = p.wait()
if not UseMSBuild:
#IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断
fin = open('IncrediBuild.log')
for line in fin:
if line.startswith('=========='):
Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)
if IsPy3:
start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========
else:#为了兼容py2做的特殊处理,很恶心
start = 0
n2 = 0
while 1:
if line[start].isdigit():
n2 += 1
if n2 == 2:
break
start = line.find(' ', start)
start += 1
end = line.find(' ', start)
failCount = int(line[start:end])
buildFailed = failCount > 0
else:
Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)
fin.close()
costTime = time.time() - startTime
Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green)
if not buildFailed:
return True
return False
def BuildAllProjects():
buildSuccess = False
cmds = []
if UseMSBuild:
if IsRebuild:
if CleanAll:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))
else:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
for project in MSBuildFirstProjects:
cmds.append('{0} {1} /t:{2} /p:Configuration={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType))
cmds.append('{0} {1} /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
else: #IncrediBuild
if IsRebuild:
if CleanAll:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug'))
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release'))
else:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for project in IncrediBuildFirstProjects:
cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType))
cmds.append('"{0}" {1} /build /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for cmd in cmds:
buildSuccess = BuildProject(cmd)
if not buildSuccess:
break
return buildSuccess
def main():
if UseMSBuild:
if not os.path.exists(MSBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
else:
if not os.path.exists(IncrediBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
dir = os.path.dirname(__file__)
if dir:
oldDir = os.getcwd()
os.chdir(dir)
if Update:
if not UpdateCode():
return 1
Logger.Log('git update succeed', ConsoleColor.Green)
if Copy:
for bat in ExecBatList:
oldBatDir = os.getcwd()
batDir = os.path.dirname(bat)
batName = os.path.basename(bat)
if batDir:
os.chdir(batDir)
start = time.clock()
os.system(batName)
Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)
if batDir:
os.chdir(oldBatDir)
buildSuccess = BuildAllProjects()
if buildSuccess:
Logger.Log('build succeed', ConsoleColor.Green)
else:
Logger.Log('build failed', ConsoleColor.Red)
if dir:
os.chdir(oldDir)
return 0 if buildSuccess else 1
if __name__ == '__main__':
Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)
sys.argv = [x.lower() for x in sys.argv]
start_time = time.time()
if 'debug' in sys.argv:
BuildType = 'Debug'
if 'build' in sys.argv:
IsRebuild = False
Build = 'Build'
if 'update' in sys.argv:
Update = True
if 'copy' in sys.argv:
Copy = True
if 'clean' in sys.argv:
CleanAll = True
if 'incredibuild' in sys.argv:
UseMSBuild = False
if UseMSBuild:
MSBuild = GetMSBuildPath()
if not MSBuild:
Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)
exit(1)
else:
IncrediBuild = GetIncrediBuildPath()
if not IncrediBuild:
Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)
exit(1)
cwd = os.getcwd()
Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))
ret = main()
end_time = time.time()
cost_time = end_time-start_time
Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)
exit(ret) | except Exception as ex:
logFile.close()
| random_line_split |
rebuild_storage.py | #!python3
# -*- coding:utf-8 -*-
import os
import sys
import time
import ctypes
import shutil
import subprocess
IsPy3 = sys.version_info[0] >= 3
if IsPy3:
import winreg
else:
import codecs
import _winreg as winreg
BuildType = 'Release'
IsRebuild = True
Build = 'Rebuild'
Update = False
Copy = False
CleanAll = False
BuildTimeout = 30*60
MSBuild = None
IncrediBuild = None
UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译
#不同项目只需修改下面5个变量
SlnFile = '../storage.sln' #相对于本py脚本路径的相对路径
UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新
ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行
MSBuildFirstProjects = [r'storage'] #使用MSBuild需要工程文件在解决方案sln中的路径
# MSBuild首先编译的项目,填空不指定顺序
IncrediBuildFirstProjects = ['storage'] #使用IncrediBuild只需工程名字
#IncrediBuild首先编译的项目,填空不指定顺序
class ConsoleColor():
'''This class defines the values of color for printing on console window'''
Black = 0
DarkBlue = 1
DarkGreen = 2
DarkCyan = 3
DarkRed = 4
DarkMagenta = 5
DarkYellow = 6
Gray = 7
DarkGray = 8
Blue = 9
Green = 10
Cyan = 11
Red = 12
Magenta = 13
Yellow = 14
White = 15
class Coord(ctypes.Structure):
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRect(ctypes.Structure):
_fields_ = [('Left', ctypes.c_short),
('Top', ctypes.c_short),
('Right', ctypes.c_short),
('Bottom', ctypes.c_short),
]
class ConsoleScreenBufferInfo(ctypes.Structure):
_fields_ = [('dwSize', Coord),
('dwCursorPosition', Coord),
('wAttributes', ctypes.c_uint),
('srWindow', SmallRect),
('dwMaximumWindowSize', Coord),
]
class Win32API():
'''Some native methods for python calling'''
StdOutputHandle = -11
ConsoleOutputHandle = None
DefaultColor = None
@staticmethod
def SetConsoleColor(color):
'''Change the text color on console window'''
if not Win32API.DefaultColor:
if not Win32API.ConsoleOutputHandle:
Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)
bufferInfo = ConsoleScreenBufferInfo()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))
Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)
@staticmethod
def ResetConsoleColor():
'''Reset the default text color on console window'''
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)
class Logger():
LogFile = '@AutomationLog.txt'
LineSep = '\n'
@staticmethod
def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
if printToStdout:
isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)
if isValidColor:
Win32API.SetConsoleColor(consoleColor)
try:
sys.stdout.write(log)
except UnicodeError as e:
Win32API.SetConsoleColor(ConsoleColor.Red)
isValidColor = True
sys.stdout.write(str(type(e)) + ' can\'t print the log!\n')
if isValidColor:
Win32API.ResetConsoleColor()
if not writeToFile:
return
if IsPy3:
logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')
else:
logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')
try:
logFile.write(log)
# logFile.flush() # need flush in python 3, otherwise log won't be saved
except Exception as ex:
logFile.close()
sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))
@staticmethod
def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)
@staticmethod
def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
t = time.localtime()
log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)
Logger.Write(log, consoleColor, writeToFile, printToStdout)
@staticmethod
def DeleteLog():
if os.path.exists(Logger.LogFile):
os.remove(Logger.LogFile)
def GetMSBuildPath():
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild'
ftemp = open('GetMSBuildPath.bat', 'wt')
ftemp.write(cmd)
ftemp.close()
p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)
p.wait()
lines = p.stdout.read().decode().splitlines()
os.remove('GetMSBuildPath.bat')
for line in lines:
if 'MSBuild.exe' in line:
return line
def GetIncrediBuildPath():
try:
key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command')
value, typeId = winreg.QueryValueEx(key, '')
if value:
start = value.find('"')
end = value.find('"', start + 1)
path = value[start+1:end]
buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')
return buildConsole
except FileNotFoundError as e:
Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)
def UpdateCode():
# put git to path first
if not shutil.which('git.exe'):
Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow)
return false
oldDir = os.getcwd()
for dir in UpdateDir:
os.chdir(dir)
ret = os.system('git pull')
os.chdir(oldDir)
if ret != 0:
Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)
return false
return True
def BuildProject(cmd):
for i in range(6):
Logger.WriteLine(cmd, ConsoleColor.Cyan)
buildFailed = True
startTime = time.time()
p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不 | p.wait(BuildTimeout)
except subprocess.TimeoutExpired as e:
Logger.Log('{0}'.format(e), ConsoleColor.Yellow)
p.kill()
else:
buildFailed = p.wait()
if not UseMSBuild:
#IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断
fin = open('IncrediBuild.log')
for line in fin:
if line.startswith('=========='):
Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)
if IsPy3:
start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========
else:#为了兼容py2做的特殊处理,很恶心
start = 0
n2 = 0
while 1:
if line[start].isdigit():
n2 += 1
if n2 == 2:
break
start = line.find(' ', start)
start += 1
end = line.find(' ', start)
failCount = int(line[start:end])
buildFailed = failCount > 0
else:
Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)
fin.close()
costTime = time.time() - startTime
Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green)
if not buildFailed:
return True
return False
def BuildAllProjects():
buildSuccess = False
cmds = []
if UseMSBuild:
if IsRebuild:
if CleanAll:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))
else:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
for project in MSBuildFirstProjects:
cmds.append('{0} {1} /t:{2} /p:Configuration={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType))
cmds.append('{0} {1} /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
else: #IncrediBuild
if IsRebuild:
if CleanAll:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug'))
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release'))
else:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for project in IncrediBuildFirstProjects:
cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType))
cmds.append('"{0}" {1} /build /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for cmd in cmds:
buildSuccess = BuildProject(cmd)
if not buildSuccess:
break
return buildSuccess
def main():
if UseMSBuild:
if not os.path.exists(MSBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
else:
if not os.path.exists(IncrediBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
dir = os.path.dirname(__file__)
if dir:
oldDir = os.getcwd()
os.chdir(dir)
if Update:
if not UpdateCode():
return 1
Logger.Log('git update succeed', ConsoleColor.Green)
if Copy:
for bat in ExecBatList:
oldBatDir = os.getcwd()
batDir = os.path.dirname(bat)
batName = os.path.basename(bat)
if batDir:
os.chdir(batDir)
start = time.clock()
os.system(batName)
Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)
if batDir:
os.chdir(oldBatDir)
buildSuccess = BuildAllProjects()
if buildSuccess:
Logger.Log('build succeed', ConsoleColor.Green)
else:
Logger.Log('build failed', ConsoleColor.Red)
if dir:
os.chdir(oldDir)
return 0 if buildSuccess else 1
if __name__ == '__main__':
Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)
sys.argv = [x.lower() for x in sys.argv]
start_time = time.time()
if 'debug' in sys.argv:
BuildType = 'Debug'
if 'build' in sys.argv:
IsRebuild = False
Build = 'Build'
if 'update' in sys.argv:
Update = True
if 'copy' in sys.argv:
Copy = True
if 'clean' in sys.argv:
CleanAll = True
if 'incredibuild' in sys.argv:
UseMSBuild = False
if UseMSBuild:
MSBuild = GetMSBuildPath()
if not MSBuild:
Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)
exit(1)
else:
IncrediBuild = GetIncrediBuildPath()
if not IncrediBuild:
Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)
exit(1)
cwd = os.getcwd()
Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))
ret = main()
end_time = time.time()
cost_time = end_time-start_time
Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)
exit(ret)
| 返回,可能是IncrediBuild的bug
if IsPy3:
try:
buildFailed = | conditional_block |
rebuild_storage.py | #!python3
# -*- coding:utf-8 -*-
import os
import sys
import time
import ctypes
import shutil
import subprocess
IsPy3 = sys.version_info[0] >= 3
if IsPy3:
import winreg
else:
import codecs
import _winreg as winreg
BuildType = 'Release'
IsRebuild = True
Build = 'Rebuild'
Update = False
Copy = False
CleanAll = False
BuildTimeout = 30*60
MSBuild = None
IncrediBuild = None
UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译
#不同项目只需修改下面5个变量
SlnFile = '../storage.sln' #相对于本py脚本路径的相对路径
UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新
ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行
MSBuildFirstProjects = [r'storage'] #使用MSBuild需要工程文件在解决方案sln中的路径
# MSBuild首先编译的项目,填空不指定顺序
IncrediBuildFirstProjects = ['storage'] #使用IncrediBuild只需工程名字
#IncrediBuild首先编译的项目,填空不指定顺序
class ConsoleColor():
'''This class defines the values of color for printing on console window'''
Black = 0
DarkBlue = 1
DarkGreen = 2
DarkCyan = 3
DarkRed = 4
DarkMagenta = 5
DarkYellow = 6
Gray = 7
DarkGray = 8
Blue = 9
Green = 10
Cyan = 11
Red = 12
Ma | rt),
]
class ConsoleScreenBufferInfo(ctypes.Structure):
_fields_ = [('dwSize', Coord),
('dwCursorPosition', Coord),
('wAttributes', ctypes.c_uint),
('srWindow', SmallRect),
('dwMaximumWindowSize', Coord),
]
class Win32API():
'''Some native methods for python calling'''
StdOutputHandle = -11
ConsoleOutputHandle = None
DefaultColor = None
@staticmethod
def SetConsoleColor(color):
'''Change the text color on console window'''
if not Win32API.DefaultColor:
if not Win32API.ConsoleOutputHandle:
Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)
bufferInfo = ConsoleScreenBufferInfo()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))
Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)
@staticmethod
def ResetConsoleColor():
'''Reset the default text color on console window'''
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)
class Logger():
LogFile = '@AutomationLog.txt'
LineSep = '\n'
@staticmethod
def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
if printToStdout:
isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)
if isValidColor:
Win32API.SetConsoleColor(consoleColor)
try:
sys.stdout.write(log)
except UnicodeError as e:
Win32API.SetConsoleColor(ConsoleColor.Red)
isValidColor = True
sys.stdout.write(str(type(e)) + ' can\'t print the log!\n')
if isValidColor:
Win32API.ResetConsoleColor()
if not writeToFile:
return
if IsPy3:
logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')
else:
logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')
try:
logFile.write(log)
# logFile.flush() # need flush in python 3, otherwise log won't be saved
except Exception as ex:
logFile.close()
sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))
@staticmethod
def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)
@staticmethod
def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
t = time.localtime()
log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)
Logger.Write(log, consoleColor, writeToFile, printToStdout)
@staticmethod
def DeleteLog():
if os.path.exists(Logger.LogFile):
os.remove(Logger.LogFile)
def GetMSBuildPath():
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild'
ftemp = open('GetMSBuildPath.bat', 'wt')
ftemp.write(cmd)
ftemp.close()
p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)
p.wait()
lines = p.stdout.read().decode().splitlines()
os.remove('GetMSBuildPath.bat')
for line in lines:
if 'MSBuild.exe' in line:
return line
def GetIncrediBuildPath():
try:
key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command')
value, typeId = winreg.QueryValueEx(key, '')
if value:
start = value.find('"')
end = value.find('"', start + 1)
path = value[start+1:end]
buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')
return buildConsole
except FileNotFoundError as e:
Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)
def UpdateCode():
# put git to path first
if not shutil.which('git.exe'):
Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow)
return false
oldDir = os.getcwd()
for dir in UpdateDir:
os.chdir(dir)
ret = os.system('git pull')
os.chdir(oldDir)
if ret != 0:
Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)
return false
return True
def BuildProject(cmd):
for i in range(6):
Logger.WriteLine(cmd, ConsoleColor.Cyan)
buildFailed = True
startTime = time.time()
p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug
if IsPy3:
try:
buildFailed = p.wait(BuildTimeout)
except subprocess.TimeoutExpired as e:
Logger.Log('{0}'.format(e), ConsoleColor.Yellow)
p.kill()
else:
buildFailed = p.wait()
if not UseMSBuild:
#IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断
fin = open('IncrediBuild.log')
for line in fin:
if line.startswith('=========='):
Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)
if IsPy3:
start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========
else:#为了兼容py2做的特殊处理,很恶心
start = 0
n2 = 0
while 1:
if line[start].isdigit():
n2 += 1
if n2 == 2:
break
start = line.find(' ', start)
start += 1
end = line.find(' ', start)
failCount = int(line[start:end])
buildFailed = failCount > 0
else:
Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)
fin.close()
costTime = time.time() - startTime
Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green)
if not buildFailed:
return True
return False
def BuildAllProjects():
buildSuccess = False
cmds = []
if UseMSBuild:
if IsRebuild:
if CleanAll:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))
else:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
for project in MSBuildFirstProjects:
cmds.append('{0} {1} /t:{2} /p:Configuration={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType))
cmds.append('{0} {1} /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
else: #IncrediBuild
if IsRebuild:
if CleanAll:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug'))
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release'))
else:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for project in IncrediBuildFirstProjects:
cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType))
cmds.append('"{0}" {1} /build /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for cmd in cmds:
buildSuccess = BuildProject(cmd)
if not buildSuccess:
break
return buildSuccess
def main():
if UseMSBuild:
if not os.path.exists(MSBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
else:
if not os.path.exists(IncrediBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
dir = os.path.dirname(__file__)
if dir:
oldDir = os.getcwd()
os.chdir(dir)
if Update:
if not UpdateCode():
return 1
Logger.Log('git update succeed', ConsoleColor.Green)
if Copy:
for bat in ExecBatList:
oldBatDir = os.getcwd()
batDir = os.path.dirname(bat)
batName = os.path.basename(bat)
if batDir:
os.chdir(batDir)
start = time.clock()
os.system(batName)
Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)
if batDir:
os.chdir(oldBatDir)
buildSuccess = BuildAllProjects()
if buildSuccess:
Logger.Log('build succeed', ConsoleColor.Green)
else:
Logger.Log('build failed', ConsoleColor.Red)
if dir:
os.chdir(oldDir)
return 0 if buildSuccess else 1
if __name__ == '__main__':
Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)
sys.argv = [x.lower() for x in sys.argv]
start_time = time.time()
if 'debug' in sys.argv:
BuildType = 'Debug'
if 'build' in sys.argv:
IsRebuild = False
Build = 'Build'
if 'update' in sys.argv:
Update = True
if 'copy' in sys.argv:
Copy = True
if 'clean' in sys.argv:
CleanAll = True
if 'incredibuild' in sys.argv:
UseMSBuild = False
if UseMSBuild:
MSBuild = GetMSBuildPath()
if not MSBuild:
Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)
exit(1)
else:
IncrediBuild = GetIncrediBuildPath()
if not IncrediBuild:
Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)
exit(1)
cwd = os.getcwd()
Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))
ret = main()
end_time = time.time()
cost_time = end_time-start_time
Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)
exit(ret)
| genta = 13
Yellow = 14
White = 15
class Coord(ctypes.Structure):
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRect(ctypes.Structure):
_fields_ = [('Left', ctypes.c_short),
('Top', ctypes.c_short),
('Right', ctypes.c_short),
('Bottom', ctypes.c_sho | identifier_body |
rebuild_storage.py | #!python3
# -*- coding:utf-8 -*-
import os
import sys
import time
import ctypes
import shutil
import subprocess
IsPy3 = sys.version_info[0] >= 3
if IsPy3:
import winreg
else:
import codecs
import _winreg as winreg
BuildType = 'Release'
IsRebuild = True
Build = 'Rebuild'
Update = False
Copy = False
CleanAll = False
BuildTimeout = 30*60
MSBuild = None
IncrediBuild = None
UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译
#不同项目只需修改下面5个变量
SlnFile = '../storage.sln' #相对于本py脚本路径的相对路径
UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新
ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行
MSBuildFirstProjects = [r'storage'] #使用MSBuild需要工程文件在解决方案sln中的路径
# MSBuild首先编译的项目,填空不指定顺序
IncrediBuildFirstProjects = ['storage'] #使用IncrediBuild只需工程名字
#IncrediBuild首先编译的项目,填空不指定顺序
class ConsoleColor():
'''This class defines the values of color for printing on console window'''
Black = 0
DarkBlue = 1
DarkGreen = 2
DarkCyan = 3
DarkRed = 4
DarkMagenta = 5
DarkYellow = 6
Gray = 7
DarkGray = 8
Blue = 9
Green = 10
Cyan = 11
Red = 12
Magenta = 13
Yellow = 14
White = 15
class Coord(ctypes.Structure):
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRect(ctypes.Structure):
_fields_ = [('Left', ctypes.c_short),
('Top', ctypes.c_short),
('Right', ctypes.c_short),
('Bottom', ctypes.c_short),
]
class ConsoleScreenBufferInfo(ctypes.Structure):
_fields_ = [('dwSize', Coord),
('dwCursorPosition', Coord),
('wAttributes', ctypes.c_uint),
('srWindow', SmallRect),
('dwMaximumWindowSize', Coord),
]
class Win32API():
'''Some native methods for python calling'''
StdOutputHandle = -11
ConsoleOutputHandle = None
DefaultColor = None
@staticmethod
def SetConsoleColor(color):
'''Change the text color on console window'''
if not Win32API.DefaultColor:
if not Win32API.ConsoleOutputHandle:
Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)
bufferInfo = ConsoleScreenBufferInfo()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))
Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)
@staticmethod
def ResetConsoleColor():
'''Reset the default text color on console window'''
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)
class Logger():
LogFile = '@AutomationLog.txt'
LineSep = '\n'
@staticmethod
def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
if printToStdout:
isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)
if isValidColor:
Win32API.SetConsoleColor(consoleColor)
try:
sys.stdout.write(log)
except UnicodeError as e:
Win32API.SetConsoleColor(ConsoleColor.Red)
isValidColor = True
sys.stdout.write(str(type(e)) + ' can\'t print the log!\n')
if isValidColor:
Win32API.ResetConsoleColor()
if not writeToFile:
return
if IsPy3:
logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')
else:
logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')
try:
logFile.write(log)
# logFile.flush() # need flush in python 3, otherwise log won't be saved
except Exception as ex:
logFile.close()
sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))
@staticmethod
def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)
@staticmethod
def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
t = time.localtime()
log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)
Logger.Write(log, consoleColor, writeToFile, printToStdout)
@staticmethod
def DeleteLog():
if os.path.exists(Logger.LogFile):
os.remove(Logger.LogFile)
def GetMSBuildPath():
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild'
ftemp = open('GetMSBuildPath.bat', 'wt')
ftemp.write(cmd)
ftemp.close()
p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)
p.wait()
lines = p.stdout.read().decode().splitlines()
os.remove('GetMSBuildPath.bat')
for line in lines:
if 'MSBuild.exe' in line:
return line
def GetIncrediBuildPath():
try:
key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command')
value, typeId = winreg.QueryValueEx(key, '')
if value:
start = value.find('"')
end = value.find('"', start + 1)
path = value[start+1:end]
buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')
return buildConsole
except FileNotFoundError as e:
Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)
def UpdateCode():
# put git to path first
if not shutil.which('git.exe'):
Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow)
return false
oldDir = os.getcwd()
for dir in UpdateDir:
os.chdir(dir)
ret = os. | t pull')
os.chdir(oldDir)
if ret != 0:
Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)
return false
return True
def BuildProject(cmd):
for i in range(6):
Logger.WriteLine(cmd, ConsoleColor.Cyan)
buildFailed = True
startTime = time.time()
p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug
if IsPy3:
try:
buildFailed = p.wait(BuildTimeout)
except subprocess.TimeoutExpired as e:
Logger.Log('{0}'.format(e), ConsoleColor.Yellow)
p.kill()
else:
buildFailed = p.wait()
if not UseMSBuild:
#IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断
fin = open('IncrediBuild.log')
for line in fin:
if line.startswith('=========='):
Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)
if IsPy3:
start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========
else:#为了兼容py2做的特殊处理,很恶心
start = 0
n2 = 0
while 1:
if line[start].isdigit():
n2 += 1
if n2 == 2:
break
start = line.find(' ', start)
start += 1
end = line.find(' ', start)
failCount = int(line[start:end])
buildFailed = failCount > 0
else:
Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)
fin.close()
costTime = time.time() - startTime
Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green)
if not buildFailed:
return True
return False
def BuildAllProjects():
buildSuccess = False
cmds = []
if UseMSBuild:
if IsRebuild:
if CleanAll:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))
else:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
for project in MSBuildFirstProjects:
cmds.append('{0} {1} /t:{2} /p:Configuration={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType))
cmds.append('{0} {1} /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
else: #IncrediBuild
if IsRebuild:
if CleanAll:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug'))
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release'))
else:
cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for project in IncrediBuildFirstProjects:
cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType))
cmds.append('"{0}" {1} /build /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))
for cmd in cmds:
buildSuccess = BuildProject(cmd)
if not buildSuccess:
break
return buildSuccess
def main():
if UseMSBuild:
if not os.path.exists(MSBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
else:
if not os.path.exists(IncrediBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
dir = os.path.dirname(__file__)
if dir:
oldDir = os.getcwd()
os.chdir(dir)
if Update:
if not UpdateCode():
return 1
Logger.Log('git update succeed', ConsoleColor.Green)
if Copy:
for bat in ExecBatList:
oldBatDir = os.getcwd()
batDir = os.path.dirname(bat)
batName = os.path.basename(bat)
if batDir:
os.chdir(batDir)
start = time.clock()
os.system(batName)
Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)
if batDir:
os.chdir(oldBatDir)
buildSuccess = BuildAllProjects()
if buildSuccess:
Logger.Log('build succeed', ConsoleColor.Green)
else:
Logger.Log('build failed', ConsoleColor.Red)
if dir:
os.chdir(oldDir)
return 0 if buildSuccess else 1
if __name__ == '__main__':
Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)
sys.argv = [x.lower() for x in sys.argv]
start_time = time.time()
if 'debug' in sys.argv:
BuildType = 'Debug'
if 'build' in sys.argv:
IsRebuild = False
Build = 'Build'
if 'update' in sys.argv:
Update = True
if 'copy' in sys.argv:
Copy = True
if 'clean' in sys.argv:
CleanAll = True
if 'incredibuild' in sys.argv:
UseMSBuild = False
if UseMSBuild:
MSBuild = GetMSBuildPath()
if not MSBuild:
Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)
exit(1)
else:
IncrediBuild = GetIncrediBuildPath()
if not IncrediBuild:
Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)
exit(1)
cwd = os.getcwd()
Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))
ret = main()
end_time = time.time()
cost_time = end_time-start_time
Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)
exit(ret)
| system('gi | identifier_name |
validate-uncompressed.js | /**
* @copyright Copyright (C) 2005 - 2012 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
Object.append(Browser.Features, {
inputemail: (function() {
var i = document.createElement("input");
i.setAttribute("type", "email");
return i.type !== "text";
})()
});
/**
* Unobtrusive Form Validation library
*
* Inspired by: Chris Campbell <www.particletree.com>
*
* @package Joomla.Framework
* @subpackage Forms
* @since 1.5
*/
var JFormValidator = new Class({
initialize: function()
{
// Initialize variables
this.handlers = Object();
this.custom = Object();
// Default handlers
this.setHandler('username',
function (value) {
regex = new RegExp("[\<|\>|\"|\'|\%|\;|\(|\)|\&]", "i");
return !regex.test(value);
}
);
this.setHandler('password',
function (value) {
regex=/^\S[\S ]{2,98}\S$/;
return regex.test(value);
}
);
this.setHandler('numeric',
function (value) {
regex=/^(\d|-)?(\d|,)*\.?\d*$/;
return regex.test(value);
}
);
this.setHandler('email',
function (value) {
regex=/^[a-zA-Z0-9._-]+(\+[a-zA-Z0-9._-]+)*@([a-zA-Z0-9.-]+\.)+[a-zA-Z0-9.-]{2,4}$/;
return regex.test(value);
}
);
// Attach to forms with class 'form-validate'
var forms = $$('form.form-validate');
forms.each(function(form){ this.attachToForm(form); }, this);
},
setHandler: function(name, fn, en)
{
en = (en == '') ? true : en;
this.handlers[name] = { enabled: en, exec: fn };
},
attachToForm: function(form)
{
// Iterate through the form object and attach the validate method to all input fields.
form.getElements('input,textarea,select,button').each(function(el){
if (el.hasClass('required')) {
el.set('aria-required', 'true');
el.set('required', 'required');
}
if ((document.id(el).get('tag') == 'input' || document.id(el).get('tag') == 'button') && document.id(el).get('type') == 'submit') {
if (el.hasClass('validate')) {
el.onclick = function(){return document.formvalidator.isValid(this.form);};
}
} else {
el.addEvent('blur', function(){return document.formvalidator.validate(this);});
if (el.hasClass('validate-email') && Browser.Features.inputemail) {
el.type = 'email';
}
}
});
},
validate: function(el)
{
el = document.id(el);
// Ignore the element if its currently disabled, because are not submitted for the http-request. For those case return always true.
if(el.get('disabled')) {
this.handleResponse(true, el);
return true;
}
// If the field is required make sure it has a value
if (el.hasClass('required')) {
if (el.get('tag')=='fieldset' && (el.hasClass('radio') || el.hasClass('checkboxes'))) {
for(var i=0;;i++) {
if (document.id(el.get('id')+i)) {
if (document.id(el.get('id')+i).checked) {
break;
}
}
else {
this.handleResponse(false, el);
return false;
}
}
}
else if (!(el.get('value'))) {
this.handleResponse(false, el);
return false;
}
}
// Only validate the field if the validate class is set
var handler = (el.className && el.className.search(/validate-([a-zA-Z0-9\_\-]+)/) != -1) ? el.className.match(/validate-([a-zA-Z0-9\_\-]+)/)[1] : "";
if (handler == '') {
this.handleResponse(true, el);
return true;
}
// Check the additional validation types
if ((handler) && (handler != 'none') && (this.handlers[handler]) && el.get('value')) {
// Execute the validation handler and return result
if (this.handlers[handler].exec(el.get('value')) != true) {
this.handleResponse(false, el);
return false;
}
}
// Return validation state
this.handleResponse(true, el);
return true;
},
isValid: function(form)
{
var valid = true;
// Validate form fields
var elements = form.getElements('fieldset').concat(Array.from(form.elements));
for (var i=0;i < elements.length; i++) { | if (this.validate(elements[i]) == false) {
valid = false;
}
}
// Run custom form validators if present
new Hash(this.custom).each(function(validator){
if (validator.exec() != true) {
valid = false;
}
});
return valid;
},
handleResponse: function(state, el)
{
// Find the label object for the given field if it exists
if (!(el.labelref)) {
var labels = $$('label');
labels.each(function(label){
if (label.get('for') == el.get('id')) {
el.labelref = label;
}
});
}
// Set the element and its label (if exists) invalid state
if (state == false) {
el.addClass('invalid');
el.set('aria-invalid', 'true');
if (el.labelref) {
document.id(el.labelref).addClass('invalid');
document.id(el.labelref).set('aria-invalid', 'true');
}
} else {
el.removeClass('invalid');
el.set('aria-invalid', 'false');
if (el.labelref) {
document.id(el.labelref).removeClass('invalid');
document.id(el.labelref).set('aria-invalid', 'false');
}
}
}
});
document.formvalidator = null;
window.addEvent('domready', function(){
document.formvalidator = new JFormValidator();
}); | random_line_split |
|
validate-uncompressed.js | /**
* @copyright Copyright (C) 2005 - 2012 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
Object.append(Browser.Features, {
inputemail: (function() {
var i = document.createElement("input");
i.setAttribute("type", "email");
return i.type !== "text";
})()
});
/**
* Unobtrusive Form Validation library
*
* Inspired by: Chris Campbell <www.particletree.com>
*
* @package Joomla.Framework
* @subpackage Forms
* @since 1.5
*/
var JFormValidator = new Class({
initialize: function()
{
// Initialize variables
this.handlers = Object();
this.custom = Object();
// Default handlers
this.setHandler('username',
function (value) {
regex = new RegExp("[\<|\>|\"|\'|\%|\;|\(|\)|\&]", "i");
return !regex.test(value);
}
);
this.setHandler('password',
function (value) {
regex=/^\S[\S ]{2,98}\S$/;
return regex.test(value);
}
);
this.setHandler('numeric',
function (value) {
regex=/^(\d|-)?(\d|,)*\.?\d*$/;
return regex.test(value);
}
);
this.setHandler('email',
function (value) {
regex=/^[a-zA-Z0-9._-]+(\+[a-zA-Z0-9._-]+)*@([a-zA-Z0-9.-]+\.)+[a-zA-Z0-9.-]{2,4}$/;
return regex.test(value);
}
);
// Attach to forms with class 'form-validate'
var forms = $$('form.form-validate');
forms.each(function(form){ this.attachToForm(form); }, this);
},
setHandler: function(name, fn, en)
{
en = (en == '') ? true : en;
this.handlers[name] = { enabled: en, exec: fn };
},
attachToForm: function(form)
{
// Iterate through the form object and attach the validate method to all input fields.
form.getElements('input,textarea,select,button').each(function(el){
if (el.hasClass('required')) {
el.set('aria-required', 'true');
el.set('required', 'required');
}
if ((document.id(el).get('tag') == 'input' || document.id(el).get('tag') == 'button') && document.id(el).get('type') == 'submit') {
if (el.hasClass('validate')) {
el.onclick = function(){return document.formvalidator.isValid(this.form);};
}
} else {
el.addEvent('blur', function(){return document.formvalidator.validate(this);});
if (el.hasClass('validate-email') && Browser.Features.inputemail) {
el.type = 'email';
}
}
});
},
validate: function(el)
{
el = document.id(el);
// Ignore the element if its currently disabled, because are not submitted for the http-request. For those case return always true.
if(el.get('disabled')) {
this.handleResponse(true, el);
return true;
}
// If the field is required make sure it has a value
if (el.hasClass('required')) {
if (el.get('tag')=='fieldset' && (el.hasClass('radio') || el.hasClass('checkboxes'))) |
else if (!(el.get('value'))) {
this.handleResponse(false, el);
return false;
}
}
// Only validate the field if the validate class is set
var handler = (el.className && el.className.search(/validate-([a-zA-Z0-9\_\-]+)/) != -1) ? el.className.match(/validate-([a-zA-Z0-9\_\-]+)/)[1] : "";
if (handler == '') {
this.handleResponse(true, el);
return true;
}
// Check the additional validation types
if ((handler) && (handler != 'none') && (this.handlers[handler]) && el.get('value')) {
// Execute the validation handler and return result
if (this.handlers[handler].exec(el.get('value')) != true) {
this.handleResponse(false, el);
return false;
}
}
// Return validation state
this.handleResponse(true, el);
return true;
},
isValid: function(form)
{
var valid = true;
// Validate form fields
var elements = form.getElements('fieldset').concat(Array.from(form.elements));
for (var i=0;i < elements.length; i++) {
if (this.validate(elements[i]) == false) {
valid = false;
}
}
// Run custom form validators if present
new Hash(this.custom).each(function(validator){
if (validator.exec() != true) {
valid = false;
}
});
return valid;
},
handleResponse: function(state, el)
{
// Find the label object for the given field if it exists
if (!(el.labelref)) {
var labels = $$('label');
labels.each(function(label){
if (label.get('for') == el.get('id')) {
el.labelref = label;
}
});
}
// Set the element and its label (if exists) invalid state
if (state == false) {
el.addClass('invalid');
el.set('aria-invalid', 'true');
if (el.labelref) {
document.id(el.labelref).addClass('invalid');
document.id(el.labelref).set('aria-invalid', 'true');
}
} else {
el.removeClass('invalid');
el.set('aria-invalid', 'false');
if (el.labelref) {
document.id(el.labelref).removeClass('invalid');
document.id(el.labelref).set('aria-invalid', 'false');
}
}
}
});
document.formvalidator = null;
window.addEvent('domready', function(){
document.formvalidator = new JFormValidator();
}); | {
for(var i=0;;i++) {
if (document.id(el.get('id')+i)) {
if (document.id(el.get('id')+i).checked) {
break;
}
}
else {
this.handleResponse(false, el);
return false;
}
}
} | conditional_block |
index.d.ts | // Type definitions for retry 0.10
// Project: https://github.com/tim-kos/node-retry
// Definitions by: Stan Goldmann <https://github.com/krenor>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
export interface RetryOperation {
/**
* Defines the function that is to be retried and executes it for the first time right away.
*
* @param callback The function that is to be retried
* @param callback.current Number of attempts callback has been executed so far.
* @param [options.timeout] A timeout in milliseconds.
* @param [options.callback] Callback to execute when the operation takes longer than the timeout.
*
*/
attempt(callback: (current: number) => void, options?: AttemptOptions): void;
/**
* Returns false when no error value is given, or the maximum amount of retries has been reached.
* Otherwise it returns true, and retries the operation after the timeout for the current attempt number.
*
*
*/
retry(err?: Error): boolean;
/**
* The number of attempts it took to call the retrying function before it was successful.
*
*/
attempts(): number;
/**
* A reference to the error object that occured most frequently.
* Errors are compared using the error.message property.
*
* @return If no errors occured so far the value will be null.
*/
mainError(): Error | null;
/**
* Returns an array of all errors that have been passed to RetryOperation.retry() so far.
*
*/
errors(): Error[];
/**
* Stops the operation being retried. Useful for aborting the operation on a fatal error etc.
*/
stop(): void;
}
export interface AttemptOptions {
timeout?: number;
callback?(): void;
}
/**
* Create a new RetryOperation object.
*
* @param [options.retries=10] The maximum amount of times to retry the operation.
* @param [options.factor=2] The exponential factor to use.
* @param [options.minTimeout=1000] The number of milliseconds before starting the first retry.
* @param [options.maxTimeout=Infinity] The maximum number of milliseconds between two retries.
* @param [options.randomize=false] Randomizes the timeouts by multiplying a factor between 1-2.
* @param [options.forever=false] Wether to retry forever.
* @param [options.unref=false] Wether to unref the setTimeout's.
*
*/
export function operation(options?: OperationOptions): RetryOperation;
export interface OperationOptions {
/**
* The maximum amount of times to retry the operation.
* @default 10
*/
retries?: number;
/**
* The exponential factor to use.
* @default 2
*/
factor?: number;
/**
* The number of milliseconds before starting the first retry.
* @default 1000
*/
minTimeout?: number;
/**
* The maximum number of milliseconds between two retries.
* @default Infinity
*/
maxTimeout?: number;
/**
* Randomizes the timeouts by multiplying a factor between 1-2.
* @default false
*/
randomize?: boolean;
forever?: boolean;
unref?: boolean;
}
/** Get an array with timeouts and their return values in milliseconds. */
export function timeouts(options?: TimeoutsOptions): number[];
export interface TimeoutsOptions {
retries?: number;
factor?: number;
minTimeout?: number;
maxTimeout?: number;
randomize?: boolean;
}
/**
* Create a new timeout (in milliseconds) based on the given parameters.
*
* @param attempt Representing for which retry the timeout should be calculated.
* @return timeout
*/
export function createTimeout(attempt: number, options?: CreateTimeoutOptions): number;
export interface CreateTimeoutOptions {
/**
* The exponential factor to use.
* @default 2
*/
factor?: number;
/**
* The number of milliseconds before starting the first retry.
* @default 1000
*/
minTimeout?: number;
/**
* The maximum number of milliseconds between two retries.
* @default Infinity
*/
maxTimeout?: number;
/**
* Randomizes the timeouts by multiplying a factor between 1-2.
* @default false
*/
randomize?: boolean;
}
/**
* Wrap all functions of the object with retry.
*
* @param object The object to be wrapped
* @param methods Methods which need to be wrapped
*
*/
export function wrap(object: object, options?: WrapOptions, methods?: string[]): void;
export interface WrapOptions {
/**
* The maximum amount of times to retry the operation.
* @default 10
*/
retries?: number;
/**
* The exponential factor to use.
* @default 2
*/
factor?: number;
/**
* The number of milliseconds before starting the first retry.
* @default 1000
*/
minTimeout?: number;
/**
* The maximum number of milliseconds between two retries.
* @default Infinity
*/
maxTimeout?: number;
/**
* Randomizes the timeouts by multiplying a factor between 1-2.
* @default false
*/ | forever?: boolean;
/**
* Whether to unref the setTimeout's.
* @default false
*/
unref?: boolean;
} | randomize?: boolean;
/**
* Whether to retry forever.
* @default false
*/ | random_line_split |
v1.ts | /**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AxiosPromise} from 'axios';
import {Compute, JWT, OAuth2Client, UserRefreshClient} from 'google-auth-library';
import {BodyResponseCallback, createAPIRequest, GlobalOptions, GoogleConfigurable, MethodOptions} from 'googleapis-common';
// tslint:disable: no-any
// tslint:disable: class-name
// tslint:disable: variable-name
// tslint:disable: jsdoc-format
// tslint:disable: no-namespace
export namespace acceleratedmobilepageurl_v1 {
export interface Options extends GlobalOptions {
version: 'v1';
}
/**
* Accelerated Mobile Pages (AMP) URL API
*
* Retrieves the list of AMP URLs (and equivalent AMP Cache URLs) for a given
* list of public URL(s).
*
* @example
* const {google} = require('googleapis');
* const acceleratedmobilepageurl = google.acceleratedmobilepageurl('v1');
*
* @namespace acceleratedmobilepageurl
* @type {Function}
* @version v1
* @variation v1
* @param {object=} options Options for Acceleratedmobilepageurl
*/
export class Acceleratedmobilepageurl {
_options: GlobalOptions;
google?: GoogleConfigurable;
root = this;
ampUrls: Resource$Ampurls;
constructor(options: GlobalOptions, google?: GoogleConfigurable) {
this._options = options || {};
this.google = google;
this.getRoot.bind(this);
this.ampUrls = new Resource$Ampurls(this);
}
getRoot() {
return this.root;
}
}
/**
* AMP URL response for a requested URL.
*/
export interface Schema$AmpUrl {
/**
* The AMP URL pointing to the publisher's web server.
*/
ampUrl?: string;
/**
* The [AMP Cache URL](/amp/cache/overview#amp-cache-url-format) pointing to
* the cached document in the Google AMP Cache.
*/
cdnAmpUrl?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL Error resource for a requested URL that couldn't be found.
*/
export interface Schema$AmpUrlError {
/**
* The error code of an API call.
*/
errorCode?: string;
/**
* An optional descriptive error message.
*/
errorMessage?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL request for a batch of URLs.
*/
export interface Schema$BatchGetAmpUrlsRequest {
/**
* The lookup_strategy being requested.
*/
lookupStrategy?: string;
/**
* List of URLs to look up for the paired AMP URLs. The URLs are
* case-sensitive. Up to 50 URLs per lookup (see [Usage
* Limits](/amp/cache/reference/limits)).
*/
urls?: string[];
}
/**
* Batch AMP URL response.
*/
export interface Schema$BatchGetAmpUrlsResponse {
/**
* For each URL in BatchAmpUrlsRequest, the URL response. The response might
* not be in the same order as URLs in the batch request. If
* BatchAmpUrlsRequest contains duplicate URLs, AmpUrl is generated only
* once.
*/
ampUrls?: Schema$AmpUrl[];
/**
* The errors for requested URLs that have no AMP URL.
*/
urlErrors?: Schema$AmpUrlError[];
}
export class Resource$Ampurls {
root: Acceleratedmobilepageurl;
constructor(root: Acceleratedmobilepageurl) {
this.root = root;
this.getRoot.bind(this);
}
getRoot() |
/**
* acceleratedmobilepageurl.ampUrls.batchGet
* @desc Returns AMP URL(s) and equivalent [AMP Cache
* URL(s)](/amp/cache/overview#amp-cache-url-format).
* @alias acceleratedmobilepageurl.ampUrls.batchGet
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {().BatchGetAmpUrlsRequest} params.resource Request body data
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
batchGet(
params?: Params$Resource$Ampurls$Batchget,
options?: MethodOptions): AxiosPromise<Schema$BatchGetAmpUrlsResponse>;
batchGet(
params: Params$Resource$Ampurls$Batchget,
options: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(
params: Params$Resource$Ampurls$Batchget,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void;
batchGet(
paramsOrCallback?: Params$Resource$Ampurls$Batchget|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
optionsOrCallback?: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback?: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void|AxiosPromise<Schema$BatchGetAmpUrlsResponse> {
let params = (paramsOrCallback || {}) as Params$Resource$Ampurls$Batchget;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Ampurls$Batchget;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl =
options.rootUrl || 'https://acceleratedmobilepageurl.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v1/ampUrls:batchGet')
.replace(/([^:]\/)\/+/g, '$1'),
method: 'POST'
},
options),
params,
requiredParams: [],
pathParams: [],
context: this.getRoot()
};
if (callback) {
createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters, callback);
} else {
return createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters);
}
}
}
export interface Params$Resource$Ampurls$Batchget {
/**
* Auth client or API Key for the request
*/
auth?: string|OAuth2Client|JWT|Compute|UserRefreshClient;
/**
* Request body metadata
*/
requestBody?: Schema$BatchGetAmpUrlsRequest;
}
}
| {
return this.root;
} | identifier_body |
v1.ts | /**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AxiosPromise} from 'axios';
import {Compute, JWT, OAuth2Client, UserRefreshClient} from 'google-auth-library';
import {BodyResponseCallback, createAPIRequest, GlobalOptions, GoogleConfigurable, MethodOptions} from 'googleapis-common';
// tslint:disable: no-any
// tslint:disable: class-name
// tslint:disable: variable-name
// tslint:disable: jsdoc-format
// tslint:disable: no-namespace
export namespace acceleratedmobilepageurl_v1 {
export interface Options extends GlobalOptions {
version: 'v1';
}
/**
* Accelerated Mobile Pages (AMP) URL API
*
* Retrieves the list of AMP URLs (and equivalent AMP Cache URLs) for a given
* list of public URL(s).
*
* @example
* const {google} = require('googleapis');
* const acceleratedmobilepageurl = google.acceleratedmobilepageurl('v1');
*
* @namespace acceleratedmobilepageurl
* @type {Function}
* @version v1
* @variation v1
* @param {object=} options Options for Acceleratedmobilepageurl
*/
export class Acceleratedmobilepageurl {
_options: GlobalOptions;
google?: GoogleConfigurable;
root = this;
ampUrls: Resource$Ampurls;
constructor(options: GlobalOptions, google?: GoogleConfigurable) {
this._options = options || {};
this.google = google;
this.getRoot.bind(this);
this.ampUrls = new Resource$Ampurls(this);
}
getRoot() {
return this.root;
}
}
/**
* AMP URL response for a requested URL.
*/
export interface Schema$AmpUrl {
/**
* The AMP URL pointing to the publisher's web server.
*/
ampUrl?: string;
/**
* The [AMP Cache URL](/amp/cache/overview#amp-cache-url-format) pointing to
* the cached document in the Google AMP Cache.
*/
cdnAmpUrl?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL Error resource for a requested URL that couldn't be found.
*/
export interface Schema$AmpUrlError {
/**
* The error code of an API call.
*/
errorCode?: string;
/**
* An optional descriptive error message.
*/
errorMessage?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL request for a batch of URLs.
*/
export interface Schema$BatchGetAmpUrlsRequest {
/**
* The lookup_strategy being requested.
*/
lookupStrategy?: string;
/**
* List of URLs to look up for the paired AMP URLs. The URLs are
* case-sensitive. Up to 50 URLs per lookup (see [Usage
* Limits](/amp/cache/reference/limits)).
*/
urls?: string[];
}
/**
* Batch AMP URL response.
*/
export interface Schema$BatchGetAmpUrlsResponse {
/**
* For each URL in BatchAmpUrlsRequest, the URL response. The response might
* not be in the same order as URLs in the batch request. If
* BatchAmpUrlsRequest contains duplicate URLs, AmpUrl is generated only
* once.
*/
ampUrls?: Schema$AmpUrl[];
/**
* The errors for requested URLs that have no AMP URL.
*/
urlErrors?: Schema$AmpUrlError[];
}
export class Resource$Ampurls {
root: Acceleratedmobilepageurl;
constructor(root: Acceleratedmobilepageurl) {
this.root = root;
this.getRoot.bind(this);
}
getRoot() {
return this.root;
}
/**
* acceleratedmobilepageurl.ampUrls.batchGet
* @desc Returns AMP URL(s) and equivalent [AMP Cache
* URL(s)](/amp/cache/overview#amp-cache-url-format).
* @alias acceleratedmobilepageurl.ampUrls.batchGet
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {().BatchGetAmpUrlsRequest} params.resource Request body data
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
batchGet(
params?: Params$Resource$Ampurls$Batchget,
options?: MethodOptions): AxiosPromise<Schema$BatchGetAmpUrlsResponse>;
batchGet(
params: Params$Resource$Ampurls$Batchget,
options: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(
params: Params$Resource$Ampurls$Batchget,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void;
batchGet(
paramsOrCallback?: Params$Resource$Ampurls$Batchget|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
optionsOrCallback?: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback?: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void|AxiosPromise<Schema$BatchGetAmpUrlsResponse> {
let params = (paramsOrCallback || {}) as Params$Resource$Ampurls$Batchget;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Ampurls$Batchget;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl =
options.rootUrl || 'https://acceleratedmobilepageurl.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v1/ampUrls:batchGet')
.replace(/([^:]\/)\/+/g, '$1'),
method: 'POST'
},
options),
params,
requiredParams: [],
pathParams: [],
context: this.getRoot()
};
if (callback) {
createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters, callback);
} else |
}
}
export interface Params$Resource$Ampurls$Batchget {
/**
* Auth client or API Key for the request
*/
auth?: string|OAuth2Client|JWT|Compute|UserRefreshClient;
/**
* Request body metadata
*/
requestBody?: Schema$BatchGetAmpUrlsRequest;
}
}
| {
return createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters);
} | conditional_block |
v1.ts | /**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AxiosPromise} from 'axios';
import {Compute, JWT, OAuth2Client, UserRefreshClient} from 'google-auth-library';
import {BodyResponseCallback, createAPIRequest, GlobalOptions, GoogleConfigurable, MethodOptions} from 'googleapis-common';
// tslint:disable: no-any
// tslint:disable: class-name
// tslint:disable: variable-name
// tslint:disable: jsdoc-format
// tslint:disable: no-namespace
export namespace acceleratedmobilepageurl_v1 {
export interface Options extends GlobalOptions {
version: 'v1';
}
/**
* Accelerated Mobile Pages (AMP) URL API
*
* Retrieves the list of AMP URLs (and equivalent AMP Cache URLs) for a given
* list of public URL(s).
*
* @example
* const {google} = require('googleapis');
* const acceleratedmobilepageurl = google.acceleratedmobilepageurl('v1');
*
* @namespace acceleratedmobilepageurl
* @type {Function}
* @version v1
* @variation v1
* @param {object=} options Options for Acceleratedmobilepageurl
*/
export class Acceleratedmobilepageurl {
_options: GlobalOptions;
google?: GoogleConfigurable;
root = this;
ampUrls: Resource$Ampurls;
| (options: GlobalOptions, google?: GoogleConfigurable) {
this._options = options || {};
this.google = google;
this.getRoot.bind(this);
this.ampUrls = new Resource$Ampurls(this);
}
getRoot() {
return this.root;
}
}
/**
* AMP URL response for a requested URL.
*/
export interface Schema$AmpUrl {
/**
* The AMP URL pointing to the publisher's web server.
*/
ampUrl?: string;
/**
* The [AMP Cache URL](/amp/cache/overview#amp-cache-url-format) pointing to
* the cached document in the Google AMP Cache.
*/
cdnAmpUrl?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL Error resource for a requested URL that couldn't be found.
*/
export interface Schema$AmpUrlError {
/**
* The error code of an API call.
*/
errorCode?: string;
/**
* An optional descriptive error message.
*/
errorMessage?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL request for a batch of URLs.
*/
export interface Schema$BatchGetAmpUrlsRequest {
/**
* The lookup_strategy being requested.
*/
lookupStrategy?: string;
/**
* List of URLs to look up for the paired AMP URLs. The URLs are
* case-sensitive. Up to 50 URLs per lookup (see [Usage
* Limits](/amp/cache/reference/limits)).
*/
urls?: string[];
}
/**
* Batch AMP URL response.
*/
export interface Schema$BatchGetAmpUrlsResponse {
/**
* For each URL in BatchAmpUrlsRequest, the URL response. The response might
* not be in the same order as URLs in the batch request. If
* BatchAmpUrlsRequest contains duplicate URLs, AmpUrl is generated only
* once.
*/
ampUrls?: Schema$AmpUrl[];
/**
* The errors for requested URLs that have no AMP URL.
*/
urlErrors?: Schema$AmpUrlError[];
}
export class Resource$Ampurls {
root: Acceleratedmobilepageurl;
constructor(root: Acceleratedmobilepageurl) {
this.root = root;
this.getRoot.bind(this);
}
getRoot() {
return this.root;
}
/**
* acceleratedmobilepageurl.ampUrls.batchGet
* @desc Returns AMP URL(s) and equivalent [AMP Cache
* URL(s)](/amp/cache/overview#amp-cache-url-format).
* @alias acceleratedmobilepageurl.ampUrls.batchGet
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {().BatchGetAmpUrlsRequest} params.resource Request body data
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
batchGet(
params?: Params$Resource$Ampurls$Batchget,
options?: MethodOptions): AxiosPromise<Schema$BatchGetAmpUrlsResponse>;
batchGet(
params: Params$Resource$Ampurls$Batchget,
options: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(
params: Params$Resource$Ampurls$Batchget,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void;
batchGet(
paramsOrCallback?: Params$Resource$Ampurls$Batchget|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
optionsOrCallback?: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback?: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void|AxiosPromise<Schema$BatchGetAmpUrlsResponse> {
let params = (paramsOrCallback || {}) as Params$Resource$Ampurls$Batchget;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Ampurls$Batchget;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl =
options.rootUrl || 'https://acceleratedmobilepageurl.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v1/ampUrls:batchGet')
.replace(/([^:]\/)\/+/g, '$1'),
method: 'POST'
},
options),
params,
requiredParams: [],
pathParams: [],
context: this.getRoot()
};
if (callback) {
createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters, callback);
} else {
return createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters);
}
}
}
export interface Params$Resource$Ampurls$Batchget {
/**
* Auth client or API Key for the request
*/
auth?: string|OAuth2Client|JWT|Compute|UserRefreshClient;
/**
* Request body metadata
*/
requestBody?: Schema$BatchGetAmpUrlsRequest;
}
}
| constructor | identifier_name |
v1.ts | /**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AxiosPromise} from 'axios';
import {Compute, JWT, OAuth2Client, UserRefreshClient} from 'google-auth-library';
import {BodyResponseCallback, createAPIRequest, GlobalOptions, GoogleConfigurable, MethodOptions} from 'googleapis-common';
// tslint:disable: no-any
// tslint:disable: class-name
// tslint:disable: variable-name
// tslint:disable: jsdoc-format
// tslint:disable: no-namespace
export namespace acceleratedmobilepageurl_v1 {
export interface Options extends GlobalOptions {
version: 'v1';
}
/**
* Accelerated Mobile Pages (AMP) URL API
*
* Retrieves the list of AMP URLs (and equivalent AMP Cache URLs) for a given
* list of public URL(s).
*
* @example
* const {google} = require('googleapis');
* const acceleratedmobilepageurl = google.acceleratedmobilepageurl('v1');
*
* @namespace acceleratedmobilepageurl
* @type {Function}
* @version v1
* @variation v1
* @param {object=} options Options for Acceleratedmobilepageurl
*/
export class Acceleratedmobilepageurl {
_options: GlobalOptions;
google?: GoogleConfigurable;
root = this;
ampUrls: Resource$Ampurls;
constructor(options: GlobalOptions, google?: GoogleConfigurable) {
this._options = options || {};
this.google = google;
this.getRoot.bind(this);
this.ampUrls = new Resource$Ampurls(this);
}
getRoot() {
return this.root;
}
}
/**
* AMP URL response for a requested URL.
*/
export interface Schema$AmpUrl {
/**
* The AMP URL pointing to the publisher's web server.
*/
ampUrl?: string;
/**
* The [AMP Cache URL](/amp/cache/overview#amp-cache-url-format) pointing to
* the cached document in the Google AMP Cache.
*/
cdnAmpUrl?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL Error resource for a requested URL that couldn't be found.
*/
export interface Schema$AmpUrlError {
/**
* The error code of an API call.
*/ | errorCode?: string;
/**
* An optional descriptive error message.
*/
errorMessage?: string;
/**
* The original non-AMP URL.
*/
originalUrl?: string;
}
/**
* AMP URL request for a batch of URLs.
*/
export interface Schema$BatchGetAmpUrlsRequest {
/**
* The lookup_strategy being requested.
*/
lookupStrategy?: string;
/**
* List of URLs to look up for the paired AMP URLs. The URLs are
* case-sensitive. Up to 50 URLs per lookup (see [Usage
* Limits](/amp/cache/reference/limits)).
*/
urls?: string[];
}
/**
* Batch AMP URL response.
*/
export interface Schema$BatchGetAmpUrlsResponse {
/**
* For each URL in BatchAmpUrlsRequest, the URL response. The response might
* not be in the same order as URLs in the batch request. If
* BatchAmpUrlsRequest contains duplicate URLs, AmpUrl is generated only
* once.
*/
ampUrls?: Schema$AmpUrl[];
/**
* The errors for requested URLs that have no AMP URL.
*/
urlErrors?: Schema$AmpUrlError[];
}
export class Resource$Ampurls {
root: Acceleratedmobilepageurl;
constructor(root: Acceleratedmobilepageurl) {
this.root = root;
this.getRoot.bind(this);
}
getRoot() {
return this.root;
}
/**
* acceleratedmobilepageurl.ampUrls.batchGet
* @desc Returns AMP URL(s) and equivalent [AMP Cache
* URL(s)](/amp/cache/overview#amp-cache-url-format).
* @alias acceleratedmobilepageurl.ampUrls.batchGet
* @memberOf! ()
*
* @param {object} params Parameters for request
* @param {().BatchGetAmpUrlsRequest} params.resource Request body data
* @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`.
* @param {callback} callback The callback that handles the response.
* @return {object} Request object
*/
batchGet(
params?: Params$Resource$Ampurls$Batchget,
options?: MethodOptions): AxiosPromise<Schema$BatchGetAmpUrlsResponse>;
batchGet(
params: Params$Resource$Ampurls$Batchget,
options: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(
params: Params$Resource$Ampurls$Batchget,
callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>): void;
batchGet(callback: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void;
batchGet(
paramsOrCallback?: Params$Resource$Ampurls$Batchget|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
optionsOrCallback?: MethodOptions|
BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>,
callback?: BodyResponseCallback<Schema$BatchGetAmpUrlsResponse>):
void|AxiosPromise<Schema$BatchGetAmpUrlsResponse> {
let params = (paramsOrCallback || {}) as Params$Resource$Ampurls$Batchget;
let options = (optionsOrCallback || {}) as MethodOptions;
if (typeof paramsOrCallback === 'function') {
callback = paramsOrCallback;
params = {} as Params$Resource$Ampurls$Batchget;
options = {};
}
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
options = {};
}
const rootUrl =
options.rootUrl || 'https://acceleratedmobilepageurl.googleapis.com/';
const parameters = {
options: Object.assign(
{
url: (rootUrl + '/v1/ampUrls:batchGet')
.replace(/([^:]\/)\/+/g, '$1'),
method: 'POST'
},
options),
params,
requiredParams: [],
pathParams: [],
context: this.getRoot()
};
if (callback) {
createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters, callback);
} else {
return createAPIRequest<Schema$BatchGetAmpUrlsResponse>(parameters);
}
}
}
export interface Params$Resource$Ampurls$Batchget {
/**
* Auth client or API Key for the request
*/
auth?: string|OAuth2Client|JWT|Compute|UserRefreshClient;
/**
* Request body metadata
*/
requestBody?: Schema$BatchGetAmpUrlsRequest;
}
} | random_line_split |
|
Modal.api.js | /**
* modal api
*/
export default {
methods: {
/**
* 点击 Full 的导航按钮
*/
clickFullNav() {
if (this.commit) {
this.no()
} else {
this.hide()
}
},
/**
* 显示pop
*
* @param {Number} - 当前页码
* @return {Object}
*/
show() {
this.modalDisplay = true
return this.$nextTick(() => {
this.$refs.fadeTransition.enter()
this.$refs.pop.show()
return this
})
},
/**
* 隐藏pop
*
* @return {Object}
*/
hide() {
this.$refs.fadeTransition.leave()
this.$refs.pop.hide({
cb: () => {
this.modalDisplay = false
this.isMousedown = false
}
})
return this
},
/**
* 鼠标mouseDown 弹窗头部触发的事件
*
* @return {Object}
*/
mouseDown(event) {
this.isMousedown = true
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseMove 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseMove(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.$refs.pop.computePosition()
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseUp 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseUp(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.isMousedown = false
return this
},
/**
* 弹窗点击确定触发的函数
*
* @return {Object}
*/
ok() {
this.$emit('ok')
if (this.okCbFun) {
if (typeof this.okCbFun === 'function') {
this.okCbFun(this)
}
return this
}
return this.hide()
},
/**
* 弹窗点击取消触发的函数
*
* @return {Object}
*/
no() {
this.$emit('no')
if (this.noCbFun) {
if (typeof this.noCbFun === 'function') {
this.noCbFun(this)
}
return this
}
this.hide()
},
/**
* 获取 / 设置 弹窗的title名
*
* @return {Object, Boolean}
*/
title(text) {
if (text === '' || text) {
this.stateHeader = text
}
return this
},
/**
* alert, confirm 弹窗的文字信息
*
* @param {String} - 需要设置的值
* @return {Object, String}
*/
info(text) {
if (text === '' || text) {
this.stateMessage = text
}
return this
},
/**
* 设置各个组件的配置数据
*
* @param {Object} opt - 选项
* {Function} okCb - 点击的回调函数
* {Function} noCb - 取消的回调函数
* {Function} showCb - 显示之后的回调函数
* {Function} hideCb - 隐藏之后的回调函数
* {String} title - 模态框标题
* {Function} message - 需要展示的信息
*/
set({
okCb,
noCb,
showCb,
hideCb,
title = '',
message = '',
ui = this.ui,
theme = this.theme
} = {}) {
this.okCbFun = okCb
this.noCbFun = noCb
this.showCb = showCb
this.hideCb = hideCb
this.stateHeader = title
| his.stateMessage = message
this.stateUI = ui
this.stateTheme = theme
return this
}
}
}
| t | identifier_name |
Modal.api.js | /**
* modal api
*/
export default {
methods: {
/**
* 点击 Full 的导航按钮
*/
clickFullNav() {
if (this.commit) {
this.no()
} else {
this.hide()
}
},
/**
* 显示pop
*
* @param {Number} - 当前页码
* @return {Object}
*/
show() {
this.modalDisplay = true
return this.$nextTick(() => {
this.$refs.fadeTransition.enter()
this.$refs.pop.show()
return this
})
},
/**
* 隐藏pop
*
* @return {Object}
*/
hide() {
this.$refs.fadeTransition.leave()
this.$refs.pop.hide({
cb: () => {
this.modalDisplay = false
this.isMousedown = false
}
})
return this
},
/**
* 鼠标mouseDown 弹窗头部触发的事件
*
* @return {Object}
*/
mouseDown(event) {
this.isMousedown = true
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseMove 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseMove(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.$refs.pop.computePosition()
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseUp 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseUp(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.isMousedown = false
return this
},
/** | */
ok() {
this.$emit('ok')
if (this.okCbFun) {
if (typeof this.okCbFun === 'function') {
this.okCbFun(this)
}
return this
}
return this.hide()
},
/**
* 弹窗点击取消触发的函数
*
* @return {Object}
*/
no() {
this.$emit('no')
if (this.noCbFun) {
if (typeof this.noCbFun === 'function') {
this.noCbFun(this)
}
return this
}
this.hide()
},
/**
* 获取 / 设置 弹窗的title名
*
* @return {Object, Boolean}
*/
title(text) {
if (text === '' || text) {
this.stateHeader = text
}
return this
},
/**
* alert, confirm 弹窗的文字信息
*
* @param {String} - 需要设置的值
* @return {Object, String}
*/
info(text) {
if (text === '' || text) {
this.stateMessage = text
}
return this
},
/**
* 设置各个组件的配置数据
*
* @param {Object} opt - 选项
* {Function} okCb - 点击的回调函数
* {Function} noCb - 取消的回调函数
* {Function} showCb - 显示之后的回调函数
* {Function} hideCb - 隐藏之后的回调函数
* {String} title - 模态框标题
* {Function} message - 需要展示的信息
*/
set({
okCb,
noCb,
showCb,
hideCb,
title = '',
message = '',
ui = this.ui,
theme = this.theme
} = {}) {
this.okCbFun = okCb
this.noCbFun = noCb
this.showCb = showCb
this.hideCb = hideCb
this.stateHeader = title
this.stateMessage = message
this.stateUI = ui
this.stateTheme = theme
return this
}
}
} | * 弹窗点击确定触发的函数
*
* @return {Object} | random_line_split |
Modal.api.js | /**
* modal api
*/
export default {
methods: {
/**
* 点击 Full 的导航按钮
*/
clickFullNav() {
if (this.commit) {
this.no()
} else {
this.hide()
}
},
/**
* 显示pop
*
* @param {Number} - 当前页码
* @return {Object}
*/
show() {
this.modalDisplay = true
return this.$nextTick(() => {
this.$refs.fadeTransition.enter()
this.$refs.pop.show()
return this
})
},
/**
* 隐藏pop
*
* @return {Object}
*/
hide() {
this.$refs.fadeTransition.leave()
this.$refs.pop.hide({
cb: () => {
this.modalDisplay = false
this.isMousedown = false
}
})
return this
},
/**
* 鼠标mouseDown 弹窗头部触发的事件
*
* @return {Object}
*/
mouseDown(event) {
this.isMousedown = true
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseMove 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseMove(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.$refs.pop.computePosition()
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseUp 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseUp(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
| ('ok')
if (this.okCbFun) {
if (typeof this.okCbFun === 'function') {
this.okCbFun(this)
}
return this
}
return this.hide()
},
/**
* 弹窗点击取消触发的函数
*
* @return {Object}
*/
no() {
this.$emit('no')
if (this.noCbFun) {
if (typeof this.noCbFun === 'function') {
this.noCbFun(this)
}
return this
}
this.hide()
},
/**
* 获取 / 设置 弹窗的title名
*
* @return {Object, Boolean}
*/
title(text) {
if (text === '' || text) {
this.stateHeader = text
}
return this
},
/**
* alert, confirm 弹窗的文字信息
*
* @param {String} - 需要设置的值
* @return {Object, String}
*/
info(text) {
if (text === '' || text) {
this.stateMessage = text
}
return this
},
/**
* 设置各个组件的配置数据
*
* @param {Object} opt - 选项
* {Function} okCb - 点击的回调函数
* {Function} noCb - 取消的回调函数
* {Function} showCb - 显示之后的回调函数
* {Function} hideCb - 隐藏之后的回调函数
* {String} title - 模态框标题
* {Function} message - 需要展示的信息
*/
set({
okCb,
noCb,
showCb,
hideCb,
title = '',
message = '',
ui = this.ui,
theme = this.theme
} = {}) {
this.okCbFun = okCb
this.noCbFun = noCb
this.showCb = showCb
this.hideCb = hideCb
this.stateHeader = title
this.stateMessage = message
this.stateUI = ui
this.stateTheme = theme
return this
}
}
}
| this.isMousedown = false
return this
},
/**
* 弹窗点击确定触发的函数
*
* @return {Object}
*/
ok() {
this.$emit | identifier_body |
Modal.api.js | /**
* modal api
*/
export default {
methods: {
/**
* 点击 Full 的导航按钮
*/
clickFullNav() {
if (this.commit) {
this | this.hide()
}
},
/**
* 显示pop
*
* @param {Number} - 当前页码
* @return {Object}
*/
show() {
this.modalDisplay = true
return this.$nextTick(() => {
this.$refs.fadeTransition.enter()
this.$refs.pop.show()
return this
})
},
/**
* 隐藏pop
*
* @return {Object}
*/
hide() {
this.$refs.fadeTransition.leave()
this.$refs.pop.hide({
cb: () => {
this.modalDisplay = false
this.isMousedown = false
}
})
return this
},
/**
* 鼠标mouseDown 弹窗头部触发的事件
*
* @return {Object}
*/
mouseDown(event) {
this.isMousedown = true
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseMove 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseMove(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.$refs.pop.computePosition()
this.pointStart = {
x: event.clientX,
y: event.clientY
}
return this
},
/**
* 鼠标mouseUp 弹窗头部触发的事件
*
* @return {Object, Boolean}
*/
mouseUp(event) {
event.preventDefault()
if (!this.isMousedown) {
return false
}
this.isMousedown = false
return this
},
/**
* 弹窗点击确定触发的函数
*
* @return {Object}
*/
ok() {
this.$emit('ok')
if (this.okCbFun) {
if (typeof this.okCbFun === 'function') {
this.okCbFun(this)
}
return this
}
return this.hide()
},
/**
* 弹窗点击取消触发的函数
*
* @return {Object}
*/
no() {
this.$emit('no')
if (this.noCbFun) {
if (typeof this.noCbFun === 'function') {
this.noCbFun(this)
}
return this
}
this.hide()
},
/**
* 获取 / 设置 弹窗的title名
*
* @return {Object, Boolean}
*/
title(text) {
if (text === '' || text) {
this.stateHeader = text
}
return this
},
/**
* alert, confirm 弹窗的文字信息
*
* @param {String} - 需要设置的值
* @return {Object, String}
*/
info(text) {
if (text === '' || text) {
this.stateMessage = text
}
return this
},
/**
* 设置各个组件的配置数据
*
* @param {Object} opt - 选项
* {Function} okCb - 点击的回调函数
* {Function} noCb - 取消的回调函数
* {Function} showCb - 显示之后的回调函数
* {Function} hideCb - 隐藏之后的回调函数
* {String} title - 模态框标题
* {Function} message - 需要展示的信息
*/
set({
okCb,
noCb,
showCb,
hideCb,
title = '',
message = '',
ui = this.ui,
theme = this.theme
} = {}) {
this.okCbFun = okCb
this.noCbFun = noCb
this.showCb = showCb
this.hideCb = hideCb
this.stateHeader = title
this.stateMessage = message
this.stateUI = ui
this.stateTheme = theme
return this
}
}
}
| .no()
} else {
| conditional_block |
ppapi_ppp_instance.js | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function startsWith(str, prefix) {
return (str.indexOf(prefix) === 0);
}
function setupTests(tester, plugin) {
//////////////////////////////////////////////////////////////////////////////
// Test Helpers
//////////////////////////////////////////////////////////////////////////////
var numMessages = 0;
function addTestListeners(numListeners, test, testFunction, runCheck) {
var messageListener = test.wrap(function(message) {
if (!startsWith(message.data, testFunction)) return;
test.log(message.data);
numMessages++;
plugin.removeEventListener('message', messageListener, false);
test.assertEqual(message.data, testFunction + ':PASSED');
if (runCheck) test.assert(runCheck());
if (numMessages < numListeners) {
plugin.addEventListener('message', messageListener, false);
} else {
numMessages = 0;
test.pass();
}
});
plugin.addEventListener('message', messageListener, false);
}
function | (test, testFunction, runCheck) {
return addTestListeners(1, test, testFunction, runCheck);
}
//////////////////////////////////////////////////////////////////////////////
// Tests
//////////////////////////////////////////////////////////////////////////////
tester.addTest('PPP_Instance::DidCreate', function() {
assertEqual(plugin.lastError, '');
});
tester.addAsyncTest('PPP_Instance::DidChangeView', function(test) {
// The .cc file hardcodes an expected 15x20 size.
plugin.width = 15;
plugin.height = 20;
addTestListener(test, 'DidChangeView');
});
// This test does not appear to be reliable on the bots.
// http://crbug.com/329511
/*
tester.addAsyncTest('PPP_Instance::DidChangeFocus', function(test) {
// TODO(polina): How can I simulate focusing on Windows?
// For now just pass explicitely.
if (startsWith(navigator.platform, 'Win')) {
test.log('skipping test on ' + navigator.platform);
test.pass();
return;
}
addTestListeners(2, test, 'DidChangeFocus');
plugin.tabIndex = 0;
plugin.focus();
plugin.blur();
});
*/
// PPP_Instance::HandleDocumentLoad is only used with full-frame plugins.
// This is tested in tests/ppapi_browser/extension_mime_handler/
// PPP_Instance::DidDestroy is never invoked in the untrusted code.
// We could wait for a crash event from it, but CallOnMainThread semantics
// on shutdown are still buggy, so it might never come even if the function
// triggered. Plus waiting for something not to happen makes the test flaky.
}
| addTestListener | identifier_name |
ppapi_ppp_instance.js | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function startsWith(str, prefix) {
return (str.indexOf(prefix) === 0);
}
function setupTests(tester, plugin) {
//////////////////////////////////////////////////////////////////////////////
// Test Helpers
//////////////////////////////////////////////////////////////////////////////
var numMessages = 0;
function addTestListeners(numListeners, test, testFunction, runCheck) {
var messageListener = test.wrap(function(message) {
if (!startsWith(message.data, testFunction)) return;
test.log(message.data);
numMessages++;
plugin.removeEventListener('message', messageListener, false);
test.assertEqual(message.data, testFunction + ':PASSED');
if (runCheck) test.assert(runCheck());
if (numMessages < numListeners) {
plugin.addEventListener('message', messageListener, false);
} else {
numMessages = 0;
test.pass();
}
});
plugin.addEventListener('message', messageListener, false);
}
function addTestListener(test, testFunction, runCheck) {
return addTestListeners(1, test, testFunction, runCheck);
}
//////////////////////////////////////////////////////////////////////////////
// Tests
//////////////////////////////////////////////////////////////////////////////
tester.addTest('PPP_Instance::DidCreate', function() {
assertEqual(plugin.lastError, '');
});
| plugin.width = 15;
plugin.height = 20;
addTestListener(test, 'DidChangeView');
});
// This test does not appear to be reliable on the bots.
// http://crbug.com/329511
/*
tester.addAsyncTest('PPP_Instance::DidChangeFocus', function(test) {
// TODO(polina): How can I simulate focusing on Windows?
// For now just pass explicitely.
if (startsWith(navigator.platform, 'Win')) {
test.log('skipping test on ' + navigator.platform);
test.pass();
return;
}
addTestListeners(2, test, 'DidChangeFocus');
plugin.tabIndex = 0;
plugin.focus();
plugin.blur();
});
*/
// PPP_Instance::HandleDocumentLoad is only used with full-frame plugins.
// This is tested in tests/ppapi_browser/extension_mime_handler/
// PPP_Instance::DidDestroy is never invoked in the untrusted code.
// We could wait for a crash event from it, but CallOnMainThread semantics
// on shutdown are still buggy, so it might never come even if the function
// triggered. Plus waiting for something not to happen makes the test flaky.
} | tester.addAsyncTest('PPP_Instance::DidChangeView', function(test) {
// The .cc file hardcodes an expected 15x20 size. | random_line_split |
ppapi_ppp_instance.js | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function startsWith(str, prefix) |
function setupTests(tester, plugin) {
//////////////////////////////////////////////////////////////////////////////
// Test Helpers
//////////////////////////////////////////////////////////////////////////////
var numMessages = 0;
function addTestListeners(numListeners, test, testFunction, runCheck) {
var messageListener = test.wrap(function(message) {
if (!startsWith(message.data, testFunction)) return;
test.log(message.data);
numMessages++;
plugin.removeEventListener('message', messageListener, false);
test.assertEqual(message.data, testFunction + ':PASSED');
if (runCheck) test.assert(runCheck());
if (numMessages < numListeners) {
plugin.addEventListener('message', messageListener, false);
} else {
numMessages = 0;
test.pass();
}
});
plugin.addEventListener('message', messageListener, false);
}
function addTestListener(test, testFunction, runCheck) {
return addTestListeners(1, test, testFunction, runCheck);
}
//////////////////////////////////////////////////////////////////////////////
// Tests
//////////////////////////////////////////////////////////////////////////////
tester.addTest('PPP_Instance::DidCreate', function() {
assertEqual(plugin.lastError, '');
});
tester.addAsyncTest('PPP_Instance::DidChangeView', function(test) {
// The .cc file hardcodes an expected 15x20 size.
plugin.width = 15;
plugin.height = 20;
addTestListener(test, 'DidChangeView');
});
// This test does not appear to be reliable on the bots.
// http://crbug.com/329511
/*
tester.addAsyncTest('PPP_Instance::DidChangeFocus', function(test) {
// TODO(polina): How can I simulate focusing on Windows?
// For now just pass explicitely.
if (startsWith(navigator.platform, 'Win')) {
test.log('skipping test on ' + navigator.platform);
test.pass();
return;
}
addTestListeners(2, test, 'DidChangeFocus');
plugin.tabIndex = 0;
plugin.focus();
plugin.blur();
});
*/
// PPP_Instance::HandleDocumentLoad is only used with full-frame plugins.
// This is tested in tests/ppapi_browser/extension_mime_handler/
// PPP_Instance::DidDestroy is never invoked in the untrusted code.
// We could wait for a crash event from it, but CallOnMainThread semantics
// on shutdown are still buggy, so it might never come even if the function
// triggered. Plus waiting for something not to happen makes the test flaky.
}
| {
return (str.indexOf(prefix) === 0);
} | identifier_body |
flags.ts | //
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///<reference path='typescript.ts' />
module TypeScript {
export function hasFlag(val: number, flag: number) {
| export enum ErrorRecoverySet {
None = 0,
Comma = 1, // Comma
SColon = 1 << 1, // SColon
Asg = 1 << 2, // Asg
BinOp = 1 << 3, // Lsh, Rsh, Rs2, Le, Ge, INSTANCEOF, EQ, NE, Eqv, NEqv, LogAnd, LogOr, AsgMul, AsgDiv
// AsgMod, AsgAdd, AsgSub, AsgLsh, AsgRsh, AsgRs2, AsgAnd, AsgXor, AsgOr, QMark, Mult, Div,
// Pct, GT, LT, And, Xor, Or
RBrack = 1 << 4, // RBrack
RCurly = 1 << 5, // RCurly
RParen = 1 << 6, // RParen
Dot = 1 << 7, // Dot
Colon = 1 << 8, // Colon
PrimType = 1 << 9, // number, string, bool
AddOp = 1 << 10, // Add, Sub
LCurly = 1 << 11, // LCurly
PreOp = 1 << 12, // Tilde, Bang, Inc, Dec
RegExp = 1 << 13, // RegExp
LParen = 1 << 14, // LParen
LBrack = 1 << 15, // LBrack
Scope = 1 << 16, // Scope
In = 1 << 17, // IN
SCase = 1 << 18, // CASE, DEFAULT
Else = 1 << 19, // ELSE
Catch = 1 << 20, // CATCH, FINALLY
Var = 1 << 21, //
Stmt = 1 << 22, // BREAK, RETURN, THROW, DEBUGGER, FOR, SWITCH, DO, IF, TRY, WITH
While = 1 << 23, // WHILE
ID = 1 << 24, // ID
Prefix = 1 << 25, // VOID, DELETE, TYPEOF, AWAIT
Literal = 1 << 26, // IntCon, FltCon, StrCon
RLit = 1 << 27, // THIS, TRUE, FALSE, NULL
Func = 1 << 28, // FUNCTION
EOF = 1 << 29, // EOF
// REVIEW: Name this something clearer.
TypeScriptS = 1 << 30, // PROPERTY, PRIVATE, STATIC, INTERFACE, CLASS, MODULE, EXPORT, IMPORT
ExprStart = SColon | AddOp | LCurly | PreOp | RegExp | LParen | LBrack | ID | Prefix | RLit | Func | Literal,
StmtStart = ExprStart | SColon | Var | Stmt | While | TypeScriptS,
Postfix = Dot | LParen | LBrack,
}
export enum AllowedElements {
None = 0,
ModuleDeclarations = 1 << 2,
ClassDeclarations = 1 << 3,
InterfaceDeclarations = 1 << 4,
AmbientDeclarations = 1 << 10,
Properties = 1 << 11,
Global = ModuleDeclarations | ClassDeclarations | InterfaceDeclarations | AmbientDeclarations,
QuickParse = Global | Properties,
}
export enum Modifiers {
None = 0,
Private = 1,
Public = 1 << 1,
Readonly = 1 << 2,
Ambient = 1 << 3,
Exported = 1 << 4,
Getter = 1 << 5,
Setter = 1 << 6,
Static = 1 << 7,
}
export enum ASTFlags {
None = 0,
ExplicitSemicolon = 1, // statment terminated by an explicit semicolon
AutomaticSemicolon = 1 << 1, // statment terminated by an automatic semicolon
Writeable = 1 << 2, // node is lhs that can be modified
Error = 1 << 3, // node has an error
DotLHSPartial = 1 << 4, // node is the lhs of an incomplete dot expr at cursor
DotLHS = 1 << 5, // node is the lhs of a dot expr
IsStatement = 1 << 6, // node is a statement
StrictMode = 1 << 7, // node is in the strict mode environment
PossibleOptionalParameter = 1 << 8,
ClassBaseConstructorCall = 1 << 9,
OptionalName = 1 << 10,
// REVIEW: This flag is to mark lambda nodes to note that the LParen of an expression has already been matched in the lambda header.
// The flag is used to communicate this piece of information to the calling parseTerm, which intern will remove it.
// Once we have a better way to associate information with nodes, this flag should not be used.
SkipNextRParen = 1 << 11,
}
export enum DeclFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
}
export enum ModuleFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
IsEnum = 1 << 8,
ShouldEmitModuleDecl = 1 << 9,
IsWholeFile = 1 << 10,
IsDynamic = 1 << 11,
MustCaptureThis = 1 << 12,
}
export enum SymbolFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Property = 1 << 8,
Readonly = 1 << 9,
ModuleMember = 1 << 10,
InterfaceMember = 1 << 11,
ClassMember = 1 << 12,
BuiltIn = 1 << 13,
TypeSetDuringScopeAssignment = 1 << 14,
Constant = 1 << 15,
Optional = 1 << 16,
RecursivelyReferenced = 1 << 17,
Bound = 1 << 18,
CompilerGenerated = 1 << 19,
}
export enum VarFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
AutoInit = 1 << 8,
Property = 1 << 9,
Readonly = 1 << 10,
Class = 1 << 11,
ClassProperty = 1 << 12,
ClassBodyProperty = 1 << 13,
ClassConstructorProperty = 1 << 14,
ClassSuperMustBeFirstCallInConstructor = 1 << 15,
Constant = 1 << 16,
MustCaptureThis = 1 << 17,
}
export enum FncFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Signature = 1 << 9,
Method = 1 << 10,
HasReturnExpression = 1 << 11,
CallMember = 1 << 12,
ConstructMember = 1 << 13,
HasSelfReference = 1 << 14,
IsFatArrowFunction = 1 << 15,
IndexerMember = 1 << 16,
IsFunctionExpression = 1 << 17,
ClassMethod = 1 << 18,
ClassPropertyMethodExported = 1 << 19,
HasSuperReferenceInFatArrowFunction = 1 << 20,
IsPropertyBound = 1 << 21,
}
export enum SignatureFlags {
None = 0,
IsIndexer = 1,
IsStringIndexer = 1 << 1,
IsNumberIndexer = 1 << 2,
}
export function ToDeclFlags(fncFlags: FncFlags) : DeclFlags;
export function ToDeclFlags(varFlags: VarFlags) : DeclFlags;
export function ToDeclFlags(symFlags: SymbolFlags): DeclFlags;
export function ToDeclFlags(moduleFlags: ModuleFlags): DeclFlags;
export function ToDeclFlags(fncOrVarOrSymbolOrModuleFlags: any) {
return <DeclFlags>fncOrVarOrSymbolOrModuleFlags;
}
export enum TypeFlags {
None = 0,
HasImplementation = 1,
HasSelfReference = 1 << 1,
MergeResult = 1 << 2,
IsEnum = 1 << 3,
BuildingName = 1 << 4,
HasBaseType = 1 << 5,
HasBaseTypeOfObject = 1 << 6,
IsClass = 1 << 7,
}
export enum TypeRelationshipFlags {
SuccessfulComparison = 0,
SourceIsNullTargetIsVoidOrUndefined = 1,
RequiredPropertyIsMissing = 1 << 1,
IncompatibleSignatures = 1 << 2,
SourceSignatureHasTooManyParameters = 3,
IncompatibleReturnTypes = 1 << 4,
IncompatiblePropertyTypes = 1 << 5,
IncompatibleParameterTypes = 1 << 6,
}
export enum CodeGenTarget {
ES3 = 0,
ES5 = 1,
}
export enum ModuleGenTarget {
Synchronous = 0,
Asynchronous = 1,
Local = 1 << 1,
}
// Compiler defaults to generating ES5-compliant code for
// - getters and setters
export var codeGenTarget: CodeGenTarget = CodeGenTarget.ES3;
export var moduleGenTarget: ModuleGenTarget = ModuleGenTarget.Synchronous;
export var optimizeModuleCodeGen = true;
export function flagsToString(e, flags: number): string {
var builder = "";
for (var i = 1; i < (1 << 31) ; i = i << 1) {
if ((flags & i) != 0) {
for (var k in e) {
if (e[k] == i) {
if (builder.length > 0) {
builder += "|";
}
builder += k;
break;
}
}
}
}
return builder;
}
} | return (val & flag) != 0;
}
| identifier_body |
flags.ts | //
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///<reference path='typescript.ts' />
module TypeScript {
export function hasFlag(val: number, flag: number) {
return (val & flag) != 0;
}
export enum ErrorRecoverySet {
None = 0,
Comma = 1, // Comma
SColon = 1 << 1, // SColon
Asg = 1 << 2, // Asg
BinOp = 1 << 3, // Lsh, Rsh, Rs2, Le, Ge, INSTANCEOF, EQ, NE, Eqv, NEqv, LogAnd, LogOr, AsgMul, AsgDiv
// AsgMod, AsgAdd, AsgSub, AsgLsh, AsgRsh, AsgRs2, AsgAnd, AsgXor, AsgOr, QMark, Mult, Div,
// Pct, GT, LT, And, Xor, Or
RBrack = 1 << 4, // RBrack
RCurly = 1 << 5, // RCurly
RParen = 1 << 6, // RParen
Dot = 1 << 7, // Dot
Colon = 1 << 8, // Colon
PrimType = 1 << 9, // number, string, bool
AddOp = 1 << 10, // Add, Sub
LCurly = 1 << 11, // LCurly
PreOp = 1 << 12, // Tilde, Bang, Inc, Dec
RegExp = 1 << 13, // RegExp
LParen = 1 << 14, // LParen
LBrack = 1 << 15, // LBrack
Scope = 1 << 16, // Scope
In = 1 << 17, // IN
SCase = 1 << 18, // CASE, DEFAULT
Else = 1 << 19, // ELSE
Catch = 1 << 20, // CATCH, FINALLY
Var = 1 << 21, //
Stmt = 1 << 22, // BREAK, RETURN, THROW, DEBUGGER, FOR, SWITCH, DO, IF, TRY, WITH
While = 1 << 23, // WHILE
ID = 1 << 24, // ID
Prefix = 1 << 25, // VOID, DELETE, TYPEOF, AWAIT
Literal = 1 << 26, // IntCon, FltCon, StrCon
RLit = 1 << 27, // THIS, TRUE, FALSE, NULL
Func = 1 << 28, // FUNCTION
EOF = 1 << 29, // EOF
// REVIEW: Name this something clearer.
TypeScriptS = 1 << 30, // PROPERTY, PRIVATE, STATIC, INTERFACE, CLASS, MODULE, EXPORT, IMPORT
ExprStart = SColon | AddOp | LCurly | PreOp | RegExp | LParen | LBrack | ID | Prefix | RLit | Func | Literal,
StmtStart = ExprStart | SColon | Var | Stmt | While | TypeScriptS,
Postfix = Dot | LParen | LBrack,
}
export enum AllowedElements {
None = 0,
ModuleDeclarations = 1 << 2,
ClassDeclarations = 1 << 3,
InterfaceDeclarations = 1 << 4,
AmbientDeclarations = 1 << 10,
Properties = 1 << 11,
Global = ModuleDeclarations | ClassDeclarations | InterfaceDeclarations | AmbientDeclarations,
QuickParse = Global | Properties,
}
export enum Modifiers {
None = 0,
Private = 1,
Public = 1 << 1,
Readonly = 1 << 2,
Ambient = 1 << 3,
Exported = 1 << 4,
Getter = 1 << 5,
Setter = 1 << 6,
Static = 1 << 7,
}
export enum ASTFlags {
None = 0,
ExplicitSemicolon = 1, // statment terminated by an explicit semicolon
AutomaticSemicolon = 1 << 1, // statment terminated by an automatic semicolon
Writeable = 1 << 2, // node is lhs that can be modified
Error = 1 << 3, // node has an error
DotLHSPartial = 1 << 4, // node is the lhs of an incomplete dot expr at cursor
DotLHS = 1 << 5, // node is the lhs of a dot expr
IsStatement = 1 << 6, // node is a statement
StrictMode = 1 << 7, // node is in the strict mode environment
PossibleOptionalParameter = 1 << 8,
ClassBaseConstructorCall = 1 << 9,
OptionalName = 1 << 10,
// REVIEW: This flag is to mark lambda nodes to note that the LParen of an expression has already been matched in the lambda header.
// The flag is used to communicate this piece of information to the calling parseTerm, which intern will remove it.
// Once we have a better way to associate information with nodes, this flag should not be used.
SkipNextRParen = 1 << 11,
}
export enum DeclFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
}
export enum ModuleFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
IsEnum = 1 << 8,
ShouldEmitModuleDecl = 1 << 9,
IsWholeFile = 1 << 10,
IsDynamic = 1 << 11,
MustCaptureThis = 1 << 12,
}
export enum SymbolFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Property = 1 << 8,
Readonly = 1 << 9,
ModuleMember = 1 << 10,
InterfaceMember = 1 << 11,
ClassMember = 1 << 12,
BuiltIn = 1 << 13,
TypeSetDuringScopeAssignment = 1 << 14,
Constant = 1 << 15,
Optional = 1 << 16,
RecursivelyReferenced = 1 << 17,
Bound = 1 << 18,
CompilerGenerated = 1 << 19,
}
export enum VarFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
AutoInit = 1 << 8,
Property = 1 << 9,
Readonly = 1 << 10,
Class = 1 << 11,
ClassProperty = 1 << 12,
ClassBodyProperty = 1 << 13,
ClassConstructorProperty = 1 << 14,
ClassSuperMustBeFirstCallInConstructor = 1 << 15,
Constant = 1 << 16,
MustCaptureThis = 1 << 17,
}
export enum FncFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Signature = 1 << 9,
Method = 1 << 10,
HasReturnExpression = 1 << 11,
CallMember = 1 << 12,
ConstructMember = 1 << 13,
HasSelfReference = 1 << 14,
IsFatArrowFunction = 1 << 15,
IndexerMember = 1 << 16,
IsFunctionExpression = 1 << 17,
ClassMethod = 1 << 18,
ClassPropertyMethodExported = 1 << 19,
HasSuperReferenceInFatArrowFunction = 1 << 20,
IsPropertyBound = 1 << 21,
}
export enum SignatureFlags {
None = 0,
IsIndexer = 1,
IsStringIndexer = 1 << 1,
IsNumberIndexer = 1 << 2,
}
export function ToDeclFlags(fncFlags: FncFlags) : DeclFlags;
export function ToDeclFlags(varFlags: VarFlags) : DeclFlags;
export function ToDeclFlags(symFlags: SymbolFlags): DeclFlags;
export function ToDeclFlags(moduleFlags: ModuleFlags): DeclFlags;
export function ToDeclFlags(fncOrVarOrSymbolOrModuleFlags: any) {
return <DeclFlags>fncOrVarOrSymbolOrModuleFlags;
}
export enum TypeFlags {
None = 0,
HasImplementation = 1,
HasSelfReference = 1 << 1,
MergeResult = 1 << 2,
IsEnum = 1 << 3,
BuildingName = 1 << 4,
HasBaseType = 1 << 5,
HasBaseTypeOfObject = 1 << 6,
IsClass = 1 << 7,
}
export enum TypeRelationshipFlags {
SuccessfulComparison = 0,
SourceIsNullTargetIsVoidOrUndefined = 1,
RequiredPropertyIsMissing = 1 << 1,
IncompatibleSignatures = 1 << 2,
SourceSignatureHasTooManyParameters = 3,
IncompatibleReturnTypes = 1 << 4,
IncompatiblePropertyTypes = 1 << 5,
IncompatibleParameterTypes = 1 << 6,
}
export enum CodeGenTarget {
ES3 = 0,
ES5 = 1,
}
export enum ModuleGenTarget {
Synchronous = 0,
Asynchronous = 1,
Local = 1 << 1,
}
// Compiler defaults to generating ES5-compliant code for
// - getters and setters
export var codeGenTarget: CodeGenTarget = CodeGenTarget.ES3;
export var moduleGenTarget: ModuleGenTarget = ModuleGenTarget.Synchronous;
export var optimizeModuleCodeGen = true;
export function flagsToString(e, flags: number): string {
var builder = "";
for (var i = 1; i < (1 << 31) ; i = i << 1) {
| return builder;
}
} | if ((flags & i) != 0) {
for (var k in e) {
if (e[k] == i) {
if (builder.length > 0) {
builder += "|";
}
builder += k;
break;
}
}
}
}
| conditional_block |
flags.ts | //
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///<reference path='typescript.ts' />
module TypeScript {
export function hasFlag(val: number, flag: number) {
return (val & flag) != 0;
}
export enum ErrorRecoverySet {
None = 0,
Comma = 1, // Comma
SColon = 1 << 1, // SColon
Asg = 1 << 2, // Asg
BinOp = 1 << 3, // Lsh, Rsh, Rs2, Le, Ge, INSTANCEOF, EQ, NE, Eqv, NEqv, LogAnd, LogOr, AsgMul, AsgDiv
// AsgMod, AsgAdd, AsgSub, AsgLsh, AsgRsh, AsgRs2, AsgAnd, AsgXor, AsgOr, QMark, Mult, Div,
// Pct, GT, LT, And, Xor, Or
RBrack = 1 << 4, // RBrack
RCurly = 1 << 5, // RCurly
RParen = 1 << 6, // RParen
Dot = 1 << 7, // Dot
Colon = 1 << 8, // Colon
PrimType = 1 << 9, // number, string, bool
AddOp = 1 << 10, // Add, Sub
LCurly = 1 << 11, // LCurly
PreOp = 1 << 12, // Tilde, Bang, Inc, Dec
RegExp = 1 << 13, // RegExp
LParen = 1 << 14, // LParen
LBrack = 1 << 15, // LBrack
Scope = 1 << 16, // Scope
In = 1 << 17, // IN
SCase = 1 << 18, // CASE, DEFAULT
Else = 1 << 19, // ELSE
Catch = 1 << 20, // CATCH, FINALLY
Var = 1 << 21, //
Stmt = 1 << 22, // BREAK, RETURN, THROW, DEBUGGER, FOR, SWITCH, DO, IF, TRY, WITH
While = 1 << 23, // WHILE
ID = 1 << 24, // ID
Prefix = 1 << 25, // VOID, DELETE, TYPEOF, AWAIT
Literal = 1 << 26, // IntCon, FltCon, StrCon
RLit = 1 << 27, // THIS, TRUE, FALSE, NULL
Func = 1 << 28, // FUNCTION
EOF = 1 << 29, // EOF
// REVIEW: Name this something clearer.
TypeScriptS = 1 << 30, // PROPERTY, PRIVATE, STATIC, INTERFACE, CLASS, MODULE, EXPORT, IMPORT
ExprStart = SColon | AddOp | LCurly | PreOp | RegExp | LParen | LBrack | ID | Prefix | RLit | Func | Literal,
StmtStart = ExprStart | SColon | Var | Stmt | While | TypeScriptS,
Postfix = Dot | LParen | LBrack,
}
export enum AllowedElements {
None = 0,
ModuleDeclarations = 1 << 2,
ClassDeclarations = 1 << 3,
InterfaceDeclarations = 1 << 4,
AmbientDeclarations = 1 << 10,
Properties = 1 << 11,
Global = ModuleDeclarations | ClassDeclarations | InterfaceDeclarations | AmbientDeclarations,
QuickParse = Global | Properties,
}
export enum Modifiers {
None = 0,
Private = 1,
Public = 1 << 1,
Readonly = 1 << 2,
Ambient = 1 << 3,
Exported = 1 << 4,
Getter = 1 << 5,
Setter = 1 << 6,
Static = 1 << 7,
}
export enum ASTFlags {
None = 0,
ExplicitSemicolon = 1, // statment terminated by an explicit semicolon
AutomaticSemicolon = 1 << 1, // statment terminated by an automatic semicolon
Writeable = 1 << 2, // node is lhs that can be modified
Error = 1 << 3, // node has an error
DotLHSPartial = 1 << 4, // node is the lhs of an incomplete dot expr at cursor
DotLHS = 1 << 5, // node is the lhs of a dot expr
IsStatement = 1 << 6, // node is a statement
StrictMode = 1 << 7, // node is in the strict mode environment
PossibleOptionalParameter = 1 << 8,
ClassBaseConstructorCall = 1 << 9,
OptionalName = 1 << 10,
// REVIEW: This flag is to mark lambda nodes to note that the LParen of an expression has already been matched in the lambda header.
// The flag is used to communicate this piece of information to the calling parseTerm, which intern will remove it.
// Once we have a better way to associate information with nodes, this flag should not be used.
SkipNextRParen = 1 << 11,
}
export enum DeclFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
}
export enum ModuleFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
IsEnum = 1 << 8,
ShouldEmitModuleDecl = 1 << 9,
IsWholeFile = 1 << 10,
IsDynamic = 1 << 11,
MustCaptureThis = 1 << 12,
}
export enum SymbolFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Property = 1 << 8,
Readonly = 1 << 9,
ModuleMember = 1 << 10,
InterfaceMember = 1 << 11,
ClassMember = 1 << 12,
BuiltIn = 1 << 13,
TypeSetDuringScopeAssignment = 1 << 14,
Constant = 1 << 15,
Optional = 1 << 16,
RecursivelyReferenced = 1 << 17,
Bound = 1 << 18,
CompilerGenerated = 1 << 19,
}
export enum VarFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
AutoInit = 1 << 8,
Property = 1 << 9,
Readonly = 1 << 10,
Class = 1 << 11,
ClassProperty = 1 << 12,
ClassBodyProperty = 1 << 13,
ClassConstructorProperty = 1 << 14,
ClassSuperMustBeFirstCallInConstructor = 1 << 15,
Constant = 1 << 16,
MustCaptureThis = 1 << 17,
}
export enum FncFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Signature = 1 << 9,
Method = 1 << 10,
HasReturnExpression = 1 << 11,
CallMember = 1 << 12,
ConstructMember = 1 << 13,
HasSelfReference = 1 << 14,
IsFatArrowFunction = 1 << 15,
IndexerMember = 1 << 16,
IsFunctionExpression = 1 << 17,
ClassMethod = 1 << 18,
ClassPropertyMethodExported = 1 << 19,
HasSuperReferenceInFatArrowFunction = 1 << 20,
IsPropertyBound = 1 << 21,
}
export enum SignatureFlags {
None = 0,
IsIndexer = 1,
IsStringIndexer = 1 << 1,
IsNumberIndexer = 1 << 2,
}
export function ToDeclFlags(fncFlags: FncFlags) : DeclFlags;
export function ToDeclFlags(varFlags: VarFlags) : DeclFlags;
export function ToDeclFlags(symFlags: SymbolFlags): DeclFlags;
export function ToDeclFlags(moduleFlags: ModuleFlags): DeclFlags;
export function ToDe | OrVarOrSymbolOrModuleFlags: any) {
return <DeclFlags>fncOrVarOrSymbolOrModuleFlags;
}
export enum TypeFlags {
None = 0,
HasImplementation = 1,
HasSelfReference = 1 << 1,
MergeResult = 1 << 2,
IsEnum = 1 << 3,
BuildingName = 1 << 4,
HasBaseType = 1 << 5,
HasBaseTypeOfObject = 1 << 6,
IsClass = 1 << 7,
}
export enum TypeRelationshipFlags {
SuccessfulComparison = 0,
SourceIsNullTargetIsVoidOrUndefined = 1,
RequiredPropertyIsMissing = 1 << 1,
IncompatibleSignatures = 1 << 2,
SourceSignatureHasTooManyParameters = 3,
IncompatibleReturnTypes = 1 << 4,
IncompatiblePropertyTypes = 1 << 5,
IncompatibleParameterTypes = 1 << 6,
}
export enum CodeGenTarget {
ES3 = 0,
ES5 = 1,
}
export enum ModuleGenTarget {
Synchronous = 0,
Asynchronous = 1,
Local = 1 << 1,
}
// Compiler defaults to generating ES5-compliant code for
// - getters and setters
export var codeGenTarget: CodeGenTarget = CodeGenTarget.ES3;
export var moduleGenTarget: ModuleGenTarget = ModuleGenTarget.Synchronous;
export var optimizeModuleCodeGen = true;
export function flagsToString(e, flags: number): string {
var builder = "";
for (var i = 1; i < (1 << 31) ; i = i << 1) {
if ((flags & i) != 0) {
for (var k in e) {
if (e[k] == i) {
if (builder.length > 0) {
builder += "|";
}
builder += k;
break;
}
}
}
}
return builder;
}
} | clFlags(fnc | identifier_name |
flags.ts | //
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///<reference path='typescript.ts' />
module TypeScript {
export function hasFlag(val: number, flag: number) {
return (val & flag) != 0;
}
export enum ErrorRecoverySet {
None = 0,
Comma = 1, // Comma
SColon = 1 << 1, // SColon
Asg = 1 << 2, // Asg
BinOp = 1 << 3, // Lsh, Rsh, Rs2, Le, Ge, INSTANCEOF, EQ, NE, Eqv, NEqv, LogAnd, LogOr, AsgMul, AsgDiv
// AsgMod, AsgAdd, AsgSub, AsgLsh, AsgRsh, AsgRs2, AsgAnd, AsgXor, AsgOr, QMark, Mult, Div,
// Pct, GT, LT, And, Xor, Or
RBrack = 1 << 4, // RBrack
RCurly = 1 << 5, // RCurly
RParen = 1 << 6, // RParen
Dot = 1 << 7, // Dot
Colon = 1 << 8, // Colon
PrimType = 1 << 9, // number, string, bool
AddOp = 1 << 10, // Add, Sub
LCurly = 1 << 11, // LCurly
PreOp = 1 << 12, // Tilde, Bang, Inc, Dec
RegExp = 1 << 13, // RegExp
LParen = 1 << 14, // LParen
LBrack = 1 << 15, // LBrack
Scope = 1 << 16, // Scope
In = 1 << 17, // IN
SCase = 1 << 18, // CASE, DEFAULT
Else = 1 << 19, // ELSE
Catch = 1 << 20, // CATCH, FINALLY
Var = 1 << 21, //
Stmt = 1 << 22, // BREAK, RETURN, THROW, DEBUGGER, FOR, SWITCH, DO, IF, TRY, WITH
While = 1 << 23, // WHILE
ID = 1 << 24, // ID
Prefix = 1 << 25, // VOID, DELETE, TYPEOF, AWAIT
Literal = 1 << 26, // IntCon, FltCon, StrCon
RLit = 1 << 27, // THIS, TRUE, FALSE, NULL
Func = 1 << 28, // FUNCTION
EOF = 1 << 29, // EOF
// REVIEW: Name this something clearer.
TypeScriptS = 1 << 30, // PROPERTY, PRIVATE, STATIC, INTERFACE, CLASS, MODULE, EXPORT, IMPORT
ExprStart = SColon | AddOp | LCurly | PreOp | RegExp | LParen | LBrack | ID | Prefix | RLit | Func | Literal,
StmtStart = ExprStart | SColon | Var | Stmt | While | TypeScriptS,
Postfix = Dot | LParen | LBrack,
}
export enum AllowedElements {
None = 0,
ModuleDeclarations = 1 << 2,
ClassDeclarations = 1 << 3,
InterfaceDeclarations = 1 << 4,
AmbientDeclarations = 1 << 10,
Properties = 1 << 11,
Global = ModuleDeclarations | ClassDeclarations | InterfaceDeclarations | AmbientDeclarations,
QuickParse = Global | Properties,
}
export enum Modifiers {
None = 0,
Private = 1,
Public = 1 << 1,
Readonly = 1 << 2,
Ambient = 1 << 3,
Exported = 1 << 4,
Getter = 1 << 5,
Setter = 1 << 6,
Static = 1 << 7,
}
export enum ASTFlags {
None = 0,
ExplicitSemicolon = 1, // statment terminated by an explicit semicolon
AutomaticSemicolon = 1 << 1, // statment terminated by an automatic semicolon
Writeable = 1 << 2, // node is lhs that can be modified
Error = 1 << 3, // node has an error
DotLHSPartial = 1 << 4, // node is the lhs of an incomplete dot expr at cursor
DotLHS = 1 << 5, // node is the lhs of a dot expr
IsStatement = 1 << 6, // node is a statement
StrictMode = 1 << 7, // node is in the strict mode environment
PossibleOptionalParameter = 1 << 8,
ClassBaseConstructorCall = 1 << 9,
OptionalName = 1 << 10,
// REVIEW: This flag is to mark lambda nodes to note that the LParen of an expression has already been matched in the lambda header.
// The flag is used to communicate this piece of information to the calling parseTerm, which intern will remove it.
// Once we have a better way to associate information with nodes, this flag should not be used.
SkipNextRParen = 1 << 11,
}
export enum DeclFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
}
export enum ModuleFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
IsEnum = 1 << 8,
ShouldEmitModuleDecl = 1 << 9,
IsWholeFile = 1 << 10,
IsDynamic = 1 << 11,
MustCaptureThis = 1 << 12,
}
export enum SymbolFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Property = 1 << 8,
Readonly = 1 << 9,
ModuleMember = 1 << 10,
InterfaceMember = 1 << 11,
ClassMember = 1 << 12,
BuiltIn = 1 << 13,
TypeSetDuringScopeAssignment = 1 << 14,
Constant = 1 << 15,
Optional = 1 << 16,
RecursivelyReferenced = 1 << 17,
Bound = 1 << 18,
CompilerGenerated = 1 << 19,
}
export enum VarFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
AutoInit = 1 << 8,
Property = 1 << 9,
Readonly = 1 << 10,
Class = 1 << 11,
ClassProperty = 1 << 12,
ClassBodyProperty = 1 << 13,
ClassConstructorProperty = 1 << 14,
ClassSuperMustBeFirstCallInConstructor = 1 << 15,
Constant = 1 << 16,
MustCaptureThis = 1 << 17,
}
export enum FncFlags {
None = 0,
Exported = 1,
Private = 1 << 1,
Public = 1 << 2,
Ambient = 1 << 3,
Static = 1 << 4,
LocalStatic = 1 << 5,
GetAccessor = 1 << 6,
SetAccessor = 1 << 7,
Signature = 1 << 9,
Method = 1 << 10,
HasReturnExpression = 1 << 11,
CallMember = 1 << 12,
ConstructMember = 1 << 13,
HasSelfReference = 1 << 14,
IsFatArrowFunction = 1 << 15,
IndexerMember = 1 << 16,
IsFunctionExpression = 1 << 17,
ClassMethod = 1 << 18,
ClassPropertyMethodExported = 1 << 19,
HasSuperReferenceInFatArrowFunction = 1 << 20,
IsPropertyBound = 1 << 21,
}
export enum SignatureFlags {
None = 0,
IsIndexer = 1,
IsStringIndexer = 1 << 1,
IsNumberIndexer = 1 << 2,
}
export function ToDeclFlags(fncFlags: FncFlags) : DeclFlags;
export function ToDeclFlags(varFlags: VarFlags) : DeclFlags;
export function ToDeclFlags(symFlags: SymbolFlags): DeclFlags;
export function ToDeclFlags(moduleFlags: ModuleFlags): DeclFlags;
export function ToDeclFlags(fncOrVarOrSymbolOrModuleFlags: any) {
return <DeclFlags>fncOrVarOrSymbolOrModuleFlags;
}
export enum TypeFlags {
None = 0,
HasImplementation = 1,
HasSelfReference = 1 << 1,
MergeResult = 1 << 2,
IsEnum = 1 << 3,
BuildingName = 1 << 4,
HasBaseType = 1 << 5,
HasBaseTypeOfObject = 1 << 6,
IsClass = 1 << 7,
}
export enum TypeRelationshipFlags {
SuccessfulComparison = 0,
SourceIsNullTargetIsVoidOrUndefined = 1,
RequiredPropertyIsMissing = 1 << 1,
IncompatibleSignatures = 1 << 2,
SourceSignatureHasTooManyParameters = 3,
IncompatibleReturnTypes = 1 << 4,
IncompatiblePropertyTypes = 1 << 5,
IncompatibleParameterTypes = 1 << 6,
}
export enum CodeGenTarget {
ES3 = 0,
ES5 = 1,
}
export enum ModuleGenTarget {
Synchronous = 0,
Asynchronous = 1,
Local = 1 << 1,
}
// Compiler defaults to generating ES5-compliant code for
// - getters and setters
export var codeGenTarget: CodeGenTarget = CodeGenTarget.ES3;
export var moduleGenTarget: ModuleGenTarget = ModuleGenTarget.Synchronous;
export var optimizeModuleCodeGen = true;
export function flagsToString(e, flags: number): string {
var builder = "";
for (var i = 1; i < (1 << 31) ; i = i << 1) {
if ((flags & i) != 0) {
for (var k in e) {
| }
builder += k;
break;
}
}
}
}
return builder;
}
} | if (e[k] == i) {
if (builder.length > 0) {
builder += "|";
| random_line_split |
bfe_photo_resources_brief.py | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the | ##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints brief HTML picture and links to resources
"""
__revision__ = "$Id$"
def format_element(bfo):
"""
Prints html image and link to photo resources.
"""
from invenio.config import CFG_SITE_URL, CFG_SITE_RECORD
resources = bfo.fields("8564_")
out = ""
for resource in resources:
if resource.get("x", "") == "icon":
out += '<a href="'+CFG_SITE_URL+'/'+ CFG_SITE_RECORD +'/'+bfo.control_field("001")+ \
'?ln='+ bfo.lang + '"><img src="' + resource.get("u", "").replace(" ","") \
+ '" alt="" border="0"/></a>'
return out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0 | ## License, or (at your option) any later version. | random_line_split |
bfe_photo_resources_brief.py | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints brief HTML picture and links to resources
"""
__revision__ = "$Id$"
def format_element(bfo):
"""
Prints html image and link to photo resources.
"""
from invenio.config import CFG_SITE_URL, CFG_SITE_RECORD
resources = bfo.fields("8564_")
out = ""
for resource in resources:
if resource.get("x", "") == "icon":
out += '<a href="'+CFG_SITE_URL+'/'+ CFG_SITE_RECORD +'/'+bfo.control_field("001")+ \
'?ln='+ bfo.lang + '"><img src="' + resource.get("u", "").replace(" ","") \
+ '" alt="" border="0"/></a>'
return out
def | (bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| escape_values | identifier_name |
bfe_photo_resources_brief.py | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints brief HTML picture and links to resources
"""
__revision__ = "$Id$"
def format_element(bfo):
"""
Prints html image and link to photo resources.
"""
from invenio.config import CFG_SITE_URL, CFG_SITE_RECORD
resources = bfo.fields("8564_")
out = ""
for resource in resources:
|
return out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| if resource.get("x", "") == "icon":
out += '<a href="'+CFG_SITE_URL+'/'+ CFG_SITE_RECORD +'/'+bfo.control_field("001")+ \
'?ln='+ bfo.lang + '"><img src="' + resource.get("u", "").replace(" ","") \
+ '" alt="" border="0"/></a>' | conditional_block |
bfe_photo_resources_brief.py | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints brief HTML picture and links to resources
"""
__revision__ = "$Id$"
def format_element(bfo):
"""
Prints html image and link to photo resources.
"""
from invenio.config import CFG_SITE_URL, CFG_SITE_RECORD
resources = bfo.fields("8564_")
out = ""
for resource in resources:
if resource.get("x", "") == "icon":
out += '<a href="'+CFG_SITE_URL+'/'+ CFG_SITE_RECORD +'/'+bfo.control_field("001")+ \
'?ln='+ bfo.lang + '"><img src="' + resource.get("u", "").replace(" ","") \
+ '" alt="" border="0"/></a>'
return out
def escape_values(bfo):
| """
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0 | identifier_body |
|
unix.rs | use libloading::Library;
use std::fs::ReadDir;
use types::Identifier;
/// Grabs all `Library` entries found within a given directory
pub(crate) struct LibraryIterator {
directory: ReadDir,
}
impl LibraryIterator {
pub(crate) fn new(directory: ReadDir) -> LibraryIterator { LibraryIterator { directory } }
}
impl Iterator for LibraryIterator {
// The `Identifier` is the name of the namespace for which values may be pulled.
// The `Library` is a handle to dynamic library loaded into memory.
type Item = (Identifier, Library);
fn next(&mut self) -> Option<(Identifier, Library)> |
}
| {
while let Some(entry) = self.directory.next() {
let entry = if let Ok(entry) = entry { entry } else { continue };
let path = entry.path();
// An entry is a library if it is a file with a 'so' extension.
if path.is_file() && path.extension().map_or(false, |ext| ext == "so") {
// The identifier will be the file name of that file, without the extension.
let identifier = match path.file_stem().unwrap().to_str() {
Some(filename) => Identifier::from(filename),
None => {
eprintln!("ion: namespace plugin has invalid filename");
continue;
}
};
// This will attempt to load the library into memory.
match Library::new(path.as_os_str()) {
Ok(library) => return Some((identifier, library)),
Err(why) => {
eprintln!("ion: failed to load library: {:?}, {:?}", path, why);
continue;
}
}
} else {
continue;
}
}
None
} | identifier_body |
unix.rs | use libloading::Library;
use std::fs::ReadDir;
use types::Identifier;
/// Grabs all `Library` entries found within a given directory
pub(crate) struct LibraryIterator {
directory: ReadDir,
}
impl LibraryIterator {
pub(crate) fn new(directory: ReadDir) -> LibraryIterator { LibraryIterator { directory } }
}
impl Iterator for LibraryIterator {
// The `Identifier` is the name of the namespace for which values may be pulled.
// The `Library` is a handle to dynamic library loaded into memory.
type Item = (Identifier, Library);
fn next(&mut self) -> Option<(Identifier, Library)> {
while let Some(entry) = self.directory.next() {
let entry = if let Ok(entry) = entry { entry } else { continue };
let path = entry.path();
// An entry is a library if it is a file with a 'so' extension.
if path.is_file() && path.extension().map_or(false, |ext| ext == "so") {
// The identifier will be the file name of that file, without the extension.
let identifier = match path.file_stem().unwrap().to_str() {
Some(filename) => Identifier::from(filename),
None => {
eprintln!("ion: namespace plugin has invalid filename");
continue;
}
};
// This will attempt to load the library into memory.
match Library::new(path.as_os_str()) {
Ok(library) => return Some((identifier, library)),
Err(why) => {
eprintln!("ion: failed to load library: {:?}, {:?}", path, why);
continue;
}
}
} else |
}
None
}
}
| {
continue;
} | conditional_block |
unix.rs | use libloading::Library;
use std::fs::ReadDir;
use types::Identifier;
/// Grabs all `Library` entries found within a given directory
pub(crate) struct LibraryIterator {
directory: ReadDir,
}
impl LibraryIterator {
pub(crate) fn new(directory: ReadDir) -> LibraryIterator { LibraryIterator { directory } }
}
impl Iterator for LibraryIterator {
// The `Identifier` is the name of the namespace for which values may be pulled.
// The `Library` is a handle to dynamic library loaded into memory.
type Item = (Identifier, Library);
fn next(&mut self) -> Option<(Identifier, Library)> {
while let Some(entry) = self.directory.next() {
let entry = if let Ok(entry) = entry { entry } else { continue };
let path = entry.path();
// An entry is a library if it is a file with a 'so' extension.
if path.is_file() && path.extension().map_or(false, |ext| ext == "so") {
// The identifier will be the file name of that file, without the extension.
let identifier = match path.file_stem().unwrap().to_str() {
Some(filename) => Identifier::from(filename),
None => {
eprintln!("ion: namespace plugin has invalid filename");
continue;
}
};
// This will attempt to load the library into memory.
match Library::new(path.as_os_str()) {
Ok(library) => return Some((identifier, library)),
Err(why) => { | }
} else {
continue;
}
}
None
}
} | eprintln!("ion: failed to load library: {:?}, {:?}", path, why);
continue;
} | random_line_split |
unix.rs | use libloading::Library;
use std::fs::ReadDir;
use types::Identifier;
/// Grabs all `Library` entries found within a given directory
pub(crate) struct | {
directory: ReadDir,
}
impl LibraryIterator {
pub(crate) fn new(directory: ReadDir) -> LibraryIterator { LibraryIterator { directory } }
}
impl Iterator for LibraryIterator {
// The `Identifier` is the name of the namespace for which values may be pulled.
// The `Library` is a handle to dynamic library loaded into memory.
type Item = (Identifier, Library);
fn next(&mut self) -> Option<(Identifier, Library)> {
while let Some(entry) = self.directory.next() {
let entry = if let Ok(entry) = entry { entry } else { continue };
let path = entry.path();
// An entry is a library if it is a file with a 'so' extension.
if path.is_file() && path.extension().map_or(false, |ext| ext == "so") {
// The identifier will be the file name of that file, without the extension.
let identifier = match path.file_stem().unwrap().to_str() {
Some(filename) => Identifier::from(filename),
None => {
eprintln!("ion: namespace plugin has invalid filename");
continue;
}
};
// This will attempt to load the library into memory.
match Library::new(path.as_os_str()) {
Ok(library) => return Some((identifier, library)),
Err(why) => {
eprintln!("ion: failed to load library: {:?}, {:?}", path, why);
continue;
}
}
} else {
continue;
}
}
None
}
}
| LibraryIterator | identifier_name |
win.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Windows console handling
// FIXME (#13400): this is only a tiny fraction of the Windows console api
extern crate libc;
use std::io;
use std::io::prelude::*;
use attr;
use color;
use {Terminal,UnwrappableTerminal};
/// A Terminal implementation which uses the Win32 Console API.
pub struct WinConsole<T> {
buf: T,
def_foreground: color::Color,
def_background: color::Color,
foreground: color::Color,
background: color::Color,
}
#[allow(non_snake_case)]
#[repr(C)]
struct CONSOLE_SCREEN_BUFFER_INFO {
dwSize: [libc::c_short; 2],
dwCursorPosition: [libc::c_short; 2],
wAttributes: libc::WORD,
srWindow: [libc::c_short; 4],
dwMaximumWindowSize: [libc::c_short; 2],
}
#[allow(non_snake_case)]
#[link(name = "kernel32")]
extern "system" {
fn SetConsoleTextAttribute(handle: libc::HANDLE, attr: libc::WORD) -> libc::BOOL;
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleScreenBufferInfo(handle: libc::HANDLE,
info: *mut CONSOLE_SCREEN_BUFFER_INFO) -> libc::BOOL;
}
fn color_to_bits(color: color::Color) -> u16 {
// magic numbers from mingw-w64's wincon.h
let bits = match color % 8 {
color::BLACK => 0,
color::BLUE => 0x1,
color::GREEN => 0x2,
color::RED => 0x4,
color::YELLOW => 0x2 | 0x4,
color::MAGENTA => 0x1 | 0x4,
color::CYAN => 0x1 | 0x2,
color::WHITE => 0x1 | 0x2 | 0x4,
_ => unreachable!()
};
if color >= 8 {
bits | 0x8
} else {
bits
}
}
fn bits_to_color(bits: u16) -> color::Color {
let color = match bits & 0x7 {
0 => color::BLACK,
0x1 => color::BLUE,
0x2 => color::GREEN,
0x4 => color::RED,
0x6 => color::YELLOW,
0x5 => color::MAGENTA,
0x3 => color::CYAN,
0x7 => color::WHITE,
_ => unreachable!()
};
color | (bits & 0x8) // copy the hi-intensity bit
}
impl<T: Write+Send+'static> WinConsole<T> {
fn apply(&mut self) {
let _unused = self.buf.flush();
let mut accum: libc::WORD = 0;
accum |= color_to_bits(self.foreground);
accum |= color_to_bits(self.background) << 4;
unsafe {
// Magic -11 means stdout, from
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231%28v=vs.85%29.aspx
//
// You may be wondering, "but what about stderr?", and the answer
// to that is that setting terminal attributes on the stdout
// handle also sets them for stderr, since they go to the same
// terminal! Admittedly, this is fragile, since stderr could be
// redirected to a different console. This is good enough for
// rustc though. See #13400.
let out = GetStdHandle(-11);
SetConsoleTextAttribute(out, accum);
}
}
/// Returns `None` whenever the terminal cannot be created for some
/// reason.
pub fn new(out: T) -> Option<Box<Terminal<T>+Send+'static>> {
let fg;
let bg;
unsafe {
let mut buffer_info = ::std::mem::uninitialized();
if GetConsoleScreenBufferInfo(GetStdHandle(-11), &mut buffer_info) != 0 {
fg = bits_to_color(buffer_info.wAttributes);
bg = bits_to_color(buffer_info.wAttributes >> 4);
} else {
fg = color::WHITE;
bg = color::BLACK;
}
}
Some(box WinConsole { buf: out,
def_foreground: fg, def_background: bg,
foreground: fg, background: bg } as Box<Terminal<T>+Send>)
}
}
impl<T: Write> Write for WinConsole<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.buf.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.buf.flush()
} | self.foreground = color;
self.apply();
Ok(true)
}
fn bg(&mut self, color: color::Color) -> io::Result<bool> {
self.background = color;
self.apply();
Ok(true)
}
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool> {
match attr {
attr::ForegroundColor(f) => {
self.foreground = f;
self.apply();
Ok(true)
},
attr::BackgroundColor(b) => {
self.background = b;
self.apply();
Ok(true)
},
_ => Ok(false)
}
}
fn supports_attr(&self, attr: attr::Attr) -> bool {
// it claims support for underscore and reverse video, but I can't get
// it to do anything -cmr
match attr {
attr::ForegroundColor(_) | attr::BackgroundColor(_) => true,
_ => false
}
}
fn reset(&mut self) -> io::Result<()> {
self.foreground = self.def_foreground;
self.background = self.def_background;
self.apply();
Ok(())
}
fn get_ref<'a>(&'a self) -> &'a T { &self.buf }
fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.buf }
}
impl<T: Write+Send+'static> UnwrappableTerminal<T> for WinConsole<T> {
fn unwrap(self) -> T { self.buf }
} | }
impl<T: Write+Send+'static> Terminal<T> for WinConsole<T> {
fn fg(&mut self, color: color::Color) -> io::Result<bool> { | random_line_split |
win.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Windows console handling
// FIXME (#13400): this is only a tiny fraction of the Windows console api
extern crate libc;
use std::io;
use std::io::prelude::*;
use attr;
use color;
use {Terminal,UnwrappableTerminal};
/// A Terminal implementation which uses the Win32 Console API.
pub struct WinConsole<T> {
buf: T,
def_foreground: color::Color,
def_background: color::Color,
foreground: color::Color,
background: color::Color,
}
#[allow(non_snake_case)]
#[repr(C)]
struct CONSOLE_SCREEN_BUFFER_INFO {
dwSize: [libc::c_short; 2],
dwCursorPosition: [libc::c_short; 2],
wAttributes: libc::WORD,
srWindow: [libc::c_short; 4],
dwMaximumWindowSize: [libc::c_short; 2],
}
#[allow(non_snake_case)]
#[link(name = "kernel32")]
extern "system" {
fn SetConsoleTextAttribute(handle: libc::HANDLE, attr: libc::WORD) -> libc::BOOL;
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleScreenBufferInfo(handle: libc::HANDLE,
info: *mut CONSOLE_SCREEN_BUFFER_INFO) -> libc::BOOL;
}
fn color_to_bits(color: color::Color) -> u16 {
// magic numbers from mingw-w64's wincon.h
let bits = match color % 8 {
color::BLACK => 0,
color::BLUE => 0x1,
color::GREEN => 0x2,
color::RED => 0x4,
color::YELLOW => 0x2 | 0x4,
color::MAGENTA => 0x1 | 0x4,
color::CYAN => 0x1 | 0x2,
color::WHITE => 0x1 | 0x2 | 0x4,
_ => unreachable!()
};
if color >= 8 {
bits | 0x8
} else {
bits
}
}
fn bits_to_color(bits: u16) -> color::Color {
let color = match bits & 0x7 {
0 => color::BLACK,
0x1 => color::BLUE,
0x2 => color::GREEN,
0x4 => color::RED,
0x6 => color::YELLOW,
0x5 => color::MAGENTA,
0x3 => color::CYAN,
0x7 => color::WHITE,
_ => unreachable!()
};
color | (bits & 0x8) // copy the hi-intensity bit
}
impl<T: Write+Send+'static> WinConsole<T> {
fn apply(&mut self) {
let _unused = self.buf.flush();
let mut accum: libc::WORD = 0;
accum |= color_to_bits(self.foreground);
accum |= color_to_bits(self.background) << 4;
unsafe {
// Magic -11 means stdout, from
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231%28v=vs.85%29.aspx
//
// You may be wondering, "but what about stderr?", and the answer
// to that is that setting terminal attributes on the stdout
// handle also sets them for stderr, since they go to the same
// terminal! Admittedly, this is fragile, since stderr could be
// redirected to a different console. This is good enough for
// rustc though. See #13400.
let out = GetStdHandle(-11);
SetConsoleTextAttribute(out, accum);
}
}
/// Returns `None` whenever the terminal cannot be created for some
/// reason.
pub fn new(out: T) -> Option<Box<Terminal<T>+Send+'static>> |
}
impl<T: Write> Write for WinConsole<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.buf.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.buf.flush()
}
}
impl<T: Write+Send+'static> Terminal<T> for WinConsole<T> {
fn fg(&mut self, color: color::Color) -> io::Result<bool> {
self.foreground = color;
self.apply();
Ok(true)
}
fn bg(&mut self, color: color::Color) -> io::Result<bool> {
self.background = color;
self.apply();
Ok(true)
}
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool> {
match attr {
attr::ForegroundColor(f) => {
self.foreground = f;
self.apply();
Ok(true)
},
attr::BackgroundColor(b) => {
self.background = b;
self.apply();
Ok(true)
},
_ => Ok(false)
}
}
fn supports_attr(&self, attr: attr::Attr) -> bool {
// it claims support for underscore and reverse video, but I can't get
// it to do anything -cmr
match attr {
attr::ForegroundColor(_) | attr::BackgroundColor(_) => true,
_ => false
}
}
fn reset(&mut self) -> io::Result<()> {
self.foreground = self.def_foreground;
self.background = self.def_background;
self.apply();
Ok(())
}
fn get_ref<'a>(&'a self) -> &'a T { &self.buf }
fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.buf }
}
impl<T: Write+Send+'static> UnwrappableTerminal<T> for WinConsole<T> {
fn unwrap(self) -> T { self.buf }
}
| {
let fg;
let bg;
unsafe {
let mut buffer_info = ::std::mem::uninitialized();
if GetConsoleScreenBufferInfo(GetStdHandle(-11), &mut buffer_info) != 0 {
fg = bits_to_color(buffer_info.wAttributes);
bg = bits_to_color(buffer_info.wAttributes >> 4);
} else {
fg = color::WHITE;
bg = color::BLACK;
}
}
Some(box WinConsole { buf: out,
def_foreground: fg, def_background: bg,
foreground: fg, background: bg } as Box<Terminal<T>+Send>)
} | identifier_body |
win.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Windows console handling
// FIXME (#13400): this is only a tiny fraction of the Windows console api
extern crate libc;
use std::io;
use std::io::prelude::*;
use attr;
use color;
use {Terminal,UnwrappableTerminal};
/// A Terminal implementation which uses the Win32 Console API.
pub struct WinConsole<T> {
buf: T,
def_foreground: color::Color,
def_background: color::Color,
foreground: color::Color,
background: color::Color,
}
#[allow(non_snake_case)]
#[repr(C)]
struct CONSOLE_SCREEN_BUFFER_INFO {
dwSize: [libc::c_short; 2],
dwCursorPosition: [libc::c_short; 2],
wAttributes: libc::WORD,
srWindow: [libc::c_short; 4],
dwMaximumWindowSize: [libc::c_short; 2],
}
#[allow(non_snake_case)]
#[link(name = "kernel32")]
extern "system" {
fn SetConsoleTextAttribute(handle: libc::HANDLE, attr: libc::WORD) -> libc::BOOL;
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleScreenBufferInfo(handle: libc::HANDLE,
info: *mut CONSOLE_SCREEN_BUFFER_INFO) -> libc::BOOL;
}
fn color_to_bits(color: color::Color) -> u16 {
// magic numbers from mingw-w64's wincon.h
let bits = match color % 8 {
color::BLACK => 0,
color::BLUE => 0x1,
color::GREEN => 0x2,
color::RED => 0x4,
color::YELLOW => 0x2 | 0x4,
color::MAGENTA => 0x1 | 0x4,
color::CYAN => 0x1 | 0x2,
color::WHITE => 0x1 | 0x2 | 0x4,
_ => unreachable!()
};
if color >= 8 {
bits | 0x8
} else {
bits
}
}
fn | (bits: u16) -> color::Color {
let color = match bits & 0x7 {
0 => color::BLACK,
0x1 => color::BLUE,
0x2 => color::GREEN,
0x4 => color::RED,
0x6 => color::YELLOW,
0x5 => color::MAGENTA,
0x3 => color::CYAN,
0x7 => color::WHITE,
_ => unreachable!()
};
color | (bits & 0x8) // copy the hi-intensity bit
}
impl<T: Write+Send+'static> WinConsole<T> {
fn apply(&mut self) {
let _unused = self.buf.flush();
let mut accum: libc::WORD = 0;
accum |= color_to_bits(self.foreground);
accum |= color_to_bits(self.background) << 4;
unsafe {
// Magic -11 means stdout, from
// http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231%28v=vs.85%29.aspx
//
// You may be wondering, "but what about stderr?", and the answer
// to that is that setting terminal attributes on the stdout
// handle also sets them for stderr, since they go to the same
// terminal! Admittedly, this is fragile, since stderr could be
// redirected to a different console. This is good enough for
// rustc though. See #13400.
let out = GetStdHandle(-11);
SetConsoleTextAttribute(out, accum);
}
}
/// Returns `None` whenever the terminal cannot be created for some
/// reason.
pub fn new(out: T) -> Option<Box<Terminal<T>+Send+'static>> {
let fg;
let bg;
unsafe {
let mut buffer_info = ::std::mem::uninitialized();
if GetConsoleScreenBufferInfo(GetStdHandle(-11), &mut buffer_info) != 0 {
fg = bits_to_color(buffer_info.wAttributes);
bg = bits_to_color(buffer_info.wAttributes >> 4);
} else {
fg = color::WHITE;
bg = color::BLACK;
}
}
Some(box WinConsole { buf: out,
def_foreground: fg, def_background: bg,
foreground: fg, background: bg } as Box<Terminal<T>+Send>)
}
}
impl<T: Write> Write for WinConsole<T> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.buf.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.buf.flush()
}
}
impl<T: Write+Send+'static> Terminal<T> for WinConsole<T> {
fn fg(&mut self, color: color::Color) -> io::Result<bool> {
self.foreground = color;
self.apply();
Ok(true)
}
fn bg(&mut self, color: color::Color) -> io::Result<bool> {
self.background = color;
self.apply();
Ok(true)
}
fn attr(&mut self, attr: attr::Attr) -> io::Result<bool> {
match attr {
attr::ForegroundColor(f) => {
self.foreground = f;
self.apply();
Ok(true)
},
attr::BackgroundColor(b) => {
self.background = b;
self.apply();
Ok(true)
},
_ => Ok(false)
}
}
fn supports_attr(&self, attr: attr::Attr) -> bool {
// it claims support for underscore and reverse video, but I can't get
// it to do anything -cmr
match attr {
attr::ForegroundColor(_) | attr::BackgroundColor(_) => true,
_ => false
}
}
fn reset(&mut self) -> io::Result<()> {
self.foreground = self.def_foreground;
self.background = self.def_background;
self.apply();
Ok(())
}
fn get_ref<'a>(&'a self) -> &'a T { &self.buf }
fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.buf }
}
impl<T: Write+Send+'static> UnwrappableTerminal<T> for WinConsole<T> {
fn unwrap(self) -> T { self.buf }
}
| bits_to_color | identifier_name |
primitive_functions.js | Board.scheme_primitive_functions = (function ($) {
var m_canvas = null;
var m_context = null;
var m_main_frame = null;
var m_board_main = Board.board_main;
var m_editor = Board.editor;
var m_div_repl = Board.div_repl;
var JS_functions = {
'set-painter-frame': function(args){
// (make-frame)
console.log(args);
var x, y, width;
x = car(args).value;
y = cadr(args).value;
width = caddr(args).value;
m_main_frame = new PainterFrame(x, y, width);
return T;
},
'set-color': function(args) {
console.log(args);
var x, y, colors;
x = car(args).value;
y = cadr(args).value;
colors = caddr(args);
var r = car(colors).value;
var g = cadr(colors).value;
var b = caddr(colors).value;
m_board_main.set_color(x, y, r, g, b);
return T;
},
'draw-line': function(args){
console.log(args);
var x1, y1, x2, y2;
x1 = car(args).value;
y1 = cadr(args).value;
x2 = car(cddr(args)).value;
y2 = cadr(cddr(args)).value;
m_board_main.draw_line(x1, y1, x2, y2);
return T;
},
'board-reset': function(){
m_board_main.reset();
return T;
},
'draw-x': function(){
return T;
},
'load': function(args){
console.log('load');
console.log('(begin ' + m_editor.get_val() + ')');
m_div_repl.output_eval_result('(begin ' + m_editor.get_val() + ')');
return T;
}
};
return {
init: function(){
m_canvas = $('#canvas_main').get(0);
m_context = m_canvas.getContext('2d'); | var createBind = function (variable, value) {
var result = {};
result.variable = variable;
result.value = value;
return result;
};
var builtinFunc = function (variable, value) {
result.push(createBind(variable, new LPrimitiveFunc(value)));
};
console.log(JS_functions);
for(key in JS_functions) {
console.log(key);
console.log(JS_functions[key]);
builtinFunc(String(key), JS_functions[key]);
}
return result;
}
};
})(jQuery); | m_editor = Board.editor;
},
install_JS_functions: function(result){ | random_line_split |
email-input.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _propTypes = require("prop-types");
var _propTypes2 = _interopRequireDefault(_propTypes);
var _textInput = require("./text-input");
var _textInput2 = _interopRequireDefault(_textInput);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function | (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
function validateEmail(email) {
var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return re.test(email);
}
var EmailInput = function (_TextInput) {
_inherits(EmailInput, _TextInput);
function EmailInput(props) {
_classCallCheck(this, EmailInput);
return _possibleConstructorReturn(this, (EmailInput.__proto__ || Object.getPrototypeOf(EmailInput)).call(this, props));
}
_createClass(EmailInput, [{
key: "onValid",
value: function onValid(e) {
if (!!this.props.onValid) {
this.props.onValid(validateEmail(e.target.value), e);
}
}
}]);
return EmailInput;
}(_textInput2.default);
exports.default = EmailInput;
EmailInput.propTypes = {
type: _propTypes2.default.string.isRequired
};
EmailInput.defaultProps = {
type: "email"
}; | _classCallCheck | identifier_name |
email-input.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _propTypes = require("prop-types");
var _propTypes2 = _interopRequireDefault(_propTypes);
var _textInput = require("./text-input");
var _textInput2 = _interopRequireDefault(_textInput);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) |
function validateEmail(email) {
var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return re.test(email);
}
var EmailInput = function (_TextInput) {
_inherits(EmailInput, _TextInput);
function EmailInput(props) {
_classCallCheck(this, EmailInput);
return _possibleConstructorReturn(this, (EmailInput.__proto__ || Object.getPrototypeOf(EmailInput)).call(this, props));
}
_createClass(EmailInput, [{
key: "onValid",
value: function onValid(e) {
if (!!this.props.onValid) {
this.props.onValid(validateEmail(e.target.value), e);
}
}
}]);
return EmailInput;
}(_textInput2.default);
exports.default = EmailInput;
EmailInput.propTypes = {
type: _propTypes2.default.string.isRequired
};
EmailInput.defaultProps = {
type: "email"
}; | { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } | identifier_body |
email-input.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _propTypes = require("prop-types");
var _propTypes2 = _interopRequireDefault(_propTypes);
var _textInput = require("./text-input");
var _textInput2 = _interopRequireDefault(_textInput);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
function validateEmail(email) {
var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return re.test(email);
}
var EmailInput = function (_TextInput) {
_inherits(EmailInput, _TextInput);
function EmailInput(props) {
_classCallCheck(this, EmailInput);
return _possibleConstructorReturn(this, (EmailInput.__proto__ || Object.getPrototypeOf(EmailInput)).call(this, props));
}
_createClass(EmailInput, [{
key: "onValid",
value: function onValid(e) {
if (!!this.props.onValid) {
this.props.onValid(validateEmail(e.target.value), e);
} | }
}]);
return EmailInput;
}(_textInput2.default);
exports.default = EmailInput;
EmailInput.propTypes = {
type: _propTypes2.default.string.isRequired
};
EmailInput.defaultProps = {
type: "email"
}; | random_line_split |
|
email-input.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = undefined;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _propTypes = require("prop-types");
var _propTypes2 = _interopRequireDefault(_propTypes);
var _textInput = require("./text-input");
var _textInput2 = _interopRequireDefault(_textInput);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) | subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
function validateEmail(email) {
var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return re.test(email);
}
var EmailInput = function (_TextInput) {
_inherits(EmailInput, _TextInput);
function EmailInput(props) {
_classCallCheck(this, EmailInput);
return _possibleConstructorReturn(this, (EmailInput.__proto__ || Object.getPrototypeOf(EmailInput)).call(this, props));
}
_createClass(EmailInput, [{
key: "onValid",
value: function onValid(e) {
if (!!this.props.onValid) {
this.props.onValid(validateEmail(e.target.value), e);
}
}
}]);
return EmailInput;
}(_textInput2.default);
exports.default = EmailInput;
EmailInput.propTypes = {
type: _propTypes2.default.string.isRequired
};
EmailInput.defaultProps = {
type: "email"
}; | { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } | conditional_block |
setup.py | # vim: set fileencoding=utf-8 :
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='endymion',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.4.1',
description='A small tool to check the link validity of external Vagrant boxes on Atlas',
long_description=long_description,
# The project's main homepage.
url='https://github.com/lpancescu/endymion',
# Author details
author='Laurențiu Păncescu',
author_email='[email protected]',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Utilities',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='vagrant atlas',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=[]),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
| # To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'endymion=endymion:main',
],
},
) | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.