code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
# Convert match and exclude args into pattern lists
match = opts.get("match")
if match and type(match) is str:
opts["match"] = [pat.strip() for pat in match.split(",")]
elif match:
assert type(match) is list
else:
opts["match"] = []
exclude = opts.get("exclude")
if exclude and type(exclude) is str:
opts["exclude"] = [pat.strip() for pat in exclude.split(",")]
elif exclude:
assert type(exclude) is list
else:
# opts["exclude"] = DEFAULT_OMIT
opts["exclude"] = []
|
def process_options(opts)
|
Check and prepare options dict.
| 2.652212 | 2.528428 | 1.048957 |
if entry.name in ALWAYS_OMIT:
return False
# TODO: currently we use fnmatch syntax and match against names.
# We also might allow glob syntax and match against the whole relative path instead
# path = entry.get_rel_path()
path = entry.name
ok = True
match = opts.get("match")
exclude = opts.get("exclude")
if entry.is_file() and match:
assert type(match) is list
ok = False
for pat in match:
if fnmatch.fnmatch(path, pat):
ok = True
break
if ok and exclude:
assert type(exclude) is list
for pat in exclude:
if fnmatch.fnmatch(path, pat):
ok = False
break
# write("match", ok, entry)
return ok
|
def match_path(entry, opts)
|
Return True if `path` matches `match` and `exclude` options.
| 4.20781 | 4.03785 | 1.042092 |
assert isinstance(local, FileEntry) and isinstance(remote, FileEntry)
if not local or not remote:
write(" Files cannot be compared ({} != {}).".format(local, remote))
return False
elif local.size != remote.size:
write(
" Files are different (size {:,d} != {:,d}).".format(
local.size, remote.size
)
)
return False
with local.target.open_readable(
local.name
) as fp_src, remote.target.open_readable(remote.name) as fp_dest:
res, ofs = byte_compare(fp_src, fp_dest)
if not res:
write(" Files are different at offset {:,d}.".format(ofs))
else:
write(" Files are equal.")
return res
|
def _compare_file(self, local, remote)
|
Byte compare two files (early out on first difference).
| 3.419009 | 3.170278 | 1.078457 |
if (self.verbose >= 3 and not IS_REDIRECTED) or self.options.get("progress"):
stats = self.get_stats()
prefix = DRY_RUN_PREFIX if self.dry_run else ""
sys.stdout.write(
"{}Touched {}/{} entries in {} directories...\r".format(
prefix,
stats["entries_touched"],
stats["entries_seen"],
stats["local_dirs"],
)
)
sys.stdout.flush()
return
|
def _tick(self)
|
Write progress info and move cursor to beginning of line.
| 6.214265 | 5.810861 | 1.069422 |
RED = ansi_code("Fore.LIGHTRED_EX")
R = ansi_code("Style.RESET_ALL")
# any_entry = pair.any_entry
write((RED + "ERROR: {}\n {}" + R).format(e, pair))
# Return True to ignore this error (instead of raising and terminating the app)
if "[Errno 92] Illegal byte sequence" in "{}".format(e) and compat.PY2:
write(RED + "This _may_ be solved by using Python 3." + R)
# return True
return False
|
def on_error(self, e, pair)
|
Called for pairs that don't match `match` and `exclude` filters.
| 10.54443 | 10.390789 | 1.014786 |
status = pair.remote_classification
self._log_action("copy", status, ">", pair.local)
|
def on_copy_local(self, pair)
|
Called when the local resource should be copied to remote.
| 26.544292 | 26.381571 | 1.006168 |
status = pair.local_classification
self._log_action("copy", status, "<", pair.remote)
|
def on_copy_remote(self, pair)
|
Called when the remote resource should be copied to local.
| 26.993656 | 28.082483 | 0.961228 |
# print("on_need_compare", pair)
# If no metadata is available, we could only classify file entries as
# 'existing'.
# Now we use peer information to improve this classification.
c_pair = (pair.local_classification, pair.remote_classification)
org_pair = c_pair
org_operation = pair.operation
# print("need_compare", pair)
if pair.is_dir:
# For directores, we cannot compare existing peer entries.
# Instead, we simply log (and traverse the children later).
pair.local_classification = pair.remote_classification = "existing"
pair.operation = "equal"
self._log_action("", "visit", "?", pair.local, min_level=4)
# self._log_action("", "equal", "=", pair.local, min_level=4)
return
elif c_pair == ("existing", "existing"):
# Naive classification derived from file time and size
time_cmp = eps_compare(
pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME
)
if time_cmp < 0:
c_pair = ("unmodified", "modified") # remote is newer
elif time_cmp > 0:
c_pair = ("modified", "unmodified") # local is newer
elif pair.local.size == pair.remote.size:
c_pair = ("unmodified", "unmodified") # equal
else:
c_pair = ("modified", "modified") # conflict!
elif c_pair == ("new", "new"):
# Naive classification derived from file time and size
time_cmp = eps_compare(
pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME
)
if time_cmp == 0 and pair.local.size == pair.remote.size:
c_pair = ("unmodified", "unmodified") # equal
else:
c_pair = ("modified", "modified") # conflict!
# elif c_pair == ("unmodified", "unmodified"):
pair.local_classification = c_pair[0]
pair.remote_classification = c_pair[1]
pair.operation = operation_map.get(c_pair)
# print("on_need_compare {} => {}".format(org_pair, pair))
if not pair.operation:
raise RuntimeError(
"Undefined operation for pair classification {}".format(c_pair)
)
elif pair.operation == org_operation:
raise RuntimeError("Could not re-classify {}".format(org_pair))
handler = getattr(self, "on_" + pair.operation, None)
res = handler(pair)
# self._log_action("", "different", "?", pair.local, min_level=2)
return res
|
def on_need_compare(self, pair)
|
Re-classify pair based on file attributes and options.
| 3.632404 | 3.533932 | 1.027865 |
# self._log_action("skip", "conflict", "!", pair.local, min_level=2)
# print("on_conflict", pair)
any_entry = pair.any_entry
if not self._test_match_or_print(any_entry):
return
resolve = self._interactive_resolve(pair)
if resolve == "skip":
self._log_action("skip", "conflict", "*?*", any_entry)
self._inc_stat("conflict_files_skipped")
return
if pair.local and pair.remote:
assert pair.local.is_file()
is_newer = pair.local > pair.remote
if (
resolve == "local"
or (is_newer and resolve == "new")
or (not is_newer and resolve == "old")
):
self._log_action("copy", "conflict", "*>*", pair.local)
self._copy_file(self.local, self.remote, pair.local)
elif (
resolve == "remote"
or (is_newer and resolve == "old")
or (not is_newer and resolve == "new")
):
self._log_action("copy", "conflict", "*<*", pair.local)
self._copy_file(self.remote, self.local, pair.remote)
else:
raise NotImplementedError
elif pair.local:
assert pair.local.is_file()
if resolve == "local":
self._log_action("restore", "conflict", "*>x", pair.local)
self._copy_file(self.local, self.remote, pair.local)
elif resolve == "remote":
self._log_action("delete", "conflict", "*<x", pair.local)
self._remove_file(pair.local)
else:
raise NotImplementedError
else:
assert pair.remote.is_file()
if resolve == "local":
self._log_action("delete", "conflict", "x>*", pair.remote)
self._remove_file(pair.remote)
elif resolve == "remote":
self._log_action("restore", "conflict", "x<*", pair.remote)
self._copy_file(self.remote, self.local, pair.remote)
else:
raise NotImplementedError
return
|
def on_conflict(self, pair)
|
Return False to prevent visiting of children.
| 2.339897 | 2.357968 | 0.992336 |
remote_entry = pair.remote
if self.options.get("delete_unmatched") and remote_entry:
self._log_action("delete", "unmatched", ">", remote_entry)
if remote_entry.is_dir():
self._remove_dir(remote_entry)
else:
self._remove_file(remote_entry)
else:
self._log_action("skip", "unmatched", "-", pair.any_entry, min_level=4)
|
def on_mismatch(self, pair)
|
Called for pairs that don't match `match` and `exclude` filters.
If --delete-unmatched is on, remove the remote resource.
| 4.328741 | 3.903232 | 1.109014 |
if self.resolve_all:
if self.verbose >= 5:
self._print_pair_diff(pair)
return self.resolve_all
resolve = self.options.get("resolve", "skip")
assert resolve in ("remote", "ask", "skip")
if resolve == "ask" or self.verbose >= 5:
self._print_pair_diff(pair)
if resolve in ("remote", "skip"):
# self.resolve_all = resolve
return resolve
# RED = ansi_code("Fore.LIGHTRED_EX")
M = ansi_code("Style.BRIGHT") + ansi_code("Style.UNDERLINE")
R = ansi_code("Style.RESET_ALL")
# self._print_pair_diff(pair)
self._inc_stat("interactive_ask")
while True:
prompt = (
"Use "
+ M
+ "R"
+ R
+ "emote, "
+ M
+ "S"
+ R
+ "kip, "
+ M
+ "B"
+ R
+ "inary compare, "
+ M
+ "H"
+ R
+ "elp? "
)
r = compat.console_input(prompt).strip()
if r in ("h", "H", "?"):
print("The following keys are supported:")
print(" 'b': Binary compare")
print(" 'r': Download remote file")
print(" 's': Skip this file (leave both targets unchanged)")
print(
"Hold Shift (upper case letters) to apply choice for all "
"remaining conflicts."
)
print("Hit Ctrl+C to abort.")
continue
elif r in ("B", "b"):
self._compare_file(pair.local, pair.remote)
continue
elif r in ("R", "S"):
r = self._resolve_shortcuts[r.lower()]
self.resolve_all = r
break
elif r in ("r", "s"):
r = self._resolve_shortcuts[r]
break
return r
|
def _interactive_resolve(self, pair)
|
Return 'local', 'remote', or 'skip' to use local, remote resource or skip.
| 4.226531 | 4.135592 | 1.021989 |
local_entry = pair.local
if self.options.get("delete_unmatched") and local_entry:
self._log_action("delete", "unmatched", "<", local_entry)
if local_entry.is_dir():
self._remove_dir(local_entry)
else:
self._remove_file(local_entry)
else:
self._log_action("skip", "unmatched", "-", pair.any_entry, min_level=4)
|
def on_mismatch(self, pair)
|
Called for pairs that don't match `match` and `exclude` filters.
If --delete-unmatched is on, remove the remote resource.
| 4.514347 | 3.959639 | 1.140091 |
global _logger
prev_logger = _logger
if logger is True:
logging.basicConfig(level=logging.INFO)
_logger = logging.getLogger("pyftpsync")
_logger.setLevel(logging.DEBUG)
else:
_logger = logger
return prev_logger
|
def set_pyftpsync_logger(logger=True)
|
Define target for common output.
Args:
logger (bool | None | logging.Logger):
Pass None to use `print()` to stdout instead of logging.
Pass True to create a simple standard logger.
| 2.328737 | 2.65346 | 0.877623 |
debug = kwargs.pop("debug", None)
warning = kwargs.pop("warning", None)
if _logger:
kwargs.pop("end", None)
kwargs.pop("file", None)
if debug:
_logger.debug(*args, **kwargs)
elif warning:
_logger.warning(*args, **kwargs)
else:
_logger.info(*args, **kwargs)
else:
print(*args, **kwargs)
|
def write(*args, **kwargs)
|
Redirectable wrapper for print statements.
| 2.088764 | 1.950638 | 1.070811 |
if _logger:
kwargs.pop("end", None)
kwargs.pop("file", None)
_logger.error(*args, **kwargs)
else:
print(*args, file=sys.stderr, **kwargs)
|
def write_error(*args, **kwargs)
|
Redirectable wrapper for print sys.stderr statements.
| 2.987449 | 2.435468 | 1.226643 |
d = {}
for k, v in o.__dict__.items():
if not callable(v):
d[k] = v
return d
|
def namespace_to_dict(o)
|
Convert an argparse namespace object to a dictionary.
| 2.430953 | 2.357845 | 1.031006 |
res = f1 - f2
if abs(res) <= eps: # '<=',so eps == 0 works as expected
return 0
elif res < 0:
return -1
return 1
|
def eps_compare(f1, f2, eps)
|
Return true if |f1-f2| <= eps.
| 6.926091 | 7.221646 | 0.959074 |
val = os.environ.get(env_name)
if val is None:
try:
val = _pyftpsyncrc_parser.get(section, opt_name)
except (compat.configparser.NoSectionError, compat.configparser.NoOptionError):
pass
if val is None:
val = default
return val
|
def get_option(env_name, section, opt_name, default=None)
|
Return a configuration setting from environment var or .pyftpsyncrc
| 3.019324 | 2.314331 | 1.304621 |
args = sys.argv[1:]
verbose = default + args.count("--verbose") - args.count("--quiet")
for arg in args:
if arg.startswith("-") and not arg.startswith("--"):
verbose += arg[1:].count("v")
verbose -= arg[1:].count("q")
return verbose
|
def check_cli_verbose(default=3)
|
Check for presence of `--verbose`/`--quiet` or `-v`/`-q` without using argparse.
| 3.282152 | 2.875954 | 1.141239 |
if user is None:
default_user = default_user or getpass.getuser()
while user is None:
user = compat.console_input(
"Enter username for {} [{}]: ".format(url, default_user)
)
if user.strip() == "" and default_user:
user = default_user
if user:
pw = getpass.getpass(
"Enter password for {}@{} (Ctrl+C to abort): ".format(user, url)
)
if pw or pw == "":
return (user, pw)
return None
|
def prompt_for_password(url, user=None, default_user=None)
|
Prompt for username and password.
If a user name is passed, only prompt for a password.
Args:
url (str): hostname
user (str, optional):
Pass a valid name to skip prompting for a user name
default_user (str, optional):
Pass a valid name that is used as default when prompting
for a user name
Raises:
KeyboardInterrupt if user hits Ctrl-C
Returns:
(username, password) or None
| 2.627899 | 2.623061 | 1.001844 |
creds = None
verbose = int(opts.get("verbose"))
force_prompt = opts.get("prompt", False)
allow_prompt = not opts.get("no_prompt", True)
allow_keyring = not opts.get("no_keyring", False) and not force_user
allow_netrc = not opts.get("no_netrc", False) and not force_user
# print("get_credentials_for_url", force_user, allow_prompt)
if force_user and not allow_prompt:
raise RuntimeError(
"Cannot get credentials for a distinct user ({}) from keyring or .netrc and "
"prompting is disabled.".format(force_user)
)
# Lookup our own pyftpsync 1.x credential store. This is deprecated with 2.x
home_path = os.path.expanduser("~")
file_path = os.path.join(home_path, DEFAULT_CREDENTIAL_STORE)
if os.path.isfile(file_path):
raise RuntimeError(
"Custom password files are no longer supported. Delete {} and use .netrc instead.".format(
file_path
)
)
# Query keyring database
if creds is None and keyring and allow_keyring:
try:
# Note: we pass the url as `username` and username:password as `password`
c = keyring.get_password("pyftpsync", url)
if c is not None:
creds = c.split(":", 1)
write(
"Using credentials from keyring('pyftpsync', '{}'): {}:***.".format(
url, creds[0]
)
)
else:
if verbose >= 4:
write(
"No credentials found in keyring('pyftpsync', '{}').".format(
url
)
)
# except keyring.errors.TransientKeyringError:
except Exception as e:
# e.g. user clicked 'no'
write_error("Could not get password from keyring {}".format(e))
# Query .netrc file
# print(opts)
if creds is None and allow_netrc:
try:
authenticators = None
authenticators = netrc.netrc().authenticators(url)
except CompatFileNotFoundError:
if verbose >= 4:
write("Could not get password (no .netrc file).")
except Exception as e:
write_error("Could not read .netrc: {}.".format(e))
if authenticators:
creds = (authenticators[0], authenticators[2])
write("Using credentials from .netrc file: {}:***.".format(creds[0]))
else:
if verbose >= 4:
write("Could not find entry for '{}' in .netrc file.".format(url))
# Prompt for password if we don't have credentials yet, or --prompt was set.
if allow_prompt:
if creds is None:
creds = prompt_for_password(url)
elif force_prompt:
# --prompt was set but we can provide a default for the user name
creds = prompt_for_password(url, default_user=creds[0])
return creds
|
def get_credentials_for_url(url, opts, force_user=None)
|
Lookup credentials for a given target in keyring and .netrc.
Optionally prompts for credentials if not found.
Returns:
2-tuple (username, password) or None
| 3.451052 | 3.433864 | 1.005005 |
if keyring:
if ":" in username:
raise RuntimeError(
"Unable to store credentials if username contains a ':' ({}).".format(
username
)
)
try:
# Note: we pass the url as `username` and username:password as `password`
if password is None:
keyring.delete_password("pyftpsync", url)
write("Delete credentials from keyring ({})".format(url))
else:
keyring.set_password(
"pyftpsync", url, "{}:{}".format(username, password)
)
write(
"Store credentials in keyring ({}, {}:***).".format(url, username)
)
# except keyring.errors.TransientKeyringError:
except Exception as e:
write("Could not delete/set password {}.".format(e))
pass # e.g. user clicked 'no'
else:
write("Could not store credentials (missing keyring support).")
return
|
def save_password(url, username, password)
|
Store credentials in keyring.
| 4.596746 | 4.372054 | 1.051393 |
val = str(val).lower().strip()
if val in ("1", "true", "on", "yes"):
return True
elif val in ("0", "false", "off", "no"):
return False
raise ValueError(
"Invalid value '{}'"
"(expected '1', '0', 'true', 'false', 'on', 'off', 'yes', 'no').".format(val)
)
|
def str_to_bool(val)
|
Return a boolean for '0', 'false', 'on', ...
| 2.306208 | 2.19431 | 1.050995 |
try:
obj = colorama
for part in name.split("."):
obj = getattr(obj, part)
return obj
except AttributeError:
return ""
|
def ansi_code(name)
|
Return ansi color or style codes or '' if colorama is not available.
| 3.94727 | 3.602238 | 1.095783 |
bufsize = 16 * 1024
equal = True
ofs = 0
while True:
b1 = stream_a.read(bufsize)
b2 = stream_b.read(bufsize)
if b1 != b2:
equal = False
if b1 and b2:
# we have two different buffers: find first mismatch
for a, b in zip(b1, b2):
if a != b:
break
ofs += 1
break
ofs += len(b1)
if not b1: # both buffers empty
break
return (equal, ofs)
|
def byte_compare(stream_a, stream_b)
|
Byte compare two files (early out on first difference).
Returns:
(bool, int): offset of first mismatch or 0 if equal
| 2.913311 | 2.96174 | 0.983648 |
assert compat.PY2
res = {}
for k, v in d.items(): #
if type(k) is str:
k = k.decode(coding)
if type(v) is dict:
v = decode_dict_keys(v, coding)
res[k] = v
return res
|
def decode_dict_keys(d, coding="utf-8")
|
Convert all keys to unicde (recursively).
| 2.625807 | 2.510577 | 1.045898 |
res = {}
for k, v in d.items(): #
k = compat.to_native(k)
if type(v) is dict:
v = make_native_dict_keys(v)
res[k] = v
return res
|
def make_native_dict_keys(d)
|
Convert all keys to native `str` type (recursively).
| 2.726465 | 2.563956 | 1.063382 |
# debug = extra_opts.get("debug", 1)
parts = compat.urlparse(url, allow_fragments=False)
# scheme is case-insensitive according to https://tools.ietf.org/html/rfc3986
scheme = parts.scheme.lower()
if scheme in ["ftp", "ftps"]:
creds = parts.username, parts.password
tls = scheme == "ftps"
from ftpsync import ftp_target
target = ftp_target.FtpTarget(
parts.path,
parts.hostname,
parts.port,
username=creds[0],
password=creds[1],
tls=tls,
timeout=None,
extra_opts=extra_opts,
)
else:
target = FsTarget(url, extra_opts)
return target
|
def make_target(url, extra_opts=None)
|
Factory that creates `_Target` objects from URLs.
FTP targets must begin with the scheme ``ftp://`` or ``ftps://`` for TLS.
Note:
TLS is only supported on Python 2.7/3.2+.
Args:
url (str):
extra_opts (dict, optional): Passed to Target constructor. Default: None.
Returns:
:class:`_Target`
| 3.214273 | 3.188133 | 1.008199 |
encoding = default
# if synchronizer and "encoding" in synchronizer.options:
# encoding = synchronizer.options.get("encoding")
if extra_opts and "encoding" in extra_opts:
encoding = extra_opts.get("encoding")
if encoding:
# Normalize name (e.g. 'UTF8' => 'utf-8')
encoding = codecs.lookup(encoding).name
# print("_get_encoding_opt", encoding)
return encoding or None
|
def _get_encoding_opt(synchronizer, extra_opts, default)
|
Helper to figure out encoding setting inside constructors.
| 3.048483 | 2.999303 | 1.016397 |
d = self.synchronizer.options if self.synchronizer else {}
d.update(self.extra_opts)
return d
|
def get_options_dict(self)
|
Return options from synchronizer (possibly overridden by own extra_opts).
| 7.537112 | 3.267929 | 2.306388 |
if self.synchronizer:
return self.extra_opts.get(key, self.synchronizer.options.get(key, default))
return self.extra_opts.get(key, default)
|
def get_option(self, key, default=None)
|
Return option from synchronizer (possibly overridden by target extra_opts).
| 3.948779 | 2.251791 | 1.753617 |
assert compat.is_native(name)
if self.readonly and name not in (
DirMetadata.META_FILE_NAME,
DirMetadata.LOCK_FILE_NAME,
):
raise RuntimeError("Target is read-only: {} + {} / ".format(self, name))
|
def check_write(self, name)
|
Raise exception if writing cur_dir/name is not allowed.
| 9.504508 | 8.414015 | 1.129604 |
peer_target = self.peer
if self.is_local():
info = self.cur_dir_meta.dir["peer_sync"].get(peer_target.get_id())
else:
info = peer_target.cur_dir_meta.dir["peer_sync"].get(self.get_id())
if name is not None:
info = info.get(name) if info else None
if info and key:
info = info.get(key)
return info
|
def get_sync_info(self, name, key=None)
|
Get mtime/size when this target's current dir was last synchronized with remote.
| 3.690705 | 3.244529 | 1.137516 |
for entry in self.get_dir():
if pred and pred(entry) is False:
continue
yield entry
if recursive:
if isinstance(entry, DirectoryEntry):
self.cwd(entry.name)
for e in self.walk(pred):
yield e
self.cwd("..")
return
|
def walk(self, pred=None, recursive=True)
|
Iterate over all target entries recursively.
Args:
pred (function, optional):
Callback(:class:`ftpsync.resources._Resource`) should return `False` to
ignore entry. Default: `None`.
recursive (bool, optional):
Pass `False` to generate top level entries only. Default: `True`.
Yields:
:class:`ftpsync.resources._Resource`
| 4.034485 | 4.146935 | 0.972884 |
with self.open_readable(name) as fp:
res = fp.read() # StringIO or file object
# try:
# res = fp.getvalue() # StringIO returned by FtpTarget
# except AttributeError:
# res = fp.read() # file object returned by FsTarget
res = res.decode("utf-8")
return res
|
def read_text(self, name)
|
Read text string from cur_dir/name using open_readable().
| 4.869162 | 4.34556 | 1.120491 |
buf = io.BytesIO(compat.to_bytes(s))
self.write_file(name, buf)
|
def write_text(self, name, s)
|
Write string data to cur_dir/name using write_file().
| 5.41339 | 4.950258 | 1.093557 |
if not self.is_local():
return self.peer.set_sync_info(name, mtime, size)
return self.cur_dir_meta.set_sync_info(name, mtime, size)
|
def set_sync_info(self, name, mtime, size)
|
Store mtime/size when this resource was last synchronized with remote.
| 3.428133 | 3.350153 | 1.023277 |
self.check_write(dir_name)
path = normpath_url(join_url(self.cur_dir, dir_name))
# write("REMOVE %r" % path)
shutil.rmtree(path)
|
def rmdir(self, dir_name)
|
Remove cur_dir/name.
| 6.655527 | 5.799504 | 1.147603 |
self.check_write(name)
path = os.path.join(self.cur_dir, name)
os.remove(path)
|
def remove_file(self, name)
|
Remove cur_dir/name.
| 3.907951 | 2.995676 | 1.304531 |
self.check_write(name)
os.utime(os.path.join(self.cur_dir, name), (-1, mtime))
|
def set_mtime(self, name, mtime, size)
|
Set modification time on file.
| 4.118561 | 3.79884 | 1.084163 |
def _lock(self, break_existing=False):
# write("_lock")
data = {"lock_time": time.time(), "lock_holder": None}
try:
assert self.cur_dir == self.root_dir
self.write_text(DirMetadata.LOCK_FILE_NAME, json.dumps(data))
self.lock_data = data
self.lock_write_time = time.time()
except Exception as e:
errmsg = "{}".format(e)
write_error("Could not write lock file: {}".format(errmsg))
if errmsg.startswith("550") and self.ftp.passiveserver:
try:
self.ftp.makepasv()
except Exception:
write_error(
"The server probably requires FTP Active mode. "
"Try passing the --ftp-active option."
)
# Set to False, so we don't try to remove later
self.lock_data = False
|
Write a special file to the target root folder.
| null | null | null |
|
def _unlock(self, closing=False):
# write("_unlock", closing)
try:
if self.cur_dir != self.root_dir:
if closing:
write(
"Changing to ftp root folder to remove lock file: {}".format(
self.root_dir
)
)
self.cwd(self.root_dir)
else:
write_error(
"Could not remove lock file, because CWD != ftp root: {}".format(
self.cur_dir
)
)
return
if self.lock_data is False:
if self.get_option("verbose", 3) >= 4:
write("Skip remove lock file (was not written).")
else:
# direct delete, without updating metadata or checking for target access:
try:
self.ftp.delete(DirMetadata.LOCK_FILE_NAME)
# self.remove_file(DirMetadata.LOCK_FILE_NAME)
except Exception as e:
# I have seen '226 Closing data connection' responses here,
# probably when a previous command threw another error.
# However here, 2xx response should be Ok(?):
# A 226 reply code is sent by the server before closing the
# data connection after successfully processing the previous client command
if e.args[0][:3] == "226":
write_error("Ignoring 226 response for ftp.delete() lockfile")
else:
raise
self.lock_data = None
except Exception as e:
write_error("Could not remove lock file: {}".format(e))
raise
|
Remove lock file to the target root folder.
| null | null | null |
|
def _probe_lock_file(self, reported_mtime):
delta = reported_mtime - self.lock_data["lock_time"]
# delta2 = reported_mtime - self.lock_write_time
self.server_time_ofs = delta
if self.get_option("verbose", 3) >= 4:
write("Server time offset: {:.2f} seconds.".format(delta))
|
Called by get_dir
| null | null | null |
|
def open_readable(self, name):
# print("FTP open_readable({})".format(name))
assert compat.is_native(name)
out = SpooledTemporaryFile(max_size=self.MAX_SPOOL_MEM, mode="w+b")
self.ftp.retrbinary(
"RETR {}".format(name), out.write, FtpTarget.DEFAULT_BLOCKSIZE
)
out.seek(0)
return out
|
Open cur_dir/name for reading.
Note: we read everything into a buffer that supports .read().
Args:
name (str): file name, located in self.curdir
Returns:
file-like (must support read() method)
| null | null | null |
|
def write_file(self, name, fp_src, blocksize=DEFAULT_BLOCKSIZE, callback=None):
# print("FTP write_file({})".format(name), blocksize)
assert compat.is_native(name)
self.check_write(name)
self.ftp.storbinary("STOR {}".format(name), fp_src, blocksize, callback)
|
Write file-like `fp_src` to cur_dir/name.
Args:
name (str): file name, located in self.curdir
fp_src (file-like): must support read() method
blocksize (int, optional):
callback (function, optional):
Called like `func(buf)` for every written chunk
| null | null | null |
|
def copy_to_file(self, name, fp_dest, callback=None):
assert compat.is_native(name)
def _write_to_file(data):
# print("_write_to_file() {} bytes.".format(len(data)))
fp_dest.write(data)
if callback:
callback(data)
self.ftp.retrbinary(
"RETR {}".format(name), _write_to_file, FtpTarget.DEFAULT_BLOCKSIZE
)
|
Write cur_dir/name to file-like `fp_dest`.
Args:
name (str): file name, located in self.curdir
fp_dest (file-like): must support write() method
callback (function, optional):
Called like `func(buf)` for every written chunk
| null | null | null |
|
def remove_file(self, name):
assert compat.is_native(name)
self.check_write(name)
# self.cur_dir_meta.remove(name)
self.ftp.delete(name)
self.remove_sync_info(name)
|
Remove cur_dir/name.
| null | null | null |
|
def _ftp_pwd(self):
try:
return self.ftp.pwd()
except UnicodeEncodeError:
if compat.PY2 or self.ftp.encoding != "utf-8":
raise # should not happen, since Py2 does not try to encode
# TODO: this is NOT THREAD-SAFE!
prev_encoding = self.ftp.encoding
try:
write("ftp.pwd() failed with utf-8: trying Cp1252...", warning=True)
return self.ftp.pwd()
finally:
self.ftp.encoding = prev_encoding
|
Variant of `self.ftp.pwd()` that supports encoding-fallback.
Returns:
Current working directory as native string.
| null | null | null |
|
def _ftp_nlst(self, dir_name):
assert compat.is_native(dir_name)
lines = []
def _add_line(status, line):
lines.append(line)
cmd = "NLST " + dir_name
self._ftp_retrlines_native(cmd, _add_line, self.encoding)
# print(cmd, lines)
return lines
|
Variant of `self.ftp.nlst()` that supports encoding-fallback.
| null | null | null |
|
def _ftp_retrlines_native(self, command, callback, encoding):
LF = b"\n"
buffer = b""
# needed to access buffer accross function scope
local_var = {"buffer": buffer}
fallback_enc = "cp1252" if encoding == "utf-8" else None
def _on_read_line(line):
# Line is a byte string
# print(" line ", line)
status = 2 # fault
line_decoded = None
try:
line_decoded = line.decode(encoding)
status = 0 # successfully decoded
except UnicodeDecodeError:
if fallback_enc:
try:
line_decoded = line.decode(fallback_enc)
status = 1 # used fallback encoding
except UnicodeDecodeError:
raise
if compat.PY2:
# line is a native binary `str`.
if status == 1:
# We used a fallback: re-encode
callback(status, line_decoded.encode(encoding))
else:
callback(status, line)
else:
# line_decoded is a native text `str`.
callback(status, line_decoded)
# on_read_line = _on_read_line_py2 if compat.PY2 else _on_read_line_py3
def _on_read_chunk(chunk):
buffer = local_var["buffer"]
# Normalize line endings
chunk = chunk.replace(b"\r\n", LF)
chunk = chunk.replace(b"\r", LF)
chunk = buffer + chunk
try:
# print("Add chunk ", chunk, "to buffer", buffer)
while True:
item, chunk = chunk.split(LF, 1)
_on_read_line(item) # + LF)
except ValueError:
pass
# print("Rest chunk", chunk)
local_var["buffer"] = chunk
self.ftp.retrbinary(command, _on_read_chunk)
if buffer:
_on_read_line(buffer)
return
|
A re-implementation of ftp.retrlines that returns lines as native `str`.
This is needed on Python 3, where `ftp.retrlines()` returns unicode `str`
by decoding the incoming command response using `ftp.encoding`.
This would fail for the whole request if a single line of the MLSD listing
cannot be decoded.
FtpTarget wants to fall back to Cp1252 if UTF-8 fails for a single line,
so we need to process the raw original binary input lines.
On Python 2, the response is already bytes, but we try to decode in
order to check validity and optionally re-encode from Cp1252.
Args:
command (str):
A valid FTP command like 'NLST', 'MLSD', ...
callback (function):
Called for every line with these args:
status (int): 0:ok 1:fallback used, 2:decode failed
line (str): result line decoded using `encoding`.
If `encoding` is 'utf-8', a fallback to cp1252
is accepted.
encoding (str):
Coding that is used to convert the FTP response to `str`.
Returns:
None
| null | null | null |
|
return (
self.local
and self.remote
and FileEntry._eps_compare(self.local.mtime, self.remote.mtime) == 0
)
|
def is_same_time(self)
|
Return True if local.mtime == remote.mtime.
| 9.858768 | 5.47793 | 1.799725 |
prev_class = (self.local_classification, self.remote_classification)
prev_op = self.operation
assert operation != prev_op
assert operation in PAIR_OPERATIONS
if self.any_entry.target.synchronizer.verbose > 3:
write(
"override_operation({}, {}) -> {} ({})".format(
prev_class, prev_op, operation, reason
),
debug=True,
)
self.operation = operation
self.re_class_reason = reason
|
def override_operation(self, operation, reason)
|
Re-Classify entry pair.
| 7.457001 | 6.18925 | 1.204831 |
assert self.operation is None
# write("CLASSIFIY", self, peer_dir_meta)
# Note: We pass False if the entry is not listed in the metadata.
# We pass None if we don't have metadata all.
peer_entry_meta = peer_dir_meta.get(self.name, False) if peer_dir_meta else None
# write("=>", self, peer_entry_meta)
if self.local:
self.local.classify(peer_dir_meta)
self.local_classification = self.local.classification
elif peer_entry_meta:
self.local_classification = "deleted"
else:
self.local_classification = "missing"
if self.remote:
self.remote.classify(peer_dir_meta)
self.remote_classification = self.remote.classification
elif peer_entry_meta:
self.remote_classification = "deleted"
else:
self.remote_classification = "missing"
c_pair = (self.local_classification, self.remote_classification)
self.operation = operation_map.get(c_pair)
if not self.operation:
raise RuntimeError(
"Undefined operation for pair classification {}".format(c_pair)
)
if PRINT_CLASSIFICATIONS:
write("classify {}".format(self))
# if not entry.meta:
# assert self.classification in PAIR_CLASSIFICATIONS
assert self.operation in PAIR_OPERATIONS
return self.operation
|
def classify(self, peer_dir_meta)
|
Classify entry pair.
| 3.723721 | 3.608946 | 1.031803 |
assert self.classification is None
peer_entry_meta = None
if peer_dir_meta:
# Metadata is generally available, so we can detect 'new' or 'modified'
peer_entry_meta = peer_dir_meta.get(self.name, False)
if self.is_dir():
# Directories are considered 'unmodified' (would require deep traversal
# to check otherwise)
if peer_entry_meta:
self.classification = "unmodified"
else:
self.classification = "new"
elif peer_entry_meta:
# File entries can be classified as modified/unmodified
self.ps_size = peer_entry_meta.get("s")
self.ps_mtime = peer_entry_meta.get("m")
self.ps_utime = peer_entry_meta.get("u")
if (
self.size == self.ps_size
and FileEntry._eps_compare(self.mtime, self.ps_mtime) == 0
):
self.classification = "unmodified"
else:
self.classification = "modified"
else:
# A new file entry
self.classification = "new"
else:
# No metadata available:
if self.is_dir():
# Directories are considered 'unmodified' (would require deep traversal
# to check otherwise)
self.classification = "unmodified"
else:
# That's all we know, but EntryPair.classify() may adjust this
self.classification = "existing"
if PRINT_CLASSIFICATIONS:
write("classify {}".format(self))
assert self.classification in ENTRY_CLASSIFICATIONS
return self.classification
|
def classify(self, peer_dir_meta)
|
Classify this entry as 'new', 'unmodified', or 'modified'.
| 3.87553 | 3.606245 | 1.074672 |
info = self.get_sync_info()
if not info:
return None
if self.size != info["s"]:
return True
if self.mtime > info["m"]:
return True
return False
|
def was_modified_since_last_sync(self)
|
Return True if this resource was modified since last sync.
None is returned if we don't know (because of missing meta data).
| 4.49623 | 3.743382 | 1.201115 |
ut = time.time() # UTC time stamp
if self.target.server_time_ofs:
# We add the estimated time offset, so the stored 'u' time stamp matches
# better the mtime value that the server will generate for that file
ut += self.target.server_time_ofs
self.list[filename] = {"m": mtime, "s": size, "u": ut}
if self.PRETTY:
self.list[filename].update(
{"mtime_str": pretty_stamp(mtime), "uploaded_str": pretty_stamp(ut)}
)
# print("set_mtime", self.list[filename])
self.modified_list = True
|
def set_mtime(self, filename, mtime, size)
|
Store real file mtime in meta data.
This is needed on FTP targets, because FTP servers don't allow to set
file mtime, but use to the upload time instead.
We also record size and upload time, so we can detect if the file was
changed by other means and we have to discard our meta data.
| 7.444598 | 7.030345 | 1.058924 |
assert self.target.is_local()
remote_target = self.target.peer
ps = self.dir["peer_sync"].setdefault(remote_target.get_id(), {})
ut = time.time() # UTC time stamp
ps[":last_sync"] = ut # this is an invalid file name to avoid conflicts
pse = ps[filename] = {"m": mtime, "s": size, "u": ut}
if self.PRETTY:
ps[":last_sync_str"] = pretty_stamp(
ut
) # use an invalid file name to avoid conflicts
pse["mtime_str"] = pretty_stamp(mtime) if mtime else "(directory)"
pse["uploaded_str"] = pretty_stamp(ut)
self.modified_sync = True
|
def set_sync_info(self, filename, mtime, size)
|
Store mtime/size when local and remote file was last synchronized.
This is stored in the local file's folder as meta data.
The information is used to detect conflicts, i.e. if both source and
remote had been modified by other means since last synchronization.
| 7.18601 | 7.068354 | 1.016646 |
if self.list.pop(filename, None):
self.modified_list = True
if self.target.peer: # otherwise `scan` command
if self.target.is_local():
remote_target = self.target.peer
if remote_target.get_id() in self.dir["peer_sync"]:
rid = remote_target.get_id()
self.modified_sync = bool(
self.dir["peer_sync"][rid].pop(filename, None)
)
return
|
def remove(self, filename)
|
Remove any data for the given file name.
| 7.455098 | 7.394687 | 1.00817 |
assert self.path == self.target.cur_dir
try:
self.modified_list = False
self.modified_sync = False
is_valid_file = False
s = self.target.read_text(self.filename)
# print("s", s)
if self.target.synchronizer:
self.target.synchronizer._inc_stat("meta_bytes_read", len(s))
self.was_read = True # True if a file exists (even invalid)
self.dir = json.loads(s)
# import pprint
# print("dir")
# print(pprint.pformat(self.dir))
self.dir = make_native_dict_keys(self.dir)
# print(pprint.pformat(self.dir))
self.list = self.dir["mtimes"]
self.peer_sync = self.dir["peer_sync"]
is_valid_file = True
# write"DirMetadata: read(%s)" % (self.filename, ), self.dir)
# except IncompatibleMetadataVersion:
# raise # We want version errors to terminate the app
except Exception as e:
write_error("Could not read meta info {}: {!r}".format(self, e))
# If the version is incompatible, we stop, unless:
# if --migrate is set, we simply ignore this file (and probably replace it
# with a current version)
if is_valid_file and self.dir.get("_file_version", 0) != self.VERSION:
if not self.target or not self.target.get_option("migrate"):
raise IncompatibleMetadataVersion(
"Invalid meta data version: {} (expected {}).\n"
"Consider passing --migrate to discard old data.".format(
self.dir.get("_file_version"), self.VERSION
)
)
#
write(
"Migrating meta data version from {} to {} (discarding old): {}".format(
self.dir.get("_file_version"), self.VERSION, self.filename
)
)
self.list = {}
self.peer_sync = {}
return
|
def read(self)
|
Initialize self from .pyftpsync-meta.json file.
| 5.627221 | 5.431005 | 1.036129 |
# We DO write meta files even on read-only targets, but not in dry-run mode
# if self.target.readonly:
# write("DirMetadata.flush(%s): read-only; nothing to do" % self.target)
# return
assert self.path == self.target.cur_dir
if self.target.dry_run:
# write("DirMetadata.flush(%s): dry-run; nothing to do" % self.target)
pass
elif self.was_read and len(self.list) == 0 and len(self.peer_sync) == 0:
write("Remove empty meta data file: {}".format(self.target))
self.target.remove_file(self.filename)
elif not self.modified_list and not self.modified_sync:
# write("DirMetadata.flush(%s): unmodified; nothing to do" % self.target)
pass
else:
self.dir["_disclaimer"] = "Generated by https://github.com/mar10/pyftpsync"
self.dir["_time_str"] = pretty_stamp(time.time())
self.dir["_file_version"] = self.VERSION
self.dir["_version"] = __version__
self.dir["_time"] = time.mktime(time.gmtime())
# We always save utf-8 encoded.
# `ensure_ascii` would escape all bytes >127 as `\x12` or `\u1234`,
# which makes it hard to read, so we set it to false.
# `sort_keys` converts binary keys to unicode using utf-8, so we
# must make sure that we don't pass cp1225 or other encoded data.
data = self.dir
opts = {"indent": 4, "sort_keys": True, "ensure_ascii": False}
if compat.PY2:
# The `encoding` arg defaults to utf-8 on Py2 and was removed in Py3
# opts["encoding"] = "utf-8"
# Python 2 has problems with mixed keys (str/unicode)
data = decode_dict_keys(data, "utf-8")
if not self.PRETTY:
opts["indent"] = None
opts["separators"] = (",", ":")
s = json.dumps(data, **opts)
self.target.write_text(self.filename, s)
if self.target.synchronizer:
self.target.synchronizer._inc_stat("meta_bytes_written", len(s))
self.modified_list = False
self.modified_sync = False
|
def flush(self)
|
Write self to .pyftpsync-meta.json.
| 5.118718 | 4.938423 | 1.036509 |
opts = namespace_to_dict(args)
opts.update({"ftp_debug": args.verbose >= 6})
target = make_target(args.target, opts)
target.readonly = True
root_depth = target.root_dir.count("/")
start = time.time()
dir_count = 1
file_count = 0
processed_files = set()
opts = namespace_to_dict(args)
process_options(opts)
def _pred(entry):
if not match_path(entry, opts):
return False
try:
target.open()
for e in target.walk(recursive=args.recursive, pred=_pred):
is_dir = isinstance(e, DirectoryEntry)
indent = " " * (target.cur_dir.count("/") - root_depth)
if is_dir:
dir_count += 1
else:
file_count += 1
if args.list:
if is_dir:
print(indent, "[{e.name}]".format(e=e))
else:
delta = e.mtime_org - e.mtime
dt_modified = pretty_stamp(e.mtime)
if delta:
prefix = "+" if delta > 0 else ""
print(
indent,
"{e.name:<40} {dt_modified} (system: {prefix}{delta})".format(
e=e,
prefix=prefix,
delta=timedelta(seconds=delta),
dt_modified=dt_modified,
),
)
else:
print(
indent,
"{e.name:<40} {dt_modified}".format(
e=e, dt_modified=dt_modified
),
)
if (
args.remove_meta
and target.cur_dir_meta
and target.cur_dir_meta.was_read
):
fspec = target.cur_dir_meta.get_full_path()
if fspec not in processed_files:
processed_files.add(fspec)
print("DELETE {}".format(fspec))
if (
args.remove_locks
and not is_dir
and e.name == DirMetadata.LOCK_FILE_NAME
):
fspec = e.get_rel_path()
print("DELETE {}".format(fspec))
finally:
target.close()
print(
"Scanning {:,} files in {:,} directories took {:02.2f} seconds.".format(
file_count, dir_count, time.time() - start
)
)
|
def scan_handler(parser, args)
|
Implement `scan` sub-command.
| 3.253333 | 3.237754 | 1.004812 |
'''
The format specification according to the values of `align` and `width`
'''
return u"{{:{align}{width}}}".format(align=self.align, width=self.width)
|
def get_format_spec(self)
|
The format specification according to the values of `align` and `width`
| 6.591732 | 2.954604 | 2.231004 |
'''
compute and set the column width for all colls in the table
'''
# skip tables with no row
if not self.rows:
return
# determine row height
for row in self.rows:
max_row_height = max((len(cell.get_cell_lines()) for cell in row.columns)) if row.columns else 1
for cell in row.columns:
cell.height = max_row_height
# determine maximum number of columns
max_columns = max([len(row.columns) for row in self.rows])
for column_idx in range(max_columns):
# determine max_column_width
row_cell_lines = [row.get_cell_lines(column_idx) for row in self.rows]
max_column_width = max((len(line) for line in chain(*row_cell_lines)))
# set column width in all rows
for row in self.rows:
if len(row.columns) > column_idx:
row.columns[column_idx].width = max_column_width
|
def compute_column_width_and_height(self)
|
compute and set the column width for all colls in the table
| 2.817847 | 2.409241 | 1.169599 |
'''
::returns:
a rendered string representation of the given table
'''
self.compute_column_width_and_height()
return '\n'.join((row.get_text() for row in self.rows))
|
def get_text(self)
|
::returns:
a rendered string representation of the given table
| 11.393432 | 5.521908 | 2.063314 |
'''
''returns:
the lines of the cell specified by the column_idx or an empty list if the column does not exist
'''
return [] if column_idx >= len(self.columns) else self.columns[column_idx].get_cell_lines()
|
def get_cell_lines(self, column_idx)
|
''returns:
the lines of the cell specified by the column_idx or an empty list if the column does not exist
| 5.66795 | 2.716547 | 2.086454 |
'''
::returns:
a rendered string representation of the given row
'''
row_lines = []
for line in zip_longest(*[column.get_cell_lines() for column in self.columns], fillvalue=' '):
row_lines.append(' '.join(line))
return '\n'.join(row_lines)
|
def get_text(self)
|
::returns:
a rendered string representation of the given row
| 5.967585 | 3.619001 | 1.648959 |
'''
::param: html_content
::returns:
a text representation of the html content.
'''
html_content = html_content.strip()
if not html_content:
return ""
# strip XML declaration, if necessary
if html_content.startswith('<?xml '):
html_content = RE_STRIP_XML_DECLARATION.sub('', html_content, count=1)
html_tree = fromstring(html_content)
parser = Inscriptis(html_tree, display_images=display_images, deduplicate_captions=deduplicate_captions, display_links=display_links)
return parser.get_text()
|
def get_text(html_content, display_images=False, deduplicate_captions=False, display_links=False)
|
::param: html_content
::returns:
a text representation of the html content.
| 3.348003 | 2.754209 | 1.215595 |
parser = argparse.ArgumentParser(description='Converts HTML from file or url to a clean text version')
parser.add_argument('input', nargs='?', default=None, help='Html input either from a file or an url (default:stdin)')
parser.add_argument('-o', '--output', type=str, help='Output file (default:stdout).')
parser.add_argument('-e', '--encoding', type=str, help='Content encoding for files (default:utf-8)', default='utf-8')
parser.add_argument('-i', '--display-image-captions', action='store_true', default=False, help='Display image captions (default:false).')
parser.add_argument('-l', '--display-link-targets', action='store_true', default=False, help='Display link targets (default:false).')
parser.add_argument('-d', '--deduplicate-image-captions', action='store_true', default=False, help='Deduplicate image captions (default:false).')
return parser
|
def get_parser()
|
Parses the arguments if script is run directly via console
| 2.482992 | 2.453495 | 1.012023 |
'''
Writes the current line to the buffer, provided that there is any
data to write.
::returns:
True, if a line has been writer, otherwise False
'''
# only break the line if there is any relevant content
if not force and (not self.current_line[-1].content or self.current_line[-1].content.isspace()):
self.current_line[-1].margin_before = max(self.current_line[-1].margin_before,
self.current_tag[-1].margin_before)
return False
line = self.current_line[-1].get_text()
self.clean_text_lines[-1].append(line)
self.current_line[-1] = self.next_line[-1]
self.next_line[-1] = Line()
return True
|
def write_line(self, force=False)
|
Writes the current line to the buffer, provided that there is any
data to write.
::returns:
True, if a line has been writer, otherwise False
| 4.701275 | 3.137564 | 1.498384 |
'''
::return: \
a clone of the current HtmlElement
'''
return HtmlElement(self.tag, self.prefix, self.suffix, self.display,
self.margin_before, self.margin_after, self.padding,
self.whitespace)
|
def clone(self)
|
::return: \
a clone of the current HtmlElement
| 8.651446 | 5.25836 | 1.645275 |
'''
::param: style_directive \
The attribute value of the given style sheet.
Example: display: none
::param: html_element: \
The HtmlElement to which the given style is applied
::returns:
A HtmlElement that merges the given element with
the style attributes specified.
'''
custome_html_element = html_element.clone()
for style_directive in style_attribute.lower().split(';'):
if ':' not in style_directive:
continue
key, value = (s.strip() for s in style_directive.split(':', 1))
try:
apply_style = getattr(CssParse, "_attr_" +
key.replace('-webkit-', '')
.replace("-", "_"))
apply_style(value, custome_html_element)
except AttributeError:
pass
return custome_html_element
|
def get_style_attribute(style_attribute, html_element)
|
::param: style_directive \
The attribute value of the given style sheet.
Example: display: none
::param: html_element: \
The HtmlElement to which the given style is applied
::returns:
A HtmlElement that merges the given element with
the style attributes specified.
| 5.668501 | 2.62621 | 2.158434 |
'''
::param: length \
the length specified in the CSS.
::return:
the length in em's.
'''
m = CssParse.RE_UNIT.search(length)
value = float(m.group(1))
unit = m.group(2)
if unit not in ('em', 'qem', 'rem'):
return int(round(value/8))
else:
return int(round(value))
|
def _get_em(length)
|
::param: length \
the length specified in the CSS.
::return:
the length in em's.
| 7.16722 | 3.805842 | 1.883215 |
'''
Set the display value
'''
if value == 'block':
html_element.display = Display.block
elif value == 'none':
html_element.display = Display.none
else:
html_element.display = Display.inline
|
def _attr_display(value, html_element)
|
Set the display value
| 3.085904 | 2.712909 | 1.137489 |
self.set_xlim(np.min(x), np.max(x))
self.set_ylim(np.min(y), np.max(y))
scatter = ScatterDensityArtist(self, x, y, dpi=dpi, downres_factor=downres_factor,
color=color, cmap=cmap,
alpha=alpha, norm=norm, **kwargs)
self.add_artist(scatter)
return scatter
|
def scatter_density(self, x, y, dpi=72, downres_factor=4, color=None, cmap=None,
alpha=1.0, norm=None, **kwargs)
|
Make a density plot of the (x, y) scatter data.
Parameters
----------
x, y : iterable
The data to plot
dpi : int or `None`
The number of dots per inch to include in the density map. To use
the native resolution of the drawing device, set this to None.
downres_factor : int
For interactive devices, when panning, the density map will
automatically be made at a lower resolution and including only a
subset of the points. The new dpi of the figure when panning will
then be dpi / downres_factor, and the number of elements in the
arrays will be reduced by downres_factor**2.
cmap : `matplotlib.colors.Colormap`
The colormap to use for the density map.
color : str or tuple
The color to use for the density map. This can be any valid
Matplotlib color. If specified, this takes precedence over the
colormap.
alpha : float
Transparency of the density map
norm : `matplotlib.colors.Normalize`
The normalization class for the density map.
| 1.874303 | 2.238967 | 0.837129 |
if not os.path.exists(file_path):
write_file(file_path, default_content)
handler = open(file_path, 'r')
content = handler.read()
handler.close()
return content or default_content
|
def read_file(file_path, default_content='')
|
Read file at the specified path.
If file doesn't exist, it will be created with default-content.
Returns the file content.
| 2.208074 | 2.433754 | 0.907271 |
handler = open(file_path, 'w+')
handler.write(content)
handler.close()
|
def write_file(file_path, content)
|
Write file at the specified path with content.
If file exists, it will be overwritten.
| 3.222637 | 3.501075 | 0.920471 |
# If maintenance mode is defined in settings, it can't be changed.
if settings.MAINTENANCE_MODE is not None:
raise ImproperlyConfigured(
'Maintenance mode cannot be set dynamically '
'if defined in settings.')
if not isinstance(value, bool):
raise TypeError('value argument type is not boolean')
backend = get_maintenance_mode_backend()
backend.set_value(value)
|
def set_maintenance_mode(value)
|
Set maintenance_mode state to state file.
| 4.647648 | 4.550117 | 1.021435 |
if settings.MAINTENANCE_MODE_REDIRECT_URL:
return redirect(settings.MAINTENANCE_MODE_REDIRECT_URL)
context = {}
if settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT:
try:
get_request_context_func = import_string(
settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT)
except ImportError:
raise ImproperlyConfigured(
'settings.MAINTENANCE_MODE_GET_TEMPLATE_CONTEXT '
'is not a valid function path.'
)
context = get_request_context_func(request=request)
if django.VERSION < (1, 8):
kwargs = {'context_instance': RequestContext(request, context)}
else:
kwargs = {'context': context}
response = render(request, settings.MAINTENANCE_MODE_TEMPLATE,
status=settings.MAINTENANCE_MODE_STATUS_CODE,
**kwargs)
response['Retry-After'] = settings.MAINTENANCE_MODE_RETRY_AFTER
add_never_cache_headers(response)
return response
|
def get_maintenance_response(request)
|
Return a '503 Service Unavailable' maintenance response.
| 2.192456 | 2.143042 | 1.023058 |
try:
view_match = resolve(request.path)
view_func = view_match[0]
view_dict = view_func.__dict__
view_force_maintenance_mode_off = view_dict.get(
'force_maintenance_mode_off', False)
if view_force_maintenance_mode_off:
# view has 'force_maintenance_mode_off' decorator
return False
view_force_maintenance_mode_on = view_dict.get(
'force_maintenance_mode_on', False)
if view_force_maintenance_mode_on:
# view has 'force_maintenance_mode_on' decorator
return True
except Resolver404:
pass
if not get_maintenance_mode():
return False
try:
url_off = reverse('maintenance_mode_off')
resolve(url_off)
if url_off == request.path_info:
return False
except NoReverseMatch:
# maintenance_mode.urls not added
pass
if hasattr(request, 'user'):
if django.VERSION < (1, 10):
if settings.MAINTENANCE_MODE_IGNORE_ANONYMOUS_USER \
and request.user.is_anonymous():
return False
if settings.MAINTENANCE_MODE_IGNORE_AUTHENTICATED_USER \
and request.user.is_authenticated():
return False
else:
if settings.MAINTENANCE_MODE_IGNORE_ANONYMOUS_USER \
and request.user.is_anonymous:
return False
if settings.MAINTENANCE_MODE_IGNORE_AUTHENTICATED_USER \
and request.user.is_authenticated:
return False
if settings.MAINTENANCE_MODE_IGNORE_STAFF \
and request.user.is_staff:
return False
if settings.MAINTENANCE_MODE_IGNORE_SUPERUSER \
and request.user.is_superuser:
return False
if settings.MAINTENANCE_MODE_IGNORE_ADMIN_SITE:
try:
request_path = request.path if request.path else ''
if not request_path.endswith('/'):
request_path += '/'
admin_url = reverse('admin:index')
if request_path.startswith(admin_url):
return False
except NoReverseMatch:
# admin.urls not added
pass
if settings.MAINTENANCE_MODE_IGNORE_TESTS:
is_testing = False
if (len(sys.argv) > 0 and 'runtests' in sys.argv[0]) \
or (len(sys.argv) > 1 and sys.argv[1] == 'test'):
# python runtests.py | python manage.py test | python
# setup.py test | django-admin.py test
is_testing = True
if is_testing:
return False
if settings.MAINTENANCE_MODE_IGNORE_IP_ADDRESSES:
if settings.MAINTENANCE_MODE_GET_CLIENT_IP_ADDRESS:
try:
get_client_ip_address_func = import_string(
settings.MAINTENANCE_MODE_GET_CLIENT_IP_ADDRESS)
except ImportError:
raise ImproperlyConfigured(
'settings.MAINTENANCE_MODE_GET_CLIENT_IP_ADDRESS '
'is not a valid function path.')
else:
client_ip_address = get_client_ip_address_func(request)
else:
client_ip_address = get_client_ip_address(request)
for ip_address in settings.MAINTENANCE_MODE_IGNORE_IP_ADDRESSES:
ip_address_re = re.compile(ip_address)
if ip_address_re.match(client_ip_address):
return False
if settings.MAINTENANCE_MODE_IGNORE_URLS:
for url in settings.MAINTENANCE_MODE_IGNORE_URLS:
if not isinstance(url, pattern_class):
url = str(url)
url_re = re.compile(url)
if url_re.match(request.path_info):
return False
if settings.MAINTENANCE_MODE_REDIRECT_URL:
redirect_url_re = re.compile(
settings.MAINTENANCE_MODE_REDIRECT_URL)
if redirect_url_re.match(request.path_info):
return False
return True
|
def need_maintenance_response(request)
|
Tells if the given request needs a maintenance response or not.
| 1.932573 | 1.928709 | 1.002003 |
'Validate the username/password data against ldap directory'
ldap_mgr = current_app.ldap_login_manager
username = self.username.data
password = self.password.data
try:
userdata = ldap_mgr.ldap_login(username, password)
except ldap.INVALID_CREDENTIALS:
flash("Invalid LDAP credentials", 'danger')
return False
except ldap.LDAPError as err:
if isinstance(err.message, dict):
message = err.message.get('desc', str(err))
else:
message = str(err.message)
flash(message, 'danger')
return False
if userdata is None:
flash("Invalid LDAP credentials", 'danger')
return False
self.user = ldap_mgr._save_user(username, userdata)
return True
|
def validate_ldap(self)
|
Validate the username/password data against ldap directory
| 3.013541 | 2.681871 | 1.123671 |
valid = Form.validate(self, *args, **kwargs)
if not valid: return valid
return self.validate_ldap()
|
def validate(self, *args, **kwargs)
|
Validates the form by calling `validate` on each field, passing any
extra `Form.validate_<fieldname>` validators to the field validator.
also calls `validate_ldap`
| 6.298809 | 3.740377 | 1.684004 |
if isinstance(value, (list, tuple)) and len(value) == 1:
return value[0]
return value
|
def scalar(value)
|
Take return a value[0] if `value` is a list of length 1
| 2.721572 | 2.201975 | 1.235969 |
'''
Configures an application. This registers an `after_request` call, and
attaches this `LoginManager` to it as `app.login_manager`.
'''
self._config = app.config.get('LDAP', {})
app.ldap_login_manager = self
self.config.setdefault('BIND_DN', '')
self.config.setdefault('BIND_AUTH', '')
self.config.setdefault('URI', 'ldap://127.0.0.1')
self.config.setdefault('OPTIONS', {})
# Referrals are disabled by default
self.config['OPTIONS'].setdefault(ldap.OPT_REFERRALS, ldap.OPT_OFF)
if self.config.get('USER_SEARCH') and not isinstance(self.config['USER_SEARCH'], list):
self.config['USER_SEARCH'] = [self.config['USER_SEARCH']]
|
def init_app(self, app)
|
Configures an application. This registers an `after_request` call, and
attaches this `LoginManager` to it as `app.login_manager`.
| 3.526235 | 2.53907 | 1.38879 |
if not results:
return None
userdn = results[0][0]
userobj = results[0][1]
userobj['dn'] = userdn
keymap = self.config.get('KEY_MAP')
if keymap:
return {key:scalar(userobj.get(value)) for key, value in keymap.items() if _is_utf8(scalar(userobj.get(value))) }
else:
return {key:scalar(value) for key, value in userobj.items() if _is_utf8(scalar(value)) }
|
def format_results(self, results)
|
Format the ldap results object into somthing that is reasonable
| 3.988055 | 3.577126 | 1.114877 |
'Transform the KEY_MAP paramiter into an attrlist for ldap filters'
keymap = self.config.get('KEY_MAP')
if keymap:
# https://github.com/ContinuumIO/flask-ldap-login/issues/11
# https://continuumsupport.zendesk.com/agent/tickets/393
return [s.encode('utf-8') for s in keymap.values()]
else:
return None
|
def attrlist(self)
|
Transform the KEY_MAP paramiter into an attrlist for ldap filters
| 8.918612 | 4.612531 | 1.933562 |
log.debug("Performing bind/search")
ctx = {'username':username, 'password':password}
user = self.config['BIND_DN'] % ctx
bind_auth = self.config['BIND_AUTH']
try:
log.debug("Binding with the BIND_DN %s" % user)
self.conn.simple_bind_s(user, bind_auth)
except ldap.INVALID_CREDENTIALS:
msg = "Could not connect bind with the BIND_DN=%s" % user
log.debug(msg)
if self._raise_errors:
raise ldap.INVALID_CREDENTIALS(msg)
return None
user_search = self.config.get('USER_SEARCH')
results = None
found_user = False
for search in user_search:
base = search['base']
filt = search['filter'] % ctx
scope = search.get('scope', ldap.SCOPE_SUBTREE)
log.debug("Search for base=%s filter=%s" % (base, filt))
results = self.conn.search_s(base, scope, filt, attrlist=self.attrlist)
if results:
found_user = True
log.debug("User with DN=%s found" % results[0][0])
try:
self.conn.simple_bind_s(results[0][0], password)
except ldap.INVALID_CREDENTIALS:
self.conn.simple_bind_s(user, bind_auth)
log.debug("Username/password mismatch, continue search...")
results = None
continue
else:
log.debug("Username/password OK")
break
if not results and self._raise_errors:
msg = "No users found matching search criteria: {}".format(user_search)
if found_user:
msg = "Username/password mismatch"
raise ldap.INVALID_CREDENTIALS(msg)
log.debug("Unbind")
self.conn.unbind_s()
return self.format_results(results)
|
def bind_search(self, username, password)
|
Bind to BIND_DN/BIND_AUTH then search for user to perform lookup.
| 2.691476 | 2.603657 | 1.033729 |
log.debug("Performing direct bind")
ctx = {'username':username, 'password':password}
scope = self.config.get('SCOPE', ldap.SCOPE_SUBTREE)
user = self.config['BIND_DN'] % ctx
try:
log.debug("Binding with the BIND_DN %s" % user)
self.conn.simple_bind_s(user, password)
except ldap.INVALID_CREDENTIALS:
if self._raise_errors:
raise ldap.INVALID_CREDENTIALS("Unable to do a direct bind with BIND_DN %s" % user)
return None
results = self.conn.search_s(user, scope, attrlist=self.attrlist)
self.conn.unbind_s()
return self.format_results(results)
|
def direct_bind(self, username, password)
|
Bind to username/password directly
| 3.30759 | 3.355417 | 0.985746 |
'initialize ldap connection and set options'
log.debug("Connecting to ldap server %s" % self.config['URI'])
self.conn = ldap.initialize(self.config['URI'])
# There are some settings that can't be changed at runtime without a context restart.
# It's possible to refresh the context and apply the settings by setting OPT_X_TLS_NEWCTX
# to 0, but this needs to be the last option set, and since the config dictionary is not
# sorted, this is not necessarily true. Sort the list of options so that if OPT_X_TLS_NEWCTX
# is present, it is applied last.
options = self.config.get('OPTIONS', {}).items()
options.sort(key=lambda x: x[0] == 'OPT_X_TLS_NEWCTX')
for opt, value in options:
if isinstance(opt, str):
opt = getattr(ldap, opt)
try:
if isinstance(value, str):
value = getattr(ldap, value)
except AttributeError:
pass
self.conn.set_option(opt, value)
if self.config.get('START_TLS'):
log.debug("Starting TLS")
self.conn.start_tls_s()
|
def connect(self)
|
initialize ldap connection and set options
| 4.059239 | 3.823936 | 1.061534 |
self.connect()
if self.config.get('USER_SEARCH'):
result = self.bind_search(username, password)
else:
result = self.direct_bind(username, password)
return result
|
def ldap_login(self, username, password)
|
Authenticate a user using ldap. This will return a userdata dict
if successfull.
ldap_login will return None if the user does not exist or if the credentials are invalid
| 4.51449 | 5.176373 | 0.872134 |
def sim(self, args):
if not self._started:
raise ApplicationNotStarted("BACnet stack not running - use startApp()")
# with self.this_application._lock: if use lock...won't be able to call read...
args = args.split()
addr, obj_type, obj_inst, prop_id, value = args[:5]
if self.read("{} {} {} outOfService".format(addr, obj_type, obj_inst)):
self.write(
"{} {} {} {} {}".format(addr, obj_type, obj_inst, prop_id, value)
)
else:
try:
self.write(
"{} {} {} outOfService True".format(addr, obj_type, obj_inst)
)
except NoResponseFromController:
pass
try:
if self.read("{} {} {} outOfService".format(addr, obj_type, obj_inst)):
self.write(
"{} {} {} {} {}".format(
addr, obj_type, obj_inst, prop_id, value
)
)
else:
raise OutOfServiceNotSet()
except NoResponseFromController:
pass
|
Simulate I/O points by setting the Out_Of_Service property, then doing a
WriteProperty to the point's Present_Value.
:param args: String with <addr> <type> <inst> <prop> <value> [ <indx> ] [ <priority> ]
| null | null | null |
|
def out_of_service(self, args):
if not self._started:
raise ApplicationNotStarted("BACnet stack not running - use startApp()")
# with self.this_application._lock: if use lock...won't be able to call read...
args = args.split()
addr, obj_type, obj_inst = args[:3]
try:
self.write("{} {} {} outOfService True".format(addr, obj_type, obj_inst))
except NoResponseFromController:
pass
|
Set the Out_Of_Service property so the Present_Value of an I/O may be written.
:param args: String with <addr> <type> <inst> <prop> <value> [ <indx> ] [ <priority> ]
| null | null | null |
|
def release(self, args):
if not self._started:
raise ApplicationNotStarted("BACnet stack not running - use startApp()")
args = args.split()
addr, obj_type, obj_inst = args[:3]
try:
self.write("{} {} {} outOfService False".format(addr, obj_type, obj_inst))
except NoResponseFromController:
pass
try:
if self.read("{} {} {} outOfService".format(addr, obj_type, obj_inst)):
raise OutOfServiceSet()
else:
pass # Everything is ok"
except NoResponseFromController:
pass
|
Set the Out_Of_Service property to False - to release the I/O point back to
the controller's control.
:param args: String with <addr> <type> <inst>
| null | null | null |
|
return "{}/{}".format(
self.interface.ip.compressed, self.interface.exploded.split("/")[-1]
)
|
def ip_address_subnet(self)
|
IP Address/subnet
| 9.972961 | 8.644051 | 1.153737 |
port = ""
if self._port:
port = ":{}".format(self._port)
return Address(
"{}/{}{}".format(
self.interface.ip.compressed,
self.interface.exploded.split("/")[-1],
port,
)
)
|
def address(self)
|
IP Address using bacpypes Address format
| 6.124613 | 5.917717 | 1.034962 |
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(("google.com", 0))
addr = s.getsockname()[0]
# print('Using ip : {addr}'.format(addr=addr))
s.close()
except socket.error:
raise NetworkInterfaceException(
"Impossible to retrieve IP, please provide one manually"
)
return addr
|
def _findIPAddr(self)
|
Retrieve the IP address connected to internet... used as
a default IP address when defining Script
:returns: IP Adress as String
| 3.605309 | 3.849922 | 0.936463 |
ip = ip
if "win32" in sys.platform:
try:
proc = subprocess.Popen("ipconfig", stdout=subprocess.PIPE)
while True:
line = proc.stdout.readline()
if ip.encode() in line:
break
mask = (
proc.stdout.readline()
.rstrip()
.split(b":")[-1]
.replace(b" ", b"")
.decode()
)
except:
raise NetworkInterfaceException("Cannot read IP parameters from OS")
else:
pattern = re.compile(r"(255.\d{1,3}.\d{1,3}.\d{1,3})")
try:
proc = subprocess.Popen("ifconfig", stdout=subprocess.PIPE)
while True:
line = proc.stdout.readline()
if ip.encode() in line:
break
mask = re.findall(pattern, line.decode())[0]
except:
mask = "255.255.255.255"
# self._log.debug('Mask found : %s' % mask)
return mask
|
def _findSubnetMask(self, ip)
|
Retrieve the broadcast IP address connected to internet... used as
a default IP address when defining Script
:param ip: (str) optionnal IP address. If not provided, default to getIPAddr()
:param mask: (str) optionnal subnet mask. If not provided, will try to find one using ipconfig (Windows) or ifconfig (Linux or MAC)
:returns: broadcast IP Adress as String
| 2.781819 | 2.826221 | 0.984289 |
with contextlib.closing(sqlite3.connect("{}.db".format(db_name))) as con:
return sql.read_sql(sql=request, con=con)
|
def _read_from_sql(self, request, db_name)
|
Using the contextlib, I hope to close the connection to database when
not in use
| 3.902027 | 3.196124 | 1.220862 |
pprops = {}
for each in self.points:
p = each.properties.asdict.copy()
p.pop("device", None)
p.pop("network", None)
p.pop("simulated", None)
p.pop("overridden", None)
pprops[each.properties.name] = p
df = pd.DataFrame(pprops)
return df
|
def points_properties_df(self)
|
Return a dictionary of point/point_properties in preparation for storage in SQL.
| 3.956575 | 3.712181 | 1.065836 |
backup = {}
for point in self.points:
if point.history.dtypes == object:
backup[point.properties.name] = (
point.history.replace(["inactive", "active"], [0, 1])
.resample("1s")
.mean()
)
else:
backup[point.properties.name] = point.history.resample("1s").mean()
df = pd.DataFrame(dict([(k, pd.Series(v)) for k, v in backup.items()]))
return df.fillna(method="ffill")
|
def backup_histories_df(self)
|
Build a dataframe of the point histories
| 3.337381 | 2.958607 | 1.128025 |
if filename:
if ".db" in filename:
filename = filename.split(".")[0]
self.properties.db_name = filename
else:
self.properties.db_name = "{}".format(self.properties.name)
# Does file exist? If so, append data
if os.path.isfile("{}.db".format(self.properties.db_name)):
his = self._read_from_sql(
'select * from "{}"'.format("history"), self.properties.db_name
)
his.index = his["index"].apply(Timestamp)
try:
last = his.index[-1]
df_to_backup = self.backup_histories_df()[last:]
except IndexError:
df_to_backup = self.backup_histories_df()
else:
self._log.debug("Creating a new backup database")
df_to_backup = self.backup_histories_df()
# DataFrames that will be saved to SQL
with contextlib.closing(
sqlite3.connect("{}.db".format(self.properties.db_name))
) as con:
sql.to_sql(
df_to_backup,
name="history",
con=con,
index_label="index",
index=True,
if_exists="append",
)
# Saving other properties to a pickle file...
prop_backup = {}
prop_backup["device"] = self.dev_properties_df()
prop_backup["points"] = self.points_properties_df()
with open("{}.bin".format(self.properties.db_name), "wb") as file:
pickle.dump(prop_backup, file)
self._log.info("Device saved to {}.db".format(self.properties.db_name))
|
def save(self, filename=None)
|
Save the point histories to sqlite3 database.
Save the device object properties to a pickle file so the device can be reloaded.
| 3.275593 | 2.986445 | 1.09682 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.