code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
if hasattr(magic, 'open'):
magic_wrapper = magic._libraries['magic']
elif hasattr(magic, 'from_buffer'):
magic_wrapper = magic.libmagic
else:
raise Exception('Unknown magic API')
if not hasattr(magic_wrapper, 'magic_version'):
# The magic_version function has been introduced in libmagic 5.13,
# if it's not present, we can't guess right, so let's assume False
return False
return magic_wrapper.magic_version >= version | def libmagic_version_at_least(version) | checks if the libmagic library installed is more recent than a given
version.
:param version: minimum version expected in the form XYY (i.e. 5.14 -> 514)
with XYY >= 513 | 5.247622 | 5.695603 | 0.921346 |
with open(path, 'rb') as f:
content = f.read()
if not ctype:
ctype = guess_mimetype(content)
# libmagic < 5.12 incorrectly detects excel/powerpoint files as
# 'application/msword' (see #179 and #186 in libmagic bugtracker)
# This is a workaround, based on file extension, useful as long
# as distributions still ship libmagic 5.11.
if (ctype == 'application/msword' and
not libmagic_version_at_least(513)):
mimetype, _ = mimetypes.guess_type(path)
if mimetype:
ctype = mimetype
maintype, subtype = ctype.split('/', 1)
if maintype == 'text':
part = MIMEText(content.decode(guess_encoding(content), 'replace'),
_subtype=subtype,
_charset='utf-8')
elif maintype == 'image':
part = MIMEImage(content, _subtype=subtype)
elif maintype == 'audio':
part = MIMEAudio(content, _subtype=subtype)
else:
part = MIMEBase(maintype, subtype)
part.set_payload(content)
# Encode the payload using Base64
email.encoders.encode_base64(part)
# Set the filename parameter
if not filename:
filename = os.path.basename(path)
part.add_header('Content-Disposition', 'attachment',
filename=filename)
return part | def mimewrap(path, filename=None, ctype=None) | Take the contents of the given path and wrap them into an email MIME
part according to the content type. The content type is auto detected from
the actual file contents and the file name if it is not given.
:param path: the path to the file contents
:type path: str
:param filename: the file name to use in the generated MIME part
:type filename: str or None
:param ctype: the content type of the file contents in path
:type ctype: str or None
:returns: the message MIME part storing the data from path
:rtype: subclasses of email.mime.base.MIMEBase | 2.822806 | 2.851906 | 0.989796 |
for factor, format_string in ((1, '%i'),
(1024, '%iKiB'),
(1024 * 1024, '%.1fMiB')):
if size / factor < 1024:
return format_string % (size / factor)
return format_string % (size / factor) | def humanize_size(size) | Create a nice human readable representation of the given number
(understood as bytes) using the "KiB" and "MiB" suffixes to indicate
kibibytes and mebibytes. A kibibyte is defined as 1024 bytes (as opposed to
a kilobyte which is 1000 bytes) and a mibibyte is 1024**2 bytes (as opposed
to a megabyte which is 1000**2 bytes).
:param size: the number to convert
:type size: int
:returns: the human readable representation of size
:rtype: str | 3.279661 | 3.450397 | 0.950517 |
nt_list = tmplate.split('%s')
template_prefix = ''
template_suffix = ''
if len(nt_list) == 2:
template_suffix = nt_list[1]
template_prefix = nt_list[0]
else:
template_suffix = tmplate
return (template_prefix, template_suffix) | def parse_mailcap_nametemplate(tmplate='%s') | this returns a prefix and suffix to be used
in the tempfile module for a given mailcap nametemplate string | 2.488853 | 2.280304 | 1.091457 |
if mailto_str.startswith('mailto:'):
import urllib.parse
to_str, parms_str = mailto_str[7:].partition('?')[::2]
headers = {}
body = u''
to = urllib.parse.unquote(to_str)
if to:
headers['To'] = [to]
for s in parms_str.split('&'):
key, value = s.partition('=')[::2]
key = key.capitalize()
if key == 'Body':
body = urllib.parse.unquote(value)
elif value:
headers[key] = [urllib.parse.unquote(value)]
return (headers, body)
else:
return (None, None) | def parse_mailto(mailto_str) | Interpret mailto-string
:param mailto_str: the string to interpret. Must conform to :rfc:2368.
:type mailto_str: str
:return: the header fields and the body found in the mailto link as a tuple
of length two
:rtype: tuple(dict(str->list(str)), str) | 2.466221 | 2.377586 | 1.03728 |
from alot.db.envelope import Envelope
headers, body = parse_mailto(mailto_str)
return Envelope(bodytext=body, headers=headers) | def mailto_to_envelope(mailto_str) | Interpret mailto-string into a :class:`alot.db.envelope.Envelope` | 8.312203 | 3.760185 | 2.210584 |
text = re.sub("\r?\n", "\r\n", text)
text = re.sub("^From ", "From=20", text, flags=re.MULTILINE)
return text | def RFC3156_canonicalize(text) | Canonicalizes plain text (MIME-encoded usually) according to RFC3156.
This function works as follows (in that order):
1. Convert all line endings to \\\\r\\\\n (DOS line endings).
2. Encode all occurrences of "From " at the beginning of a line
to "From=20" in order to prevent other mail programs to replace
this with "> From" (to avoid MBox conflicts) and thus invalidate
the signature.
:param text: text to canonicalize (already encoded as quoted-printable)
:rtype: str | 4.709352 | 3.315403 | 1.420447 |
env = os.environ.get(env_name)
return env if env else fallback | def get_xdg_env(env_name, fallback) | Used for XDG_* env variables to return fallback if unset *or* empty | 3.213256 | 3.48797 | 0.921239 |
res = []
query = re.compile('.*%s.*' % re.escape(query), self.reflags)
for name, email in self.get_contacts():
if query.match(name) or query.match(email):
res.append((name, email))
return res | def lookup(self, query='') | looks up all contacts where name or address match query | 3.883124 | 3.595094 | 1.080118 |
"Set the focus in the underlying body widget."
logging.debug('setting focus to %s ', pos)
self.body.set_focus(pos) | def set_focus(self, pos) | Set the focus in the underlying body widget. | 10.928678 | 5.992333 | 1.823777 |
mid = self.get_selected_mid()
newpos = self._tree.parent_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos) | def focus_parent(self) | move focus to parent of currently focussed message | 6.777732 | 6.146414 | 1.102713 |
mid = self.get_selected_mid()
newpos = self._tree.first_child_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos) | def focus_first_reply(self) | move focus to first reply to currently focussed message | 6.611762 | 5.675159 | 1.165036 |
mid = self.get_selected_mid()
newpos = self._tree.last_child_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos) | def focus_last_reply(self) | move focus to last reply to currently focussed message | 6.675682 | 5.688576 | 1.173524 |
mid = self.get_selected_mid()
newpos = self._tree.next_sibling_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos) | def focus_next_sibling(self) | focus next sibling of currently focussed message in thread tree | 6.285106 | 5.648629 | 1.112678 |
mid = self.get_selected_mid()
localroot = self._sanitize_position((mid,))
if localroot == self.get_focus()[1]:
newpos = self._tree.prev_sibling_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
else:
newpos = localroot
if newpos is not None:
self.body.set_focus(newpos) | def focus_prev_sibling(self) | focus previous sibling of currently focussed message in thread tree | 4.806347 | 4.695998 | 1.023498 |
mid = self.get_selected_mid()
newpos = self._tree.next_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos) | def focus_next(self) | focus next message in depth first order | 6.727513 | 6.142136 | 1.095305 |
mid = self.get_selected_mid()
localroot = self._sanitize_position((mid,))
if localroot == self.get_focus()[1]:
newpos = self._tree.prev_position(mid)
if newpos is not None:
newpos = self._sanitize_position((newpos,))
else:
newpos = localroot
if newpos is not None:
self.body.set_focus(newpos) | def focus_prev(self) | focus previous message in depth first order | 5.011666 | 4.872224 | 1.02862 |
newpos = self.get_selected_mid()
newpos = direction(newpos)
while newpos is not None:
MT = self._tree[newpos]
if prop(MT):
newpos = self._sanitize_position((newpos,))
self.body.set_focus(newpos)
break
newpos = direction(newpos) | def focus_property(self, prop, direction) | does a walk in the given direction and focuses the
first message tree that matches the given property | 6.5945 | 6.024095 | 1.094687 |
self.focus_property(lambda x: x._message.matches(querystring),
self._tree.next_position) | def focus_next_matching(self, querystring) | focus next matching message in depth first order | 20.165548 | 15.332022 | 1.315257 |
self.focus_property(lambda x: x._message.matches(querystring),
self._tree.prev_position) | def focus_prev_matching(self, querystring) | focus previous matching message in depth first order | 20.544886 | 15.25019 | 1.347189 |
self.focus_property(lambda x: not x.is_collapsed(x.root),
self._tree.next_position) | def focus_next_unfolded(self) | focus next unfolded message in depth first order | 17.064932 | 14.236121 | 1.198707 |
self.focus_property(lambda x: not x.is_collapsed(x.root),
self._tree.prev_position) | def focus_prev_unfolded(self) | focus previous unfolded message in depth first order | 17.727304 | 15.435942 | 1.148443 |
MT = self._tree[msgpos]
MT.expand(MT.root) | def expand(self, msgpos) | expand message at given position | 19.302757 | 19.351433 | 0.997485 |
MT = self._tree[msgpos]
MT.collapse(MT.root)
self.focus_selected_message() | def collapse(self, msgpos) | collapse message at given position | 19.626493 | 20.542688 | 0.9554 |
for MT in self.messagetrees():
MT.collapse(MT.root)
self.focus_selected_message() | def collapse_all(self) | collapse all messages in thread | 20.041468 | 15.366904 | 1.304197 |
first = None
for MT in self.messagetrees():
msg = MT._message
if msg.matches(querystring):
MT.expand(MT.root)
if first is None:
first = (self._tree.position_of_messagetree(MT), MT.root)
self.body.set_focus(first)
else:
MT.collapse(MT.root)
self.body.refresh() | def unfold_matching(self, querystring, focus_first=True) | expand all messages that match a given querystring.
:param querystring: query to match
:type querystring: str
:param focus_first: set the focus to the first matching message
:type focus_first: bool | 6.908522 | 6.408489 | 1.078027 |
if not isinstance(other, TagWidget):
return NotImplemented
self_len = len(self.translated)
oth_len = len(other.translated)
if (self_len == 1) is not (oth_len == 1):
return comparitor(self_len, oth_len)
return comparitor(self.translated.lower(), other.translated.lower()) | def __cmp(self, other, comparitor) | Shared comparison method. | 3.455588 | 3.471139 | 0.99552 |
'''Add pseudo headers to the mail indicating whether the signature
verification was successful.
:param mail: :class:`email.message.Message` the message to entitle
:param sigs: list of :class:`gpg.results.Signature`
:param error_msg: An error message if there is one, or None
:type error_msg: :class:`str` or `None`
'''
sig_from = ''
sig_known = True
uid_trusted = False
assert error_msg is None or isinstance(error_msg, str)
if not sigs:
error_msg = error_msg or u'no signature found'
elif not error_msg:
try:
key = crypto.get_key(sigs[0].fpr)
for uid in key.uids:
if crypto.check_uid_validity(key, uid.email):
sig_from = uid.uid
uid_trusted = True
break
else:
# No trusted uid found, since we did not break from the loop.
sig_from = key.uids[0].uid
except GPGProblem:
sig_from = sigs[0].fpr
sig_known = False
if error_msg:
msg = 'Invalid: {}'.format(error_msg)
elif uid_trusted:
msg = 'Valid: {}'.format(sig_from)
else:
msg = 'Untrusted: {}'.format(sig_from)
mail.add_header(X_SIGNATURE_VALID_HEADER,
'False' if (error_msg or not sig_known) else 'True')
mail.add_header(X_SIGNATURE_MESSAGE_HEADER, msg) | def add_signature_headers(mail, sigs, error_msg) | Add pseudo headers to the mail indicating whether the signature
verification was successful.
:param mail: :class:`email.message.Message` the message to entitle
:param sigs: list of :class:`gpg.results.Signature`
:param error_msg: An error message if there is one, or None
:type error_msg: :class:`str` or `None` | 3.649087 | 2.626386 | 1.389394 |
'''Get Content-Type parameters as dict.
RFC 2045 specifies that parameter names are case-insensitive, so
we normalize them here.
:param mail: :class:`email.message.Message`
:param failobj: object to return if no such header is found
:param header: the header to search for parameters, default
:param unquote: unquote the values
:returns: a `dict` containing the parameters
'''
failobj = failobj or []
return {k.lower(): v for k, v in mail.get_params(failobj, header, unquote)} | def get_params(mail, failobj=None, header='content-type', unquote=True) | Get Content-Type parameters as dict.
RFC 2045 specifies that parameter names are case-insensitive, so
we normalize them here.
:param mail: :class:`email.message.Message`
:param failobj: object to return if no such header is found
:param header: the header to search for parameters, default
:param unquote: unquote the values
:returns: a `dict` containing the parameters | 4.00449 | 1.637803 | 2.445038 |
malformed = None
if len(message.get_payload()) != 2:
malformed = u'expected exactly two messages, got {0}'.format(
len(message.get_payload()))
else:
ct = message.get_payload(1).get_content_type()
if ct != _APP_PGP_SIG:
malformed = u'expected Content-Type: {0}, got: {1}'.format(
_APP_PGP_SIG, ct)
# TODO: RFC 3156 says the alg has to be lower case, but I've seen a message
# with 'PGP-'. maybe we should be more permissive here, or maybe not, this
# is crypto stuff...
if not params.get('micalg', 'nothing').startswith('pgp-'):
malformed = u'expected micalg=pgp-..., got: {0}'.format(
params.get('micalg', 'nothing'))
sigs = []
if not malformed:
try:
sigs = crypto.verify_detached(
message.get_payload(0).as_bytes(policy=email.policy.SMTP),
message.get_payload(1).get_payload(decode=True))
except GPGProblem as e:
malformed = str(e)
add_signature_headers(original, sigs, malformed) | def _handle_signatures(original, message, params) | Shared code for handling message signatures.
RFC 3156 is quite strict:
* exactly two messages
* the second is of type 'application/pgp-signature'
* the second contains the detached signature
:param original: The original top-level mail. This is required to attache
special headers to
:type original: :class:`email.message.Message`
:param message: The multipart/signed payload to verify
:type message: :class:`email.message.Message`
:param params: the message parameters as returned by :func:`get_params`
:type params: dict[str, str] | 4.609241 | 4.012705 | 1.148662 |
malformed = False
ct = message.get_payload(0).get_content_type()
if ct != _APP_PGP_ENC:
malformed = u'expected Content-Type: {0}, got: {1}'.format(
_APP_PGP_ENC, ct)
want = 'application/octet-stream'
ct = message.get_payload(1).get_content_type()
if ct != want:
malformed = u'expected Content-Type: {0}, got: {1}'.format(want, ct)
if not malformed:
# This should be safe because PGP uses US-ASCII characters only
payload = message.get_payload(1).get_payload().encode('ascii')
try:
sigs, d = crypto.decrypt_verify(payload, session_keys)
except GPGProblem as e:
# signature verification failures end up here too if the combined
# method is used, currently this prevents the interpretation of the
# recovered plain text mail. maybe that's a feature.
malformed = str(e)
else:
n = decrypted_message_from_bytes(d, session_keys)
# add the decrypted message to message. note that n contains all
# the attachments, no need to walk over n here.
original.attach(n)
original.defects.extend(n.defects)
# there are two methods for both signed and encrypted data, one is
# called 'RFC 1847 Encapsulation' by RFC 3156, and one is the
# 'Combined method'.
if not sigs:
# 'RFC 1847 Encapsulation', the signature is a detached
# signature found in the recovered mime message of type
# multipart/signed.
if X_SIGNATURE_VALID_HEADER in n:
for k in (X_SIGNATURE_VALID_HEADER,
X_SIGNATURE_MESSAGE_HEADER):
original[k] = n[k]
else:
# 'Combined method', the signatures are returned by the
# decrypt_verify function.
# note that if we reached this point, we know the signatures
# are valid. if they were not valid, the else block of the
# current try would not have been executed
add_signature_headers(original, sigs, '')
if malformed:
msg = u'Malformed OpenPGP message: {0}'.format(malformed)
content = email.message_from_string(msg, policy=email.policy.SMTP)
content.set_charset('utf-8')
original.attach(content) | def _handle_encrypted(original, message, session_keys=None) | Handle encrypted messages helper.
RFC 3156 is quite strict:
* exactly two messages
* the first is of type 'application/pgp-encrypted'
* the first contains 'Version: 1'
* the second is of type 'application/octet-stream'
* the second contains the encrypted and possibly signed data
:param original: The original top-level mail. This is required to attache
special headers to
:type original: :class:`email.message.Message`
:param message: The multipart/signed payload to verify
:type message: :class:`email.message.Message`
:param session_keys: a list OpenPGP session keys
:type session_keys: [str] | 5.929532 | 5.644768 | 1.050447 |
'''Detect and decrypt OpenPGP encrypted data in an email object. If this
succeeds, any mime messages found in the recovered plaintext
message are added to the returned message object.
:param m: an email object
:param session_keys: a list OpenPGP session keys
:returns: :class:`email.message.Message` possibly augmented with
decrypted data
'''
# make sure no one smuggles a token in (data from m is untrusted)
del m[X_SIGNATURE_VALID_HEADER]
del m[X_SIGNATURE_MESSAGE_HEADER]
if m.is_multipart():
p = get_params(m)
# handle OpenPGP signed data
if (m.get_content_subtype() == 'signed' and
p.get('protocol') == _APP_PGP_SIG):
_handle_signatures(m, m, p)
# handle OpenPGP encrypted data
elif (m.get_content_subtype() == 'encrypted' and
p.get('protocol') == _APP_PGP_ENC and
'Version: 1' in m.get_payload(0).get_payload()):
_handle_encrypted(m, m, session_keys)
# It is also possible to put either of the abov into a multipart/mixed
# segment
elif m.get_content_subtype() == 'mixed':
sub = m.get_payload(0)
if sub.is_multipart():
p = get_params(sub)
if (sub.get_content_subtype() == 'signed' and
p.get('protocol') == _APP_PGP_SIG):
_handle_signatures(m, sub, p)
elif (sub.get_content_subtype() == 'encrypted' and
p.get('protocol') == _APP_PGP_ENC):
_handle_encrypted(m, sub, session_keys)
return m | def decrypted_message_from_message(m, session_keys=None) | Detect and decrypt OpenPGP encrypted data in an email object. If this
succeeds, any mime messages found in the recovered plaintext
message are added to the returned message object.
:param m: an email object
:param session_keys: a list OpenPGP session keys
:returns: :class:`email.message.Message` possibly augmented with
decrypted data | 4.180449 | 2.88791 | 1.447569 |
return decrypted_message_from_message(
email.message_from_bytes(bytestring, policy=email.policy.SMTP),
session_keys) | def decrypted_message_from_bytes(bytestring, session_keys=None) | Create a Message from bytes.
:param bytes bytestring: an email message as raw bytes
:param session_keys: a list OpenPGP session keys | 3.87301 | 4.504388 | 0.85983 |
headertext = u''
if headers is None:
headers = mail.keys()
for key in headers:
value = u''
if key in mail:
value = decode_header(mail.get(key, ''))
headertext += '%s: %s\n' % (key, value)
return headertext | def extract_headers(mail, headers=None) | returns subset of this messages headers as human-readable format:
all header values are decoded, the resulting string has
one line "KEY: VALUE" for each requested header present in the mail.
:param mail: the mail to use
:type mail: :class:`email.Message`
:param headers: headers to extract
:type headers: list of str | 2.767918 | 2.769957 | 0.999264 |
ctype = part.get_content_type()
raw_payload = remove_cte(part)
rendered_payload = None
# get mime handler
_, entry = settings.mailcap_find_match(ctype, key=field_key)
if entry is not None:
tempfile_name = None
stdin = None
handler_raw_commandstring = entry['view']
# in case the mailcap defined command contains no '%s',
# we pipe the files content to the handling command via stdin
if '%s' in handler_raw_commandstring:
# open tempfile, respect mailcaps nametemplate
nametemplate = entry.get('nametemplate', '%s')
prefix, suffix = parse_mailcap_nametemplate(nametemplate)
with tempfile.NamedTemporaryFile(
delete=False, prefix=prefix, suffix=suffix) \
as tmpfile:
tmpfile.write(raw_payload)
tempfile_name = tmpfile.name
else:
stdin = raw_payload
# read parameter, create handler command
parms = tuple('='.join(p) for p in part.get_params())
# create and call external command
cmd = mailcap.subst(entry['view'], ctype,
filename=tempfile_name, plist=parms)
logging.debug('command: %s', cmd)
logging.debug('parms: %s', str(parms))
cmdlist = split_commandstring(cmd)
# call handler
stdout, _, _ = helper.call_cmd(cmdlist, stdin=stdin)
if stdout:
rendered_payload = stdout
# remove tempfile
if tempfile_name:
os.unlink(tempfile_name)
return rendered_payload | def render_part(part, field_key='copiousoutput') | renders a non-multipart email part into displayable plaintext by piping its
payload through an external script. The handler itself is determined by
the mailcap entry for this part's ctype. | 6.09903 | 5.361257 | 1.137612 |
enc = part.get_content_charset() or 'ascii'
cte = str(part.get('content-transfer-encoding', '7bit')).lower().strip()
payload = part.get_payload()
sp = '' # string variant of return value
bp = b'' # bytestring variant
logging.debug('Content-Transfer-Encoding: "{}"'.format(cte))
if cte not in ['quoted-printable', 'base64', '7bit', '8bit', 'binary']:
logging.info('Unknown Content-Transfer-Encoding: "{}"'.format(cte))
# switch through all sensible cases
# starting with those where payload is already a str
if '7bit' in cte or 'binary' in cte:
logging.debug('assuming Content-Transfer-Encoding: 7bit')
sp = payload
if as_string:
return sp
bp = payload.encode('utf-8')
return bp
# the remaining cases need decoding and define only bt;
# decoding into a str is done at the end if requested
elif '8bit' in cte:
logging.debug('assuming Content-Transfer-Encoding: 8bit')
# Python's mail library may decode 8bit as raw-unicode-escape, so
# we need to encode that back to bytes so we can decode it using
# the correct encoding, or it might not, in which case assume that
# the str representation we got is correct.
bp = payload.encode('raw-unicode-escape')
elif 'quoted-printable' in cte:
logging.debug('assuming Content-Transfer-Encoding: quoted-printable')
bp = quopri.decodestring(payload.encode('ascii'))
elif 'base64' in cte:
logging.debug('assuming Content-Transfer-Encoding: base64')
bp = base64.b64decode(payload)
else:
logging.debug('failed to interpret Content-Transfer-Encoding: '
'"{}"'.format(cte))
# by now, bp is defined, sp is not.
if as_string:
try:
sp = bp.decode(enc)
except LookupError:
# enc is unknown;
# fall back to guessing the correct encoding using libmagic
sp = helper.try_decode(bp)
except UnicodeDecodeError as emsg:
# the mail contains chars that are not enc-encoded.
# libmagic works better than just ignoring those
logging.debug('Decoding failure: {}'.format(emsg))
sp = helper.try_decode(bp)
return sp
return bp | def remove_cte(part, as_string=False) | Interpret MIME-part according to it's Content-Transfer-Encodings.
This returns the payload of `part` as string or bytestring for display, or
to be passed to an external program. In the raw file the payload may be
encoded, e.g. in base64, quoted-printable, 7bit, or 8bit. This method will
look for one of the above Content-Transfer-Encoding header and interpret
the payload accordingly.
Incorrect header values (common in spam messages) will be interpreted as
lenient as possible and will result in INFO-level debug messages.
..Note:: All this may be depricated in favour of
`email.contentmanager.raw_data_manager` (v3.6+)
:param email.Message part: The part to decode
:param bool as_string: If true return a str, otherwise return bytes
:returns: The mail with any Content-Transfer-Encoding removed
:rtype: Union[str, bytes] | 4.270075 | 4.099331 | 1.041652 |
preferred = 'text/plain' if settings.get(
'prefer_plaintext') else 'text/html'
has_preferred = False
# see if the mail has our preferred type
if types is None:
has_preferred = list(typed_subpart_iterator(
mail, *preferred.split('/')))
body_parts = []
for part in mail.walk():
# skip non-leaf nodes in the mail tree
if part.is_multipart():
continue
ctype = part.get_content_type()
if types is not None:
if ctype not in types:
continue
cd = part.get('Content-Disposition', '')
if cd.startswith('attachment'):
continue
# if the mail has our preferred type, we only keep this type
# note that if types != None, has_preferred always stays False
if has_preferred and ctype != preferred:
continue
if ctype == 'text/plain':
body_parts.append(string_sanitize(remove_cte(part, as_string=True)))
else:
rendered_payload = render_part(part)
if rendered_payload: # handler had output
body_parts.append(string_sanitize(rendered_payload))
# mark as attachment
elif cd:
part.replace_header('Content-Disposition', 'attachment; ' + cd)
else:
part.add_header('Content-Disposition', 'attachment;')
return u'\n\n'.join(body_parts) | def extract_body(mail, types=None, field_key='copiousoutput') | Returns a string view of a Message.
If the `types` argument is set then any encoding types there will be used
as the prefered encoding to extract. If `types` is None then
:ref:`prefer_plaintext <prefer-plaintext>` will be consulted; if it is True
then text/plain parts will be returned, if it is false then text/html will
be returned if present or text/plain if there are no text/html parts.
:param mail: the mail to use
:type mail: :class:`email.Message`
:param types: mime content types to use for body string
:type types: list[str]
:returns: The combined text of any parts to be used
:rtype: str | 4.794396 | 4.69993 | 1.020099 |
# some mailers send out incorrectly escaped headers
# and double quote the escaped realname part again. remove those
# RFC: 2047
regex = r'"(=\?.+?\?.+?\?[^ ?]+\?=)"'
value = re.sub(regex, r'\1', header)
logging.debug("unquoted header: |%s|", value)
# otherwise we interpret RFC2822 encoding escape sequences
valuelist = email.header.decode_header(value)
decoded_list = []
for v, enc in valuelist:
v = string_decode(v, enc)
decoded_list.append(string_sanitize(v))
value = ''.join(decoded_list)
if normalize:
value = re.sub(r'\n\s+', r' ', value)
return value | def decode_header(header, normalize=False) | decode a header value to a unicode string
values are usually a mixture of different substrings
encoded in quoted printable using different encodings.
This turns it into a single unicode string
:param header: the header value
:type header: str
:param normalize: replace trailing spaces after newlines
:type normalize: bool
:rtype: str | 7.348948 | 7.428117 | 0.989342 |
self.set_position_collapsed(
self.root, self._message.matches(querystring)) | def collapse_if_matches(self, querystring) | collapse (and show summary only) if the :class:`alot.db.Message`
matches given `querystring` | 25.183577 | 18.858906 | 1.335368 |
if self.ro:
raise DatabaseROError()
if self.writequeue:
# read notmuch's config regarding imap flag synchronization
sync = settings.get_notmuch_setting('maildir', 'synchronize_flags')
# go through writequeue entries
while self.writequeue:
current_item = self.writequeue.popleft()
logging.debug('write-out item: %s', str(current_item))
# watch out for notmuch errors to re-insert current_item
# to the queue on errors
try:
# the first two coordinants are cnmdname and post-callback
cmd, afterwards = current_item[:2]
logging.debug('cmd created')
# acquire a writeable db handler
try:
mode = Database.MODE.READ_WRITE
db = Database(path=self.path, mode=mode)
except NotmuchError:
raise DatabaseLockedError()
logging.debug('got write lock')
# make this a transaction
db.begin_atomic()
logging.debug('got atomic')
if cmd == 'add':
logging.debug('add')
path, tags = current_item[2:]
msg, _ = db.add_message(path, sync_maildir_flags=sync)
logging.debug('added msg')
msg.freeze()
logging.debug('freeze')
for tag in tags:
msg.add_tag(tag, sync_maildir_flags=sync)
logging.debug('added tags ')
msg.thaw()
logging.debug('thaw')
elif cmd == 'remove':
path = current_item[2]
db.remove_message(path)
elif cmd == 'setconfig':
key = current_item[2]
value = current_item[3]
db.set_config(key, value)
else: # tag/set/untag
querystring, tags = current_item[2:]
query = db.create_query(querystring)
for msg in query.search_messages():
msg.freeze()
if cmd == 'tag':
strategy = msg.add_tag
if cmd == 'set':
msg.remove_all_tags()
strategy = msg.add_tag
elif cmd == 'untag':
strategy = msg.remove_tag
for tag in tags:
strategy(tag, sync_maildir_flags=sync)
msg.thaw()
logging.debug('ended atomic')
# end transaction and reinsert queue item on error
if db.end_atomic() != notmuch.STATUS.SUCCESS:
raise DatabaseError('end_atomic failed')
logging.debug('ended atomic')
# close db
db.close()
logging.debug('closed db')
# call post-callback
if callable(afterwards):
logging.debug(str(afterwards))
afterwards()
logging.debug('called callback')
# re-insert item to the queue upon Xapian/NotmuchErrors
except (XapianError, NotmuchError) as e:
logging.exception(e)
self.writequeue.appendleft(current_item)
raise DatabaseError(str(e))
except DatabaseLockedError as e:
logging.debug('index temporarily locked')
self.writequeue.appendleft(current_item)
raise e
logging.debug('flush finished') | def flush(self) | write out all queued write-commands in order, each one in a separate
:meth:`atomic <notmuch.Database.begin_atomic>` transaction.
If this fails the current action is rolled back, stays in the write
queue and an exception is raised.
You are responsible to retry flushing at a later time if you want to
ensure that the cached changes are applied to the database.
:exception: :exc:`~errors.DatabaseROError` if db is opened read-only
:exception: :exc:`~errors.DatabaseLockedError` if db is locked | 4.519431 | 4.223277 | 1.070124 |
if self.ro:
raise DatabaseROError()
if remove_rest:
self.writequeue.append(('set', afterwards, querystring, tags))
else:
self.writequeue.append(('tag', afterwards, querystring, tags)) | def tag(self, querystring, tags, afterwards=None, remove_rest=False) | add tags to messages matching `querystring`.
This appends a tag operation to the write queue and raises
:exc:`~errors.DatabaseROError` if in read only mode.
:param querystring: notmuch search string
:type querystring: str
:param tags: a list of tags to be added
:type tags: list of str
:param afterwards: callback that gets called after successful
application of this tagging operation
:type afterwards: callable
:param remove_rest: remove tags from matching messages before tagging
:type remove_rest: bool
:exception: :exc:`~errors.DatabaseROError`
.. note::
This only adds the requested operation to the write queue.
You need to call :meth:`DBManager.flush` to actually write out. | 6.538084 | 4.668082 | 1.400593 |
if self.ro:
raise DatabaseROError()
self.writequeue.append(('untag', afterwards, querystring, tags)) | def untag(self, querystring, tags, afterwards=None) | removes tags from messages that match `querystring`.
This appends an untag operation to the write queue and raises
:exc:`~errors.DatabaseROError` if in read only mode.
:param querystring: notmuch search string
:type querystring: str
:param tags: a list of tags to be added
:type tags: list of str
:param afterwards: callback that gets called after successful
application of this tagging operation
:type afterwards: callable
:exception: :exc:`~errors.DatabaseROError`
.. note::
This only adds the requested operation to the write queue.
You need to call :meth:`DBManager.flush` to actually write out. | 17.836195 | 7.816918 | 2.281743 |
query = self.query('thread:' + tid)
try:
return next(query.search_threads())
except StopIteration:
errmsg = 'no thread with id %s exists!' % tid
raise NonexistantObjectError(errmsg) | def _get_notmuch_thread(self, tid) | returns :class:`notmuch.database.Thread` with given id | 5.770431 | 5.439461 | 1.060846 |
mode = Database.MODE.READ_ONLY
db = Database(path=self.path, mode=mode)
try:
return db.find_message(mid)
except:
errmsg = 'no message with id %s exists!' % mid
raise NonexistantObjectError(errmsg) | def _get_notmuch_message(self, mid) | returns :class:`notmuch.database.Message` with given id | 6.081974 | 5.309783 | 1.145428 |
db = Database(path=self.path)
return [t for t in db.get_all_tags()] | def get_all_tags(self) | returns all tagsstrings used in the database
:rtype: list of str | 6.242723 | 4.943669 | 1.262771 |
db = Database(path=self.path)
return {k[6:]: v for k, v in db.get_configs('query.')} | def get_named_queries(self) | returns the named queries stored in the database.
:rtype: dict (str -> str) mapping alias to full query string | 10.641587 | 10.629247 | 1.001161 |
# create two unix pipes to redirect the workers stdout and
# stderr
stdout = os.pipe()
stderr = os.pipe()
# create a multiprocessing pipe for the results
pipe = multiprocessing.Pipe(False)
receiver, sender = pipe
process = FillPipeProcess(cbl(), stdout[1], stderr[1], pipe, fun)
process.start()
self.processes.append(process)
logging.debug('Worker process %s spawned', process.pid)
def threaded_wait():
# wait(2) for the process to die
process.join()
if process.exitcode < 0:
msg = 'received signal {0}'.format(-process.exitcode)
elif process.exitcode > 0:
msg = 'returned error code {0}'.format(process.exitcode)
else:
msg = 'exited successfully'
logging.debug('Worker process %s %s', process.pid, msg)
self.processes.remove(process)
# XXX: it would be much nicer to run this as a coroutine than a thread,
# except that this code is executed before the eventloop is started.
#
# spawn a thread to collect the worker process once it dies
# preventing it from hanging around as zombie
threading.Thread(target=threaded_wait).start()
# TODO: avoid this if logging level > debug
def threaded_reader(prefix, fd):
with os.fdopen(fd) as handle:
for line in handle:
logging.debug('Worker process %s said on %s: %s',
process.pid, prefix, line.rstrip())
# spawn two threads that read from the stdout and stderr pipes
# and write anything that appears there to the log
threading.Thread(target=threaded_reader,
args=('stdout', stdout[0])).start()
os.close(stdout[1])
threading.Thread(target=threaded_reader,
args=('stderr', stderr[0])).start()
os.close(stderr[1])
# closing the sending end in this (receiving) process guarantees
# that here the appropriate EOFError is raised upon .recv in the walker
sender.close()
return receiver, process | def async_(self, cbl, fun) | return a pair (pipe, process) so that the process writes
`fun(a)` to the pipe for each element `a` in the iterable returned
by the callable `cbl`.
:param cbl: a function returning something iterable
:type cbl: callable
:param fun: an unary translation function
:type fun: callable
:rtype: (:class:`multiprocessing.Pipe`,
:class:`multiprocessing.Process`) | 4.969511 | 4.87671 | 1.019029 |
assert sort in self._sort_orders
q = self.query(querystring)
q.set_sort(self._sort_orders[sort])
if exclude_tags:
for tag in exclude_tags:
q.exclude_tag(tag)
return self.async_(q.search_threads, (lambda a: a.get_thread_id())) | def get_threads(self, querystring, sort='newest_first', exclude_tags=None) | asynchronously look up thread ids matching `querystring`.
:param querystring: The query string to use for the lookup
:type querystring: str.
:param sort: Sort order. one of ['oldest_first', 'newest_first',
'message_id', 'unsorted']
:type query: str
:param exclude_tags: Tags to exclude by default unless included in the
search
:type exclude_tags: list of str
:returns: a pipe together with the process that asynchronously
writes to it.
:rtype: (:class:`multiprocessing.Pipe`,
:class:`multiprocessing.Process`) | 4.500284 | 4.572863 | 0.984128 |
mode = Database.MODE.READ_ONLY
db = Database(path=self.path, mode=mode)
q = db.create_query(querystring)
# add configured exclude tags
for tag in settings.get('exclude_tags'):
q.exclude_tag(tag)
return q | def query(self, querystring) | creates :class:`notmuch.Query` objects on demand
:param querystring: The query string to use for the lookup
:type query: str.
:returns: :class:`notmuch.Query` -- the query object. | 6.181333 | 6.966818 | 0.887253 |
tags = tags or []
if self.ro:
raise DatabaseROError()
if not is_subdir_of(path, self.path):
msg = 'message path %s ' % path
msg += ' is not below notmuchs '
msg += 'root path (%s)' % self.path
raise DatabaseError(msg)
else:
self.writequeue.append(('add', afterwards, path, tags)) | def add_message(self, path, tags=None, afterwards=None) | Adds a file to the notmuch index.
:param path: path to the file
:type path: str
:param tags: tagstrings to add
:type tags: list of str
:param afterwards: callback to trigger after adding
:type afterwards: callable or None | 8.760017 | 8.855889 | 0.989174 |
if self.ro:
raise DatabaseROError()
path = message.get_filename()
self.writequeue.append(('remove', afterwards, path)) | def remove_message(self, message, afterwards=None) | Remove a message from the notmuch index
:param message: message to remove
:type message: :class:`Message`
:param afterwards: callback to trigger after removing
:type afterwards: callable or None | 22.037094 | 25.788704 | 0.854525 |
if self.ro:
raise DatabaseROError()
self.writequeue.append(('setconfig', afterwards, 'query.' + alias,
querystring)) | def save_named_query(self, alias, querystring, afterwards=None) | add an alias for a query string.
These are stored in the notmuch database and can be used as part of
more complex queries using the syntax "query:alias".
See :manpage:`notmuch-search-terms(7)` for more info.
:param alias: name of shortcut
:type alias: str
:param querystring: value, i.e., the full query string
:type querystring: str
:param afterwards: callback to trigger after adding the alias
:type afterwards: callable or None | 28.314201 | 34.276421 | 0.826055 |
if self.ro:
raise DatabaseROError()
self.writequeue.append(('setconfig', afterwards, 'query.' + alias, '')) | def remove_named_query(self, alias, afterwards=None) | remove a named query from the notmuch database.
:param alias: name of shortcut
:type alias: str
:param afterwards: callback to trigger after adding the alias
:type afterwards: callable or None | 33.518967 | 33.869354 | 0.989655 |
# hash_algo will be something like SHA256, but we need pgp-sha256.
algo = gpg.core.hash_algo_name(hash_algo)
if algo is None:
raise GPGProblem('Unknown hash algorithm {}'.format(algo),
code=GPGCode.INVALID_HASH_ALGORITHM)
return 'pgp-' + algo.lower() | def RFC3156_micalg_from_algo(hash_algo) | Converts a GPGME hash algorithm name to one conforming to RFC3156.
GPGME returns hash algorithm names such as "SHA256", but RFC3156 says that
programs need to use names such as "pgp-sha256" instead.
:param str hash_algo: GPGME hash_algo
:returns: the lowercase name of of the algorithm with "pgp-" prepended
:rtype: str | 6.095676 | 5.71057 | 1.067437 |
ctx = gpg.core.Context()
try:
key = ctx.get_key(keyid)
if validate:
validate_key(key, encrypt=encrypt, sign=sign)
except gpg.errors.KeyNotFound:
raise GPGProblem('Cannot find key for "{}".'.format(keyid),
code=GPGCode.NOT_FOUND)
except gpg.errors.GPGMEError as e:
if e.getcode() == gpg.errors.AMBIGUOUS_NAME:
# When we get here it means there were multiple keys returned by
# gpg for given keyid. Unfortunately gpgme returns invalid and
# expired keys together with valid keys. If only one key is valid
# for given operation maybe we can still return it instead of
# raising exception
valid_key = None
for k in list_keys(hint=keyid):
try:
validate_key(k, encrypt=encrypt, sign=sign)
except GPGProblem:
# if the key is invalid for given action skip it
continue
if valid_key:
# we have already found one valid key and now we find
# another? We really received an ambiguous keyid
raise GPGProblem(
"More than one key found matching this filter. "
"Please be more specific "
"(use a key ID like 4AC8EE1D).",
code=GPGCode.AMBIGUOUS_NAME)
valid_key = k
if not valid_key:
# there were multiple keys found but none of them are valid for
# given action (we don't have private key, they are expired
# etc), or there was no key at all
raise GPGProblem(
'Can not find usable key for "{}".'.format(keyid),
code=GPGCode.NOT_FOUND)
return valid_key
elif e.getcode() == gpg.errors.INV_VALUE:
raise GPGProblem(
'Can not find usable key for "{}".'.format(keyid),
code=GPGCode.NOT_FOUND)
else:
raise e # pragma: nocover
if signed_only and not check_uid_validity(key, keyid):
raise GPGProblem(
'Cannot find a trusworthy key for "{}".'.format(keyid),
code=GPGCode.NOT_FOUND)
return key | def get_key(keyid, validate=False, encrypt=False, sign=False,
signed_only=False) | Gets a key from the keyring by filtering for the specified keyid, but
only if the given keyid is specific enough (if it matches multiple
keys, an exception will be thrown).
If validate is True also make sure that returned key is not invalid,
revoked or expired. In addition if encrypt or sign is True also validate
that key is valid for that action. For example only keys with private key
can sign. If signed_only is True make sure that the user id can be trusted
to belong to the key (is signed). This last check will only work if the
keyid is part of the user id associated with the key, not if it is part of
the key fingerprint.
:param keyid: filter term for the keyring (usually a key ID)
:type keyid: str
:param validate: validate that returned keyid is valid
:type validate: bool
:param encrypt: when validating confirm that returned key can encrypt
:type encrypt: bool
:param sign: when validating confirm that returned key can sign
:type sign: bool
:param signed_only: only return keys whose uid is signed (trusted to
belong to the key)
:type signed_only: bool
:returns: A gpg key matching the given parameters
:rtype: gpg.gpgme._gpgme_key
:raises ~alot.errors.GPGProblem: if the keyid is ambiguous
:raises ~alot.errors.GPGProblem: if there is no key that matches the
parameters
:raises ~alot.errors.GPGProblem: if a key is found, but signed_only is true
and the key is unused | 4.189654 | 3.841222 | 1.090709 |
ctx = gpg.core.Context()
return ctx.keylist(hint, private) | def list_keys(hint=None, private=False) | Returns a generator of all keys containing the fingerprint, or all keys if
hint is None.
The generator may raise exceptions of :class:gpg.errors.GPGMEError, and it
is the caller's responsibility to handle them.
:param hint: Part of a fingerprint to usee to search
:type hint: str or None
:param private: Whether to return public keys or secret keys
:type private: bool
:returns: A generator that yields keys.
:rtype: Generator[gpg.gpgme.gpgme_key_t, None, None] | 15.027194 | 15.319258 | 0.980935 |
ctx = gpg.core.Context(armor=True)
ctx.signers = keys
(sigblob, sign_result) = ctx.sign(plaintext_str,
mode=gpg.constants.SIG_MODE_DETACH)
return sign_result.signatures, sigblob | def detached_signature_for(plaintext_str, keys) | Signs the given plaintext string and returns the detached signature.
A detached signature in GPG speak is a separate blob of data containing
a signature for the specified plaintext.
:param bytes plaintext_str: bytestring to sign
:param keys: list of one or more key to sign with.
:type keys: list[gpg.gpgme._gpgme_key]
:returns: A list of signature and the signed blob of data
:rtype: tuple[list[gpg.results.NewSignature], str] | 8.010996 | 7.520598 | 1.065207 |
assert keys, 'Must provide at least one key to encrypt with'
ctx = gpg.core.Context(armor=True)
out = ctx.encrypt(plaintext_str, recipients=keys, sign=False,
always_trust=True)[0]
return out | def encrypt(plaintext_str, keys) | Encrypt data and return the encrypted form.
:param bytes plaintext_str: the mail to encrypt
:param key: optionally, a list of keys to encrypt with
:type key: list[gpg.gpgme.gpgme_key_t] or None
:returns: encrypted mail
:rtype: str | 6.125678 | 6.257675 | 0.978906 |
return ", ".join("{}: {}".format(s.fpr,
"Bad signature for key(s)")
for s in error.result.signatures
if s.status != NO_ERROR) | def bad_signatures_to_str(error) | Convert a bad signature exception to a text message.
This is a workaround for gpg not handling non-ascii data correctly.
:param BadSignatures error: BadSignatures exception | 14.534396 | 18.359579 | 0.791652 |
ctx = gpg.core.Context()
try:
verify_results = ctx.verify(message, signature)[1]
return verify_results.signatures
except gpg.errors.BadSignatures as e:
raise GPGProblem(bad_signatures_to_str(e), code=GPGCode.BAD_SIGNATURE)
except gpg.errors.GPGMEError as e:
raise GPGProblem(str(e), code=e.getcode()) | def verify_detached(message, signature) | Verifies whether the message is authentic by checking the signature.
:param bytes message: The message to be verified, in canonical form.
:param bytes signature: the OpenPGP signature to verify
:returns: a list of signatures
:rtype: list[gpg.results.Signature]
:raises alot.errors.GPGProblem: if the verification fails | 5.113465 | 4.217702 | 1.212382 |
if session_keys is not None:
try:
return _decrypt_verify_session_keys(encrypted, session_keys)
except GPGProblem:
pass
ctx = gpg.core.Context()
return _decrypt_verify_with_context(ctx, encrypted) | def decrypt_verify(encrypted, session_keys=None) | Decrypts the given ciphertext string and returns both the
signatures (if any) and the plaintext.
:param bytes encrypted: the mail to decrypt
:param list[str] session_keys: a list OpenPGP session keys
:returns: the signatures and decrypted plaintext data
:rtype: tuple[list[gpg.resuit.Signature], str]
:raises alot.errors.GPGProblem: if the decryption fails | 4.268324 | 3.94799 | 1.081138 |
for key in session_keys:
ctx = gpg.core.Context()
ctx.set_ctx_flag("override-session-key", key)
try:
return _decrypt_verify_with_context(ctx, encrypted)
except GPGProblem:
continue
raise GPGProblem("No valid session key", code=GPGCode.NOT_FOUND) | def _decrypt_verify_session_keys(encrypted, session_keys) | Decrypts the given ciphertext string using the session_keys
and returns both the signatures (if any) and the plaintext.
:param bytes encrypted: the mail to decrypt
:param list[str] session_keys: a list OpenPGP session keys
:returns: the signatures and decrypted plaintext data
:rtype: tuple[list[gpg.resuit.Signature], str]
:raises alot.errors.GPGProblem: if the decryption fails | 6.917803 | 6.283036 | 1.101029 |
try:
(plaintext, _, verify_result) = ctx.decrypt(
encrypted, verify=True)
sigs = verify_result.signatures
except gpg.errors.GPGMEError as e:
raise GPGProblem(str(e), code=e.getcode())
except gpg.errors.BadSignatures as e:
(plaintext, _, _) = ctx.decrypt(encrypted, verify=False)
sigs = e.result.signatures
return sigs, plaintext | def _decrypt_verify_with_context(ctx, encrypted) | Decrypts the given ciphertext string using the gpg context
and returns both the signatures (if any) and the plaintext.
:param gpg.Context ctx: the gpg context
:param bytes encrypted: the mail to decrypt
:returns: the signatures and decrypted plaintext data
:rtype: tuple[list[gpg.resuit.Signature], str]
:raises alot.errors.GPGProblem: if the decryption fails | 4.910778 | 4.149518 | 1.183457 |
if key.revoked:
raise GPGProblem('The key "{}" is revoked.'.format(key.uids[0].uid),
code=GPGCode.KEY_REVOKED)
elif key.expired:
raise GPGProblem('The key "{}" is expired.'.format(key.uids[0].uid),
code=GPGCode.KEY_EXPIRED)
elif key.invalid:
raise GPGProblem('The key "{}" is invalid.'.format(key.uids[0].uid),
code=GPGCode.KEY_INVALID)
if encrypt and not key.can_encrypt:
raise GPGProblem(
'The key "{}" cannot be used to encrypt'.format(key.uids[0].uid),
code=GPGCode.KEY_CANNOT_ENCRYPT)
if sign and not key.can_sign:
raise GPGProblem(
'The key "{}" cannot be used to sign'.format(key.uids[0].uid),
code=GPGCode.KEY_CANNOT_SIGN) | def validate_key(key, sign=False, encrypt=False) | Assert that a key is valide and optionally that it can be used for
signing or encrypting. Raise GPGProblem otherwise.
:param key: the GPG key to check
:type key: gpg.gpgme._gpgme_key
:param sign: whether the key should be able to sign
:type sign: bool
:param encrypt: whether the key should be able to encrypt
:type encrypt: bool
:raises ~alot.errors.GPGProblem: If the key is revoked, expired, or invalid
:raises ~alot.errors.GPGProblem: If encrypt is true and the key cannot be
used to encrypt
:raises ~alot.errors.GPGProblem: If sign is true and th key cannot be used
to encrypt | 1.715753 | 1.518355 | 1.130008 |
def check(key_uid):
return (email == key_uid.email and
not key_uid.revoked and
not key_uid.invalid and
key_uid.validity >= gpg.constants.validity.FULL)
return any(check(u) for u in key.uids) | def check_uid_validity(key, email) | Check that a the email belongs to the given key. Also check the trust
level of this connection. Only if the trust level is high enough (>=4) the
email is assumed to belong to the key.
:param key: the GPG key to which the email should belong
:type key: gpg.gpgme._gpgme_key
:param email: the email address that should belong to the key
:type email: str
:returns: whether the key can be assumed to belong to the given email
:rtype: bool | 6.528046 | 7.017139 | 0.9303 |
if self.proc:
if self.proc.is_alive():
self.proc.terminate() | def kill_filler_process(self) | terminates the process that fills this buffers
:class:`~alot.walker.PipeWalker`. | 4.119905 | 4.680089 | 0.880305 |
threadlinewidget = self.get_selected_threadline()
thread = None
if threadlinewidget:
thread = threadlinewidget.get_thread()
return thread | def get_selected_thread(self) | returns currently selected :class:`~alot.db.Thread` | 4.064562 | 3.916286 | 1.037861 |
keys = ['dfg', 'dbg', '1fg', '1bg', '16fg', '16bg', '256fg', '256bg']
acc = {}
if not isinstance(value, (list, tuple)):
value = value,
if len(value) > 6:
raise VdtValueTooLongError(value)
# ensure we have exactly 6 attribute strings
attrstrings = (value + (6 - len(value)) * [None])[:6]
# add fallbacks for the empty list
attrstrings = (2 * ['default']) + attrstrings
for i, value in enumerate(attrstrings):
if value:
acc[keys[i]] = value
else:
acc[keys[i]] = acc[keys[i - 2]]
try:
mono = AttrSpec(acc['1fg'], acc['1bg'], 1)
normal = AttrSpec(acc['16fg'], acc['16bg'], 16)
high = AttrSpec(acc['256fg'], acc['256bg'], 256)
except AttrSpecError as e:
raise ValidateError(str(e))
return mono, normal, high | def attr_triple(value) | Check that interprets the value as `urwid.AttrSpec` triple for the colour
modes 1,16 and 256. It assumes a <6 tuple of attribute strings for
mono foreground, mono background, 16c fg, 16c bg, 256 fg and 256 bg
respectively. If any of these are missing, we downgrade to the next
lower available pair, defaulting to 'default'.
:raises: VdtValueTooLongError, VdtTypeError
:rtype: triple of `urwid.AttrSpec` | 3.899262 | 3.07546 | 1.267863 |
if value is None:
res = 'fit', 0, 0
elif not isinstance(value, (list, tuple)):
raise VdtTypeError(value)
elif value[0] not in ['fit', 'weight']:
raise VdtTypeError(value)
if value[0] == 'fit':
if not isinstance(value[1], int) or not isinstance(value[2], int):
VdtTypeError(value)
res = 'fit', int(value[1]), int(value[2])
else:
if not isinstance(value[1], int):
VdtTypeError(value)
res = 'weight', int(value[1])
return res | def width_tuple(value) | test if value is a valid width indicator (for a sub-widget in a column).
This can either be
('fit', min, max): use the length actually needed for the content, padded
to use at least width min, and cut of at width max.
Here, min and max are positive integers or 0 to disable
the boundary.
('weight',n): have it relative weight of n compared to other columns.
Here, n is an int. | 2.476702 | 2.318466 | 1.068251 |
if not re.match(r'.*://.*', value):
raise VdtTypeError(value)
mburl = urlparse(value)
uri_scheme_to_mbclass = {
'mbox': mailbox.mbox,
'maildir': mailbox.Maildir,
'mh': mailbox.MH,
'babyl': mailbox.Babyl,
'mmdf': mailbox.MMDF,
}
klass = uri_scheme_to_mbclass.get(mburl.scheme)
if klass:
return klass(mburl.netloc + mburl.path)
raise VdtTypeError(value) | def mail_container(value) | Check that the value points to a valid mail container,
in URI-style, e.g.: `mbox:///home/username/mail/mail.box`.
`~`-expansion will work, e.g.: `mbox://~/mail/mail.box`.
The value is cast to a :class:`mailbox.Mailbox` object. | 3.775645 | 3.212096 | 1.175446 |
try:
return crypto.get_key(value)
except GPGProblem as e:
raise ValidateError(str(e)) | def gpg_key(value) | test if value points to a known gpg key
and return that key as a gpg key object. | 6.905661 | 7.318663 | 0.943569 |
checks = checks or {}
try:
config = ConfigObj(infile=configpath, configspec=specpath,
file_error=True, encoding='UTF8')
except ConfigObjError as e:
msg = 'Error when parsing `%s`:\n%s' % (configpath, e)
logging.error(msg)
raise ConfigError(msg)
except IOError:
raise ConfigError('Could not read %s and/or %s'
% (configpath, specpath))
except UnboundLocalError:
# this works around a bug in configobj
msg = '%s is malformed. Check for sections without parents..'
raise ConfigError(msg % configpath)
if specpath:
validator = Validator()
validator.functions.update(checks)
try:
results = config.validate(validator, preserve_errors=True)
except ConfigObjError as e:
raise ConfigError(str(e))
if results is not True:
error_msg = ''
for (section_list, key, res) in flatten_errors(config, results):
if key is not None:
if res is False:
msg = 'key "%s" in section "%s" is missing.'
msg = msg % (key, ', '.join(section_list))
else:
msg = 'key "%s" in section "%s" failed validation: %s'
msg = msg % (key, ', '.join(section_list), res)
else:
msg = 'section "%s" is missing' % '.'.join(section_list)
error_msg += msg + '\n'
raise ConfigError(error_msg)
extra_values = get_extra_values(config) if report_extra else None
if extra_values:
msg = ['Unknown values were found in `%s`. Please check for '
'typos if a specified setting does not seem to work:'
% configpath]
for sections, val in extra_values:
if sections:
msg.append('%s: %s' % ('->'.join(sections), val))
else:
msg.append(str(val))
logging.info('\n'.join(msg))
return config | def read_config(configpath=None, specpath=None, checks=None,
report_extra=False) | get a (validated) config object for given config file path.
:param configpath: path to config-file or a list of lines as its content
:type configpath: str or list(str)
:param specpath: path to spec-file
:type specpath: str
:param checks: custom checks to use for validator.
see `validate docs <http://www.voidspace.org.uk/python/validate.html>`_
:type checks: dict str->callable,
:param report_extra: log if a setting is not present in the spec file
:type report_extra: boolean
:raises: :class:`~alot.settings.errors.ConfigError`
:rtype: `configobj.ConfigObj` | 2.975133 | 3.051962 | 0.974826 |
if a is None:
return fallback
if a.background in ['default', '']:
bg = fallback.background
else:
bg = a.background
if a.foreground in ['default', '']:
fg = fallback.foreground
else:
fg = a.foreground
return AttrSpec(fg, bg) | def resolve_att(a, fallback) | replace '' and 'default' by fallback values | 2.927616 | 2.67873 | 1.092912 |
start = original.rfind(sep, 0, pos) + 1
end = original.find(sep, pos - 1)
if end == -1:
end = len(original)
return original[start:end], start, end, pos - start | def relevant_part(self, original, pos, sep=' ') | calculates the subword in a `sep`-splitted list of substrings of
`original` that `pos` is ia.n | 2.48303 | 2.481885 | 1.000461 |
start = original.rfind(self._separator, 0, pos)
if start == -1:
start = 0
else:
start = start + len(self._separator)
end = original.find(self._separator, pos - 1)
if end == -1:
end = len(original)
return original[start:end], start, end, pos - start | def relevant_part(self, original, pos) | calculates the subword of `original` that `pos` is in | 2.17886 | 2.118887 | 1.028304 |
commands = split_commandline(line) + ['']
i = 0
start = 0
end = len(commands[i])
while pos > end:
i += 1
start = end + 1
end += 1 + len(commands[i])
return start, end | def get_context(line, pos) | computes start and end position of substring of line that is the
command string under given position | 4.08707 | 3.410265 | 1.198461 |
@functools.wraps(check)
def validator(paths):
if isinstance(paths, str):
check(paths)
elif isinstance(paths, collections.Sequence):
for path in paths:
check(path)
else:
raise Exception('expected either basestr or sequenc of basstr')
return validator | def _path_factory(check) | Create a function that checks paths. | 4.308422 | 4.150639 | 1.038014 |
if (os.path.exists(path) and not (os.path.isfile(path) or
stat.S_ISFIFO(os.stat(path).st_mode) or
stat.S_ISCHR(os.stat(path).st_mode))):
raise ValidationFailed(
'{} is not a valid file, character device, or fifo.'.format(path)) | def optional_file_like(path) | Validator that ensures that if a file exists it regular, a fifo, or a
character device. The file is not required to exist.
This includes character special devices like /dev/null. | 3.13504 | 2.394823 | 1.30909 |
fname = self.part.get_filename()
if fname:
extracted_name = decode_header(fname)
if extracted_name:
return os.path.basename(extracted_name)
return None | def get_filename(self) | return name of attached file.
If the content-disposition header contains no file name,
this returns `None` | 4.881993 | 3.894724 | 1.253489 |
ctype = self.part.get_content_type()
# replace underspecified mime description by a better guess
if ctype in ['octet/stream', 'application/octet-stream',
'application/octetstream']:
ctype = guess_mimetype(self.get_data())
return ctype | def get_content_type(self) | mime type of the attachment part | 6.587218 | 6.082038 | 1.083061 |
filename = self.get_filename()
path = os.path.expanduser(path)
if os.path.isdir(path):
if filename:
basename = os.path.basename(filename)
file_ = open(os.path.join(path, basename), "wb")
else:
file_ = tempfile.NamedTemporaryFile(delete=False, dir=path)
else:
file_ = open(path, "wb") # this throws IOErrors for invalid path
self.write(file_)
file_.close()
return file_.name | def save(self, path) | save the attachment to disk. Uses :meth:`~get_filename` in case path
is a directory | 2.844077 | 2.577393 | 1.10347 |
part = deepcopy(self.part)
part.set_param('maxlinelen', '78', header='Content-Disposition')
return part | def get_mime_representation(self) | returns mime part that constitutes this attachment | 15.263766 | 10.735332 | 1.421825 |
thmble = self._config[mode][name]
if part is not None:
thmble = thmble[part]
thmble = thmble or DUMMYDEFAULT
return thmble[self._colours.index(colourmode)] | def get_attribute(self, colourmode, mode, name, part=None) | returns requested attribute
:param mode: ui-mode (e.g. `search`,`thread`...)
:type mode: str
:param name: of the atttribute
:type name: str
:param colourmode: colour mode; in [1, 16, 256]
:type colourmode: int
:rtype: urwid.AttrSpec | 6.338542 | 6.718626 | 0.943428 |
def pickcolour(triple):
return triple[self._colours.index(colourmode)]
def matches(sec, thread):
if sec.get('tagged_with') is not None:
if not set(sec['tagged_with']).issubset(thread.get_tags()):
return False
if sec.get('query') is not None:
if not thread.matches(sec['query']):
return False
return True
default = self._config['search']['threadline']
match = default
candidates = self._config['search'].sections
for candidatename in candidates:
candidate = self._config['search'][candidatename]
if (candidatename.startswith('threadline') and
(not candidatename == 'threadline') and
matches(candidate, thread)):
match = candidate
break
# fill in values
res = {}
res['normal'] = pickcolour(match.get('normal') or default['normal'])
res['focus'] = pickcolour(match.get('focus') or default['focus'])
res['parts'] = match.get('parts') or default['parts']
for part in res['parts']:
defaultsec = default.get(part)
partsec = match.get(part) or {}
def fill(key, fallback=None):
pvalue = partsec.get(key) or defaultsec.get(key)
return pvalue or fallback
res[part] = {}
res[part]['width'] = fill('width', ('fit', 0, 0))
res[part]['alignment'] = fill('alignment', 'right')
res[part]['normal'] = pickcolour(fill('normal'))
res[part]['focus'] = pickcolour(fill('focus'))
return res | def get_threadline_theming(self, thread, colourmode) | look up how to display a Threadline wiidget in search mode
for a given thread.
:param thread: Thread to theme Threadline for
:type thread: alot.db.thread.Thread
:param colourmode: colourmode to use, one of 1,16,256.
:type colourmode: int
This will return a dict mapping
:normal: to `urwid.AttrSpec`,
:focus: to `urwid.AttrSpec`,
:parts: to a list of strings indentifying subwidgets
to be displayed in this order.
Moreover, for every part listed this will map 'part' to a dict mapping
:normal: to `urwid.AttrSpec`,
:focus: to `urwid.AttrSpec`,
:width: to a tuple indicating the width of the subpart.
This is either `('fit', min, max)` to force the widget
to be at least `min` and at most `max` characters wide,
or `('weight', n)` which makes it share remaining space
with other 'weight' parts.
:alignment: where to place the content if shorter than the widget.
This is either 'right', 'left' or 'center'. | 3.416273 | 2.885528 | 1.183933 |
logging.debug("Got key (%s, %s)", keys, raw)
# work around: escape triggers this twice, with keys = raw = []
# the first time..
if not keys:
return
# let widgets handle input if key is virtual window resize keypress
# or we are in "passall" mode
elif 'window resize' in keys or self._passall:
return keys
# end "lockdown" mode if the right key was pressed
elif self._locked and keys[0] == self._unlock_key:
self._locked = False
self.mainloop.widget = self.root_widget
if callable(self._unlock_callback):
self._unlock_callback()
# otherwise interpret keybinding
else:
def clear(*_):
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm)
self.input_queue = []
async def _apply_fire(cmdline):
try:
await self.apply_commandline(cmdline)
except CommandParseError as e:
self.notify(str(e), priority='error')
def fire(_, cmdline):
clear()
logging.debug("cmdline: '%s'", cmdline)
if not self._locked:
loop = asyncio.get_event_loop()
loop.create_task(_apply_fire(cmdline))
# move keys are always passed
elif cmdline in ['move up', 'move down', 'move page up',
'move page down']:
return [cmdline[5:]]
key = keys[0]
if key and 'mouse' in key[0]:
key = key[0] + ' %i' % key[1]
self.input_queue.append(key)
keyseq = ' '.join(self.input_queue)
candidates = settings.get_mapped_input_keysequences(self.mode,
prefix=keyseq)
if keyseq in candidates:
# case: current input queue is a mapped keysequence
# get binding and interpret it if non-null
cmdline = settings.get_keybinding(self.mode, keyseq)
if cmdline:
if len(candidates) > 1:
timeout = float(settings.get('input_timeout'))
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm)
self._alarm = self.mainloop.set_alarm_in(
timeout, fire, cmdline)
else:
return fire(self.mainloop, cmdline)
elif not candidates:
# case: no sequence with prefix keyseq is mapped
# just clear the input queue
clear()
else:
# case: some sequences with proper prefix keyseq is mapped
timeout = float(settings.get('input_timeout'))
if self._alarm is not None:
self.mainloop.remove_alarm(self._alarm)
self._alarm = self.mainloop.set_alarm_in(timeout, clear)
# update statusbar
self.update() | def _input_filter(self, keys, raw) | handles keypresses.
This function gets triggered directly by class:`urwid.MainLoop`
upon user input and is supposed to pass on its `keys` parameter
to let the root widget handle keys. We intercept the input here
to trigger custom commands as defined in our keybindings. | 4.973988 | 4.694045 | 1.059638 |
# remove initial spaces
cmdline = cmdline.lstrip()
# we pass Commands one by one to `self.apply_command`.
# To properly call them in sequence, even if they trigger asyncronous
# code (return Deferreds), these applications happen in individual
# callback functions which are then used as callback chain to some
# trivial Deferred that immediately calls its first callback. This way,
# one callback may return a Deferred and thus postpone the application
# of the next callback (and thus Command-application)
def apply_this_command(cmdstring):
logging.debug('%s command string: "%s"', self.mode, str(cmdstring))
# translate cmdstring into :class:`Command`
cmd = commandfactory(cmdstring, self.mode)
# store cmdline for use with 'repeat' command
if cmd.repeatable:
self.last_commandline = cmdline
return self.apply_command(cmd)
try:
for c in split_commandline(cmdline):
await apply_this_command(c)
except Exception as e:
self._error_handler(e) | async def apply_commandline(self, cmdline) | interprets a command line string
i.e., splits it into separate command strings,
instanciates :class:`Commands <alot.commands.Command>`
accordingly and applies then in sequence.
:param cmdline: command line to interpret
:type cmdline: str | 9.557766 | 9.401036 | 1.016672 |
self.mainloop.widget = w
self._unlock_key = key
self._unlock_callback = afterwards
self._locked = True | def show_as_root_until_keypress(self, w, key, afterwards=None) | Replaces root widget by given :class:`urwid.Widget` and makes the UI
ignore all further commands apart from cursor movement.
If later on `key` is pressed, the old root widget is reset, callable
`afterwards` is called and normal behaviour is resumed. | 7.942673 | 8.421474 | 0.943145 |
history = history or []
fut = asyncio.get_event_loop().create_future()
oldroot = self.mainloop.widget
def select_or_cancel(text):
self.mainloop.widget = oldroot
self._passall = False
fut.set_result(text)
def cerror(e):
logging.error(e)
self.notify('completion error: %s' % str(e),
priority='error')
self.update()
prefix = prefix + settings.get('prompt_suffix')
# set up widgets
leftpart = urwid.Text(prefix, align='left')
editpart = CompleteEdit(completer, on_exit=select_or_cancel,
edit_text=text, history=history,
on_error=cerror)
for _ in range(tab): # hit some tabs
editpart.keypress((0,), 'tab')
# build promptwidget
both = urwid.Columns(
[
('fixed', len(prefix), leftpart),
('weight', 1, editpart),
])
att = settings.get_theming_attribute('global', 'prompt')
both = urwid.AttrMap(both, att)
# put promptwidget as overlay on main widget
overlay = urwid.Overlay(both, oldroot,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 1),
None)
self.mainloop.widget = overlay
self._passall = True
return fut | def prompt(self, prefix, text=u'', completer=None, tab=0, history=None) | prompt for text input.
This returns a :class:`asyncio.Future`, which will have a string value
:param prefix: text to print before the input field
:type prefix: str
:param text: initial content of the input field
:type text: str
:param completer: completion object to use
:type completer: :meth:`alot.completion.Completer`
:param tab: number of tabs to press initially
(to select completion results)
:type tab: int
:param history: history to be used for up/down keys
:type history: list of str
:rtype: asyncio.Future | 5.009398 | 5.182745 | 0.966553 |
exit_msg = None
try:
loop = asyncio.get_event_loop()
loop.stop()
except Exception as e:
logging.error('Could not stop loop: %s\nShutting down anyway..',
str(e)) | def exit() | shuts down user interface without cleaning up.
Use a :class:`alot.commands.globals.ExitCommand` for a clean shutdown. | 4.731138 | 4.852278 | 0.975034 |
self.mainloop.stop()
try:
yield
finally:
self.mainloop.start()
# make sure urwid renders its canvas at the correct size
self.mainloop.screen_size = None
self.mainloop.draw_screen() | def paused(self) | context manager that pauses the UI to allow running external commands.
If an exception occurs, the UI will be started before the exception is
re-raised. | 7.5495 | 6.624144 | 1.139694 |
# call pre_buffer_open hook
prehook = settings.get_hook('pre_buffer_open')
if prehook is not None:
prehook(ui=self, dbm=self.dbman, buf=buf)
if self.current_buffer is not None:
offset = settings.get('bufferclose_focus_offset') * -1
currentindex = self.buffers.index(self.current_buffer)
self.buffers.insert(currentindex + offset, buf)
else:
self.buffers.append(buf)
self.buffer_focus(buf)
# call post_buffer_open hook
posthook = settings.get_hook('post_buffer_open')
if posthook is not None:
posthook(ui=self, dbm=self.dbman, buf=buf) | def buffer_open(self, buf) | register and focus new :class:`~alot.buffers.Buffer`. | 3.00503 | 2.760538 | 1.088567 |
# call pre_buffer_close hook
prehook = settings.get_hook('pre_buffer_close')
if prehook is not None:
prehook(ui=self, dbm=self.dbman, buf=buf)
buffers = self.buffers
success = False
if buf not in buffers:
logging.error('tried to close unknown buffer: %s. \n\ni have:%s',
buf, self.buffers)
elif self.current_buffer == buf:
logging.info('closing current buffer %s', buf)
index = buffers.index(buf)
buffers.remove(buf)
offset = settings.get('bufferclose_focus_offset')
nextbuffer = buffers[(index + offset) % len(buffers)]
self.buffer_focus(nextbuffer, redraw)
buf.cleanup()
success = True
else:
buffers.remove(buf)
buf.cleanup()
success = True
# call post_buffer_closed hook
posthook = settings.get_hook('post_buffer_closed')
if posthook is not None:
posthook(ui=self, dbm=self.dbman, buf=buf, success=success) | def buffer_close(self, buf, redraw=True) | closes given :class:`~alot.buffers.Buffer`.
This it removes it from the bufferlist and calls its cleanup() method. | 3.277684 | 3.152257 | 1.039789 |
# call pre_buffer_focus hook
prehook = settings.get_hook('pre_buffer_focus')
if prehook is not None:
prehook(ui=self, dbm=self.dbman, buf=buf)
success = False
if buf not in self.buffers:
logging.error('tried to focus unknown buffer')
else:
if self.current_buffer != buf:
self.current_buffer = buf
self.mode = buf.modename
if isinstance(self.current_buffer, BufferlistBuffer):
self.current_buffer.rebuild()
self.update()
success = True
# call post_buffer_focus hook
posthook = settings.get_hook('post_buffer_focus')
if posthook is not None:
posthook(ui=self, dbm=self.dbman, buf=buf, success=success) | def buffer_focus(self, buf, redraw=True) | focus given :class:`~alot.buffers.Buffer`. | 3.26822 | 3.103633 | 1.05303 |
if not startfrom:
startfrom = self.current_buffer
if 'get_focus' in dir(startfrom):
focus = startfrom.get_focus()
if isinstance(focus, tuple):
focus = focus[0]
if isinstance(focus, urwid.Widget):
return self.get_deep_focus(startfrom=focus)
return startfrom | def get_deep_focus(self, startfrom=None) | return the bottom most focussed widget of the widget tree | 2.834379 | 2.654734 | 1.06767 |
return [x for x in self.buffers if isinstance(x, t)] | def get_buffers_of_type(self, t) | returns currently open buffers for a given subclass of
:class:`~alot.buffers.Buffer`.
:param t: Buffer class
:type t: alot.buffers.Buffer
:rtype: list | 3.782524 | 6.487998 | 0.583003 |
newpile = self._notificationbar.widget_list
for l in messages:
if l in newpile:
newpile.remove(l)
if newpile:
self._notificationbar = urwid.Pile(newpile)
else:
self._notificationbar = None
self.update() | def clear_notify(self, messages) | Clears notification popups. Call this to ged rid of messages that don't
time out.
:param messages: The popups to remove. This should be exactly
what :meth:`notify` returned when creating the popup | 4.372223 | 5.047545 | 0.866208 |
choices = choices or {'y': 'yes', 'n': 'no'}
assert select is None or select in choices.values()
assert cancel is None or cancel in choices.values()
assert msg_position in ['left', 'above']
fut = asyncio.get_event_loop().create_future() # Create a returned future
oldroot = self.mainloop.widget
def select_or_cancel(text):
self.mainloop.widget = oldroot
self._passall = False
fut.set_result(text)
# set up widgets
msgpart = urwid.Text(message)
choicespart = ChoiceWidget(choices,
choices_to_return=choices_to_return,
callback=select_or_cancel, select=select,
cancel=cancel)
# build widget
if msg_position == 'left':
both = urwid.Columns(
[
('fixed', len(message), msgpart),
('weight', 1, choicespart),
], dividechars=1)
else: # above
both = urwid.Pile([msgpart, choicespart])
att = settings.get_theming_attribute('global', 'prompt')
both = urwid.AttrMap(both, att, att)
# put promptwidget as overlay on main widget
overlay = urwid.Overlay(both, oldroot,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 1),
None)
self.mainloop.widget = overlay
self._passall = True
return fut | def choice(self, message, choices=None, select=None, cancel=None,
msg_position='above', choices_to_return=None) | prompt user to make a choice.
:param message: string to display before list of choices
:type message: unicode
:param choices: dict of possible choices
:type choices: dict: keymap->choice (both str)
:param choices_to_return: dict of possible choices to return for the
choices of the choices of paramter
:type choices: dict: keymap->choice key is str and value is any obj)
:param select: choice to return if enter/return is hit. Ignored if set
to `None`.
:type select: str
:param cancel: choice to return if escape is hit. Ignored if set to
`None`.
:type cancel: str
:param msg_position: determines if `message` is above or left of the
prompt. Must be `above` or `left`.
:type msg_position: str
:rtype: asyncio.Future | 3.835784 | 3.92585 | 0.977058 |
def build_line(msg, prio):
cols = urwid.Columns([urwid.Text(msg)])
att = settings.get_theming_attribute('global', 'notify_' + prio)
return urwid.AttrMap(cols, att)
msgs = [build_line(message, priority)]
if not self._notificationbar:
self._notificationbar = urwid.Pile(msgs)
else:
newpile = self._notificationbar.widget_list + msgs
self._notificationbar = urwid.Pile(newpile)
self.update()
def clear(*_):
self.clear_notify(msgs)
if block:
# put "cancel to continue" widget as overlay on main widget
txt = build_line('(escape continues)', priority)
overlay = urwid.Overlay(txt, self.root_widget,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 0),
None)
self.show_as_root_until_keypress(overlay, 'esc',
afterwards=clear)
else:
if timeout >= 0:
if timeout == 0:
timeout = settings.get('notify_timeout')
self.mainloop.set_alarm_in(timeout, clear)
return msgs[0] | def notify(self, message, priority='normal', timeout=0, block=False) | opens notification popup.
:param message: message to print
:type message: str
:param priority: priority string, used to format the popup: currently,
'normal' and 'error' are defined. If you use 'X' here,
the attribute 'global_notify_X' is used to format the
popup.
:type priority: str
:param timeout: seconds until message disappears. Defaults to the value
of 'notify_timeout' in the general config section.
A negative value means never time out.
:type timeout: int
:param block: this notification blocks until a keypress is made
:type block: bool
:returns: an urwid widget (this notification) that can be handed to
:meth:`clear_notify` for removal | 5.344664 | 4.908041 | 1.088961 |
# get the main urwid.Frame widget
mainframe = self.root_widget.original_widget
# body
if self.current_buffer:
mainframe.set_body(self.current_buffer)
# footer
lines = []
if self._notificationbar: # .get_text()[0] != ' ':
lines.append(self._notificationbar)
if self._show_statusbar:
lines.append(self.build_statusbar())
if lines:
mainframe.set_footer(urwid.Pile(lines))
else:
mainframe.set_footer(None)
# force a screen redraw
if self.mainloop.screen.started and redraw:
self.mainloop.draw_screen() | def update(self, redraw=True) | redraw interface | 4.242778 | 4.153759 | 1.021431 |
info = {}
cb = self.current_buffer
btype = None
if cb is not None:
info = cb.get_info()
btype = cb.modename
info['buffer_no'] = self.buffers.index(cb)
info['buffer_type'] = btype
info['total_messages'] = self.dbman.count_messages('*')
info['pending_writes'] = len(self.dbman.writequeue)
info['input_queue'] = ' '.join(self.input_queue)
lefttxt = righttxt = u''
if cb is not None:
lefttxt, righttxt = settings.get(btype + '_statusbar', (u'', u''))
lefttxt = string_decode(lefttxt, 'UTF-8')
lefttxt = lefttxt.format(**info)
righttxt = string_decode(righttxt, 'UTF-8')
righttxt = righttxt.format(**info)
footerleft = urwid.Text(lefttxt, align='left')
pending_writes = len(self.dbman.writequeue)
if pending_writes > 0:
righttxt = ('|' * pending_writes) + ' ' + righttxt
footerright = urwid.Text(righttxt, align='right')
columns = urwid.Columns([
footerleft,
('pack', footerright)])
footer_att = settings.get_theming_attribute('global', 'footer')
return urwid.AttrMap(columns, footer_att) | def build_statusbar(self) | construct and return statusbar widget | 3.865729 | 3.826734 | 1.01019 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.