file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
i18n.py
|
import re
from django.template import Node, Variable, VariableNode, _render_value_in_context
from django.template import TemplateSyntaxError, TokenParser, Library
from django.template import TOKEN_TEXT, TOKEN_VAR
from django.utils import translation
from django.utils.encoding import force_unicode
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
from django.conf import settings
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class GetCurrentLanguageNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
def __init__(self, value, noop):
self.value = Variable(value)
self.noop = noop
def render(self, context):
value = self.value.resolve(context)
if self.noop:
return value
else:
return _render_value_in_context(translation.ugettext(value), context)
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
if token.token_type == TOKEN_TEXT:
result.append(token.contents)
elif token.token_type == TOKEN_VAR:
result.append(u'%%(%s)s' % token.contents)
vars.append(token.contents)
return ''.join(result), vars
def render(self, context):
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.render(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, vars = self.render_token_list(self.plural)
result = translation.ungettext(singular, plural, count)
else:
result = translation.ugettext(singular)
# Escape all isolated '%' before substituting in the context.
result = re.sub(u'%(?!\()', u'%%', result)
data = dict([(v, _render_value_in_context(context[v], context)) for v in vars])
context.pop()
return result % data
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_available_languages' requires 'as variable' (got %r)" % args
return GetAvailableLanguagesNode(args[2])
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language' requires 'as variable' (got %r)" % args
return GetCurrentLanguageNode(args[2])
|
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language_bidi' requires 'as variable' (got %r)" % args
return GetCurrentLanguageBidiNode(args[2])
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
"""
class TranslateParser(TokenParser):
def top(self):
value = self.value()
if self.more():
if self.tag() == 'noop':
noop = True
else:
raise TemplateSyntaxError, "only option for 'trans' is 'noop'"
else:
noop = False
return (value, noop)
value, noop = TranslateParser(token.contents).top()
return TranslateNode(value, noop)
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count var|length as count %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
"""
class BlockTranslateParser(TokenParser):
def top(self):
countervar = None
counter = None
extra_context = {}
while self.more():
tag = self.tag()
if tag == 'with' or tag == 'and':
value = self.value()
if self.tag() != 'as':
raise TemplateSyntaxError, "variable bindings in 'blocktrans' must be 'with value as variable'"
extra_context[self.tag()] = VariableNode(
parser.compile_filter(value))
elif tag == 'count':
counter = parser.compile_filter(self.value())
if self.tag() != 'as':
raise TemplateSyntaxError, "counter specification in 'blocktrans' must be 'count value as variable'"
countervar = self.tag()
else:
raise TemplateSyntaxError, "unknown subtag %s for 'blocktrans' found" % tag
return (countervar, counter, extra_context)
countervar, counter, extra_context = BlockTranslateParser(token.contents).top()
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags inside it"
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter)
register.tag('get_available_languages', do_get_available_languages)
register.tag('get_current_language', do_get_current_language)
register.tag('get_current_language_bidi', do_get_current_language_bidi)
register.tag('trans', do_translate)
register.tag('blocktrans', do_block_translate)
|
def do_get_current_language_bidi(parser, token):
|
random_line_split
|
i18n.py
|
import re
from django.template import Node, Variable, VariableNode, _render_value_in_context
from django.template import TemplateSyntaxError, TokenParser, Library
from django.template import TOKEN_TEXT, TOKEN_VAR
from django.utils import translation
from django.utils.encoding import force_unicode
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
from django.conf import settings
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class GetCurrentLanguageNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
|
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
if token.token_type == TOKEN_TEXT:
result.append(token.contents)
elif token.token_type == TOKEN_VAR:
result.append(u'%%(%s)s' % token.contents)
vars.append(token.contents)
return ''.join(result), vars
def render(self, context):
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.render(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, vars = self.render_token_list(self.plural)
result = translation.ungettext(singular, plural, count)
else:
result = translation.ugettext(singular)
# Escape all isolated '%' before substituting in the context.
result = re.sub(u'%(?!\()', u'%%', result)
data = dict([(v, _render_value_in_context(context[v], context)) for v in vars])
context.pop()
return result % data
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_available_languages' requires 'as variable' (got %r)" % args
return GetAvailableLanguagesNode(args[2])
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language' requires 'as variable' (got %r)" % args
return GetCurrentLanguageNode(args[2])
def do_get_current_language_bidi(parser, token):
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language_bidi' requires 'as variable' (got %r)" % args
return GetCurrentLanguageBidiNode(args[2])
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
"""
class TranslateParser(TokenParser):
def top(self):
value = self.value()
if self.more():
if self.tag() == 'noop':
noop = True
else:
raise TemplateSyntaxError, "only option for 'trans' is 'noop'"
else:
noop = False
return (value, noop)
value, noop = TranslateParser(token.contents).top()
return TranslateNode(value, noop)
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count var|length as count %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
"""
class BlockTranslateParser(TokenParser):
def top(self):
countervar = None
counter = None
extra_context = {}
while self.more():
tag = self.tag()
if tag == 'with' or tag == 'and':
value = self.value()
if self.tag() != 'as':
raise TemplateSyntaxError, "variable bindings in 'blocktrans' must be 'with value as variable'"
extra_context[self.tag()] = VariableNode(
parser.compile_filter(value))
elif tag == 'count':
counter = parser.compile_filter(self.value())
if self.tag() != 'as':
raise TemplateSyntaxError, "counter specification in 'blocktrans' must be 'count value as variable'"
countervar = self.tag()
else:
raise TemplateSyntaxError, "unknown subtag %s for 'blocktrans' found" % tag
return (countervar, counter, extra_context)
countervar, counter, extra_context = BlockTranslateParser(token.contents).top()
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags inside it"
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter)
register.tag('get_available_languages', do_get_available_languages)
register.tag('get_current_language', do_get_current_language)
register.tag('get_current_language_bidi', do_get_current_language_bidi)
register.tag('trans', do_translate)
register.tag('blocktrans', do_block_translate)
|
def __init__(self, value, noop):
self.value = Variable(value)
self.noop = noop
def render(self, context):
value = self.value.resolve(context)
if self.noop:
return value
else:
return _render_value_in_context(translation.ugettext(value), context)
|
identifier_body
|
i18n.py
|
import re
from django.template import Node, Variable, VariableNode, _render_value_in_context
from django.template import TemplateSyntaxError, TokenParser, Library
from django.template import TOKEN_TEXT, TOKEN_VAR
from django.utils import translation
from django.utils.encoding import force_unicode
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
from django.conf import settings
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class
|
(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
def __init__(self, value, noop):
self.value = Variable(value)
self.noop = noop
def render(self, context):
value = self.value.resolve(context)
if self.noop:
return value
else:
return _render_value_in_context(translation.ugettext(value), context)
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
if token.token_type == TOKEN_TEXT:
result.append(token.contents)
elif token.token_type == TOKEN_VAR:
result.append(u'%%(%s)s' % token.contents)
vars.append(token.contents)
return ''.join(result), vars
def render(self, context):
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.render(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, vars = self.render_token_list(self.plural)
result = translation.ungettext(singular, plural, count)
else:
result = translation.ugettext(singular)
# Escape all isolated '%' before substituting in the context.
result = re.sub(u'%(?!\()', u'%%', result)
data = dict([(v, _render_value_in_context(context[v], context)) for v in vars])
context.pop()
return result % data
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_available_languages' requires 'as variable' (got %r)" % args
return GetAvailableLanguagesNode(args[2])
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language' requires 'as variable' (got %r)" % args
return GetCurrentLanguageNode(args[2])
def do_get_current_language_bidi(parser, token):
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language_bidi' requires 'as variable' (got %r)" % args
return GetCurrentLanguageBidiNode(args[2])
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
"""
class TranslateParser(TokenParser):
def top(self):
value = self.value()
if self.more():
if self.tag() == 'noop':
noop = True
else:
raise TemplateSyntaxError, "only option for 'trans' is 'noop'"
else:
noop = False
return (value, noop)
value, noop = TranslateParser(token.contents).top()
return TranslateNode(value, noop)
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count var|length as count %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
"""
class BlockTranslateParser(TokenParser):
def top(self):
countervar = None
counter = None
extra_context = {}
while self.more():
tag = self.tag()
if tag == 'with' or tag == 'and':
value = self.value()
if self.tag() != 'as':
raise TemplateSyntaxError, "variable bindings in 'blocktrans' must be 'with value as variable'"
extra_context[self.tag()] = VariableNode(
parser.compile_filter(value))
elif tag == 'count':
counter = parser.compile_filter(self.value())
if self.tag() != 'as':
raise TemplateSyntaxError, "counter specification in 'blocktrans' must be 'count value as variable'"
countervar = self.tag()
else:
raise TemplateSyntaxError, "unknown subtag %s for 'blocktrans' found" % tag
return (countervar, counter, extra_context)
countervar, counter, extra_context = BlockTranslateParser(token.contents).top()
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags inside it"
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter)
register.tag('get_available_languages', do_get_available_languages)
register.tag('get_current_language', do_get_current_language)
register.tag('get_current_language_bidi', do_get_current_language_bidi)
register.tag('trans', do_translate)
register.tag('blocktrans', do_block_translate)
|
GetCurrentLanguageNode
|
identifier_name
|
i18n.py
|
import re
from django.template import Node, Variable, VariableNode, _render_value_in_context
from django.template import TemplateSyntaxError, TokenParser, Library
from django.template import TOKEN_TEXT, TOKEN_VAR
from django.utils import translation
from django.utils.encoding import force_unicode
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
from django.conf import settings
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class GetCurrentLanguageNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
def __init__(self, value, noop):
self.value = Variable(value)
self.noop = noop
def render(self, context):
value = self.value.resolve(context)
if self.noop:
return value
else:
return _render_value_in_context(translation.ugettext(value), context)
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
|
return ''.join(result), vars
def render(self, context):
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.render(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, vars = self.render_token_list(self.plural)
result = translation.ungettext(singular, plural, count)
else:
result = translation.ugettext(singular)
# Escape all isolated '%' before substituting in the context.
result = re.sub(u'%(?!\()', u'%%', result)
data = dict([(v, _render_value_in_context(context[v], context)) for v in vars])
context.pop()
return result % data
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_available_languages' requires 'as variable' (got %r)" % args
return GetAvailableLanguagesNode(args[2])
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language' requires 'as variable' (got %r)" % args
return GetCurrentLanguageNode(args[2])
def do_get_current_language_bidi(parser, token):
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError, "'get_current_language_bidi' requires 'as variable' (got %r)" % args
return GetCurrentLanguageBidiNode(args[2])
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
"""
class TranslateParser(TokenParser):
def top(self):
value = self.value()
if self.more():
if self.tag() == 'noop':
noop = True
else:
raise TemplateSyntaxError, "only option for 'trans' is 'noop'"
else:
noop = False
return (value, noop)
value, noop = TranslateParser(token.contents).top()
return TranslateNode(value, noop)
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count var|length as count %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
"""
class BlockTranslateParser(TokenParser):
def top(self):
countervar = None
counter = None
extra_context = {}
while self.more():
tag = self.tag()
if tag == 'with' or tag == 'and':
value = self.value()
if self.tag() != 'as':
raise TemplateSyntaxError, "variable bindings in 'blocktrans' must be 'with value as variable'"
extra_context[self.tag()] = VariableNode(
parser.compile_filter(value))
elif tag == 'count':
counter = parser.compile_filter(self.value())
if self.tag() != 'as':
raise TemplateSyntaxError, "counter specification in 'blocktrans' must be 'count value as variable'"
countervar = self.tag()
else:
raise TemplateSyntaxError, "unknown subtag %s for 'blocktrans' found" % tag
return (countervar, counter, extra_context)
countervar, counter, extra_context = BlockTranslateParser(token.contents).top()
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags inside it"
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError, "'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter)
register.tag('get_available_languages', do_get_available_languages)
register.tag('get_current_language', do_get_current_language)
register.tag('get_current_language_bidi', do_get_current_language_bidi)
register.tag('trans', do_translate)
register.tag('blocktrans', do_block_translate)
|
if token.token_type == TOKEN_TEXT:
result.append(token.contents)
elif token.token_type == TOKEN_VAR:
result.append(u'%%(%s)s' % token.contents)
vars.append(token.contents)
|
conditional_block
|
mut_mut.rs
|
use clippy_utils::diagnostics::span_lint;
use clippy_utils::higher;
use rustc_hir as hir;
use rustc_hir::intravisit;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for instances of `mut mut` references.
///
/// ### Why is this bad?
/// Multiple `mut`s don't add anything meaningful to the
/// source. This is either a copy'n'paste error, or it shows a fundamental
/// misunderstanding of references.
///
/// ### Example
/// ```rust
/// # let mut y = 1;
/// let x = &mut &mut y;
/// ```
pub MUT_MUT,
pedantic,
"usage of double-mut refs, e.g., `&mut &mut ...`"
}
declare_lint_pass!(MutMut => [MUT_MUT]);
impl<'tcx> LateLintPass<'tcx> for MutMut {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
intravisit::walk_block(&mut MutVisitor { cx }, block);
}
fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx hir::Ty<'_>) {
use rustc_hir::intravisit::Visitor;
MutVisitor { cx }.visit_ty(ty);
}
}
pub struct MutVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for MutVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
if in_external_macro(self.cx.sess(), expr.span) {
return;
}
if let Some(higher::ForLoop { arg, body, .. }) = higher::ForLoop::hir(expr) {
// A `for` loop lowers to:
// ```rust
// match ::std::iter::Iterator::next(&mut iter) {
// // ^^^^
// ```
// Let's ignore the generated code.
intravisit::walk_expr(self, arg);
intravisit::walk_expr(self, body);
} else if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e) = expr.kind {
if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, _) = e.kind {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"generally you want to avoid `&mut &mut _` if possible",
);
} else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"this expression mutably borrows a mutable reference. Consider reborrowing",
);
}
}
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>)
|
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
|
{
if in_external_macro(self.cx.sess(), ty.span) {
return;
}
if let hir::TyKind::Rptr(
_,
hir::MutTy {
ty: pty,
mutbl: hir::Mutability::Mut,
},
) = ty.kind
{
if let hir::TyKind::Rptr(
_,
hir::MutTy {
mutbl: hir::Mutability::Mut,
..
},
) = pty.kind
{
span_lint(
self.cx,
MUT_MUT,
ty.span,
"generally you want to avoid `&mut &mut _` if possible",
);
}
}
intravisit::walk_ty(self, ty);
}
|
identifier_body
|
mut_mut.rs
|
use clippy_utils::diagnostics::span_lint;
use clippy_utils::higher;
use rustc_hir as hir;
use rustc_hir::intravisit;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for instances of `mut mut` references.
///
/// ### Why is this bad?
/// Multiple `mut`s don't add anything meaningful to the
/// source. This is either a copy'n'paste error, or it shows a fundamental
/// misunderstanding of references.
///
/// ### Example
/// ```rust
/// # let mut y = 1;
/// let x = &mut &mut y;
/// ```
pub MUT_MUT,
pedantic,
"usage of double-mut refs, e.g., `&mut &mut ...`"
}
declare_lint_pass!(MutMut => [MUT_MUT]);
impl<'tcx> LateLintPass<'tcx> for MutMut {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
intravisit::walk_block(&mut MutVisitor { cx }, block);
}
fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx hir::Ty<'_>) {
use rustc_hir::intravisit::Visitor;
MutVisitor { cx }.visit_ty(ty);
}
}
pub struct MutVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for MutVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
if in_external_macro(self.cx.sess(), expr.span) {
return;
}
if let Some(higher::ForLoop { arg, body, .. }) = higher::ForLoop::hir(expr) {
// A `for` loop lowers to:
// ```rust
// match ::std::iter::Iterator::next(&mut iter) {
// // ^^^^
// ```
// Let's ignore the generated code.
intravisit::walk_expr(self, arg);
intravisit::walk_expr(self, body);
} else if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e) = expr.kind {
if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, _) = e.kind {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"generally you want to avoid `&mut &mut _` if possible",
);
} else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"this expression mutably borrows a mutable reference. Consider reborrowing",
);
}
}
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
if in_external_macro(self.cx.sess(), ty.span) {
return;
}
if let hir::TyKind::Rptr(
_,
hir::MutTy {
ty: pty,
mutbl: hir::Mutability::Mut,
},
) = ty.kind
{
if let hir::TyKind::Rptr(
_,
hir::MutTy {
mutbl: hir::Mutability::Mut,
..
},
) = pty.kind
{
span_lint(
self.cx,
MUT_MUT,
ty.span,
"generally you want to avoid `&mut &mut _` if possible",
);
}
|
}
intravisit::walk_ty(self, ty);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
|
random_line_split
|
|
mut_mut.rs
|
use clippy_utils::diagnostics::span_lint;
use clippy_utils::higher;
use rustc_hir as hir;
use rustc_hir::intravisit;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for instances of `mut mut` references.
///
/// ### Why is this bad?
/// Multiple `mut`s don't add anything meaningful to the
/// source. This is either a copy'n'paste error, or it shows a fundamental
/// misunderstanding of references.
///
/// ### Example
/// ```rust
/// # let mut y = 1;
/// let x = &mut &mut y;
/// ```
pub MUT_MUT,
pedantic,
"usage of double-mut refs, e.g., `&mut &mut ...`"
}
declare_lint_pass!(MutMut => [MUT_MUT]);
impl<'tcx> LateLintPass<'tcx> for MutMut {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) {
intravisit::walk_block(&mut MutVisitor { cx }, block);
}
fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx hir::Ty<'_>) {
use rustc_hir::intravisit::Visitor;
MutVisitor { cx }.visit_ty(ty);
}
}
pub struct
|
<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for MutVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
if in_external_macro(self.cx.sess(), expr.span) {
return;
}
if let Some(higher::ForLoop { arg, body, .. }) = higher::ForLoop::hir(expr) {
// A `for` loop lowers to:
// ```rust
// match ::std::iter::Iterator::next(&mut iter) {
// // ^^^^
// ```
// Let's ignore the generated code.
intravisit::walk_expr(self, arg);
intravisit::walk_expr(self, body);
} else if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e) = expr.kind {
if let hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, _) = e.kind {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"generally you want to avoid `&mut &mut _` if possible",
);
} else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() {
span_lint(
self.cx,
MUT_MUT,
expr.span,
"this expression mutably borrows a mutable reference. Consider reborrowing",
);
}
}
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
if in_external_macro(self.cx.sess(), ty.span) {
return;
}
if let hir::TyKind::Rptr(
_,
hir::MutTy {
ty: pty,
mutbl: hir::Mutability::Mut,
},
) = ty.kind
{
if let hir::TyKind::Rptr(
_,
hir::MutTy {
mutbl: hir::Mutability::Mut,
..
},
) = pty.kind
{
span_lint(
self.cx,
MUT_MUT,
ty.span,
"generally you want to avoid `&mut &mut _` if possible",
);
}
}
intravisit::walk_ty(self, ty);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
|
MutVisitor
|
identifier_name
|
event_details_converter.py
|
from collections import defaultdict
from typing import cast, Dict, List, NewType
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.event_details import EventDetails
from backend.common.models.keys import TeamKey
from backend.common.queries.dict_converters.converter_base import ConverterBase
EventDetailsDict = NewType("EventDetailsDict", Dict)
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
ApiMajorVersion.API_V3: 3,
}
@classmethod
def _convert_list(cls, model_list: List[EventDetails], version: ApiMajorVersion):
CONVERTERS = {
3: cls.eventsDetailsConverter_v3,
}
return CONVERTERS[version](model_list)
@classmethod
def eventsDetailsConverter_v3(cls, event_details: List[EventDetails]):
return list(map(cls.eventDetailsConverter_v3, event_details))
@classmethod
def eventDetailsConverter_v3(cls, event_details: EventDetails) -> EventDetailsDict:
normalized_oprs = defaultdict(dict)
if event_details and event_details.matchstats:
for stat_type, stats in event_details.matchstats.items():
if stat_type in {"oprs", "dprs", "ccwms"}:
|
rankings = {}
if event_details:
rankings = event_details.renderable_rankings
else:
rankings = {
"extra_stats_info": [],
"rankings": [],
"sort_order_info": None,
}
event_details_dict = {
"alliances": event_details.alliance_selections if event_details else [],
"district_points": event_details.district_points if event_details else {},
"insights": event_details.insights
if event_details
else {"qual": {}, "playoff": {}},
"oprs": normalized_oprs if normalized_oprs else {}, # OPRs, DPRs, CCWMs
"predictions": event_details.predictions if event_details else {},
"rankings": rankings,
}
return EventDetailsDict(event_details_dict)
|
for team, value in cast(Dict[TeamKey, float], stats).items():
if "frc" not in team: # Normalize output
team = "frc{}".format(team)
normalized_oprs[stat_type][team] = value
|
conditional_block
|
event_details_converter.py
|
from collections import defaultdict
from typing import cast, Dict, List, NewType
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.event_details import EventDetails
from backend.common.models.keys import TeamKey
from backend.common.queries.dict_converters.converter_base import ConverterBase
EventDetailsDict = NewType("EventDetailsDict", Dict)
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
ApiMajorVersion.API_V3: 3,
}
@classmethod
def
|
(cls, model_list: List[EventDetails], version: ApiMajorVersion):
CONVERTERS = {
3: cls.eventsDetailsConverter_v3,
}
return CONVERTERS[version](model_list)
@classmethod
def eventsDetailsConverter_v3(cls, event_details: List[EventDetails]):
return list(map(cls.eventDetailsConverter_v3, event_details))
@classmethod
def eventDetailsConverter_v3(cls, event_details: EventDetails) -> EventDetailsDict:
normalized_oprs = defaultdict(dict)
if event_details and event_details.matchstats:
for stat_type, stats in event_details.matchstats.items():
if stat_type in {"oprs", "dprs", "ccwms"}:
for team, value in cast(Dict[TeamKey, float], stats).items():
if "frc" not in team: # Normalize output
team = "frc{}".format(team)
normalized_oprs[stat_type][team] = value
rankings = {}
if event_details:
rankings = event_details.renderable_rankings
else:
rankings = {
"extra_stats_info": [],
"rankings": [],
"sort_order_info": None,
}
event_details_dict = {
"alliances": event_details.alliance_selections if event_details else [],
"district_points": event_details.district_points if event_details else {},
"insights": event_details.insights
if event_details
else {"qual": {}, "playoff": {}},
"oprs": normalized_oprs if normalized_oprs else {}, # OPRs, DPRs, CCWMs
"predictions": event_details.predictions if event_details else {},
"rankings": rankings,
}
return EventDetailsDict(event_details_dict)
|
_convert_list
|
identifier_name
|
event_details_converter.py
|
from collections import defaultdict
from typing import cast, Dict, List, NewType
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.event_details import EventDetails
from backend.common.models.keys import TeamKey
from backend.common.queries.dict_converters.converter_base import ConverterBase
EventDetailsDict = NewType("EventDetailsDict", Dict)
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
ApiMajorVersion.API_V3: 3,
}
@classmethod
def _convert_list(cls, model_list: List[EventDetails], version: ApiMajorVersion):
CONVERTERS = {
3: cls.eventsDetailsConverter_v3,
}
return CONVERTERS[version](model_list)
@classmethod
def eventsDetailsConverter_v3(cls, event_details: List[EventDetails]):
return list(map(cls.eventDetailsConverter_v3, event_details))
@classmethod
def eventDetailsConverter_v3(cls, event_details: EventDetails) -> EventDetailsDict:
normalized_oprs = defaultdict(dict)
if event_details and event_details.matchstats:
for stat_type, stats in event_details.matchstats.items():
if stat_type in {"oprs", "dprs", "ccwms"}:
for team, value in cast(Dict[TeamKey, float], stats).items():
if "frc" not in team: # Normalize output
team = "frc{}".format(team)
normalized_oprs[stat_type][team] = value
rankings = {}
if event_details:
rankings = event_details.renderable_rankings
else:
rankings = {
"extra_stats_info": [],
"rankings": [],
|
"alliances": event_details.alliance_selections if event_details else [],
"district_points": event_details.district_points if event_details else {},
"insights": event_details.insights
if event_details
else {"qual": {}, "playoff": {}},
"oprs": normalized_oprs if normalized_oprs else {}, # OPRs, DPRs, CCWMs
"predictions": event_details.predictions if event_details else {},
"rankings": rankings,
}
return EventDetailsDict(event_details_dict)
|
"sort_order_info": None,
}
event_details_dict = {
|
random_line_split
|
event_details_converter.py
|
from collections import defaultdict
from typing import cast, Dict, List, NewType
from backend.common.consts.api_version import ApiMajorVersion
from backend.common.models.event_details import EventDetails
from backend.common.models.keys import TeamKey
from backend.common.queries.dict_converters.converter_base import ConverterBase
EventDetailsDict = NewType("EventDetailsDict", Dict)
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
ApiMajorVersion.API_V3: 3,
}
@classmethod
def _convert_list(cls, model_list: List[EventDetails], version: ApiMajorVersion):
CONVERTERS = {
3: cls.eventsDetailsConverter_v3,
}
return CONVERTERS[version](model_list)
@classmethod
def eventsDetailsConverter_v3(cls, event_details: List[EventDetails]):
return list(map(cls.eventDetailsConverter_v3, event_details))
@classmethod
def eventDetailsConverter_v3(cls, event_details: EventDetails) -> EventDetailsDict:
|
normalized_oprs = defaultdict(dict)
if event_details and event_details.matchstats:
for stat_type, stats in event_details.matchstats.items():
if stat_type in {"oprs", "dprs", "ccwms"}:
for team, value in cast(Dict[TeamKey, float], stats).items():
if "frc" not in team: # Normalize output
team = "frc{}".format(team)
normalized_oprs[stat_type][team] = value
rankings = {}
if event_details:
rankings = event_details.renderable_rankings
else:
rankings = {
"extra_stats_info": [],
"rankings": [],
"sort_order_info": None,
}
event_details_dict = {
"alliances": event_details.alliance_selections if event_details else [],
"district_points": event_details.district_points if event_details else {},
"insights": event_details.insights
if event_details
else {"qual": {}, "playoff": {}},
"oprs": normalized_oprs if normalized_oprs else {}, # OPRs, DPRs, CCWMs
"predictions": event_details.predictions if event_details else {},
"rankings": rankings,
}
return EventDetailsDict(event_details_dict)
|
identifier_body
|
|
1.js
|
/*
--- Day 15: Science for Hungry People ---
Today, you set out on the task of perfecting your milk-dunking cookie recipe.
All you have to do is find the right balance of ingredients.
Your recipe leaves room for exactly 100 teaspoons of ingredients. You make a
list of the remaining ingredients you could use to finish the recipe (your
puzzle input) and their properties per teaspoon:
capacity (how well it helps the cookie absorb milk)
durability (how well it keeps the cookie intact when full of milk)
flavor (how tasty it makes the cookie)
texture (how it improves the feel of the cookie)
calories (how many calories it adds to the cookie)
You can only measure ingredients in whole-teaspoon amounts accurately, and you
have to be accurate so you can reproduce your results in the future. The total
score of a cookie can be found by adding up each of the properties (negative
totals become 0) and then multiplying together everything except calories.
For instance, suppose you have these two ingredients:
Butterscotch: capacity -1, durability -2, flavor 6, texture 3, calories 8
Cinnamon: capacity 2, durability 3, flavor -2, texture -1, calories 3
Then, choosing to use 44 teaspoons of butterscotch and 56 teaspoons of cinnamon
(because the amounts of each ingredient must add up to 100) would result in a
cookie with the following properties:
- A capacity of 44*-1 + 56*2 = 68
- A durability of 44*-2 + 56*3 = 80
- A flavor of 44*6 + 56*-2 = 152
- A texture of 44*3 + 56*-1 = 76
Multiplying these together (68 * 80 * 152 * 76, ignoring calories for now)
results in a total score of 62842880, which happens to be the best score
possible given these ingredients. If any properties had produced a negative
total, it would have instead become zero, causing the whole score to multiply
to zero.
Given the ingredients in your kitchen and their properties, what is the total
score of the highest-scoring cookie you can make?
*/
const AMOUNT = 100;
export default function solution(input) {
const ingredients = input.trim()
.split('\n')
.map(line => line.split(':').map(i => i.trim()))
.map(([title, params]) => {
return params.split(',')
.map(i => i.trim().split(' '))
.reduce((a, i) => {
a[i[0]] = parseInt(i[1]);
return a;
}, {});
});
function score(ingredients, amounts) {
const all = ingredients
.reduce((a, item, i) => {
Object.keys(item).forEach(key => {
a[key] = a[key] || 0;
a[key] += item[key] * amounts[i];
});
return a;
}, {});
return Object.keys(all)
.reduce((a, key) => {
if (key === 'calories') {
return a;
}
return a * Math.max(0, all[key]);
}, 1);
}
const amounts = [];
for (let i = 0; i < ingredients.length; i++) {
amounts.push(0);
}
let answer = 0;
function
|
(id = 0, amount = AMOUNT) {
if (amount === 0) {
answer = Math.max(answer, score(ingredients, amounts));
} else if (id < ingredients.length) {
for (let i = 0; i <= amount; i++) {
amounts[id] = i;
calc(id + 1, amount - i);
}
}
return answer;
}
return calc();
};
|
calc
|
identifier_name
|
1.js
|
/*
--- Day 15: Science for Hungry People ---
Today, you set out on the task of perfecting your milk-dunking cookie recipe.
All you have to do is find the right balance of ingredients.
Your recipe leaves room for exactly 100 teaspoons of ingredients. You make a
list of the remaining ingredients you could use to finish the recipe (your
puzzle input) and their properties per teaspoon:
capacity (how well it helps the cookie absorb milk)
durability (how well it keeps the cookie intact when full of milk)
flavor (how tasty it makes the cookie)
texture (how it improves the feel of the cookie)
calories (how many calories it adds to the cookie)
You can only measure ingredients in whole-teaspoon amounts accurately, and you
have to be accurate so you can reproduce your results in the future. The total
score of a cookie can be found by adding up each of the properties (negative
totals become 0) and then multiplying together everything except calories.
For instance, suppose you have these two ingredients:
Butterscotch: capacity -1, durability -2, flavor 6, texture 3, calories 8
Cinnamon: capacity 2, durability 3, flavor -2, texture -1, calories 3
Then, choosing to use 44 teaspoons of butterscotch and 56 teaspoons of cinnamon
(because the amounts of each ingredient must add up to 100) would result in a
cookie with the following properties:
- A capacity of 44*-1 + 56*2 = 68
- A durability of 44*-2 + 56*3 = 80
- A flavor of 44*6 + 56*-2 = 152
- A texture of 44*3 + 56*-1 = 76
Multiplying these together (68 * 80 * 152 * 76, ignoring calories for now)
results in a total score of 62842880, which happens to be the best score
possible given these ingredients. If any properties had produced a negative
total, it would have instead become zero, causing the whole score to multiply
to zero.
Given the ingredients in your kitchen and their properties, what is the total
score of the highest-scoring cookie you can make?
*/
const AMOUNT = 100;
export default function solution(input) {
const ingredients = input.trim()
.split('\n')
.map(line => line.split(':').map(i => i.trim()))
.map(([title, params]) => {
return params.split(',')
.map(i => i.trim().split(' '))
.reduce((a, i) => {
a[i[0]] = parseInt(i[1]);
return a;
}, {});
});
function score(ingredients, amounts) {
const all = ingredients
.reduce((a, item, i) => {
Object.keys(item).forEach(key => {
a[key] = a[key] || 0;
a[key] += item[key] * amounts[i];
});
return a;
}, {});
return Object.keys(all)
.reduce((a, key) => {
if (key === 'calories') {
return a;
}
return a * Math.max(0, all[key]);
}, 1);
}
const amounts = [];
for (let i = 0; i < ingredients.length; i++) {
amounts.push(0);
}
|
answer = Math.max(answer, score(ingredients, amounts));
} else if (id < ingredients.length) {
for (let i = 0; i <= amount; i++) {
amounts[id] = i;
calc(id + 1, amount - i);
}
}
return answer;
}
return calc();
};
|
let answer = 0;
function calc(id = 0, amount = AMOUNT) {
if (amount === 0) {
|
random_line_split
|
1.js
|
/*
--- Day 15: Science for Hungry People ---
Today, you set out on the task of perfecting your milk-dunking cookie recipe.
All you have to do is find the right balance of ingredients.
Your recipe leaves room for exactly 100 teaspoons of ingredients. You make a
list of the remaining ingredients you could use to finish the recipe (your
puzzle input) and their properties per teaspoon:
capacity (how well it helps the cookie absorb milk)
durability (how well it keeps the cookie intact when full of milk)
flavor (how tasty it makes the cookie)
texture (how it improves the feel of the cookie)
calories (how many calories it adds to the cookie)
You can only measure ingredients in whole-teaspoon amounts accurately, and you
have to be accurate so you can reproduce your results in the future. The total
score of a cookie can be found by adding up each of the properties (negative
totals become 0) and then multiplying together everything except calories.
For instance, suppose you have these two ingredients:
Butterscotch: capacity -1, durability -2, flavor 6, texture 3, calories 8
Cinnamon: capacity 2, durability 3, flavor -2, texture -1, calories 3
Then, choosing to use 44 teaspoons of butterscotch and 56 teaspoons of cinnamon
(because the amounts of each ingredient must add up to 100) would result in a
cookie with the following properties:
- A capacity of 44*-1 + 56*2 = 68
- A durability of 44*-2 + 56*3 = 80
- A flavor of 44*6 + 56*-2 = 152
- A texture of 44*3 + 56*-1 = 76
Multiplying these together (68 * 80 * 152 * 76, ignoring calories for now)
results in a total score of 62842880, which happens to be the best score
possible given these ingredients. If any properties had produced a negative
total, it would have instead become zero, causing the whole score to multiply
to zero.
Given the ingredients in your kitchen and their properties, what is the total
score of the highest-scoring cookie you can make?
*/
const AMOUNT = 100;
export default function solution(input)
|
;
|
{
const ingredients = input.trim()
.split('\n')
.map(line => line.split(':').map(i => i.trim()))
.map(([title, params]) => {
return params.split(',')
.map(i => i.trim().split(' '))
.reduce((a, i) => {
a[i[0]] = parseInt(i[1]);
return a;
}, {});
});
function score(ingredients, amounts) {
const all = ingredients
.reduce((a, item, i) => {
Object.keys(item).forEach(key => {
a[key] = a[key] || 0;
a[key] += item[key] * amounts[i];
});
return a;
}, {});
return Object.keys(all)
.reduce((a, key) => {
if (key === 'calories') {
return a;
}
return a * Math.max(0, all[key]);
}, 1);
}
const amounts = [];
for (let i = 0; i < ingredients.length; i++) {
amounts.push(0);
}
let answer = 0;
function calc(id = 0, amount = AMOUNT) {
if (amount === 0) {
answer = Math.max(answer, score(ingredients, amounts));
} else if (id < ingredients.length) {
for (let i = 0; i <= amount; i++) {
amounts[id] = i;
calc(id + 1, amount - i);
}
}
return answer;
}
return calc();
}
|
identifier_body
|
_output.py
|
"""
Implementation of hooks and APIs for outputting log messages.
"""
import sys
import traceback
import inspect
import json as pyjson
from threading import Lock
from functools import wraps
from io import IOBase
from pyrsistent import PClass, field
from . import _bytesjson as bytesjson
from zope.interface import Interface, implementer
from ._traceback import write_traceback, TRACEBACK_MESSAGE
from ._message import EXCEPTION_FIELD, MESSAGE_TYPE_FIELD, REASON_FIELD
from ._util import saferepr, safeunicode
from .json import EliotJSONEncoder
from ._validation import ValidationError
class _DestinationsSendError(Exception):
"""
An error occured sending to one or more destinations.
@ivar errors: A list of tuples output from C{sys.exc_info()}.
"""
def __init__(self, errors):
self.errors = errors
Exception.__init__(self, errors)
class BufferingDestination(object):
"""
Buffer messages in memory.
"""
|
def __call__(self, message):
self.messages.append(message)
while len(self.messages) > 1000:
self.messages.pop(0)
class Destinations(object):
"""
Manage a list of destinations for message dictionaries.
The global instance of this class is where L{Logger} instances will
send written messages.
"""
def __init__(self):
self._destinations = [BufferingDestination()]
self._any_added = False
self._globalFields = {}
def addGlobalFields(self, **fields):
"""
Add fields that will be included in all messages sent through this
destination.
@param fields: Keyword arguments mapping field names to values.
"""
self._globalFields.update(fields)
def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors)
def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message)
def remove(self, destination):
"""
Remove an existing destination.
@param destination: A destination previously added with C{self.add}.
@raises ValueError: If the destination is unknown.
"""
self._destinations.remove(destination)
class ILogger(Interface):
"""
Write out message dictionaries to some destination.
"""
def write(dictionary, serializer=None):
"""
Write a dictionary to the appropriate destination.
@note: This method is thread-safe.
@param serializer: Either C{None}, or a
L{eliot._validation._MessageSerializer} which can be used to
validate this message.
@param dictionary: The message to write out. The given dictionary
will not be mutated.
@type dictionary: C{dict}
"""
@implementer(ILogger)
class Logger(object):
"""
Write out messages to the globally configured destination(s).
You will typically want to create one of these for every chunk of code
whose messages you want to unit test in isolation, e.g. a class. The tests
can then replace a specific L{Logger} with a L{MemoryLogger}.
"""
_destinations = Destinations()
_log_tracebacks = True
def _safeUnicodeDictionary(self, dictionary):
"""
Serialize a dictionary to a unicode string no matter what it contains.
The resulting dictionary will loosely follow Python syntax but it is
not expected to actually be a lossless encoding in all cases.
@param dictionary: A L{dict} to serialize.
@return: A L{unicode} string representing the input dictionary as
faithfully as can be done without putting in too much effort.
"""
try:
return str(
dict(
(saferepr(key), saferepr(value))
for (key, value) in dictionary.items()
)
)
except:
return saferepr(dictionary)
def write(self, dictionary, serializer=None):
"""
Serialize the dictionary, and write it to C{self._destinations}.
"""
dictionary = dictionary.copy()
try:
if serializer is not None:
serializer.serialize(dictionary)
except:
write_traceback(self)
from ._action import log_message
log_message(
"eliot:serialization_failure",
message=self._safeUnicodeDictionary(dictionary),
__eliot_logger__=self,
)
return
try:
self._destinations.send(dictionary)
except _DestinationsSendError as e:
from ._action import log_message
if self._log_tracebacks:
for (exc_type, exception, exc_traceback) in e.errors:
# Can't use same Logger as serialization errors because
# if destination continues to error out we will get
# infinite recursion. So instead we have to manually
# construct a Logger that won't retry.
logger = Logger()
logger._log_tracebacks = False
logger._destinations = self._destinations
msg = {
MESSAGE_TYPE_FIELD: "eliot:destination_failure",
REASON_FIELD: safeunicode(exception),
EXCEPTION_FIELD: exc_type.__module__ + "." + exc_type.__name__,
"message": self._safeUnicodeDictionary(dictionary),
"__eliot_logger__": logger,
}
log_message(**msg)
else:
# Nothing we can do here, raising exception to caller will
# break business logic, better to have that continue to
# work even if logging isn't.
pass
def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f
@implementer(ILogger)
class MemoryLogger(object):
"""
Store written messages in memory.
When unit testing you don't want to create this directly but rather use
the L{eliot.testing.validateLogging} decorator on a test method, which
will provide additional testing integration.
@ivar messages: A C{list} of the dictionaries passed to
L{MemoryLogger.write}. Do not mutate this list.
@ivar serializers: A C{list} of the serializers passed to
L{MemoryLogger.write}, each corresponding to a message
L{MemoryLogger.messages}. Do not mutate this list.
@ivar tracebackMessages: A C{list} of messages written to this logger for
tracebacks using L{eliot.write_traceback} or L{eliot.writeFailure}. Do
not mutate this list.
"""
def __init__(self, encoder=EliotJSONEncoder):
"""
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
self._lock = Lock()
self._encoder = encoder
self.reset()
@exclusively
def flushTracebacks(self, exceptionType):
"""
Flush all logged tracebacks whose exception is of the given type.
This means they are expected tracebacks and should not cause the test
to fail.
@param exceptionType: A subclass of L{Exception}.
@return: C{list} of flushed messages.
"""
result = []
remaining = []
for message in self.tracebackMessages:
if isinstance(message[REASON_FIELD], exceptionType):
result.append(message)
else:
remaining.append(message)
self.tracebackMessages = remaining
return result
# PEP 8 variant:
flush_tracebacks = flushTracebacks
@exclusively
def write(self, dictionary, serializer=None):
"""
Add the dictionary to list of messages.
"""
# Validate copy of the dictionary, to ensure what we store isn't
# mutated.
try:
self._validate_message(dictionary.copy(), serializer)
except Exception as e:
# Skip irrelevant frames that don't help pinpoint the problem:
from . import _output, _message, _action
skip_filenames = [_output.__file__, _message.__file__, _action.__file__]
for frame in inspect.stack():
if frame[1] not in skip_filenames:
break
self._failed_validations.append(
"{}: {}".format(e, "".join(traceback.format_stack(frame[0])))
)
self.messages.append(dictionary)
self.serializers.append(serializer)
if serializer is TRACEBACK_MESSAGE._serializer:
self.tracebackMessages.append(dictionary)
def _validate_message(self, dictionary, serializer):
"""Validate an individual message.
As a side-effect, the message is replaced with its serialized contents.
@param dictionary: A message C{dict} to be validated. Might be mutated
by the serializer!
@param serializer: C{None} or a serializer.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
if serializer is not None:
serializer.validate(dictionary)
for key in dictionary:
if not isinstance(key, str):
if isinstance(key, bytes):
key.decode("utf-8")
else:
raise TypeError(dictionary, "%r is not unicode" % (key,))
if serializer is not None:
serializer.serialize(dictionary)
try:
pyjson.dumps(dictionary, cls=self._encoder)
except Exception as e:
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
@exclusively
def validate(self):
"""
Validate all written messages.
Does minimal validation of types, and for messages with corresponding
serializers use those to do additional validation.
As a side-effect, the messages are replaced with their serialized
contents.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
for dictionary, serializer in zip(self.messages, self.serializers):
try:
self._validate_message(dictionary, serializer)
except (TypeError, ValidationError) as e:
# We already figured out which messages failed validation
# earlier. This just lets us figure out which exception type to
# raise.
raise e.__class__("\n\n".join(self._failed_validations))
@exclusively
def serialize(self):
"""
Serialize all written messages.
This is the Field-based serialization, not JSON.
@return: A C{list} of C{dict}, the serialized messages.
"""
result = []
for dictionary, serializer in zip(self.messages, self.serializers):
dictionary = dictionary.copy()
serializer.serialize(dictionary)
result.append(dictionary)
return result
@exclusively
def reset(self):
"""
Clear all logged messages.
Any logged tracebacks will also be cleared, and will therefore not
cause a test failure.
This is useful to ensure a logger is in a known state before testing
logging of a specific code path.
"""
self.messages = []
self.serializers = []
self.tracebackMessages = []
self._failed_validations = []
class FileDestination(PClass):
"""
Callable that writes JSON messages to a file.
On Python 3 the file may support either C{bytes} or C{unicode}. On
Python 2 only C{bytes} are supported since that is what all files expect
in practice.
@ivar file: The file to which messages will be written.
@ivar _dumps: Function that serializes an object to JSON.
@ivar _linebreak: C{"\n"} as either bytes or unicode.
"""
file = field(mandatory=True)
encoder = field(mandatory=True)
_dumps = field(mandatory=True)
_linebreak = field(mandatory=True)
def __new__(cls, file, encoder=EliotJSONEncoder):
if isinstance(file, IOBase) and not file.writable():
raise RuntimeError("Given file {} is not writeable.")
unicodeFile = False
try:
file.write(b"")
except TypeError:
unicodeFile = True
if unicodeFile:
# On Python 3 native json module outputs unicode:
_dumps = pyjson.dumps
_linebreak = "\n"
else:
_dumps = bytesjson.dumps
_linebreak = b"\n"
return PClass.__new__(
cls, file=file, _dumps=_dumps, _linebreak=_linebreak, encoder=encoder
)
def __call__(self, message):
"""
@param message: A message dictionary.
"""
self.file.write(self._dumps(message, cls=self.encoder) + self._linebreak)
self.file.flush()
def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
Logger._destinations.add(FileDestination(file=output_file, encoder=encoder))
# The default Logger, used when none is specified:
_DEFAULT_LOGGER = Logger()
|
def __init__(self):
self.messages = []
|
random_line_split
|
_output.py
|
"""
Implementation of hooks and APIs for outputting log messages.
"""
import sys
import traceback
import inspect
import json as pyjson
from threading import Lock
from functools import wraps
from io import IOBase
from pyrsistent import PClass, field
from . import _bytesjson as bytesjson
from zope.interface import Interface, implementer
from ._traceback import write_traceback, TRACEBACK_MESSAGE
from ._message import EXCEPTION_FIELD, MESSAGE_TYPE_FIELD, REASON_FIELD
from ._util import saferepr, safeunicode
from .json import EliotJSONEncoder
from ._validation import ValidationError
class _DestinationsSendError(Exception):
"""
An error occured sending to one or more destinations.
@ivar errors: A list of tuples output from C{sys.exc_info()}.
"""
def __init__(self, errors):
self.errors = errors
Exception.__init__(self, errors)
class BufferingDestination(object):
"""
Buffer messages in memory.
"""
def __init__(self):
self.messages = []
def __call__(self, message):
self.messages.append(message)
while len(self.messages) > 1000:
self.messages.pop(0)
class Destinations(object):
"""
Manage a list of destinations for message dictionaries.
The global instance of this class is where L{Logger} instances will
send written messages.
"""
def __init__(self):
self._destinations = [BufferingDestination()]
self._any_added = False
self._globalFields = {}
def addGlobalFields(self, **fields):
"""
Add fields that will be included in all messages sent through this
destination.
@param fields: Keyword arguments mapping field names to values.
"""
self._globalFields.update(fields)
def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors)
def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message)
def remove(self, destination):
"""
Remove an existing destination.
@param destination: A destination previously added with C{self.add}.
@raises ValueError: If the destination is unknown.
"""
self._destinations.remove(destination)
class ILogger(Interface):
"""
Write out message dictionaries to some destination.
"""
def write(dictionary, serializer=None):
"""
Write a dictionary to the appropriate destination.
@note: This method is thread-safe.
@param serializer: Either C{None}, or a
L{eliot._validation._MessageSerializer} which can be used to
validate this message.
@param dictionary: The message to write out. The given dictionary
will not be mutated.
@type dictionary: C{dict}
"""
@implementer(ILogger)
class Logger(object):
"""
Write out messages to the globally configured destination(s).
You will typically want to create one of these for every chunk of code
whose messages you want to unit test in isolation, e.g. a class. The tests
can then replace a specific L{Logger} with a L{MemoryLogger}.
"""
_destinations = Destinations()
_log_tracebacks = True
def _safeUnicodeDictionary(self, dictionary):
"""
Serialize a dictionary to a unicode string no matter what it contains.
The resulting dictionary will loosely follow Python syntax but it is
not expected to actually be a lossless encoding in all cases.
@param dictionary: A L{dict} to serialize.
@return: A L{unicode} string representing the input dictionary as
faithfully as can be done without putting in too much effort.
"""
try:
return str(
dict(
(saferepr(key), saferepr(value))
for (key, value) in dictionary.items()
)
)
except:
return saferepr(dictionary)
def write(self, dictionary, serializer=None):
"""
Serialize the dictionary, and write it to C{self._destinations}.
"""
dictionary = dictionary.copy()
try:
if serializer is not None:
serializer.serialize(dictionary)
except:
write_traceback(self)
from ._action import log_message
log_message(
"eliot:serialization_failure",
message=self._safeUnicodeDictionary(dictionary),
__eliot_logger__=self,
)
return
try:
self._destinations.send(dictionary)
except _DestinationsSendError as e:
from ._action import log_message
if self._log_tracebacks:
for (exc_type, exception, exc_traceback) in e.errors:
# Can't use same Logger as serialization errors because
# if destination continues to error out we will get
# infinite recursion. So instead we have to manually
# construct a Logger that won't retry.
logger = Logger()
logger._log_tracebacks = False
logger._destinations = self._destinations
msg = {
MESSAGE_TYPE_FIELD: "eliot:destination_failure",
REASON_FIELD: safeunicode(exception),
EXCEPTION_FIELD: exc_type.__module__ + "." + exc_type.__name__,
"message": self._safeUnicodeDictionary(dictionary),
"__eliot_logger__": logger,
}
log_message(**msg)
else:
# Nothing we can do here, raising exception to caller will
# break business logic, better to have that continue to
# work even if logging isn't.
pass
def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f
@implementer(ILogger)
class MemoryLogger(object):
"""
Store written messages in memory.
When unit testing you don't want to create this directly but rather use
the L{eliot.testing.validateLogging} decorator on a test method, which
will provide additional testing integration.
@ivar messages: A C{list} of the dictionaries passed to
L{MemoryLogger.write}. Do not mutate this list.
@ivar serializers: A C{list} of the serializers passed to
L{MemoryLogger.write}, each corresponding to a message
L{MemoryLogger.messages}. Do not mutate this list.
@ivar tracebackMessages: A C{list} of messages written to this logger for
tracebacks using L{eliot.write_traceback} or L{eliot.writeFailure}. Do
not mutate this list.
"""
def __init__(self, encoder=EliotJSONEncoder):
"""
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
self._lock = Lock()
self._encoder = encoder
self.reset()
@exclusively
def flushTracebacks(self, exceptionType):
"""
Flush all logged tracebacks whose exception is of the given type.
This means they are expected tracebacks and should not cause the test
to fail.
@param exceptionType: A subclass of L{Exception}.
@return: C{list} of flushed messages.
"""
result = []
remaining = []
for message in self.tracebackMessages:
if isinstance(message[REASON_FIELD], exceptionType):
result.append(message)
else:
remaining.append(message)
self.tracebackMessages = remaining
return result
# PEP 8 variant:
flush_tracebacks = flushTracebacks
@exclusively
def write(self, dictionary, serializer=None):
"""
Add the dictionary to list of messages.
"""
# Validate copy of the dictionary, to ensure what we store isn't
# mutated.
try:
self._validate_message(dictionary.copy(), serializer)
except Exception as e:
# Skip irrelevant frames that don't help pinpoint the problem:
from . import _output, _message, _action
skip_filenames = [_output.__file__, _message.__file__, _action.__file__]
for frame in inspect.stack():
if frame[1] not in skip_filenames:
break
self._failed_validations.append(
"{}: {}".format(e, "".join(traceback.format_stack(frame[0])))
)
self.messages.append(dictionary)
self.serializers.append(serializer)
if serializer is TRACEBACK_MESSAGE._serializer:
self.tracebackMessages.append(dictionary)
def _validate_message(self, dictionary, serializer):
"""Validate an individual message.
As a side-effect, the message is replaced with its serialized contents.
@param dictionary: A message C{dict} to be validated. Might be mutated
by the serializer!
@param serializer: C{None} or a serializer.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
if serializer is not None:
serializer.validate(dictionary)
for key in dictionary:
if not isinstance(key, str):
if isinstance(key, bytes):
key.decode("utf-8")
else:
raise TypeError(dictionary, "%r is not unicode" % (key,))
if serializer is not None:
serializer.serialize(dictionary)
try:
pyjson.dumps(dictionary, cls=self._encoder)
except Exception as e:
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
@exclusively
def validate(self):
"""
Validate all written messages.
Does minimal validation of types, and for messages with corresponding
serializers use those to do additional validation.
As a side-effect, the messages are replaced with their serialized
contents.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
for dictionary, serializer in zip(self.messages, self.serializers):
try:
self._validate_message(dictionary, serializer)
except (TypeError, ValidationError) as e:
# We already figured out which messages failed validation
# earlier. This just lets us figure out which exception type to
# raise.
raise e.__class__("\n\n".join(self._failed_validations))
@exclusively
def serialize(self):
"""
Serialize all written messages.
This is the Field-based serialization, not JSON.
@return: A C{list} of C{dict}, the serialized messages.
"""
result = []
for dictionary, serializer in zip(self.messages, self.serializers):
dictionary = dictionary.copy()
serializer.serialize(dictionary)
result.append(dictionary)
return result
@exclusively
def
|
(self):
"""
Clear all logged messages.
Any logged tracebacks will also be cleared, and will therefore not
cause a test failure.
This is useful to ensure a logger is in a known state before testing
logging of a specific code path.
"""
self.messages = []
self.serializers = []
self.tracebackMessages = []
self._failed_validations = []
class FileDestination(PClass):
"""
Callable that writes JSON messages to a file.
On Python 3 the file may support either C{bytes} or C{unicode}. On
Python 2 only C{bytes} are supported since that is what all files expect
in practice.
@ivar file: The file to which messages will be written.
@ivar _dumps: Function that serializes an object to JSON.
@ivar _linebreak: C{"\n"} as either bytes or unicode.
"""
file = field(mandatory=True)
encoder = field(mandatory=True)
_dumps = field(mandatory=True)
_linebreak = field(mandatory=True)
def __new__(cls, file, encoder=EliotJSONEncoder):
if isinstance(file, IOBase) and not file.writable():
raise RuntimeError("Given file {} is not writeable.")
unicodeFile = False
try:
file.write(b"")
except TypeError:
unicodeFile = True
if unicodeFile:
# On Python 3 native json module outputs unicode:
_dumps = pyjson.dumps
_linebreak = "\n"
else:
_dumps = bytesjson.dumps
_linebreak = b"\n"
return PClass.__new__(
cls, file=file, _dumps=_dumps, _linebreak=_linebreak, encoder=encoder
)
def __call__(self, message):
"""
@param message: A message dictionary.
"""
self.file.write(self._dumps(message, cls=self.encoder) + self._linebreak)
self.file.flush()
def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
Logger._destinations.add(FileDestination(file=output_file, encoder=encoder))
# The default Logger, used when none is specified:
_DEFAULT_LOGGER = Logger()
|
reset
|
identifier_name
|
_output.py
|
"""
Implementation of hooks and APIs for outputting log messages.
"""
import sys
import traceback
import inspect
import json as pyjson
from threading import Lock
from functools import wraps
from io import IOBase
from pyrsistent import PClass, field
from . import _bytesjson as bytesjson
from zope.interface import Interface, implementer
from ._traceback import write_traceback, TRACEBACK_MESSAGE
from ._message import EXCEPTION_FIELD, MESSAGE_TYPE_FIELD, REASON_FIELD
from ._util import saferepr, safeunicode
from .json import EliotJSONEncoder
from ._validation import ValidationError
class _DestinationsSendError(Exception):
"""
An error occured sending to one or more destinations.
@ivar errors: A list of tuples output from C{sys.exc_info()}.
"""
def __init__(self, errors):
self.errors = errors
Exception.__init__(self, errors)
class BufferingDestination(object):
|
class Destinations(object):
"""
Manage a list of destinations for message dictionaries.
The global instance of this class is where L{Logger} instances will
send written messages.
"""
def __init__(self):
self._destinations = [BufferingDestination()]
self._any_added = False
self._globalFields = {}
def addGlobalFields(self, **fields):
"""
Add fields that will be included in all messages sent through this
destination.
@param fields: Keyword arguments mapping field names to values.
"""
self._globalFields.update(fields)
def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors)
def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message)
def remove(self, destination):
"""
Remove an existing destination.
@param destination: A destination previously added with C{self.add}.
@raises ValueError: If the destination is unknown.
"""
self._destinations.remove(destination)
class ILogger(Interface):
"""
Write out message dictionaries to some destination.
"""
def write(dictionary, serializer=None):
"""
Write a dictionary to the appropriate destination.
@note: This method is thread-safe.
@param serializer: Either C{None}, or a
L{eliot._validation._MessageSerializer} which can be used to
validate this message.
@param dictionary: The message to write out. The given dictionary
will not be mutated.
@type dictionary: C{dict}
"""
@implementer(ILogger)
class Logger(object):
"""
Write out messages to the globally configured destination(s).
You will typically want to create one of these for every chunk of code
whose messages you want to unit test in isolation, e.g. a class. The tests
can then replace a specific L{Logger} with a L{MemoryLogger}.
"""
_destinations = Destinations()
_log_tracebacks = True
def _safeUnicodeDictionary(self, dictionary):
"""
Serialize a dictionary to a unicode string no matter what it contains.
The resulting dictionary will loosely follow Python syntax but it is
not expected to actually be a lossless encoding in all cases.
@param dictionary: A L{dict} to serialize.
@return: A L{unicode} string representing the input dictionary as
faithfully as can be done without putting in too much effort.
"""
try:
return str(
dict(
(saferepr(key), saferepr(value))
for (key, value) in dictionary.items()
)
)
except:
return saferepr(dictionary)
def write(self, dictionary, serializer=None):
"""
Serialize the dictionary, and write it to C{self._destinations}.
"""
dictionary = dictionary.copy()
try:
if serializer is not None:
serializer.serialize(dictionary)
except:
write_traceback(self)
from ._action import log_message
log_message(
"eliot:serialization_failure",
message=self._safeUnicodeDictionary(dictionary),
__eliot_logger__=self,
)
return
try:
self._destinations.send(dictionary)
except _DestinationsSendError as e:
from ._action import log_message
if self._log_tracebacks:
for (exc_type, exception, exc_traceback) in e.errors:
# Can't use same Logger as serialization errors because
# if destination continues to error out we will get
# infinite recursion. So instead we have to manually
# construct a Logger that won't retry.
logger = Logger()
logger._log_tracebacks = False
logger._destinations = self._destinations
msg = {
MESSAGE_TYPE_FIELD: "eliot:destination_failure",
REASON_FIELD: safeunicode(exception),
EXCEPTION_FIELD: exc_type.__module__ + "." + exc_type.__name__,
"message": self._safeUnicodeDictionary(dictionary),
"__eliot_logger__": logger,
}
log_message(**msg)
else:
# Nothing we can do here, raising exception to caller will
# break business logic, better to have that continue to
# work even if logging isn't.
pass
def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f
@implementer(ILogger)
class MemoryLogger(object):
"""
Store written messages in memory.
When unit testing you don't want to create this directly but rather use
the L{eliot.testing.validateLogging} decorator on a test method, which
will provide additional testing integration.
@ivar messages: A C{list} of the dictionaries passed to
L{MemoryLogger.write}. Do not mutate this list.
@ivar serializers: A C{list} of the serializers passed to
L{MemoryLogger.write}, each corresponding to a message
L{MemoryLogger.messages}. Do not mutate this list.
@ivar tracebackMessages: A C{list} of messages written to this logger for
tracebacks using L{eliot.write_traceback} or L{eliot.writeFailure}. Do
not mutate this list.
"""
def __init__(self, encoder=EliotJSONEncoder):
"""
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
self._lock = Lock()
self._encoder = encoder
self.reset()
@exclusively
def flushTracebacks(self, exceptionType):
"""
Flush all logged tracebacks whose exception is of the given type.
This means they are expected tracebacks and should not cause the test
to fail.
@param exceptionType: A subclass of L{Exception}.
@return: C{list} of flushed messages.
"""
result = []
remaining = []
for message in self.tracebackMessages:
if isinstance(message[REASON_FIELD], exceptionType):
result.append(message)
else:
remaining.append(message)
self.tracebackMessages = remaining
return result
# PEP 8 variant:
flush_tracebacks = flushTracebacks
@exclusively
def write(self, dictionary, serializer=None):
"""
Add the dictionary to list of messages.
"""
# Validate copy of the dictionary, to ensure what we store isn't
# mutated.
try:
self._validate_message(dictionary.copy(), serializer)
except Exception as e:
# Skip irrelevant frames that don't help pinpoint the problem:
from . import _output, _message, _action
skip_filenames = [_output.__file__, _message.__file__, _action.__file__]
for frame in inspect.stack():
if frame[1] not in skip_filenames:
break
self._failed_validations.append(
"{}: {}".format(e, "".join(traceback.format_stack(frame[0])))
)
self.messages.append(dictionary)
self.serializers.append(serializer)
if serializer is TRACEBACK_MESSAGE._serializer:
self.tracebackMessages.append(dictionary)
def _validate_message(self, dictionary, serializer):
"""Validate an individual message.
As a side-effect, the message is replaced with its serialized contents.
@param dictionary: A message C{dict} to be validated. Might be mutated
by the serializer!
@param serializer: C{None} or a serializer.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
if serializer is not None:
serializer.validate(dictionary)
for key in dictionary:
if not isinstance(key, str):
if isinstance(key, bytes):
key.decode("utf-8")
else:
raise TypeError(dictionary, "%r is not unicode" % (key,))
if serializer is not None:
serializer.serialize(dictionary)
try:
pyjson.dumps(dictionary, cls=self._encoder)
except Exception as e:
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
@exclusively
def validate(self):
"""
Validate all written messages.
Does minimal validation of types, and for messages with corresponding
serializers use those to do additional validation.
As a side-effect, the messages are replaced with their serialized
contents.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
for dictionary, serializer in zip(self.messages, self.serializers):
try:
self._validate_message(dictionary, serializer)
except (TypeError, ValidationError) as e:
# We already figured out which messages failed validation
# earlier. This just lets us figure out which exception type to
# raise.
raise e.__class__("\n\n".join(self._failed_validations))
@exclusively
def serialize(self):
"""
Serialize all written messages.
This is the Field-based serialization, not JSON.
@return: A C{list} of C{dict}, the serialized messages.
"""
result = []
for dictionary, serializer in zip(self.messages, self.serializers):
dictionary = dictionary.copy()
serializer.serialize(dictionary)
result.append(dictionary)
return result
@exclusively
def reset(self):
"""
Clear all logged messages.
Any logged tracebacks will also be cleared, and will therefore not
cause a test failure.
This is useful to ensure a logger is in a known state before testing
logging of a specific code path.
"""
self.messages = []
self.serializers = []
self.tracebackMessages = []
self._failed_validations = []
class FileDestination(PClass):
"""
Callable that writes JSON messages to a file.
On Python 3 the file may support either C{bytes} or C{unicode}. On
Python 2 only C{bytes} are supported since that is what all files expect
in practice.
@ivar file: The file to which messages will be written.
@ivar _dumps: Function that serializes an object to JSON.
@ivar _linebreak: C{"\n"} as either bytes or unicode.
"""
file = field(mandatory=True)
encoder = field(mandatory=True)
_dumps = field(mandatory=True)
_linebreak = field(mandatory=True)
def __new__(cls, file, encoder=EliotJSONEncoder):
if isinstance(file, IOBase) and not file.writable():
raise RuntimeError("Given file {} is not writeable.")
unicodeFile = False
try:
file.write(b"")
except TypeError:
unicodeFile = True
if unicodeFile:
# On Python 3 native json module outputs unicode:
_dumps = pyjson.dumps
_linebreak = "\n"
else:
_dumps = bytesjson.dumps
_linebreak = b"\n"
return PClass.__new__(
cls, file=file, _dumps=_dumps, _linebreak=_linebreak, encoder=encoder
)
def __call__(self, message):
"""
@param message: A message dictionary.
"""
self.file.write(self._dumps(message, cls=self.encoder) + self._linebreak)
self.file.flush()
def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
Logger._destinations.add(FileDestination(file=output_file, encoder=encoder))
# The default Logger, used when none is specified:
_DEFAULT_LOGGER = Logger()
|
"""
Buffer messages in memory.
"""
def __init__(self):
self.messages = []
def __call__(self, message):
self.messages.append(message)
while len(self.messages) > 1000:
self.messages.pop(0)
|
identifier_body
|
_output.py
|
"""
Implementation of hooks and APIs for outputting log messages.
"""
import sys
import traceback
import inspect
import json as pyjson
from threading import Lock
from functools import wraps
from io import IOBase
from pyrsistent import PClass, field
from . import _bytesjson as bytesjson
from zope.interface import Interface, implementer
from ._traceback import write_traceback, TRACEBACK_MESSAGE
from ._message import EXCEPTION_FIELD, MESSAGE_TYPE_FIELD, REASON_FIELD
from ._util import saferepr, safeunicode
from .json import EliotJSONEncoder
from ._validation import ValidationError
class _DestinationsSendError(Exception):
"""
An error occured sending to one or more destinations.
@ivar errors: A list of tuples output from C{sys.exc_info()}.
"""
def __init__(self, errors):
self.errors = errors
Exception.__init__(self, errors)
class BufferingDestination(object):
"""
Buffer messages in memory.
"""
def __init__(self):
self.messages = []
def __call__(self, message):
self.messages.append(message)
while len(self.messages) > 1000:
self.messages.pop(0)
class Destinations(object):
"""
Manage a list of destinations for message dictionaries.
The global instance of this class is where L{Logger} instances will
send written messages.
"""
def __init__(self):
self._destinations = [BufferingDestination()]
self._any_added = False
self._globalFields = {}
def addGlobalFields(self, **fields):
"""
Add fields that will be included in all messages sent through this
destination.
@param fields: Keyword arguments mapping field names to values.
"""
self._globalFields.update(fields)
def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors)
def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message)
def remove(self, destination):
"""
Remove an existing destination.
@param destination: A destination previously added with C{self.add}.
@raises ValueError: If the destination is unknown.
"""
self._destinations.remove(destination)
class ILogger(Interface):
"""
Write out message dictionaries to some destination.
"""
def write(dictionary, serializer=None):
"""
Write a dictionary to the appropriate destination.
@note: This method is thread-safe.
@param serializer: Either C{None}, or a
L{eliot._validation._MessageSerializer} which can be used to
validate this message.
@param dictionary: The message to write out. The given dictionary
will not be mutated.
@type dictionary: C{dict}
"""
@implementer(ILogger)
class Logger(object):
"""
Write out messages to the globally configured destination(s).
You will typically want to create one of these for every chunk of code
whose messages you want to unit test in isolation, e.g. a class. The tests
can then replace a specific L{Logger} with a L{MemoryLogger}.
"""
_destinations = Destinations()
_log_tracebacks = True
def _safeUnicodeDictionary(self, dictionary):
"""
Serialize a dictionary to a unicode string no matter what it contains.
The resulting dictionary will loosely follow Python syntax but it is
not expected to actually be a lossless encoding in all cases.
@param dictionary: A L{dict} to serialize.
@return: A L{unicode} string representing the input dictionary as
faithfully as can be done without putting in too much effort.
"""
try:
return str(
dict(
(saferepr(key), saferepr(value))
for (key, value) in dictionary.items()
)
)
except:
return saferepr(dictionary)
def write(self, dictionary, serializer=None):
"""
Serialize the dictionary, and write it to C{self._destinations}.
"""
dictionary = dictionary.copy()
try:
if serializer is not None:
serializer.serialize(dictionary)
except:
write_traceback(self)
from ._action import log_message
log_message(
"eliot:serialization_failure",
message=self._safeUnicodeDictionary(dictionary),
__eliot_logger__=self,
)
return
try:
self._destinations.send(dictionary)
except _DestinationsSendError as e:
from ._action import log_message
if self._log_tracebacks:
|
else:
# Nothing we can do here, raising exception to caller will
# break business logic, better to have that continue to
# work even if logging isn't.
pass
def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f
@implementer(ILogger)
class MemoryLogger(object):
"""
Store written messages in memory.
When unit testing you don't want to create this directly but rather use
the L{eliot.testing.validateLogging} decorator on a test method, which
will provide additional testing integration.
@ivar messages: A C{list} of the dictionaries passed to
L{MemoryLogger.write}. Do not mutate this list.
@ivar serializers: A C{list} of the serializers passed to
L{MemoryLogger.write}, each corresponding to a message
L{MemoryLogger.messages}. Do not mutate this list.
@ivar tracebackMessages: A C{list} of messages written to this logger for
tracebacks using L{eliot.write_traceback} or L{eliot.writeFailure}. Do
not mutate this list.
"""
def __init__(self, encoder=EliotJSONEncoder):
"""
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
self._lock = Lock()
self._encoder = encoder
self.reset()
@exclusively
def flushTracebacks(self, exceptionType):
"""
Flush all logged tracebacks whose exception is of the given type.
This means they are expected tracebacks and should not cause the test
to fail.
@param exceptionType: A subclass of L{Exception}.
@return: C{list} of flushed messages.
"""
result = []
remaining = []
for message in self.tracebackMessages:
if isinstance(message[REASON_FIELD], exceptionType):
result.append(message)
else:
remaining.append(message)
self.tracebackMessages = remaining
return result
# PEP 8 variant:
flush_tracebacks = flushTracebacks
@exclusively
def write(self, dictionary, serializer=None):
"""
Add the dictionary to list of messages.
"""
# Validate copy of the dictionary, to ensure what we store isn't
# mutated.
try:
self._validate_message(dictionary.copy(), serializer)
except Exception as e:
# Skip irrelevant frames that don't help pinpoint the problem:
from . import _output, _message, _action
skip_filenames = [_output.__file__, _message.__file__, _action.__file__]
for frame in inspect.stack():
if frame[1] not in skip_filenames:
break
self._failed_validations.append(
"{}: {}".format(e, "".join(traceback.format_stack(frame[0])))
)
self.messages.append(dictionary)
self.serializers.append(serializer)
if serializer is TRACEBACK_MESSAGE._serializer:
self.tracebackMessages.append(dictionary)
def _validate_message(self, dictionary, serializer):
"""Validate an individual message.
As a side-effect, the message is replaced with its serialized contents.
@param dictionary: A message C{dict} to be validated. Might be mutated
by the serializer!
@param serializer: C{None} or a serializer.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
if serializer is not None:
serializer.validate(dictionary)
for key in dictionary:
if not isinstance(key, str):
if isinstance(key, bytes):
key.decode("utf-8")
else:
raise TypeError(dictionary, "%r is not unicode" % (key,))
if serializer is not None:
serializer.serialize(dictionary)
try:
pyjson.dumps(dictionary, cls=self._encoder)
except Exception as e:
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
@exclusively
def validate(self):
"""
Validate all written messages.
Does minimal validation of types, and for messages with corresponding
serializers use those to do additional validation.
As a side-effect, the messages are replaced with their serialized
contents.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
for dictionary, serializer in zip(self.messages, self.serializers):
try:
self._validate_message(dictionary, serializer)
except (TypeError, ValidationError) as e:
# We already figured out which messages failed validation
# earlier. This just lets us figure out which exception type to
# raise.
raise e.__class__("\n\n".join(self._failed_validations))
@exclusively
def serialize(self):
"""
Serialize all written messages.
This is the Field-based serialization, not JSON.
@return: A C{list} of C{dict}, the serialized messages.
"""
result = []
for dictionary, serializer in zip(self.messages, self.serializers):
dictionary = dictionary.copy()
serializer.serialize(dictionary)
result.append(dictionary)
return result
@exclusively
def reset(self):
"""
Clear all logged messages.
Any logged tracebacks will also be cleared, and will therefore not
cause a test failure.
This is useful to ensure a logger is in a known state before testing
logging of a specific code path.
"""
self.messages = []
self.serializers = []
self.tracebackMessages = []
self._failed_validations = []
class FileDestination(PClass):
"""
Callable that writes JSON messages to a file.
On Python 3 the file may support either C{bytes} or C{unicode}. On
Python 2 only C{bytes} are supported since that is what all files expect
in practice.
@ivar file: The file to which messages will be written.
@ivar _dumps: Function that serializes an object to JSON.
@ivar _linebreak: C{"\n"} as either bytes or unicode.
"""
file = field(mandatory=True)
encoder = field(mandatory=True)
_dumps = field(mandatory=True)
_linebreak = field(mandatory=True)
def __new__(cls, file, encoder=EliotJSONEncoder):
if isinstance(file, IOBase) and not file.writable():
raise RuntimeError("Given file {} is not writeable.")
unicodeFile = False
try:
file.write(b"")
except TypeError:
unicodeFile = True
if unicodeFile:
# On Python 3 native json module outputs unicode:
_dumps = pyjson.dumps
_linebreak = "\n"
else:
_dumps = bytesjson.dumps
_linebreak = b"\n"
return PClass.__new__(
cls, file=file, _dumps=_dumps, _linebreak=_linebreak, encoder=encoder
)
def __call__(self, message):
"""
@param message: A message dictionary.
"""
self.file.write(self._dumps(message, cls=self.encoder) + self._linebreak)
self.file.flush()
def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
Logger._destinations.add(FileDestination(file=output_file, encoder=encoder))
# The default Logger, used when none is specified:
_DEFAULT_LOGGER = Logger()
|
for (exc_type, exception, exc_traceback) in e.errors:
# Can't use same Logger as serialization errors because
# if destination continues to error out we will get
# infinite recursion. So instead we have to manually
# construct a Logger that won't retry.
logger = Logger()
logger._log_tracebacks = False
logger._destinations = self._destinations
msg = {
MESSAGE_TYPE_FIELD: "eliot:destination_failure",
REASON_FIELD: safeunicode(exception),
EXCEPTION_FIELD: exc_type.__module__ + "." + exc_type.__name__,
"message": self._safeUnicodeDictionary(dictionary),
"__eliot_logger__": logger,
}
log_message(**msg)
|
conditional_block
|
es-419.js
|
import regionSettingsMessages from 'ringcentral-integration/modules/RegionSettings/regionSettingsMessages';
export default {
region: "Región",
[regionSettingsMessages.saveSuccess]: "La configuración se guardó correctamente.",
[regionSettingsMessages.dialingPlansChanged]: "Su cuenta ya no se admite para su cuenta.\n Verifique su nueva {regionSettingsLink}.",
regionSettings: "configuración de región",
[regionSettingsMessages.areaCodeInvalid]: "Ingrese un código de área válido."
};
|
// @key: @#@"[regionSettingsMessages.saveSuccess]"@#@ @source: @#@"Settings saved successfully."@#@
// @key: @#@"[regionSettingsMessages.dialingPlansChanged]"@#@ @source: @#@"The previous region is no longer supported for your account.\n Please verify your new {regionSettingsLink}."@#@
// @key: @#@"regionSettings"@#@ @source: @#@"region settings"@#@
// @key: @#@"[regionSettingsMessages.areaCodeInvalid]"@#@ @source: @#@"Please enter a valid area code."@#@
|
// @key: @#@"region"@#@ @source: @#@"Region"@#@
|
random_line_split
|
trafficlogger.py
|
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
"""
This module implements a reactor wrapper which will cause all traffic on
connections set up using that reactor to be logged.
"""
__all__ = ['loggedReactor']
from weakref import ref
from StringIO import StringIO
from collections import namedtuple
from zope.interface import providedBy
from twisted.python.components import proxyForInterface
from twisted.internet.interfaces import IReactorTCP
from twisted.protocols.policies import WrappingFactory, TrafficLoggingProtocol
logstate = namedtuple('logstate', 'active finished')
def loggedReactor(reactor):
"""
Construct and return a wrapper around the given C{reactor} which provides
all of the same interfaces, but which will log all traffic over outgoing
TCP connections it establishes.
"""
bases = []
for iface in providedBy(reactor):
if iface is IReactorTCP:
bases.append(_TCPTrafficLoggingReactor)
else:
bases.append(proxyForInterface(iface, '_reactor'))
if bases:
return type('(Logged Reactor)', tuple(bases), {})(reactor)
return reactor
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
traffic to be logged.
"""
_factories = None
@property
def factories(self):
if self._factories is None:
|
return self._factories
def getLogFiles(self):
active = []
finished = []
for factoryref in self.factories:
factory = factoryref()
active.extend(factory.logs)
finished.extend(factory.finishedLogs)
return logstate(active, finished)
def connectTCP(self, host, port, factory, *args, **kwargs):
wrapper = _TrafficLoggingFactory(factory)
self.factories.append(ref(wrapper, self.factories.remove))
return self._reactor.connectTCP(
host, port, wrapper, *args, **kwargs)
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
"""
LOGFILE_LIMIT = 20
protocol = TrafficLoggingProtocol
noisy = False
def __init__(self, wrappedFactory):
WrappingFactory.__init__(self, wrappedFactory)
self.logs = []
self.finishedLogs = []
def unregisterProtocol(self, protocol):
WrappingFactory.unregisterProtocol(self, protocol)
self.logs.remove(protocol.logfile)
self.finishedLogs.append(protocol.logfile)
del self.finishedLogs[:-self.LOGFILE_LIMIT]
def buildProtocol(self, addr):
logfile = StringIO()
self.logs.append(logfile)
return self.protocol(
self, self.wrappedFactory.buildProtocol(addr), logfile, None, 0)
|
self._factories = []
|
conditional_block
|
trafficlogger.py
|
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
"""
This module implements a reactor wrapper which will cause all traffic on
connections set up using that reactor to be logged.
"""
__all__ = ['loggedReactor']
from weakref import ref
from StringIO import StringIO
from collections import namedtuple
from zope.interface import providedBy
from twisted.python.components import proxyForInterface
from twisted.internet.interfaces import IReactorTCP
from twisted.protocols.policies import WrappingFactory, TrafficLoggingProtocol
logstate = namedtuple('logstate', 'active finished')
def loggedReactor(reactor):
"""
Construct and return a wrapper around the given C{reactor} which provides
all of the same interfaces, but which will log all traffic over outgoing
TCP connections it establishes.
"""
bases = []
for iface in providedBy(reactor):
if iface is IReactorTCP:
bases.append(_TCPTrafficLoggingReactor)
else:
bases.append(proxyForInterface(iface, '_reactor'))
if bases:
return type('(Logged Reactor)', tuple(bases), {})(reactor)
return reactor
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
traffic to be logged.
"""
_factories = None
@property
def
|
(self):
if self._factories is None:
self._factories = []
return self._factories
def getLogFiles(self):
active = []
finished = []
for factoryref in self.factories:
factory = factoryref()
active.extend(factory.logs)
finished.extend(factory.finishedLogs)
return logstate(active, finished)
def connectTCP(self, host, port, factory, *args, **kwargs):
wrapper = _TrafficLoggingFactory(factory)
self.factories.append(ref(wrapper, self.factories.remove))
return self._reactor.connectTCP(
host, port, wrapper, *args, **kwargs)
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
"""
LOGFILE_LIMIT = 20
protocol = TrafficLoggingProtocol
noisy = False
def __init__(self, wrappedFactory):
WrappingFactory.__init__(self, wrappedFactory)
self.logs = []
self.finishedLogs = []
def unregisterProtocol(self, protocol):
WrappingFactory.unregisterProtocol(self, protocol)
self.logs.remove(protocol.logfile)
self.finishedLogs.append(protocol.logfile)
del self.finishedLogs[:-self.LOGFILE_LIMIT]
def buildProtocol(self, addr):
logfile = StringIO()
self.logs.append(logfile)
return self.protocol(
self, self.wrappedFactory.buildProtocol(addr), logfile, None, 0)
|
factories
|
identifier_name
|
trafficlogger.py
|
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
"""
This module implements a reactor wrapper which will cause all traffic on
connections set up using that reactor to be logged.
"""
__all__ = ['loggedReactor']
from weakref import ref
from StringIO import StringIO
from collections import namedtuple
from zope.interface import providedBy
from twisted.python.components import proxyForInterface
from twisted.internet.interfaces import IReactorTCP
from twisted.protocols.policies import WrappingFactory, TrafficLoggingProtocol
logstate = namedtuple('logstate', 'active finished')
def loggedReactor(reactor):
"""
Construct and return a wrapper around the given C{reactor} which provides
all of the same interfaces, but which will log all traffic over outgoing
TCP connections it establishes.
"""
bases = []
for iface in providedBy(reactor):
if iface is IReactorTCP:
bases.append(_TCPTrafficLoggingReactor)
else:
bases.append(proxyForInterface(iface, '_reactor'))
if bases:
return type('(Logged Reactor)', tuple(bases), {})(reactor)
return reactor
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
traffic to be logged.
"""
_factories = None
@property
def factories(self):
if self._factories is None:
self._factories = []
return self._factories
def getLogFiles(self):
active = []
finished = []
for factoryref in self.factories:
factory = factoryref()
active.extend(factory.logs)
finished.extend(factory.finishedLogs)
return logstate(active, finished)
def connectTCP(self, host, port, factory, *args, **kwargs):
wrapper = _TrafficLoggingFactory(factory)
self.factories.append(ref(wrapper, self.factories.remove))
return self._reactor.connectTCP(
host, port, wrapper, *args, **kwargs)
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
"""
LOGFILE_LIMIT = 20
protocol = TrafficLoggingProtocol
noisy = False
|
self.finishedLogs = []
def unregisterProtocol(self, protocol):
WrappingFactory.unregisterProtocol(self, protocol)
self.logs.remove(protocol.logfile)
self.finishedLogs.append(protocol.logfile)
del self.finishedLogs[:-self.LOGFILE_LIMIT]
def buildProtocol(self, addr):
logfile = StringIO()
self.logs.append(logfile)
return self.protocol(
self, self.wrappedFactory.buildProtocol(addr), logfile, None, 0)
|
def __init__(self, wrappedFactory):
WrappingFactory.__init__(self, wrappedFactory)
self.logs = []
|
random_line_split
|
trafficlogger.py
|
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
"""
This module implements a reactor wrapper which will cause all traffic on
connections set up using that reactor to be logged.
"""
__all__ = ['loggedReactor']
from weakref import ref
from StringIO import StringIO
from collections import namedtuple
from zope.interface import providedBy
from twisted.python.components import proxyForInterface
from twisted.internet.interfaces import IReactorTCP
from twisted.protocols.policies import WrappingFactory, TrafficLoggingProtocol
logstate = namedtuple('logstate', 'active finished')
def loggedReactor(reactor):
"""
Construct and return a wrapper around the given C{reactor} which provides
all of the same interfaces, but which will log all traffic over outgoing
TCP connections it establishes.
"""
bases = []
for iface in providedBy(reactor):
if iface is IReactorTCP:
bases.append(_TCPTrafficLoggingReactor)
else:
bases.append(proxyForInterface(iface, '_reactor'))
if bases:
return type('(Logged Reactor)', tuple(bases), {})(reactor)
return reactor
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
traffic to be logged.
"""
_factories = None
@property
def factories(self):
if self._factories is None:
self._factories = []
return self._factories
def getLogFiles(self):
active = []
finished = []
for factoryref in self.factories:
factory = factoryref()
active.extend(factory.logs)
finished.extend(factory.finishedLogs)
return logstate(active, finished)
def connectTCP(self, host, port, factory, *args, **kwargs):
wrapper = _TrafficLoggingFactory(factory)
self.factories.append(ref(wrapper, self.factories.remove))
return self._reactor.connectTCP(
host, port, wrapper, *args, **kwargs)
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
"""
LOGFILE_LIMIT = 20
protocol = TrafficLoggingProtocol
noisy = False
def __init__(self, wrappedFactory):
WrappingFactory.__init__(self, wrappedFactory)
self.logs = []
self.finishedLogs = []
def unregisterProtocol(self, protocol):
|
def buildProtocol(self, addr):
logfile = StringIO()
self.logs.append(logfile)
return self.protocol(
self, self.wrappedFactory.buildProtocol(addr), logfile, None, 0)
|
WrappingFactory.unregisterProtocol(self, protocol)
self.logs.remove(protocol.logfile)
self.finishedLogs.append(protocol.logfile)
del self.finishedLogs[:-self.LOGFILE_LIMIT]
|
identifier_body
|
borrowed-unique-basic.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// Gdb doesn't know about UTF-32 character encoding and will print a rust char as only
// its numerical value.
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print *bool_ref
// gdb-check:$1 = true
// gdb-command:print *int_ref
// gdb-check:$2 = -1
// gdb-command:print *char_ref
// gdb-check:$3 = 97
// gdb-command:print/d *i8_ref
// gdb-check:$4 = 68
// gdb-command:print *i16_ref
// gdb-check:$5 = -16
// gdb-command:print *i32_ref
// gdb-check:$6 = -32
// gdb-command:print *i64_ref
// gdb-check:$7 = -64
// gdb-command:print *uint_ref
// gdb-check:$8 = 1
// gdb-command:print/d *u8_ref
// gdb-check:$9 = 100
// gdb-command:print *u16_ref
// gdb-check:$10 = 16
// gdb-command:print *u32_ref
// gdb-check:$11 = 32
// gdb-command:print *u64_ref
// gdb-check:$12 = 64
// gdb-command:print *f32_ref
// gdb-check:$13 = 2.5
// gdb-command:print *f64_ref
// gdb-check:$14 = 3.5
// === LLDB TESTS ==================================================================================
// lldb-command:type format add -f decimal char
// lldb-command:type format add -f decimal 'unsigned char'
// lldb-command:run
// lldb-command:print *bool_ref
// lldb-check:[...]$0 = true
// lldb-command:print *int_ref
// lldb-check:[...]$1 = -1
// d ebugger:print *char_ref
// c heck:[...]$3 = 97
// lldb-command:print *i8_ref
// lldb-check:[...]$2 = 68
// lldb-command:print *i16_ref
// lldb-check:[...]$3 = -16
// lldb-command:print *i32_ref
// lldb-check:[...]$4 = -32
// lldb-command:print *i64_ref
// lldb-check:[...]$5 = -64
// lldb-command:print *uint_ref
// lldb-check:[...]$6 = 1
// lldb-command:print *u8_ref
// lldb-check:[...]$7 = 100
// lldb-command:print *u16_ref
// lldb-check:[...]$8 = 16
// lldb-command:print *u32_ref
// lldb-check:[...]$9 = 32
// lldb-command:print *u64_ref
// lldb-check:[...]$10 = 64
// lldb-command:print *f32_ref
// lldb-check:[...]$11 = 2.5
// lldb-command:print *f64_ref
// lldb-check:[...]$12 = 3.5
#![allow(unused_variables)]
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
fn main() {
let bool_box: Box<bool> = box true;
let bool_ref: &bool = &*bool_box;
let int_box: Box<int> = box -1;
let int_ref: &int = &*int_box;
let char_box: Box<char> = box 'a';
let char_ref: &char = &*char_box;
let i8_box: Box<i8> = box 68;
let i8_ref: &i8 = &*i8_box;
let i16_box: Box<i16> = box -16;
let i16_ref: &i16 = &*i16_box;
let i32_box: Box<i32> = box -32;
let i32_ref: &i32 = &*i32_box;
let i64_box: Box<i64> = box -64;
let i64_ref: &i64 = &*i64_box;
let uint_box: Box<uint> = box 1;
let uint_ref: &uint = &*uint_box;
let u8_box: Box<u8> = box 100;
let u8_ref: &u8 = &*u8_box;
let u16_box: Box<u16> = box 16;
let u16_ref: &u16 = &*u16_box;
let u32_box: Box<u32> = box 32;
let u32_ref: &u32 = &*u32_box;
let u64_box: Box<u64> = box 64;
let u64_ref: &u64 = &*u64_box;
let f32_box: Box<f32> = box 2.5;
let f32_ref: &f32 = &*f32_box;
let f64_box: Box<f64> = box 3.5;
let f64_ref: &f64 = &*f64_box;
zzz(); // #break
}
fn
|
() {()}
|
zzz
|
identifier_name
|
borrowed-unique-basic.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// Gdb doesn't know about UTF-32 character encoding and will print a rust char as only
// its numerical value.
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print *bool_ref
// gdb-check:$1 = true
// gdb-command:print *int_ref
// gdb-check:$2 = -1
|
// gdb-command:print/d *i8_ref
// gdb-check:$4 = 68
// gdb-command:print *i16_ref
// gdb-check:$5 = -16
// gdb-command:print *i32_ref
// gdb-check:$6 = -32
// gdb-command:print *i64_ref
// gdb-check:$7 = -64
// gdb-command:print *uint_ref
// gdb-check:$8 = 1
// gdb-command:print/d *u8_ref
// gdb-check:$9 = 100
// gdb-command:print *u16_ref
// gdb-check:$10 = 16
// gdb-command:print *u32_ref
// gdb-check:$11 = 32
// gdb-command:print *u64_ref
// gdb-check:$12 = 64
// gdb-command:print *f32_ref
// gdb-check:$13 = 2.5
// gdb-command:print *f64_ref
// gdb-check:$14 = 3.5
// === LLDB TESTS ==================================================================================
// lldb-command:type format add -f decimal char
// lldb-command:type format add -f decimal 'unsigned char'
// lldb-command:run
// lldb-command:print *bool_ref
// lldb-check:[...]$0 = true
// lldb-command:print *int_ref
// lldb-check:[...]$1 = -1
// d ebugger:print *char_ref
// c heck:[...]$3 = 97
// lldb-command:print *i8_ref
// lldb-check:[...]$2 = 68
// lldb-command:print *i16_ref
// lldb-check:[...]$3 = -16
// lldb-command:print *i32_ref
// lldb-check:[...]$4 = -32
// lldb-command:print *i64_ref
// lldb-check:[...]$5 = -64
// lldb-command:print *uint_ref
// lldb-check:[...]$6 = 1
// lldb-command:print *u8_ref
// lldb-check:[...]$7 = 100
// lldb-command:print *u16_ref
// lldb-check:[...]$8 = 16
// lldb-command:print *u32_ref
// lldb-check:[...]$9 = 32
// lldb-command:print *u64_ref
// lldb-check:[...]$10 = 64
// lldb-command:print *f32_ref
// lldb-check:[...]$11 = 2.5
// lldb-command:print *f64_ref
// lldb-check:[...]$12 = 3.5
#![allow(unused_variables)]
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
fn main() {
let bool_box: Box<bool> = box true;
let bool_ref: &bool = &*bool_box;
let int_box: Box<int> = box -1;
let int_ref: &int = &*int_box;
let char_box: Box<char> = box 'a';
let char_ref: &char = &*char_box;
let i8_box: Box<i8> = box 68;
let i8_ref: &i8 = &*i8_box;
let i16_box: Box<i16> = box -16;
let i16_ref: &i16 = &*i16_box;
let i32_box: Box<i32> = box -32;
let i32_ref: &i32 = &*i32_box;
let i64_box: Box<i64> = box -64;
let i64_ref: &i64 = &*i64_box;
let uint_box: Box<uint> = box 1;
let uint_ref: &uint = &*uint_box;
let u8_box: Box<u8> = box 100;
let u8_ref: &u8 = &*u8_box;
let u16_box: Box<u16> = box 16;
let u16_ref: &u16 = &*u16_box;
let u32_box: Box<u32> = box 32;
let u32_ref: &u32 = &*u32_box;
let u64_box: Box<u64> = box 64;
let u64_ref: &u64 = &*u64_box;
let f32_box: Box<f32> = box 2.5;
let f32_ref: &f32 = &*f32_box;
let f64_box: Box<f64> = box 3.5;
let f64_ref: &f64 = &*f64_box;
zzz(); // #break
}
fn zzz() {()}
|
// gdb-command:print *char_ref
// gdb-check:$3 = 97
|
random_line_split
|
borrowed-unique-basic.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// Gdb doesn't know about UTF-32 character encoding and will print a rust char as only
// its numerical value.
// compile-flags:-g
// === GDB TESTS ===================================================================================
// gdb-command:run
// gdb-command:print *bool_ref
// gdb-check:$1 = true
// gdb-command:print *int_ref
// gdb-check:$2 = -1
// gdb-command:print *char_ref
// gdb-check:$3 = 97
// gdb-command:print/d *i8_ref
// gdb-check:$4 = 68
// gdb-command:print *i16_ref
// gdb-check:$5 = -16
// gdb-command:print *i32_ref
// gdb-check:$6 = -32
// gdb-command:print *i64_ref
// gdb-check:$7 = -64
// gdb-command:print *uint_ref
// gdb-check:$8 = 1
// gdb-command:print/d *u8_ref
// gdb-check:$9 = 100
// gdb-command:print *u16_ref
// gdb-check:$10 = 16
// gdb-command:print *u32_ref
// gdb-check:$11 = 32
// gdb-command:print *u64_ref
// gdb-check:$12 = 64
// gdb-command:print *f32_ref
// gdb-check:$13 = 2.5
// gdb-command:print *f64_ref
// gdb-check:$14 = 3.5
// === LLDB TESTS ==================================================================================
// lldb-command:type format add -f decimal char
// lldb-command:type format add -f decimal 'unsigned char'
// lldb-command:run
// lldb-command:print *bool_ref
// lldb-check:[...]$0 = true
// lldb-command:print *int_ref
// lldb-check:[...]$1 = -1
// d ebugger:print *char_ref
// c heck:[...]$3 = 97
// lldb-command:print *i8_ref
// lldb-check:[...]$2 = 68
// lldb-command:print *i16_ref
// lldb-check:[...]$3 = -16
// lldb-command:print *i32_ref
// lldb-check:[...]$4 = -32
// lldb-command:print *i64_ref
// lldb-check:[...]$5 = -64
// lldb-command:print *uint_ref
// lldb-check:[...]$6 = 1
// lldb-command:print *u8_ref
// lldb-check:[...]$7 = 100
// lldb-command:print *u16_ref
// lldb-check:[...]$8 = 16
// lldb-command:print *u32_ref
// lldb-check:[...]$9 = 32
// lldb-command:print *u64_ref
// lldb-check:[...]$10 = 64
// lldb-command:print *f32_ref
// lldb-check:[...]$11 = 2.5
// lldb-command:print *f64_ref
// lldb-check:[...]$12 = 3.5
#![allow(unused_variables)]
#![feature(box_syntax)]
#![omit_gdb_pretty_printer_section]
fn main() {
let bool_box: Box<bool> = box true;
let bool_ref: &bool = &*bool_box;
let int_box: Box<int> = box -1;
let int_ref: &int = &*int_box;
let char_box: Box<char> = box 'a';
let char_ref: &char = &*char_box;
let i8_box: Box<i8> = box 68;
let i8_ref: &i8 = &*i8_box;
let i16_box: Box<i16> = box -16;
let i16_ref: &i16 = &*i16_box;
let i32_box: Box<i32> = box -32;
let i32_ref: &i32 = &*i32_box;
let i64_box: Box<i64> = box -64;
let i64_ref: &i64 = &*i64_box;
let uint_box: Box<uint> = box 1;
let uint_ref: &uint = &*uint_box;
let u8_box: Box<u8> = box 100;
let u8_ref: &u8 = &*u8_box;
let u16_box: Box<u16> = box 16;
let u16_ref: &u16 = &*u16_box;
let u32_box: Box<u32> = box 32;
let u32_ref: &u32 = &*u32_box;
let u64_box: Box<u64> = box 64;
let u64_ref: &u64 = &*u64_box;
let f32_box: Box<f32> = box 2.5;
let f32_ref: &f32 = &*f32_box;
let f64_box: Box<f64> = box 3.5;
let f64_ref: &f64 = &*f64_box;
zzz(); // #break
}
fn zzz()
|
{()}
|
identifier_body
|
|
conf.py
|
# -*- coding: utf-8 -*-
#
# Blend documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 24 14:11:43 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Blend'
copyright = u'2012, Azavea'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Blenddoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Blend.tex', u'Blend Documentation',
u'Justin Walgran', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
|
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'blend', u'Blend Documentation',
[u'Justin Walgran'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Blend', u'Blend Documentation',
u'Justin Walgran', 'Blend', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
# If true, show URL addresses after external links.
|
random_line_split
|
runtest.js
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var error;
function testGetDescriptor() {
if (error !== undefined) {
chrome.test.sendMessage('fail');
chrome.test.fail(error);
}
chrome.test.assertTrue(descriptor != null, '\'descriptor\' is null');
chrome.test.assertEq('desc_id0', descriptor.instanceId);
chrome.test.assertEq('00001221-0000-1000-8000-00805f9b34fb', descriptor.uuid);
chrome.test.assertEq(false, descriptor.isLocal);
chrome.test.assertEq(charId, descriptor.characteristic.instanceId);
var valueBytes = new Uint8Array(descriptor.value);
chrome.test.assertEq(3, descriptor.value.byteLength);
chrome.test.assertEq(0x01, valueBytes[0]);
chrome.test.assertEq(0x02, valueBytes[1]);
chrome.test.assertEq(0x03, valueBytes[2]);
chrome.test.succeed();
}
var getDescriptor = chrome.bluetoothLowEnergy.getDescriptor;
var charId = 'char_id0';
var descId = 'desc_id0';
var badDescId = 'desc_id1';
var descriptor = null;
function earlyError(message) {
error = message;
chrome.test.runTests([testGetDescriptor]);
}
function expectError(result) {
if (result || !chrome.runtime.lastError) {
earlyError('getDescriptor should have failed');
}
return error !== undefined;
}
// 1. Unknown descriptor instanceId.
getDescriptor(badDescId, function (result) {
if (result || !chrome.runtime.lastError) {
earlyError('getDescriptor should have failed for \'badDescId\'');
return;
}
|
// 3. Known descriptor instanceId, but the mapped service is unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 4. Known descriptor instanceId, but the mapped characteristic is
// unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 5. Known descriptor instanceId, but the mapped the characteristic
// does not know about the descriptor.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 6. Success.
getDescriptor(descId, function (result) {
if (chrome.runtime.lastError) {
earlyError(chrome.runtime.lastError.message);
return;
}
descriptor = result;
chrome.test.sendMessage('ready', function (message) {
chrome.test.runTests([testGetDescriptor]);
});
});
});
});
});
});
});
|
// 2. Known descriptor instanceId, but the mapped device is unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
|
random_line_split
|
runtest.js
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var error;
function testGetDescriptor()
|
var getDescriptor = chrome.bluetoothLowEnergy.getDescriptor;
var charId = 'char_id0';
var descId = 'desc_id0';
var badDescId = 'desc_id1';
var descriptor = null;
function earlyError(message) {
error = message;
chrome.test.runTests([testGetDescriptor]);
}
function expectError(result) {
if (result || !chrome.runtime.lastError) {
earlyError('getDescriptor should have failed');
}
return error !== undefined;
}
// 1. Unknown descriptor instanceId.
getDescriptor(badDescId, function (result) {
if (result || !chrome.runtime.lastError) {
earlyError('getDescriptor should have failed for \'badDescId\'');
return;
}
// 2. Known descriptor instanceId, but the mapped device is unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 3. Known descriptor instanceId, but the mapped service is unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 4. Known descriptor instanceId, but the mapped characteristic is
// unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 5. Known descriptor instanceId, but the mapped the characteristic
// does not know about the descriptor.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 6. Success.
getDescriptor(descId, function (result) {
if (chrome.runtime.lastError) {
earlyError(chrome.runtime.lastError.message);
return;
}
descriptor = result;
chrome.test.sendMessage('ready', function (message) {
chrome.test.runTests([testGetDescriptor]);
});
});
});
});
});
});
});
|
{
if (error !== undefined) {
chrome.test.sendMessage('fail');
chrome.test.fail(error);
}
chrome.test.assertTrue(descriptor != null, '\'descriptor\' is null');
chrome.test.assertEq('desc_id0', descriptor.instanceId);
chrome.test.assertEq('00001221-0000-1000-8000-00805f9b34fb', descriptor.uuid);
chrome.test.assertEq(false, descriptor.isLocal);
chrome.test.assertEq(charId, descriptor.characteristic.instanceId);
var valueBytes = new Uint8Array(descriptor.value);
chrome.test.assertEq(3, descriptor.value.byteLength);
chrome.test.assertEq(0x01, valueBytes[0]);
chrome.test.assertEq(0x02, valueBytes[1]);
chrome.test.assertEq(0x03, valueBytes[2]);
chrome.test.succeed();
}
|
identifier_body
|
runtest.js
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var error;
function
|
() {
if (error !== undefined) {
chrome.test.sendMessage('fail');
chrome.test.fail(error);
}
chrome.test.assertTrue(descriptor != null, '\'descriptor\' is null');
chrome.test.assertEq('desc_id0', descriptor.instanceId);
chrome.test.assertEq('00001221-0000-1000-8000-00805f9b34fb', descriptor.uuid);
chrome.test.assertEq(false, descriptor.isLocal);
chrome.test.assertEq(charId, descriptor.characteristic.instanceId);
var valueBytes = new Uint8Array(descriptor.value);
chrome.test.assertEq(3, descriptor.value.byteLength);
chrome.test.assertEq(0x01, valueBytes[0]);
chrome.test.assertEq(0x02, valueBytes[1]);
chrome.test.assertEq(0x03, valueBytes[2]);
chrome.test.succeed();
}
var getDescriptor = chrome.bluetoothLowEnergy.getDescriptor;
var charId = 'char_id0';
var descId = 'desc_id0';
var badDescId = 'desc_id1';
var descriptor = null;
function earlyError(message) {
error = message;
chrome.test.runTests([testGetDescriptor]);
}
function expectError(result) {
if (result || !chrome.runtime.lastError) {
earlyError('getDescriptor should have failed');
}
return error !== undefined;
}
// 1. Unknown descriptor instanceId.
getDescriptor(badDescId, function (result) {
if (result || !chrome.runtime.lastError) {
earlyError('getDescriptor should have failed for \'badDescId\'');
return;
}
// 2. Known descriptor instanceId, but the mapped device is unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 3. Known descriptor instanceId, but the mapped service is unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 4. Known descriptor instanceId, but the mapped characteristic is
// unknown.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 5. Known descriptor instanceId, but the mapped the characteristic
// does not know about the descriptor.
getDescriptor(descId, function (result) {
if (expectError(result))
return;
// 6. Success.
getDescriptor(descId, function (result) {
if (chrome.runtime.lastError) {
earlyError(chrome.runtime.lastError.message);
return;
}
descriptor = result;
chrome.test.sendMessage('ready', function (message) {
chrome.test.runTests([testGetDescriptor]);
});
});
});
});
});
});
});
|
testGetDescriptor
|
identifier_name
|
coherence-blanket-conflicts-with-specific-cross-crate.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:go_trait.rs
extern crate go_trait;
use go_trait::{Go,GoMut};
use std::fmt::Debug;
use std::default::Default;
struct MyThingy;
impl Go for MyThingy {
fn
|
(&self, arg: isize) { }
}
impl GoMut for MyThingy { //~ ERROR conflicting implementations
fn go_mut(&mut self, arg: isize) { }
}
fn main() { }
|
go
|
identifier_name
|
coherence-blanket-conflicts-with-specific-cross-crate.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:go_trait.rs
extern crate go_trait;
use go_trait::{Go,GoMut};
use std::fmt::Debug;
use std::default::Default;
struct MyThingy;
impl Go for MyThingy {
fn go(&self, arg: isize) { }
}
impl GoMut for MyThingy { //~ ERROR conflicting implementations
|
fn go_mut(&mut self, arg: isize) { }
}
fn main() { }
|
random_line_split
|
|
coherence-blanket-conflicts-with-specific-cross-crate.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:go_trait.rs
extern crate go_trait;
use go_trait::{Go,GoMut};
use std::fmt::Debug;
use std::default::Default;
struct MyThingy;
impl Go for MyThingy {
fn go(&self, arg: isize)
|
}
impl GoMut for MyThingy { //~ ERROR conflicting implementations
fn go_mut(&mut self, arg: isize) { }
}
fn main() { }
|
{ }
|
identifier_body
|
can_read_log_from_rosout.rs
|
use crossbeam::channel::unbounded;
use std::collections::BTreeSet;
mod util;
mod msg {
rosrust::rosmsg_include!(rosgraph_msgs / Log);
}
#[test]
fn
|
() {
let _roscore = util::run_roscore_for(util::Language::None, util::Feature::Log);
rosrust::init("rosout_agg_listener");
let (tx, rx) = unbounded();
let _subscriber =
rosrust::subscribe::<msg::rosgraph_msgs::Log, _>("/rosout_agg", 100, move |data| {
tx.send((data.level, data.msg)).unwrap();
})
.unwrap();
let rate = rosrust::rate(1.0);
let mut expected_messages = BTreeSet::new();
expected_messages.insert((1, "debug message".to_owned()));
expected_messages.insert((2, "info message".to_owned()));
expected_messages.insert((4, "warn message".to_owned()));
expected_messages.insert((8, "err message".to_owned()));
expected_messages.insert((16, "fatal message".to_owned()));
for _ in 0..10 {
for item in rx.try_iter() {
println!("Received message at level {}: {}", item.0, item.1);
expected_messages.remove(&item);
}
if expected_messages.is_empty() {
return;
}
rosrust::ros_debug!("debug message");
rosrust::ros_info!("info message");
rosrust::ros_warn!("warn message");
rosrust::ros_err!("err message");
rosrust::ros_fatal!("fatal message");
rate.sleep();
}
panic!("Failed to receive data on /rosout_agg");
}
|
can_read_log_from_rosout
|
identifier_name
|
can_read_log_from_rosout.rs
|
use crossbeam::channel::unbounded;
use std::collections::BTreeSet;
mod util;
mod msg {
rosrust::rosmsg_include!(rosgraph_msgs / Log);
}
#[test]
fn can_read_log_from_rosout() {
let _roscore = util::run_roscore_for(util::Language::None, util::Feature::Log);
rosrust::init("rosout_agg_listener");
let (tx, rx) = unbounded();
let _subscriber =
rosrust::subscribe::<msg::rosgraph_msgs::Log, _>("/rosout_agg", 100, move |data| {
tx.send((data.level, data.msg)).unwrap();
})
.unwrap();
let rate = rosrust::rate(1.0);
let mut expected_messages = BTreeSet::new();
expected_messages.insert((1, "debug message".to_owned()));
expected_messages.insert((2, "info message".to_owned()));
expected_messages.insert((4, "warn message".to_owned()));
expected_messages.insert((8, "err message".to_owned()));
expected_messages.insert((16, "fatal message".to_owned()));
for _ in 0..10 {
for item in rx.try_iter() {
println!("Received message at level {}: {}", item.0, item.1);
expected_messages.remove(&item);
}
if expected_messages.is_empty()
|
rosrust::ros_debug!("debug message");
rosrust::ros_info!("info message");
rosrust::ros_warn!("warn message");
rosrust::ros_err!("err message");
rosrust::ros_fatal!("fatal message");
rate.sleep();
}
panic!("Failed to receive data on /rosout_agg");
}
|
{
return;
}
|
conditional_block
|
can_read_log_from_rosout.rs
|
use crossbeam::channel::unbounded;
use std::collections::BTreeSet;
mod util;
mod msg {
rosrust::rosmsg_include!(rosgraph_msgs / Log);
}
#[test]
fn can_read_log_from_rosout() {
let _roscore = util::run_roscore_for(util::Language::None, util::Feature::Log);
rosrust::init("rosout_agg_listener");
let (tx, rx) = unbounded();
let _subscriber =
rosrust::subscribe::<msg::rosgraph_msgs::Log, _>("/rosout_agg", 100, move |data| {
tx.send((data.level, data.msg)).unwrap();
})
.unwrap();
let rate = rosrust::rate(1.0);
let mut expected_messages = BTreeSet::new();
expected_messages.insert((1, "debug message".to_owned()));
expected_messages.insert((2, "info message".to_owned()));
expected_messages.insert((4, "warn message".to_owned()));
expected_messages.insert((8, "err message".to_owned()));
|
expected_messages.insert((16, "fatal message".to_owned()));
for _ in 0..10 {
for item in rx.try_iter() {
println!("Received message at level {}: {}", item.0, item.1);
expected_messages.remove(&item);
}
if expected_messages.is_empty() {
return;
}
rosrust::ros_debug!("debug message");
rosrust::ros_info!("info message");
rosrust::ros_warn!("warn message");
rosrust::ros_err!("err message");
rosrust::ros_fatal!("fatal message");
rate.sleep();
}
panic!("Failed to receive data on /rosout_agg");
}
|
random_line_split
|
|
can_read_log_from_rosout.rs
|
use crossbeam::channel::unbounded;
use std::collections::BTreeSet;
mod util;
mod msg {
rosrust::rosmsg_include!(rosgraph_msgs / Log);
}
#[test]
fn can_read_log_from_rosout()
|
{
let _roscore = util::run_roscore_for(util::Language::None, util::Feature::Log);
rosrust::init("rosout_agg_listener");
let (tx, rx) = unbounded();
let _subscriber =
rosrust::subscribe::<msg::rosgraph_msgs::Log, _>("/rosout_agg", 100, move |data| {
tx.send((data.level, data.msg)).unwrap();
})
.unwrap();
let rate = rosrust::rate(1.0);
let mut expected_messages = BTreeSet::new();
expected_messages.insert((1, "debug message".to_owned()));
expected_messages.insert((2, "info message".to_owned()));
expected_messages.insert((4, "warn message".to_owned()));
expected_messages.insert((8, "err message".to_owned()));
expected_messages.insert((16, "fatal message".to_owned()));
for _ in 0..10 {
for item in rx.try_iter() {
println!("Received message at level {}: {}", item.0, item.1);
expected_messages.remove(&item);
}
if expected_messages.is_empty() {
return;
}
rosrust::ros_debug!("debug message");
rosrust::ros_info!("info message");
rosrust::ros_warn!("warn message");
rosrust::ros_err!("err message");
rosrust::ros_fatal!("fatal message");
rate.sleep();
}
panic!("Failed to receive data on /rosout_agg");
}
|
identifier_body
|
|
origin.rs
|
extern crate regex;
use std::fmt;
use std::str::FromStr;
use std::string::ToString;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use regex::Regex;
use super::{ NetType, AddrType };
use super::{ ProtocolVersion, SessionVersion };
use error::Error;
// o=<username> <sess-id> <sess-version> <nettype> <addrtype> <unicast-address>
// o=mozilla...THIS_IS_SDPARTA-46.0.1 5381835512098962904 0 IN IP4 0.0.0.0
#[derive(Clone, Debug)]
pub struct Origin {
pub username : String, // username MUST NOT contain spaces
pub session_id: String,
pub session_version: SessionVersion,
pub nettype : NetType, // IN( IANA Registered, Meas `Internet` )
pub addrtype: AddrType, // IP4 | IP6
pub address : IpAddr,
}
impl ToString for Origin {
fn to_string(&self) -> String {
let origin = "o=".to_string()
+ self.username.as_ref() + " "
+ self.session_id.as_ref() + " "
+ self.session_version.to_string().as_ref() + " "
+ self.nettype.to_string().as_ref() + " "
+ self.addrtype.to_string().as_ref() + " "
+ self.address.to_string().as_ref();
origin
}
}
impl FromStr for Origin {
type Err = Error;
fn from_str(s: &str) -> Result<Origin, Error>
|
}
|
{
let re = match Regex::new(r"(\S+)\s(\S+)\s(\d+)\s(IN)\s(IP\d)\s(\d+\.\d+\.\d+\.\d+)") {
Ok(re) => re,
Err(e) => {
println!("[Regex] {:?}", e);
return Err(Error::Origin);
}
};
let cap = re.captures(s).unwrap();
let username = match cap.at(1) {
Some(username) => username.to_string(),
None => return Err(Error::SessionName)
};
let session_id = match cap.at(2) {
Some(session_id) => session_id.to_string(),
None => return Err(Error::SessionId)
};
let session_version = match cap.at(3) {
Some(session_version) => match SessionVersion::from_str(session_version) {
Ok(session_version) => session_version,
Err(_) => return Err(Error::SessionVersion)
},
None => return Err(Error::SessionVersion)
};
let nettype = match cap.at(4) {
Some(nettype) => {
match NetType::from_str(nettype) {
Ok(nettype) => nettype,
Err(_) => return Err(Error::NetType)
}
},
None => return Err(Error::NetType)
};
let addrtype = match cap.at(5) {
Some(addrtype) => match AddrType::from_str(addrtype) {
Ok(addrtype) => addrtype,
Err(_) => return Err(Error::AddrType)
},
None => return Err(Error::AddrType)
};
let address = match cap.at(6) {
Some(address) => {
match IpAddr::from_str(address) {
Ok(address) => address,
Err(e) => return Err(Error::IpAddress)
}
},
None => return Err(Error::IpAddress)
};
// check addrtype <-> address
match addrtype {
AddrType::Ip4 => {
match address {
IpAddr::V6(_) => return Err(Error::AddrType),
IpAddr::V4(_) => { }
};
},
AddrType::Ip6 => {
match address {
IpAddr::V4(_) => return Err(Error::AddrType),
IpAddr::V6(_) => { }
};
}
}
Ok(Origin {
username: username,
session_id : session_id,
session_version: session_version,
nettype : nettype,
addrtype: addrtype,
address : address
})
}
|
identifier_body
|
origin.rs
|
extern crate regex;
use std::fmt;
use std::str::FromStr;
use std::string::ToString;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use regex::Regex;
use super::{ NetType, AddrType };
use super::{ ProtocolVersion, SessionVersion };
use error::Error;
// o=<username> <sess-id> <sess-version> <nettype> <addrtype> <unicast-address>
// o=mozilla...THIS_IS_SDPARTA-46.0.1 5381835512098962904 0 IN IP4 0.0.0.0
#[derive(Clone, Debug)]
pub struct Origin {
pub username : String, // username MUST NOT contain spaces
pub session_id: String,
pub session_version: SessionVersion,
pub nettype : NetType, // IN( IANA Registered, Meas `Internet` )
pub addrtype: AddrType, // IP4 | IP6
pub address : IpAddr,
}
impl ToString for Origin {
fn to_string(&self) -> String {
let origin = "o=".to_string()
+ self.username.as_ref() + " "
+ self.session_id.as_ref() + " "
+ self.session_version.to_string().as_ref() + " "
+ self.nettype.to_string().as_ref() + " "
+ self.addrtype.to_string().as_ref() + " "
+ self.address.to_string().as_ref();
origin
}
}
impl FromStr for Origin {
type Err = Error;
fn from_str(s: &str) -> Result<Origin, Error> {
let re = match Regex::new(r"(\S+)\s(\S+)\s(\d+)\s(IN)\s(IP\d)\s(\d+\.\d+\.\d+\.\d+)") {
|
};
let cap = re.captures(s).unwrap();
let username = match cap.at(1) {
Some(username) => username.to_string(),
None => return Err(Error::SessionName)
};
let session_id = match cap.at(2) {
Some(session_id) => session_id.to_string(),
None => return Err(Error::SessionId)
};
let session_version = match cap.at(3) {
Some(session_version) => match SessionVersion::from_str(session_version) {
Ok(session_version) => session_version,
Err(_) => return Err(Error::SessionVersion)
},
None => return Err(Error::SessionVersion)
};
let nettype = match cap.at(4) {
Some(nettype) => {
match NetType::from_str(nettype) {
Ok(nettype) => nettype,
Err(_) => return Err(Error::NetType)
}
},
None => return Err(Error::NetType)
};
let addrtype = match cap.at(5) {
Some(addrtype) => match AddrType::from_str(addrtype) {
Ok(addrtype) => addrtype,
Err(_) => return Err(Error::AddrType)
},
None => return Err(Error::AddrType)
};
let address = match cap.at(6) {
Some(address) => {
match IpAddr::from_str(address) {
Ok(address) => address,
Err(e) => return Err(Error::IpAddress)
}
},
None => return Err(Error::IpAddress)
};
// check addrtype <-> address
match addrtype {
AddrType::Ip4 => {
match address {
IpAddr::V6(_) => return Err(Error::AddrType),
IpAddr::V4(_) => { }
};
},
AddrType::Ip6 => {
match address {
IpAddr::V4(_) => return Err(Error::AddrType),
IpAddr::V6(_) => { }
};
}
}
Ok(Origin {
username: username,
session_id : session_id,
session_version: session_version,
nettype : nettype,
addrtype: addrtype,
address : address
})
}
}
|
Ok(re) => re,
Err(e) => {
println!("[Regex] {:?}", e);
return Err(Error::Origin);
}
|
random_line_split
|
origin.rs
|
extern crate regex;
use std::fmt;
use std::str::FromStr;
use std::string::ToString;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use regex::Regex;
use super::{ NetType, AddrType };
use super::{ ProtocolVersion, SessionVersion };
use error::Error;
// o=<username> <sess-id> <sess-version> <nettype> <addrtype> <unicast-address>
// o=mozilla...THIS_IS_SDPARTA-46.0.1 5381835512098962904 0 IN IP4 0.0.0.0
#[derive(Clone, Debug)]
pub struct Origin {
pub username : String, // username MUST NOT contain spaces
pub session_id: String,
pub session_version: SessionVersion,
pub nettype : NetType, // IN( IANA Registered, Meas `Internet` )
pub addrtype: AddrType, // IP4 | IP6
pub address : IpAddr,
}
impl ToString for Origin {
fn
|
(&self) -> String {
let origin = "o=".to_string()
+ self.username.as_ref() + " "
+ self.session_id.as_ref() + " "
+ self.session_version.to_string().as_ref() + " "
+ self.nettype.to_string().as_ref() + " "
+ self.addrtype.to_string().as_ref() + " "
+ self.address.to_string().as_ref();
origin
}
}
impl FromStr for Origin {
type Err = Error;
fn from_str(s: &str) -> Result<Origin, Error> {
let re = match Regex::new(r"(\S+)\s(\S+)\s(\d+)\s(IN)\s(IP\d)\s(\d+\.\d+\.\d+\.\d+)") {
Ok(re) => re,
Err(e) => {
println!("[Regex] {:?}", e);
return Err(Error::Origin);
}
};
let cap = re.captures(s).unwrap();
let username = match cap.at(1) {
Some(username) => username.to_string(),
None => return Err(Error::SessionName)
};
let session_id = match cap.at(2) {
Some(session_id) => session_id.to_string(),
None => return Err(Error::SessionId)
};
let session_version = match cap.at(3) {
Some(session_version) => match SessionVersion::from_str(session_version) {
Ok(session_version) => session_version,
Err(_) => return Err(Error::SessionVersion)
},
None => return Err(Error::SessionVersion)
};
let nettype = match cap.at(4) {
Some(nettype) => {
match NetType::from_str(nettype) {
Ok(nettype) => nettype,
Err(_) => return Err(Error::NetType)
}
},
None => return Err(Error::NetType)
};
let addrtype = match cap.at(5) {
Some(addrtype) => match AddrType::from_str(addrtype) {
Ok(addrtype) => addrtype,
Err(_) => return Err(Error::AddrType)
},
None => return Err(Error::AddrType)
};
let address = match cap.at(6) {
Some(address) => {
match IpAddr::from_str(address) {
Ok(address) => address,
Err(e) => return Err(Error::IpAddress)
}
},
None => return Err(Error::IpAddress)
};
// check addrtype <-> address
match addrtype {
AddrType::Ip4 => {
match address {
IpAddr::V6(_) => return Err(Error::AddrType),
IpAddr::V4(_) => { }
};
},
AddrType::Ip6 => {
match address {
IpAddr::V4(_) => return Err(Error::AddrType),
IpAddr::V6(_) => { }
};
}
}
Ok(Origin {
username: username,
session_id : session_id,
session_version: session_version,
nettype : nettype,
addrtype: addrtype,
address : address
})
}
}
|
to_string
|
identifier_name
|
regie.py
|
from .base import BaseInterface
import eventlet
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from flask import Flask, render_template, session, request, send_from_directory
from flask_socketio import SocketIO, emit, join_room, leave_room, close_room, rooms, disconnect
from werkzeug.utils import secure_filename
import threading, os, time, queue
import logging, sys, json
from ..engine.network import get_allip, get_hostname
import socket
from zeroconf import ServiceInfo, Zeroconf
thread = None
thread_lock = threading.Lock()
REGIE_PATH1 = '/opt/RPi-Regie'
REGIE_PATH2 = '/data/RPi-Regie'
class RegieInterface (BaseInterface):
def __init__(self, hplayer, port, datapath):
super(RegieInterface, self).__init__(hplayer, "Regie")
self._port = port
self._datapath = datapath
self._server = None
# HTTP receiver THREAD
def listen(self):
# Advertize on ZeroConf
zeroconf = Zeroconf()
info = ServiceInfo(
"_http._tcp.local.",
"Regie._"+get_hostname()+"._http._tcp.local.",
addresses=[socket.inet_aton(ip) for ip in get_allip()],
port=self._port,
properties={},
server=get_hostname()+".local.",
)
zeroconf.register_service(info)
# Start server
self.log( "regie interface on port", self._port)
with ThreadedHTTPServer(self, self._port) as server:
self._server = server
self.stopped.wait()
self._server = None
# Unregister ZeroConf
zeroconf.unregister_service(info)
zeroconf.close()
def projectPath(self):
return os.path.join(self._datapath, 'project.json')
def projectRaw(self):
project = '{"pool":[], "project":[[]]}'
if os.path.isfile(self.projectPath()):
with open( self.projectPath(), 'r') as file:
project = file.read()
return project
# parse locally for programatic execution
def reload(self):
try:
self._project = json.loads(self.projectRaw())
except:
self._project = None
self.log("Error while parsing project..")
# print(self._project)
return self._project
# play sequence
def playseq(self, sceneIndex, seqIndex):
self.log("PLAYSEQ")
try:
# self.log('PLAYSEQ', seqIndex, sceneIndex, boxes)
orderz = []
boxes = [b for b in self._project["project"][0][sceneIndex]["allMedias"] if b["y"] == seqIndex]
for b in boxes:
peerName = self._project["pool"][ b["x"] ]["name"]
# MEDIA
order = { 'peer': peerName, 'synchro': True}
if b["media"] in ['stop', 'pause', 'unfade'] :
order["event"] = b["media"]
elif b["media"] == '...':
order["event"] = 'continue'
elif b["media"].startswith('fade'):
order["event"] = 'fade'
order["data"] = b["media"].split('fade ')[1]
else:
order["event"] = 'playthen'
order["data"] = [ self._project["project"][0][sceneIndex]["name"] + '/' + b["media"] ]
# ON MEDIA END
if 'onend' in b:
if b['onend'] == 'next':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex+1] } )
elif b['onend'] == 'prev':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex-1] } )
elif b['onend'] == 'replay':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex] } )
orderz.append(order)
# LOOP
if b["loop"] == 'loop':
orderz.append( { 'peer': peerName, 'event': 'loop', 'data': 1} )
elif b["loop"] == 'unloop':
orderz.append( { 'peer': peerName, 'event': 'unloop'} )
|
order["data"] = {
'topic': 'leds/all',
'data': b["light"].split('light ')[1]
}
elif b["light"].startswith('preset'):
order["data"] = {
'topic': 'leds/mem',
'data': b["light"].split('preset ')[1]
}
elif b["light"].startswith('off'):
order["data"] = {
'topic': 'leds/stop',
'data': ''
}
orderz.append(order)
self.emit('playingseq', sceneIndex, seqIndex)
self.emit('peers.triggers', orderz, 437)
except:
self.log('Error playing Scene', sceneIndex, 'Seq', seqIndex)
#
# Threaded HTTP Server
#
class ThreadedHTTPServer(object):
def __init__(self, regieinterface, port):
self.regieinterface = regieinterface
interface_path = os.path.dirname(os.path.realpath(__file__))
if os.path.isdir(REGIE_PATH1):
www_path = os.path.join(REGIE_PATH1, 'web')
elif os.path.isdir(REGIE_PATH2):
www_path = os.path.join(REGIE_PATH2, 'web')
else:
www_path = os.path.join(interface_path, 'regie')
app = Flask(__name__, template_folder=www_path)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins="*")
#
# FLASK Routing Static
#
@app.route('/')
def index():
# self.regieinterface.log('requesting index')
return send_from_directory(www_path, 'index.html')
@app.route('/<path:path>')
def send_static(path):
# self.regieinterface.log('requesting '+path)
return send_from_directory(www_path, path)
#
# FLASK Routing API
#
# @app.route('/<path:path>')
# def send_static(path):
# # self.regieinterface.log('requesting '+path)
# return send_from_directory(www_path, path)
#
# SOCKETIO Routing
#
self.sendBuffer = queue.Queue()
def background_thread():
while True:
try:
task = self.sendBuffer.get_nowait()
if len(task) > 1: socketio.emit(task[0], task[1])
else: socketio.emit(task[0], None)
self.sendBuffer.task_done()
except queue.Empty:
socketio.sleep(0.1)
@self.regieinterface.hplayer.on('files.dirlist-updated')
def filetree_send(ev, *args):
self.sendBuffer.put( ('data', {'fileTree': self.regieinterface.hplayer.files()}) )
@self.regieinterface.hplayer.on('files.activedir-updated')
def activedir_send(ev, *args):
self.sendBuffer.put( ('data', {'scene': args[1]}) )
@self.regieinterface.hplayer.on('*.peer.*')
def peer_send(ev, *args):
event = ev.split('.')[-1]
if event == 'playingseq':
print(ev, args[0]['data'][1])
self.sendBuffer.put( ('data', {'sequence': args[0]['data'][1]}) )
else:
args[0].update({'type': event})
self.sendBuffer.put( ('peer', args[0]) )
# !!! TODO: stop zyre monitoring when every client are disconnected
@socketio.on('connect')
def client_connect():
self.regieinterface.log('New Remote Regie connected')
@socketio.on('save')
def save(data):
try:
json.loads(data)
with open( os.path.join(self.regieinterface._datapath, 'project.json'), 'w') as file:
file.write(data)
except:
e = str(sys.exc_info()[0])
self.regieinterface.log('fail to save project: '+e+' '+data)
@socketio.on('init')
def init(data):
# send project
emit('data', self.projectData())
# Start update broadcaster
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(target=background_thread)
@socketio.on('register')
def register(data):
# enable peer monitoring
self.regieinterface.emit('peers.getlink')
self.regieinterface.emit('peers.subscribe', ['status', 'settings', 'playingseq'])
@socketio.on('event')
def event(data):
self.regieinterface.emit('peers.triggers', data, 437)
# prepare sub-thread
self.server_thread = threading.Thread(target=lambda:socketio.run(app, host='0.0.0.0', port=port))
self.server_thread.daemon = True
# watchdog project.json
self.watcher()
# internal load project
self.regieinterface.reload()
def start(self):
self.server_thread.start()
def stop(self):
#self.server.stop()
pass
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def projectData(self):
data={
'fullproject': self.regieinterface.projectRaw(),
'fileTree': self.regieinterface.hplayer.files()
}
return data
def watcher(self):
def onchange(e):
self.regieinterface.log('project updated ! pushing it...')
self.regieinterface.reload()
self.sendBuffer.put( ('data', self.projectData()) )
handler = PatternMatchingEventHandler("*/project.json", None, False, True)
handler.on_any_event = onchange
self.projectObserver = Observer()
self.projectObserver.schedule(handler, os.path.dirname(self.regieinterface.projectPath()))
try:
self.projectObserver.start()
except:
self.regieinterface.log('project.json not found')
|
# LIGHT
if b["light"] and b["light"] != '...':
order = { 'peer': peerName, 'synchro': True, 'event': 'esp'}
if b["light"].startswith('light'):
|
random_line_split
|
regie.py
|
from .base import BaseInterface
import eventlet
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from flask import Flask, render_template, session, request, send_from_directory
from flask_socketio import SocketIO, emit, join_room, leave_room, close_room, rooms, disconnect
from werkzeug.utils import secure_filename
import threading, os, time, queue
import logging, sys, json
from ..engine.network import get_allip, get_hostname
import socket
from zeroconf import ServiceInfo, Zeroconf
thread = None
thread_lock = threading.Lock()
REGIE_PATH1 = '/opt/RPi-Regie'
REGIE_PATH2 = '/data/RPi-Regie'
class RegieInterface (BaseInterface):
def
|
(self, hplayer, port, datapath):
super(RegieInterface, self).__init__(hplayer, "Regie")
self._port = port
self._datapath = datapath
self._server = None
# HTTP receiver THREAD
def listen(self):
# Advertize on ZeroConf
zeroconf = Zeroconf()
info = ServiceInfo(
"_http._tcp.local.",
"Regie._"+get_hostname()+"._http._tcp.local.",
addresses=[socket.inet_aton(ip) for ip in get_allip()],
port=self._port,
properties={},
server=get_hostname()+".local.",
)
zeroconf.register_service(info)
# Start server
self.log( "regie interface on port", self._port)
with ThreadedHTTPServer(self, self._port) as server:
self._server = server
self.stopped.wait()
self._server = None
# Unregister ZeroConf
zeroconf.unregister_service(info)
zeroconf.close()
def projectPath(self):
return os.path.join(self._datapath, 'project.json')
def projectRaw(self):
project = '{"pool":[], "project":[[]]}'
if os.path.isfile(self.projectPath()):
with open( self.projectPath(), 'r') as file:
project = file.read()
return project
# parse locally for programatic execution
def reload(self):
try:
self._project = json.loads(self.projectRaw())
except:
self._project = None
self.log("Error while parsing project..")
# print(self._project)
return self._project
# play sequence
def playseq(self, sceneIndex, seqIndex):
self.log("PLAYSEQ")
try:
# self.log('PLAYSEQ', seqIndex, sceneIndex, boxes)
orderz = []
boxes = [b for b in self._project["project"][0][sceneIndex]["allMedias"] if b["y"] == seqIndex]
for b in boxes:
peerName = self._project["pool"][ b["x"] ]["name"]
# MEDIA
order = { 'peer': peerName, 'synchro': True}
if b["media"] in ['stop', 'pause', 'unfade'] :
order["event"] = b["media"]
elif b["media"] == '...':
order["event"] = 'continue'
elif b["media"].startswith('fade'):
order["event"] = 'fade'
order["data"] = b["media"].split('fade ')[1]
else:
order["event"] = 'playthen'
order["data"] = [ self._project["project"][0][sceneIndex]["name"] + '/' + b["media"] ]
# ON MEDIA END
if 'onend' in b:
if b['onend'] == 'next':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex+1] } )
elif b['onend'] == 'prev':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex-1] } )
elif b['onend'] == 'replay':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex] } )
orderz.append(order)
# LOOP
if b["loop"] == 'loop':
orderz.append( { 'peer': peerName, 'event': 'loop', 'data': 1} )
elif b["loop"] == 'unloop':
orderz.append( { 'peer': peerName, 'event': 'unloop'} )
# LIGHT
if b["light"] and b["light"] != '...':
order = { 'peer': peerName, 'synchro': True, 'event': 'esp'}
if b["light"].startswith('light'):
order["data"] = {
'topic': 'leds/all',
'data': b["light"].split('light ')[1]
}
elif b["light"].startswith('preset'):
order["data"] = {
'topic': 'leds/mem',
'data': b["light"].split('preset ')[1]
}
elif b["light"].startswith('off'):
order["data"] = {
'topic': 'leds/stop',
'data': ''
}
orderz.append(order)
self.emit('playingseq', sceneIndex, seqIndex)
self.emit('peers.triggers', orderz, 437)
except:
self.log('Error playing Scene', sceneIndex, 'Seq', seqIndex)
#
# Threaded HTTP Server
#
class ThreadedHTTPServer(object):
def __init__(self, regieinterface, port):
self.regieinterface = regieinterface
interface_path = os.path.dirname(os.path.realpath(__file__))
if os.path.isdir(REGIE_PATH1):
www_path = os.path.join(REGIE_PATH1, 'web')
elif os.path.isdir(REGIE_PATH2):
www_path = os.path.join(REGIE_PATH2, 'web')
else:
www_path = os.path.join(interface_path, 'regie')
app = Flask(__name__, template_folder=www_path)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins="*")
#
# FLASK Routing Static
#
@app.route('/')
def index():
# self.regieinterface.log('requesting index')
return send_from_directory(www_path, 'index.html')
@app.route('/<path:path>')
def send_static(path):
# self.regieinterface.log('requesting '+path)
return send_from_directory(www_path, path)
#
# FLASK Routing API
#
# @app.route('/<path:path>')
# def send_static(path):
# # self.regieinterface.log('requesting '+path)
# return send_from_directory(www_path, path)
#
# SOCKETIO Routing
#
self.sendBuffer = queue.Queue()
def background_thread():
while True:
try:
task = self.sendBuffer.get_nowait()
if len(task) > 1: socketio.emit(task[0], task[1])
else: socketio.emit(task[0], None)
self.sendBuffer.task_done()
except queue.Empty:
socketio.sleep(0.1)
@self.regieinterface.hplayer.on('files.dirlist-updated')
def filetree_send(ev, *args):
self.sendBuffer.put( ('data', {'fileTree': self.regieinterface.hplayer.files()}) )
@self.regieinterface.hplayer.on('files.activedir-updated')
def activedir_send(ev, *args):
self.sendBuffer.put( ('data', {'scene': args[1]}) )
@self.regieinterface.hplayer.on('*.peer.*')
def peer_send(ev, *args):
event = ev.split('.')[-1]
if event == 'playingseq':
print(ev, args[0]['data'][1])
self.sendBuffer.put( ('data', {'sequence': args[0]['data'][1]}) )
else:
args[0].update({'type': event})
self.sendBuffer.put( ('peer', args[0]) )
# !!! TODO: stop zyre monitoring when every client are disconnected
@socketio.on('connect')
def client_connect():
self.regieinterface.log('New Remote Regie connected')
@socketio.on('save')
def save(data):
try:
json.loads(data)
with open( os.path.join(self.regieinterface._datapath, 'project.json'), 'w') as file:
file.write(data)
except:
e = str(sys.exc_info()[0])
self.regieinterface.log('fail to save project: '+e+' '+data)
@socketio.on('init')
def init(data):
# send project
emit('data', self.projectData())
# Start update broadcaster
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(target=background_thread)
@socketio.on('register')
def register(data):
# enable peer monitoring
self.regieinterface.emit('peers.getlink')
self.regieinterface.emit('peers.subscribe', ['status', 'settings', 'playingseq'])
@socketio.on('event')
def event(data):
self.regieinterface.emit('peers.triggers', data, 437)
# prepare sub-thread
self.server_thread = threading.Thread(target=lambda:socketio.run(app, host='0.0.0.0', port=port))
self.server_thread.daemon = True
# watchdog project.json
self.watcher()
# internal load project
self.regieinterface.reload()
def start(self):
self.server_thread.start()
def stop(self):
#self.server.stop()
pass
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def projectData(self):
data={
'fullproject': self.regieinterface.projectRaw(),
'fileTree': self.regieinterface.hplayer.files()
}
return data
def watcher(self):
def onchange(e):
self.regieinterface.log('project updated ! pushing it...')
self.regieinterface.reload()
self.sendBuffer.put( ('data', self.projectData()) )
handler = PatternMatchingEventHandler("*/project.json", None, False, True)
handler.on_any_event = onchange
self.projectObserver = Observer()
self.projectObserver.schedule(handler, os.path.dirname(self.regieinterface.projectPath()))
try:
self.projectObserver.start()
except:
self.regieinterface.log('project.json not found')
|
__init__
|
identifier_name
|
regie.py
|
from .base import BaseInterface
import eventlet
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from flask import Flask, render_template, session, request, send_from_directory
from flask_socketio import SocketIO, emit, join_room, leave_room, close_room, rooms, disconnect
from werkzeug.utils import secure_filename
import threading, os, time, queue
import logging, sys, json
from ..engine.network import get_allip, get_hostname
import socket
from zeroconf import ServiceInfo, Zeroconf
thread = None
thread_lock = threading.Lock()
REGIE_PATH1 = '/opt/RPi-Regie'
REGIE_PATH2 = '/data/RPi-Regie'
class RegieInterface (BaseInterface):
def __init__(self, hplayer, port, datapath):
super(RegieInterface, self).__init__(hplayer, "Regie")
self._port = port
self._datapath = datapath
self._server = None
# HTTP receiver THREAD
def listen(self):
# Advertize on ZeroConf
zeroconf = Zeroconf()
info = ServiceInfo(
"_http._tcp.local.",
"Regie._"+get_hostname()+"._http._tcp.local.",
addresses=[socket.inet_aton(ip) for ip in get_allip()],
port=self._port,
properties={},
server=get_hostname()+".local.",
)
zeroconf.register_service(info)
# Start server
self.log( "regie interface on port", self._port)
with ThreadedHTTPServer(self, self._port) as server:
self._server = server
self.stopped.wait()
self._server = None
# Unregister ZeroConf
zeroconf.unregister_service(info)
zeroconf.close()
def projectPath(self):
return os.path.join(self._datapath, 'project.json')
def projectRaw(self):
project = '{"pool":[], "project":[[]]}'
if os.path.isfile(self.projectPath()):
with open( self.projectPath(), 'r') as file:
project = file.read()
return project
# parse locally for programatic execution
def reload(self):
try:
self._project = json.loads(self.projectRaw())
except:
self._project = None
self.log("Error while parsing project..")
# print(self._project)
return self._project
# play sequence
def playseq(self, sceneIndex, seqIndex):
self.log("PLAYSEQ")
try:
# self.log('PLAYSEQ', seqIndex, sceneIndex, boxes)
orderz = []
boxes = [b for b in self._project["project"][0][sceneIndex]["allMedias"] if b["y"] == seqIndex]
for b in boxes:
peerName = self._project["pool"][ b["x"] ]["name"]
# MEDIA
order = { 'peer': peerName, 'synchro': True}
if b["media"] in ['stop', 'pause', 'unfade'] :
order["event"] = b["media"]
elif b["media"] == '...':
order["event"] = 'continue'
elif b["media"].startswith('fade'):
order["event"] = 'fade'
order["data"] = b["media"].split('fade ')[1]
else:
order["event"] = 'playthen'
order["data"] = [ self._project["project"][0][sceneIndex]["name"] + '/' + b["media"] ]
# ON MEDIA END
if 'onend' in b:
if b['onend'] == 'next':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex+1] } )
elif b['onend'] == 'prev':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex-1] } )
elif b['onend'] == 'replay':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex] } )
orderz.append(order)
# LOOP
if b["loop"] == 'loop':
orderz.append( { 'peer': peerName, 'event': 'loop', 'data': 1} )
elif b["loop"] == 'unloop':
orderz.append( { 'peer': peerName, 'event': 'unloop'} )
# LIGHT
if b["light"] and b["light"] != '...':
order = { 'peer': peerName, 'synchro': True, 'event': 'esp'}
if b["light"].startswith('light'):
order["data"] = {
'topic': 'leds/all',
'data': b["light"].split('light ')[1]
}
elif b["light"].startswith('preset'):
order["data"] = {
'topic': 'leds/mem',
'data': b["light"].split('preset ')[1]
}
elif b["light"].startswith('off'):
order["data"] = {
'topic': 'leds/stop',
'data': ''
}
orderz.append(order)
self.emit('playingseq', sceneIndex, seqIndex)
self.emit('peers.triggers', orderz, 437)
except:
self.log('Error playing Scene', sceneIndex, 'Seq', seqIndex)
#
# Threaded HTTP Server
#
class ThreadedHTTPServer(object):
def __init__(self, regieinterface, port):
self.regieinterface = regieinterface
interface_path = os.path.dirname(os.path.realpath(__file__))
if os.path.isdir(REGIE_PATH1):
www_path = os.path.join(REGIE_PATH1, 'web')
elif os.path.isdir(REGIE_PATH2):
www_path = os.path.join(REGIE_PATH2, 'web')
else:
www_path = os.path.join(interface_path, 'regie')
app = Flask(__name__, template_folder=www_path)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins="*")
#
# FLASK Routing Static
#
@app.route('/')
def index():
# self.regieinterface.log('requesting index')
return send_from_directory(www_path, 'index.html')
@app.route('/<path:path>')
def send_static(path):
# self.regieinterface.log('requesting '+path)
return send_from_directory(www_path, path)
#
# FLASK Routing API
#
# @app.route('/<path:path>')
# def send_static(path):
# # self.regieinterface.log('requesting '+path)
# return send_from_directory(www_path, path)
#
# SOCKETIO Routing
#
self.sendBuffer = queue.Queue()
def background_thread():
while True:
try:
task = self.sendBuffer.get_nowait()
if len(task) > 1: socketio.emit(task[0], task[1])
else: socketio.emit(task[0], None)
self.sendBuffer.task_done()
except queue.Empty:
socketio.sleep(0.1)
@self.regieinterface.hplayer.on('files.dirlist-updated')
def filetree_send(ev, *args):
self.sendBuffer.put( ('data', {'fileTree': self.regieinterface.hplayer.files()}) )
@self.regieinterface.hplayer.on('files.activedir-updated')
def activedir_send(ev, *args):
self.sendBuffer.put( ('data', {'scene': args[1]}) )
@self.regieinterface.hplayer.on('*.peer.*')
def peer_send(ev, *args):
event = ev.split('.')[-1]
if event == 'playingseq':
|
else:
args[0].update({'type': event})
self.sendBuffer.put( ('peer', args[0]) )
# !!! TODO: stop zyre monitoring when every client are disconnected
@socketio.on('connect')
def client_connect():
self.regieinterface.log('New Remote Regie connected')
@socketio.on('save')
def save(data):
try:
json.loads(data)
with open( os.path.join(self.regieinterface._datapath, 'project.json'), 'w') as file:
file.write(data)
except:
e = str(sys.exc_info()[0])
self.regieinterface.log('fail to save project: '+e+' '+data)
@socketio.on('init')
def init(data):
# send project
emit('data', self.projectData())
# Start update broadcaster
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(target=background_thread)
@socketio.on('register')
def register(data):
# enable peer monitoring
self.regieinterface.emit('peers.getlink')
self.regieinterface.emit('peers.subscribe', ['status', 'settings', 'playingseq'])
@socketio.on('event')
def event(data):
self.regieinterface.emit('peers.triggers', data, 437)
# prepare sub-thread
self.server_thread = threading.Thread(target=lambda:socketio.run(app, host='0.0.0.0', port=port))
self.server_thread.daemon = True
# watchdog project.json
self.watcher()
# internal load project
self.regieinterface.reload()
def start(self):
self.server_thread.start()
def stop(self):
#self.server.stop()
pass
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def projectData(self):
data={
'fullproject': self.regieinterface.projectRaw(),
'fileTree': self.regieinterface.hplayer.files()
}
return data
def watcher(self):
def onchange(e):
self.regieinterface.log('project updated ! pushing it...')
self.regieinterface.reload()
self.sendBuffer.put( ('data', self.projectData()) )
handler = PatternMatchingEventHandler("*/project.json", None, False, True)
handler.on_any_event = onchange
self.projectObserver = Observer()
self.projectObserver.schedule(handler, os.path.dirname(self.regieinterface.projectPath()))
try:
self.projectObserver.start()
except:
self.regieinterface.log('project.json not found')
|
print(ev, args[0]['data'][1])
self.sendBuffer.put( ('data', {'sequence': args[0]['data'][1]}) )
|
conditional_block
|
regie.py
|
from .base import BaseInterface
import eventlet
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from flask import Flask, render_template, session, request, send_from_directory
from flask_socketio import SocketIO, emit, join_room, leave_room, close_room, rooms, disconnect
from werkzeug.utils import secure_filename
import threading, os, time, queue
import logging, sys, json
from ..engine.network import get_allip, get_hostname
import socket
from zeroconf import ServiceInfo, Zeroconf
thread = None
thread_lock = threading.Lock()
REGIE_PATH1 = '/opt/RPi-Regie'
REGIE_PATH2 = '/data/RPi-Regie'
class RegieInterface (BaseInterface):
def __init__(self, hplayer, port, datapath):
super(RegieInterface, self).__init__(hplayer, "Regie")
self._port = port
self._datapath = datapath
self._server = None
# HTTP receiver THREAD
def listen(self):
# Advertize on ZeroConf
zeroconf = Zeroconf()
info = ServiceInfo(
"_http._tcp.local.",
"Regie._"+get_hostname()+"._http._tcp.local.",
addresses=[socket.inet_aton(ip) for ip in get_allip()],
port=self._port,
properties={},
server=get_hostname()+".local.",
)
zeroconf.register_service(info)
# Start server
self.log( "regie interface on port", self._port)
with ThreadedHTTPServer(self, self._port) as server:
self._server = server
self.stopped.wait()
self._server = None
# Unregister ZeroConf
zeroconf.unregister_service(info)
zeroconf.close()
def projectPath(self):
return os.path.join(self._datapath, 'project.json')
def projectRaw(self):
project = '{"pool":[], "project":[[]]}'
if os.path.isfile(self.projectPath()):
with open( self.projectPath(), 'r') as file:
project = file.read()
return project
# parse locally for programatic execution
def reload(self):
try:
self._project = json.loads(self.projectRaw())
except:
self._project = None
self.log("Error while parsing project..")
# print(self._project)
return self._project
# play sequence
def playseq(self, sceneIndex, seqIndex):
|
#
# Threaded HTTP Server
#
class ThreadedHTTPServer(object):
def __init__(self, regieinterface, port):
self.regieinterface = regieinterface
interface_path = os.path.dirname(os.path.realpath(__file__))
if os.path.isdir(REGIE_PATH1):
www_path = os.path.join(REGIE_PATH1, 'web')
elif os.path.isdir(REGIE_PATH2):
www_path = os.path.join(REGIE_PATH2, 'web')
else:
www_path = os.path.join(interface_path, 'regie')
app = Flask(__name__, template_folder=www_path)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, cors_allowed_origins="*")
#
# FLASK Routing Static
#
@app.route('/')
def index():
# self.regieinterface.log('requesting index')
return send_from_directory(www_path, 'index.html')
@app.route('/<path:path>')
def send_static(path):
# self.regieinterface.log('requesting '+path)
return send_from_directory(www_path, path)
#
# FLASK Routing API
#
# @app.route('/<path:path>')
# def send_static(path):
# # self.regieinterface.log('requesting '+path)
# return send_from_directory(www_path, path)
#
# SOCKETIO Routing
#
self.sendBuffer = queue.Queue()
def background_thread():
while True:
try:
task = self.sendBuffer.get_nowait()
if len(task) > 1: socketio.emit(task[0], task[1])
else: socketio.emit(task[0], None)
self.sendBuffer.task_done()
except queue.Empty:
socketio.sleep(0.1)
@self.regieinterface.hplayer.on('files.dirlist-updated')
def filetree_send(ev, *args):
self.sendBuffer.put( ('data', {'fileTree': self.regieinterface.hplayer.files()}) )
@self.regieinterface.hplayer.on('files.activedir-updated')
def activedir_send(ev, *args):
self.sendBuffer.put( ('data', {'scene': args[1]}) )
@self.regieinterface.hplayer.on('*.peer.*')
def peer_send(ev, *args):
event = ev.split('.')[-1]
if event == 'playingseq':
print(ev, args[0]['data'][1])
self.sendBuffer.put( ('data', {'sequence': args[0]['data'][1]}) )
else:
args[0].update({'type': event})
self.sendBuffer.put( ('peer', args[0]) )
# !!! TODO: stop zyre monitoring when every client are disconnected
@socketio.on('connect')
def client_connect():
self.regieinterface.log('New Remote Regie connected')
@socketio.on('save')
def save(data):
try:
json.loads(data)
with open( os.path.join(self.regieinterface._datapath, 'project.json'), 'w') as file:
file.write(data)
except:
e = str(sys.exc_info()[0])
self.regieinterface.log('fail to save project: '+e+' '+data)
@socketio.on('init')
def init(data):
# send project
emit('data', self.projectData())
# Start update broadcaster
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(target=background_thread)
@socketio.on('register')
def register(data):
# enable peer monitoring
self.regieinterface.emit('peers.getlink')
self.regieinterface.emit('peers.subscribe', ['status', 'settings', 'playingseq'])
@socketio.on('event')
def event(data):
self.regieinterface.emit('peers.triggers', data, 437)
# prepare sub-thread
self.server_thread = threading.Thread(target=lambda:socketio.run(app, host='0.0.0.0', port=port))
self.server_thread.daemon = True
# watchdog project.json
self.watcher()
# internal load project
self.regieinterface.reload()
def start(self):
self.server_thread.start()
def stop(self):
#self.server.stop()
pass
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def projectData(self):
data={
'fullproject': self.regieinterface.projectRaw(),
'fileTree': self.regieinterface.hplayer.files()
}
return data
def watcher(self):
def onchange(e):
self.regieinterface.log('project updated ! pushing it...')
self.regieinterface.reload()
self.sendBuffer.put( ('data', self.projectData()) )
handler = PatternMatchingEventHandler("*/project.json", None, False, True)
handler.on_any_event = onchange
self.projectObserver = Observer()
self.projectObserver.schedule(handler, os.path.dirname(self.regieinterface.projectPath()))
try:
self.projectObserver.start()
except:
self.regieinterface.log('project.json not found')
|
self.log("PLAYSEQ")
try:
# self.log('PLAYSEQ', seqIndex, sceneIndex, boxes)
orderz = []
boxes = [b for b in self._project["project"][0][sceneIndex]["allMedias"] if b["y"] == seqIndex]
for b in boxes:
peerName = self._project["pool"][ b["x"] ]["name"]
# MEDIA
order = { 'peer': peerName, 'synchro': True}
if b["media"] in ['stop', 'pause', 'unfade'] :
order["event"] = b["media"]
elif b["media"] == '...':
order["event"] = 'continue'
elif b["media"].startswith('fade'):
order["event"] = 'fade'
order["data"] = b["media"].split('fade ')[1]
else:
order["event"] = 'playthen'
order["data"] = [ self._project["project"][0][sceneIndex]["name"] + '/' + b["media"] ]
# ON MEDIA END
if 'onend' in b:
if b['onend'] == 'next':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex+1] } )
elif b['onend'] == 'prev':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex-1] } )
elif b['onend'] == 'replay':
order["data"].append( {'event': 'do-playseq', 'data': [sceneIndex, seqIndex] } )
orderz.append(order)
# LOOP
if b["loop"] == 'loop':
orderz.append( { 'peer': peerName, 'event': 'loop', 'data': 1} )
elif b["loop"] == 'unloop':
orderz.append( { 'peer': peerName, 'event': 'unloop'} )
# LIGHT
if b["light"] and b["light"] != '...':
order = { 'peer': peerName, 'synchro': True, 'event': 'esp'}
if b["light"].startswith('light'):
order["data"] = {
'topic': 'leds/all',
'data': b["light"].split('light ')[1]
}
elif b["light"].startswith('preset'):
order["data"] = {
'topic': 'leds/mem',
'data': b["light"].split('preset ')[1]
}
elif b["light"].startswith('off'):
order["data"] = {
'topic': 'leds/stop',
'data': ''
}
orderz.append(order)
self.emit('playingseq', sceneIndex, seqIndex)
self.emit('peers.triggers', orderz, 437)
except:
self.log('Error playing Scene', sceneIndex, 'Seq', seqIndex)
|
identifier_body
|
purchase_invoice.js
|
// Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
// License: GNU General Public License v3. See license.txt
frappe.provide("erpnext.accounts");
{% include 'erpnext/public/js/controllers/buying.js' %};
erpnext.accounts.PurchaseInvoice = erpnext.buying.BuyingController.extend({
setup: function(doc) {
this.setup_posting_date_time_check();
this._super(doc);
// formatter for purchase invoice item
if(this.frm.doc.update_stock) {
this.frm.set_indicator_formatter('item_code', function(doc) {
return (doc.qty<=doc.received_qty) ? "green" : "orange";
});
}
},
onload: function() {
this._super();
if(!this.frm.doc.__islocal) {
// show credit_to in print format
if(!this.frm.doc.supplier && this.frm.doc.credit_to) {
this.frm.set_df_property("credit_to", "print_hide", 0);
}
}
},
refresh: function(doc) {
const me = this;
this._super();
hide_fields(this.frm.doc);
// Show / Hide button
this.show_general_ledger();
if(doc.update_stock==1 && doc.docstatus==1) {
this.show_stock_ledger();
}
this.frm.add_custom_button("Télécharger Fiche Franchise",
() => {
if(doc.items != null)
{
var data = [];
var docfields = [];
data.push(["Ref Facture",'"'+doc.bill_no+'"',"","DATE",'"'+doc.posting_date+'"',"","","","","",""]);
data.push(["#","Code Article","Designation","Ref Article","Qts","Poids","Poids Total","Prix Unitaire","Montant","pays d'origine","pays de provenance"]);
$.each(doc.items || [], (i, d) => {
var row = [];
row.push(['"'+i+'"','"'+d["item_code"]+'"','"'+d["item_name"]+'"','"'+d["ref_fabricant"]+'"','"'+d["qty"]+'"','"'+d["weight_per_unit"]+'"','"'+d["total_weight"]+'"','"'+d["rate"]+'"','"'+d["amount"]+'"','"'+d["pays"] +'"','"'+doc.pays_de_provenance+'"']);
data.push(row);
});
frappe.tools.downloadify(data, null, "FICHE FRANCHISE "+doc.name+" "+doc.supplier_name);
} });
if(!doc.is_return && doc.docstatus == 1 && doc.outstanding_amount != 0){
if(doc.on_hold) {
this.frm.add_custom_button(
__('Change Release Date'),
function() {me.change_release_date()},
__('Hold Invoice')
);
this.frm.add_custom_button(
__('Unblock Invoice'),
function() {me.unblock_invoice()},
__('Make')
);
} else if (!doc.on_hold) {
this.frm.add_custom_button(
__('Block Invoice'),
function() {me.block_invoice()},
__('Make')
);
}
}
if(doc.docstatus == 1 && doc.outstanding_amount != 0
&& !(doc.is_return && doc.return_against)) {
this.frm.add_custom_button(__('Payment'), this.make_payment_entry, __("Make"));
cur_frm.page.set_inner_btn_group_as_primary(__("Make"));
}
if(!doc.is_return && doc.docstatus==1) {
if(doc.outstanding_amount >= 0 || Math.abs(flt(doc.outstanding_amount)) < flt(doc.grand_total)) {
cur_frm.add_custom_button(__('Return / Debit Note'),
this.make_debit_note, __("Make"));
}
cur_frm.add_custom_button("Recu d'achat",this.make_pr,__("Make"));
if(!doc.auto_repeat) {
cur_frm.add_custom_button(__('Subscription'), function() {
erpnext.utils.make_subscription(doc.doctype, doc.name)
}, __("Make"))
}
}
if (doc.outstanding_amount > 0 && !cint(doc.is_return)) {
cur_frm.add_custom_button(__('Payment Request'), function() {
me.make_payment_request()
}, __("Make"));
}
if(doc.docstatus===0) {
this.frm.add_custom_button(__('Purchase Order'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.buying.doctype.purchase_order.purchase_order.make_purchase_invoice",
source_doctype: "Purchase Order",
target: me.frm,
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["!=", "Closed"],
per_billed: ["<", 99.99],
company: me.frm.doc.company
}
})
}, __("Get items from"));
this.frm.add_custom_button(__('Purchase Receipt'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.stock.doctype.purchase_receipt.purchase_receipt.make_purchase_invoice",
source_doctype: "Purchase Receipt",
|
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["not in", ["Closed", "Completed"]],
company: me.frm.doc.company,
is_return: 0
}
})
}, __("Get items from"));
}
this.frm.toggle_reqd("supplier_warehouse", this.frm.doc.is_subcontracted==="Yes");
if (doc.docstatus == 1 && !doc.inter_company_invoice_reference) {
frappe.model.with_doc("Supplier", me.frm.doc.supplier, function() {
var supplier = frappe.model.get_doc("Supplier", me.frm.doc.supplier);
var internal = supplier.is_internal_supplier;
var disabled = supplier.disabled;
if (internal == 1 && disabled == 0) {
me.frm.add_custom_button("Inter Company Invoice", function() {
me.make_inter_company_invoice(me.frm);
}, __("Make"));
}
});
}
},
unblock_invoice: function() {
const me = this;
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.unblock_invoice',
'args': {'name': me.frm.doc.name},
'callback': (r) => me.frm.reload_doc()
});
},
block_invoice: function() {
this.make_comment_dialog_and_block_invoice();
},
change_release_date: function() {
this.make_dialog_and_set_release_date();
},
can_change_release_date: function(date) {
const diff = frappe.datetime.get_diff(date, frappe.datetime.nowdate());
if (diff < 0) {
frappe.throw('New release date should be in the future');
return false;
} else {
return true;
}
},
make_comment_dialog_and_block_invoice: function(){
const me = this;
const title = __('Add Comment');
const fields = [
{
fieldname: 'hold_comment',
read_only: 0,
fieldtype:'Small Text',
label: __('Reason For Putting On Hold'),
default: ""
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
const dialog_data = me.dialog.get_values();
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.block_invoice',
'args': {'name': me.frm.doc.name, 'hold_comment': dialog_data.hold_comment},
'callback': (r) => me.frm.reload_doc()
});
me.dialog.hide();
});
this.dialog.show();
},
make_dialog_and_set_release_date: function() {
const me = this;
const title = __('Set New Release Date');
const fields = [
{
fieldname: 'release_date',
read_only: 0,
fieldtype:'Date',
label: __('Release Date'),
default: me.frm.doc.release_date
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
me.dialog_data = me.dialog.get_values();
if(me.can_change_release_date(me.dialog_data.release_date)) {
me.dialog_data.name = me.frm.doc.name;
me.set_release_date(me.dialog_data);
me.dialog.hide();
}
});
this.dialog.show();
},
set_release_date: function(data) {
return frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.change_release_date',
'args': data,
'callback': (r) => this.frm.reload_doc()
});
},
supplier: function() {
var me = this;
if(this.frm.updating_party_details)
return;
erpnext.utils.get_party_details(this.frm, "erpnext.accounts.party.get_party_details",
{
posting_date: this.frm.doc.posting_date,
bill_date: this.frm.doc.bill_date,
party: this.frm.doc.supplier,
party_type: "Supplier",
account: this.frm.doc.credit_to,
price_list: this.frm.doc.buying_price_list
}, function() {
me.apply_pricing_rule();
me.frm.doc.apply_tds = me.frm.supplier_tds ? 1 : 0;
me.frm.set_df_property("apply_tds", "read_only", me.frm.supplier_tds ? 0 : 1);
})
},
credit_to: function() {
var me = this;
if(this.frm.doc.credit_to) {
me.frm.call({
method: "frappe.client.get_value",
args: {
doctype: "Account",
fieldname: "account_currency",
filters: { name: me.frm.doc.credit_to },
},
callback: function(r, rt) {
if(r.message) {
me.frm.set_value("party_account_currency", r.message.account_currency);
me.set_dynamic_labels();
}
}
});
}
},
make_inter_company_invoice: function(frm) {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_inter_company_sales_invoice",
frm: frm
});
},
is_paid: function() {
hide_fields(this.frm.doc);
if(cint(this.frm.doc.is_paid)) {
this.frm.set_value("allocate_advances_automatically", 0);
if(!this.frm.doc.company) {
this.frm.set_value("is_paid", 0)
frappe.msgprint(__("Please specify Company to proceed"));
}
}
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
write_off_amount: function() {
this.set_in_company_currency(this.frm.doc, ["write_off_amount"]);
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
paid_amount: function() {
this.set_in_company_currency(this.frm.doc, ["paid_amount"]);
this.write_off_amount();
this.frm.refresh_fields();
},
allocated_amount: function() {
this.calculate_total_advance();
this.frm.refresh_fields();
},
items_add: function(doc, cdt, cdn) {
var row = frappe.get_doc(cdt, cdn);
this.frm.script_manager.copy_from_first_row("items", row,
["expense_account", "cost_center", "project"]);
},
on_submit: function() {
$.each(this.frm.doc["items"] || [], function(i, row) {
if(row.purchase_receipt) frappe.model.clear_doc("Purchase Receipt", row.purchase_receipt)
})
},
make_pr: function() {
frappe.model.open_mapped_doc({
method:"erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_pr",
frm: cur_frm
});
},
make_debit_note: function() {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_debit_note",
frm: cur_frm
})
},
asset: function(frm, cdt, cdn) {
var row = locals[cdt][cdn];
if(row.asset) {
frappe.call({
method: "erpnext.assets.doctype.asset_category.asset_category.get_asset_category_account",
args: {
"asset": row.asset,
"fieldname": "fixed_asset_account",
"account": row.expense_account
},
callback: function(r, rt) {
frappe.model.set_value(cdt, cdn, "expense_account", r.message);
}
})
}
}
});
cur_frm.script_manager.make(erpnext.accounts.PurchaseInvoice);
// Hide Fields
// ------------
function hide_fields(doc) {
var parent_fields = ['due_date', 'is_opening', 'advances_section', 'from_date', 'to_date'];
if(cint(doc.is_paid) == 1) {
hide_field(parent_fields);
} else {
for (var i in parent_fields) {
var docfield = frappe.meta.docfield_map[doc.doctype][parent_fields[i]];
if(!docfield.hidden) unhide_field(parent_fields[i]);
}
}
var item_fields_stock = ['warehouse_section', 'received_qty', 'rejected_qty'];
cur_frm.fields_dict['items'].grid.set_column_disp(item_fields_stock,
(cint(doc.update_stock)==1 || cint(doc.is_return)==1 ? true : false));
cur_frm.refresh_fields();
}
cur_frm.cscript.update_stock = function(doc, dt, dn) {
hide_fields(doc, dt, dn);
this.frm.fields_dict.items.grid.toggle_reqd("item_code", doc.update_stock? true: false)
}
cur_frm.fields_dict.cash_bank_account.get_query = function(doc) {
return {
filters: [
["Account", "account_type", "in", ["Cash", "Bank"]],
["Account", "is_group", "=",0],
["Account", "company", "=", doc.company],
["Account", "report_type", "=", "Balance Sheet"]
]
}
}
cur_frm.fields_dict['items'].grid.get_field("item_code").get_query = function(doc, cdt, cdn) {
return {
query: "erpnext.controllers.queries.item_query",
filters: {'is_purchase_item': 1}
}
}
cur_frm.fields_dict['credit_to'].get_query = function(doc) {
// filter on Account
if (doc.supplier) {
return {
filters: {
'account_type': 'Payable',
'is_group': 0,
'company': doc.company
}
}
} else {
return {
filters: {
'report_type': 'Balance Sheet',
'is_group': 0,
'company': doc.company
}
}
}
}
// Get Print Heading
cur_frm.fields_dict['select_print_heading'].get_query = function(doc, cdt, cdn) {
return {
filters:[
['Print Heading', 'docstatus', '!=', 2]
]
}
}
cur_frm.set_query("expense_account", "items", function(doc) {
return {
query: "erpnext.controllers.queries.get_expense_account",
filters: {'company': doc.company}
}
});
cur_frm.set_query("asset", "items", function(doc, cdt, cdn) {
var d = locals[cdt][cdn];
return {
filters: {
'item_code': d.item_code,
'docstatus': 1,
'company': doc.company,
'status': 'Submitted'
}
}
});
cur_frm.cscript.expense_account = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.idx == 1 && d.expense_account){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].expense_account) cl[i].expense_account = d.expense_account;
}
}
refresh_field('items');
}
cur_frm.fields_dict["items"].grid.get_field("cost_center").get_query = function(doc) {
return {
filters: {
'company': doc.company,
'is_group': 0
}
}
}
cur_frm.cscript.cost_center = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.cost_center){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].cost_center) cl[i].cost_center = d.cost_center;
}
}
refresh_field('items');
}
cur_frm.fields_dict['items'].grid.get_field('project').get_query = function(doc, cdt, cdn) {
return{
filters:[
['Project', 'status', 'not in', 'Completed, Cancelled']
]
}
}
cur_frm.cscript.select_print_heading = function(doc,cdt,cdn){
if(doc.select_print_heading){
// print heading
cur_frm.pformat.print_heading = doc.select_print_heading;
}
else
cur_frm.pformat.print_heading = __("Purchase Invoice");
}
frappe.ui.form.on("Purchase Invoice", {
setup: function(frm) {
frm.custom_make_buttons = {
'Purchase Invoice': 'Debit Note',
'Payment Entry': 'Payment'
}
frm.fields_dict['items'].grid.get_field('deferred_expense_account').get_query = function(doc) {
return {
filters: {
'root_type': 'Asset',
'company': doc.company,
"is_group": 0
}
}
}
frm.set_query("cost_center", function() {
return {
filters: {
company: frm.doc.company,
is_group: 0
}
};
});
},
onload: function(frm) {
if(frm.doc.__onload) {
if(frm.doc.supplier) {
frm.doc.apply_tds = frm.doc.__onload.supplier_tds ? 1 : 0;
}
if(!frm.doc.__onload.supplier_tds) {
frm.set_df_property("apply_tds", "read_only", 1);
}
}
erpnext.queries.setup_queries(frm, "Warehouse", function() {
return erpnext.queries.warehouse(frm.doc);
});
},
is_subcontracted: function(frm) {
if (frm.doc.is_subcontracted === "Yes") {
erpnext.buying.get_default_bom(frm);
}
frm.toggle_reqd("supplier_warehouse", frm.doc.is_subcontracted==="Yes");
}
})
|
target: me.frm,
date_field: "posting_date",
|
random_line_split
|
purchase_invoice.js
|
// Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
// License: GNU General Public License v3. See license.txt
frappe.provide("erpnext.accounts");
{% include 'erpnext/public/js/controllers/buying.js' %};
erpnext.accounts.PurchaseInvoice = erpnext.buying.BuyingController.extend({
setup: function(doc) {
this.setup_posting_date_time_check();
this._super(doc);
// formatter for purchase invoice item
if(this.frm.doc.update_stock) {
this.frm.set_indicator_formatter('item_code', function(doc) {
return (doc.qty<=doc.received_qty) ? "green" : "orange";
});
}
},
onload: function() {
this._super();
if(!this.frm.doc.__islocal) {
// show credit_to in print format
if(!this.frm.doc.supplier && this.frm.doc.credit_to) {
this.frm.set_df_property("credit_to", "print_hide", 0);
}
}
},
refresh: function(doc) {
const me = this;
this._super();
hide_fields(this.frm.doc);
// Show / Hide button
this.show_general_ledger();
if(doc.update_stock==1 && doc.docstatus==1) {
this.show_stock_ledger();
}
this.frm.add_custom_button("Télécharger Fiche Franchise",
() => {
if(doc.items != null)
{
var data = [];
var docfields = [];
data.push(["Ref Facture",'"'+doc.bill_no+'"',"","DATE",'"'+doc.posting_date+'"',"","","","","",""]);
data.push(["#","Code Article","Designation","Ref Article","Qts","Poids","Poids Total","Prix Unitaire","Montant","pays d'origine","pays de provenance"]);
$.each(doc.items || [], (i, d) => {
var row = [];
row.push(['"'+i+'"','"'+d["item_code"]+'"','"'+d["item_name"]+'"','"'+d["ref_fabricant"]+'"','"'+d["qty"]+'"','"'+d["weight_per_unit"]+'"','"'+d["total_weight"]+'"','"'+d["rate"]+'"','"'+d["amount"]+'"','"'+d["pays"] +'"','"'+doc.pays_de_provenance+'"']);
data.push(row);
});
frappe.tools.downloadify(data, null, "FICHE FRANCHISE "+doc.name+" "+doc.supplier_name);
} });
if(!doc.is_return && doc.docstatus == 1 && doc.outstanding_amount != 0){
if(doc.on_hold) {
this.frm.add_custom_button(
__('Change Release Date'),
function() {me.change_release_date()},
__('Hold Invoice')
);
this.frm.add_custom_button(
__('Unblock Invoice'),
function() {me.unblock_invoice()},
__('Make')
);
} else if (!doc.on_hold) {
this.frm.add_custom_button(
__('Block Invoice'),
function() {me.block_invoice()},
__('Make')
);
}
}
if(doc.docstatus == 1 && doc.outstanding_amount != 0
&& !(doc.is_return && doc.return_against)) {
this.frm.add_custom_button(__('Payment'), this.make_payment_entry, __("Make"));
cur_frm.page.set_inner_btn_group_as_primary(__("Make"));
}
if(!doc.is_return && doc.docstatus==1) {
if(doc.outstanding_amount >= 0 || Math.abs(flt(doc.outstanding_amount)) < flt(doc.grand_total)) {
cur_frm.add_custom_button(__('Return / Debit Note'),
this.make_debit_note, __("Make"));
}
cur_frm.add_custom_button("Recu d'achat",this.make_pr,__("Make"));
if(!doc.auto_repeat) {
cur_frm.add_custom_button(__('Subscription'), function() {
erpnext.utils.make_subscription(doc.doctype, doc.name)
}, __("Make"))
}
}
if (doc.outstanding_amount > 0 && !cint(doc.is_return)) {
cur_frm.add_custom_button(__('Payment Request'), function() {
me.make_payment_request()
}, __("Make"));
}
if(doc.docstatus===0) {
this.frm.add_custom_button(__('Purchase Order'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.buying.doctype.purchase_order.purchase_order.make_purchase_invoice",
source_doctype: "Purchase Order",
target: me.frm,
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["!=", "Closed"],
per_billed: ["<", 99.99],
company: me.frm.doc.company
}
})
}, __("Get items from"));
this.frm.add_custom_button(__('Purchase Receipt'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.stock.doctype.purchase_receipt.purchase_receipt.make_purchase_invoice",
source_doctype: "Purchase Receipt",
target: me.frm,
date_field: "posting_date",
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["not in", ["Closed", "Completed"]],
company: me.frm.doc.company,
is_return: 0
}
})
}, __("Get items from"));
}
this.frm.toggle_reqd("supplier_warehouse", this.frm.doc.is_subcontracted==="Yes");
if (doc.docstatus == 1 && !doc.inter_company_invoice_reference) {
frappe.model.with_doc("Supplier", me.frm.doc.supplier, function() {
var supplier = frappe.model.get_doc("Supplier", me.frm.doc.supplier);
var internal = supplier.is_internal_supplier;
var disabled = supplier.disabled;
if (internal == 1 && disabled == 0) {
me.frm.add_custom_button("Inter Company Invoice", function() {
me.make_inter_company_invoice(me.frm);
}, __("Make"));
}
});
}
},
unblock_invoice: function() {
const me = this;
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.unblock_invoice',
'args': {'name': me.frm.doc.name},
'callback': (r) => me.frm.reload_doc()
});
},
block_invoice: function() {
this.make_comment_dialog_and_block_invoice();
},
change_release_date: function() {
this.make_dialog_and_set_release_date();
},
can_change_release_date: function(date) {
const diff = frappe.datetime.get_diff(date, frappe.datetime.nowdate());
if (diff < 0) {
frappe.throw('New release date should be in the future');
return false;
} else {
return true;
}
},
make_comment_dialog_and_block_invoice: function(){
const me = this;
const title = __('Add Comment');
const fields = [
{
fieldname: 'hold_comment',
read_only: 0,
fieldtype:'Small Text',
label: __('Reason For Putting On Hold'),
default: ""
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
const dialog_data = me.dialog.get_values();
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.block_invoice',
'args': {'name': me.frm.doc.name, 'hold_comment': dialog_data.hold_comment},
'callback': (r) => me.frm.reload_doc()
});
me.dialog.hide();
});
this.dialog.show();
},
make_dialog_and_set_release_date: function() {
const me = this;
const title = __('Set New Release Date');
const fields = [
{
fieldname: 'release_date',
read_only: 0,
fieldtype:'Date',
label: __('Release Date'),
default: me.frm.doc.release_date
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
me.dialog_data = me.dialog.get_values();
if(me.can_change_release_date(me.dialog_data.release_date)) {
me.dialog_data.name = me.frm.doc.name;
me.set_release_date(me.dialog_data);
me.dialog.hide();
}
});
this.dialog.show();
},
set_release_date: function(data) {
return frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.change_release_date',
'args': data,
'callback': (r) => this.frm.reload_doc()
});
},
supplier: function() {
var me = this;
if(this.frm.updating_party_details)
return;
erpnext.utils.get_party_details(this.frm, "erpnext.accounts.party.get_party_details",
{
posting_date: this.frm.doc.posting_date,
bill_date: this.frm.doc.bill_date,
party: this.frm.doc.supplier,
party_type: "Supplier",
account: this.frm.doc.credit_to,
price_list: this.frm.doc.buying_price_list
}, function() {
me.apply_pricing_rule();
me.frm.doc.apply_tds = me.frm.supplier_tds ? 1 : 0;
me.frm.set_df_property("apply_tds", "read_only", me.frm.supplier_tds ? 0 : 1);
})
},
credit_to: function() {
var me = this;
if(this.frm.doc.credit_to) {
me.frm.call({
method: "frappe.client.get_value",
args: {
doctype: "Account",
fieldname: "account_currency",
filters: { name: me.frm.doc.credit_to },
},
callback: function(r, rt) {
if(r.message) {
me.frm.set_value("party_account_currency", r.message.account_currency);
me.set_dynamic_labels();
}
}
});
}
},
make_inter_company_invoice: function(frm) {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_inter_company_sales_invoice",
frm: frm
});
},
is_paid: function() {
hide_fields(this.frm.doc);
if(cint(this.frm.doc.is_paid)) {
this.frm.set_value("allocate_advances_automatically", 0);
if(!this.frm.doc.company) {
this.frm.set_value("is_paid", 0)
frappe.msgprint(__("Please specify Company to proceed"));
}
}
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
write_off_amount: function() {
this.set_in_company_currency(this.frm.doc, ["write_off_amount"]);
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
paid_amount: function() {
this.set_in_company_currency(this.frm.doc, ["paid_amount"]);
this.write_off_amount();
this.frm.refresh_fields();
},
allocated_amount: function() {
this.calculate_total_advance();
this.frm.refresh_fields();
},
items_add: function(doc, cdt, cdn) {
var row = frappe.get_doc(cdt, cdn);
this.frm.script_manager.copy_from_first_row("items", row,
["expense_account", "cost_center", "project"]);
},
on_submit: function() {
$.each(this.frm.doc["items"] || [], function(i, row) {
if(row.purchase_receipt) frappe.model.clear_doc("Purchase Receipt", row.purchase_receipt)
})
},
make_pr: function() {
frappe.model.open_mapped_doc({
method:"erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_pr",
frm: cur_frm
});
},
make_debit_note: function() {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_debit_note",
frm: cur_frm
})
},
asset: function(frm, cdt, cdn) {
var row = locals[cdt][cdn];
if(row.asset) {
frappe.call({
method: "erpnext.assets.doctype.asset_category.asset_category.get_asset_category_account",
args: {
"asset": row.asset,
"fieldname": "fixed_asset_account",
"account": row.expense_account
},
callback: function(r, rt) {
frappe.model.set_value(cdt, cdn, "expense_account", r.message);
}
})
}
}
});
cur_frm.script_manager.make(erpnext.accounts.PurchaseInvoice);
// Hide Fields
// ------------
function hi
|
oc) {
var parent_fields = ['due_date', 'is_opening', 'advances_section', 'from_date', 'to_date'];
if(cint(doc.is_paid) == 1) {
hide_field(parent_fields);
} else {
for (var i in parent_fields) {
var docfield = frappe.meta.docfield_map[doc.doctype][parent_fields[i]];
if(!docfield.hidden) unhide_field(parent_fields[i]);
}
}
var item_fields_stock = ['warehouse_section', 'received_qty', 'rejected_qty'];
cur_frm.fields_dict['items'].grid.set_column_disp(item_fields_stock,
(cint(doc.update_stock)==1 || cint(doc.is_return)==1 ? true : false));
cur_frm.refresh_fields();
}
cur_frm.cscript.update_stock = function(doc, dt, dn) {
hide_fields(doc, dt, dn);
this.frm.fields_dict.items.grid.toggle_reqd("item_code", doc.update_stock? true: false)
}
cur_frm.fields_dict.cash_bank_account.get_query = function(doc) {
return {
filters: [
["Account", "account_type", "in", ["Cash", "Bank"]],
["Account", "is_group", "=",0],
["Account", "company", "=", doc.company],
["Account", "report_type", "=", "Balance Sheet"]
]
}
}
cur_frm.fields_dict['items'].grid.get_field("item_code").get_query = function(doc, cdt, cdn) {
return {
query: "erpnext.controllers.queries.item_query",
filters: {'is_purchase_item': 1}
}
}
cur_frm.fields_dict['credit_to'].get_query = function(doc) {
// filter on Account
if (doc.supplier) {
return {
filters: {
'account_type': 'Payable',
'is_group': 0,
'company': doc.company
}
}
} else {
return {
filters: {
'report_type': 'Balance Sheet',
'is_group': 0,
'company': doc.company
}
}
}
}
// Get Print Heading
cur_frm.fields_dict['select_print_heading'].get_query = function(doc, cdt, cdn) {
return {
filters:[
['Print Heading', 'docstatus', '!=', 2]
]
}
}
cur_frm.set_query("expense_account", "items", function(doc) {
return {
query: "erpnext.controllers.queries.get_expense_account",
filters: {'company': doc.company}
}
});
cur_frm.set_query("asset", "items", function(doc, cdt, cdn) {
var d = locals[cdt][cdn];
return {
filters: {
'item_code': d.item_code,
'docstatus': 1,
'company': doc.company,
'status': 'Submitted'
}
}
});
cur_frm.cscript.expense_account = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.idx == 1 && d.expense_account){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].expense_account) cl[i].expense_account = d.expense_account;
}
}
refresh_field('items');
}
cur_frm.fields_dict["items"].grid.get_field("cost_center").get_query = function(doc) {
return {
filters: {
'company': doc.company,
'is_group': 0
}
}
}
cur_frm.cscript.cost_center = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.cost_center){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].cost_center) cl[i].cost_center = d.cost_center;
}
}
refresh_field('items');
}
cur_frm.fields_dict['items'].grid.get_field('project').get_query = function(doc, cdt, cdn) {
return{
filters:[
['Project', 'status', 'not in', 'Completed, Cancelled']
]
}
}
cur_frm.cscript.select_print_heading = function(doc,cdt,cdn){
if(doc.select_print_heading){
// print heading
cur_frm.pformat.print_heading = doc.select_print_heading;
}
else
cur_frm.pformat.print_heading = __("Purchase Invoice");
}
frappe.ui.form.on("Purchase Invoice", {
setup: function(frm) {
frm.custom_make_buttons = {
'Purchase Invoice': 'Debit Note',
'Payment Entry': 'Payment'
}
frm.fields_dict['items'].grid.get_field('deferred_expense_account').get_query = function(doc) {
return {
filters: {
'root_type': 'Asset',
'company': doc.company,
"is_group": 0
}
}
}
frm.set_query("cost_center", function() {
return {
filters: {
company: frm.doc.company,
is_group: 0
}
};
});
},
onload: function(frm) {
if(frm.doc.__onload) {
if(frm.doc.supplier) {
frm.doc.apply_tds = frm.doc.__onload.supplier_tds ? 1 : 0;
}
if(!frm.doc.__onload.supplier_tds) {
frm.set_df_property("apply_tds", "read_only", 1);
}
}
erpnext.queries.setup_queries(frm, "Warehouse", function() {
return erpnext.queries.warehouse(frm.doc);
});
},
is_subcontracted: function(frm) {
if (frm.doc.is_subcontracted === "Yes") {
erpnext.buying.get_default_bom(frm);
}
frm.toggle_reqd("supplier_warehouse", frm.doc.is_subcontracted==="Yes");
}
})
|
de_fields(d
|
identifier_name
|
purchase_invoice.js
|
// Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
// License: GNU General Public License v3. See license.txt
frappe.provide("erpnext.accounts");
{% include 'erpnext/public/js/controllers/buying.js' %};
erpnext.accounts.PurchaseInvoice = erpnext.buying.BuyingController.extend({
setup: function(doc) {
this.setup_posting_date_time_check();
this._super(doc);
// formatter for purchase invoice item
if(this.frm.doc.update_stock) {
this.frm.set_indicator_formatter('item_code', function(doc) {
return (doc.qty<=doc.received_qty) ? "green" : "orange";
});
}
},
onload: function() {
this._super();
if(!this.frm.doc.__islocal) {
// show credit_to in print format
if(!this.frm.doc.supplier && this.frm.doc.credit_to) {
this.frm.set_df_property("credit_to", "print_hide", 0);
}
}
},
refresh: function(doc) {
const me = this;
this._super();
hide_fields(this.frm.doc);
// Show / Hide button
this.show_general_ledger();
if(doc.update_stock==1 && doc.docstatus==1) {
this.show_stock_ledger();
}
this.frm.add_custom_button("Télécharger Fiche Franchise",
() => {
if(doc.items != null)
{
var data = [];
var docfields = [];
data.push(["Ref Facture",'"'+doc.bill_no+'"',"","DATE",'"'+doc.posting_date+'"',"","","","","",""]);
data.push(["#","Code Article","Designation","Ref Article","Qts","Poids","Poids Total","Prix Unitaire","Montant","pays d'origine","pays de provenance"]);
$.each(doc.items || [], (i, d) => {
var row = [];
row.push(['"'+i+'"','"'+d["item_code"]+'"','"'+d["item_name"]+'"','"'+d["ref_fabricant"]+'"','"'+d["qty"]+'"','"'+d["weight_per_unit"]+'"','"'+d["total_weight"]+'"','"'+d["rate"]+'"','"'+d["amount"]+'"','"'+d["pays"] +'"','"'+doc.pays_de_provenance+'"']);
data.push(row);
});
frappe.tools.downloadify(data, null, "FICHE FRANCHISE "+doc.name+" "+doc.supplier_name);
} });
if(!doc.is_return && doc.docstatus == 1 && doc.outstanding_amount != 0){
if(doc.on_hold) {
this.frm.add_custom_button(
__('Change Release Date'),
function() {me.change_release_date()},
__('Hold Invoice')
);
this.frm.add_custom_button(
__('Unblock Invoice'),
function() {me.unblock_invoice()},
__('Make')
);
} else if (!doc.on_hold) {
this.frm.add_custom_button(
__('Block Invoice'),
function() {me.block_invoice()},
__('Make')
);
}
}
if(doc.docstatus == 1 && doc.outstanding_amount != 0
&& !(doc.is_return && doc.return_against)) {
this.frm.add_custom_button(__('Payment'), this.make_payment_entry, __("Make"));
cur_frm.page.set_inner_btn_group_as_primary(__("Make"));
}
if(!doc.is_return && doc.docstatus==1) {
if(doc.outstanding_amount >= 0 || Math.abs(flt(doc.outstanding_amount)) < flt(doc.grand_total)) {
cur_frm.add_custom_button(__('Return / Debit Note'),
this.make_debit_note, __("Make"));
}
cur_frm.add_custom_button("Recu d'achat",this.make_pr,__("Make"));
if(!doc.auto_repeat) {
cur_frm.add_custom_button(__('Subscription'), function() {
erpnext.utils.make_subscription(doc.doctype, doc.name)
}, __("Make"))
}
}
if (doc.outstanding_amount > 0 && !cint(doc.is_return)) {
cur_frm.add_custom_button(__('Payment Request'), function() {
me.make_payment_request()
}, __("Make"));
}
if(doc.docstatus===0) {
this.frm.add_custom_button(__('Purchase Order'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.buying.doctype.purchase_order.purchase_order.make_purchase_invoice",
source_doctype: "Purchase Order",
target: me.frm,
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["!=", "Closed"],
per_billed: ["<", 99.99],
company: me.frm.doc.company
}
})
}, __("Get items from"));
this.frm.add_custom_button(__('Purchase Receipt'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.stock.doctype.purchase_receipt.purchase_receipt.make_purchase_invoice",
source_doctype: "Purchase Receipt",
target: me.frm,
date_field: "posting_date",
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["not in", ["Closed", "Completed"]],
company: me.frm.doc.company,
is_return: 0
}
})
}, __("Get items from"));
}
this.frm.toggle_reqd("supplier_warehouse", this.frm.doc.is_subcontracted==="Yes");
if (doc.docstatus == 1 && !doc.inter_company_invoice_reference) {
frappe.model.with_doc("Supplier", me.frm.doc.supplier, function() {
var supplier = frappe.model.get_doc("Supplier", me.frm.doc.supplier);
var internal = supplier.is_internal_supplier;
var disabled = supplier.disabled;
if (internal == 1 && disabled == 0) {
me.frm.add_custom_button("Inter Company Invoice", function() {
me.make_inter_company_invoice(me.frm);
}, __("Make"));
}
});
}
},
unblock_invoice: function() {
const me = this;
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.unblock_invoice',
'args': {'name': me.frm.doc.name},
'callback': (r) => me.frm.reload_doc()
});
},
block_invoice: function() {
this.make_comment_dialog_and_block_invoice();
},
change_release_date: function() {
this.make_dialog_and_set_release_date();
},
can_change_release_date: function(date) {
const diff = frappe.datetime.get_diff(date, frappe.datetime.nowdate());
if (diff < 0) {
frappe.throw('New release date should be in the future');
return false;
} else {
return true;
}
},
make_comment_dialog_and_block_invoice: function(){
const me = this;
const title = __('Add Comment');
const fields = [
{
fieldname: 'hold_comment',
read_only: 0,
fieldtype:'Small Text',
label: __('Reason For Putting On Hold'),
default: ""
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
const dialog_data = me.dialog.get_values();
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.block_invoice',
'args': {'name': me.frm.doc.name, 'hold_comment': dialog_data.hold_comment},
'callback': (r) => me.frm.reload_doc()
});
me.dialog.hide();
});
this.dialog.show();
},
make_dialog_and_set_release_date: function() {
const me = this;
const title = __('Set New Release Date');
const fields = [
{
fieldname: 'release_date',
read_only: 0,
fieldtype:'Date',
label: __('Release Date'),
default: me.frm.doc.release_date
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
me.dialog_data = me.dialog.get_values();
if(me.can_change_release_date(me.dialog_data.release_date)) {
me.dialog_data.name = me.frm.doc.name;
me.set_release_date(me.dialog_data);
me.dialog.hide();
}
});
this.dialog.show();
},
set_release_date: function(data) {
return frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.change_release_date',
'args': data,
'callback': (r) => this.frm.reload_doc()
});
},
supplier: function() {
var me = this;
if(this.frm.updating_party_details)
return;
erpnext.utils.get_party_details(this.frm, "erpnext.accounts.party.get_party_details",
{
posting_date: this.frm.doc.posting_date,
bill_date: this.frm.doc.bill_date,
party: this.frm.doc.supplier,
party_type: "Supplier",
account: this.frm.doc.credit_to,
price_list: this.frm.doc.buying_price_list
}, function() {
me.apply_pricing_rule();
me.frm.doc.apply_tds = me.frm.supplier_tds ? 1 : 0;
me.frm.set_df_property("apply_tds", "read_only", me.frm.supplier_tds ? 0 : 1);
})
},
credit_to: function() {
var me = this;
if(this.frm.doc.credit_to) {
me.frm.call({
method: "frappe.client.get_value",
args: {
doctype: "Account",
fieldname: "account_currency",
filters: { name: me.frm.doc.credit_to },
},
callback: function(r, rt) {
if(r.message) {
me.frm.set_value("party_account_currency", r.message.account_currency);
me.set_dynamic_labels();
}
}
});
}
},
make_inter_company_invoice: function(frm) {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_inter_company_sales_invoice",
frm: frm
});
},
is_paid: function() {
hide_fields(this.frm.doc);
if(cint(this.frm.doc.is_paid)) {
this.frm.set_value("allocate_advances_automatically", 0);
if(!this.frm.doc.company) {
this.frm.set_value("is_paid", 0)
frappe.msgprint(__("Please specify Company to proceed"));
}
}
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
write_off_amount: function() {
this.set_in_company_currency(this.frm.doc, ["write_off_amount"]);
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
paid_amount: function() {
this.set_in_company_currency(this.frm.doc, ["paid_amount"]);
this.write_off_amount();
this.frm.refresh_fields();
},
allocated_amount: function() {
this.calculate_total_advance();
this.frm.refresh_fields();
},
items_add: function(doc, cdt, cdn) {
var row = frappe.get_doc(cdt, cdn);
this.frm.script_manager.copy_from_first_row("items", row,
["expense_account", "cost_center", "project"]);
},
on_submit: function() {
$.each(this.frm.doc["items"] || [], function(i, row) {
if(row.purchase_receipt) frappe.model.clear_doc("Purchase Receipt", row.purchase_receipt)
})
},
make_pr: function() {
frappe.model.open_mapped_doc({
method:"erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_pr",
frm: cur_frm
});
},
make_debit_note: function() {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_debit_note",
frm: cur_frm
})
},
asset: function(frm, cdt, cdn) {
var row = locals[cdt][cdn];
if(row.asset) {
frappe.call({
method: "erpnext.assets.doctype.asset_category.asset_category.get_asset_category_account",
args: {
"asset": row.asset,
"fieldname": "fixed_asset_account",
"account": row.expense_account
},
callback: function(r, rt) {
frappe.model.set_value(cdt, cdn, "expense_account", r.message);
}
})
}
}
});
cur_frm.script_manager.make(erpnext.accounts.PurchaseInvoice);
// Hide Fields
// ------------
function hide_fields(doc) {
|
cur_frm.cscript.update_stock = function(doc, dt, dn) {
hide_fields(doc, dt, dn);
this.frm.fields_dict.items.grid.toggle_reqd("item_code", doc.update_stock? true: false)
}
cur_frm.fields_dict.cash_bank_account.get_query = function(doc) {
return {
filters: [
["Account", "account_type", "in", ["Cash", "Bank"]],
["Account", "is_group", "=",0],
["Account", "company", "=", doc.company],
["Account", "report_type", "=", "Balance Sheet"]
]
}
}
cur_frm.fields_dict['items'].grid.get_field("item_code").get_query = function(doc, cdt, cdn) {
return {
query: "erpnext.controllers.queries.item_query",
filters: {'is_purchase_item': 1}
}
}
cur_frm.fields_dict['credit_to'].get_query = function(doc) {
// filter on Account
if (doc.supplier) {
return {
filters: {
'account_type': 'Payable',
'is_group': 0,
'company': doc.company
}
}
} else {
return {
filters: {
'report_type': 'Balance Sheet',
'is_group': 0,
'company': doc.company
}
}
}
}
// Get Print Heading
cur_frm.fields_dict['select_print_heading'].get_query = function(doc, cdt, cdn) {
return {
filters:[
['Print Heading', 'docstatus', '!=', 2]
]
}
}
cur_frm.set_query("expense_account", "items", function(doc) {
return {
query: "erpnext.controllers.queries.get_expense_account",
filters: {'company': doc.company}
}
});
cur_frm.set_query("asset", "items", function(doc, cdt, cdn) {
var d = locals[cdt][cdn];
return {
filters: {
'item_code': d.item_code,
'docstatus': 1,
'company': doc.company,
'status': 'Submitted'
}
}
});
cur_frm.cscript.expense_account = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.idx == 1 && d.expense_account){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].expense_account) cl[i].expense_account = d.expense_account;
}
}
refresh_field('items');
}
cur_frm.fields_dict["items"].grid.get_field("cost_center").get_query = function(doc) {
return {
filters: {
'company': doc.company,
'is_group': 0
}
}
}
cur_frm.cscript.cost_center = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.cost_center){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].cost_center) cl[i].cost_center = d.cost_center;
}
}
refresh_field('items');
}
cur_frm.fields_dict['items'].grid.get_field('project').get_query = function(doc, cdt, cdn) {
return{
filters:[
['Project', 'status', 'not in', 'Completed, Cancelled']
]
}
}
cur_frm.cscript.select_print_heading = function(doc,cdt,cdn){
if(doc.select_print_heading){
// print heading
cur_frm.pformat.print_heading = doc.select_print_heading;
}
else
cur_frm.pformat.print_heading = __("Purchase Invoice");
}
frappe.ui.form.on("Purchase Invoice", {
setup: function(frm) {
frm.custom_make_buttons = {
'Purchase Invoice': 'Debit Note',
'Payment Entry': 'Payment'
}
frm.fields_dict['items'].grid.get_field('deferred_expense_account').get_query = function(doc) {
return {
filters: {
'root_type': 'Asset',
'company': doc.company,
"is_group": 0
}
}
}
frm.set_query("cost_center", function() {
return {
filters: {
company: frm.doc.company,
is_group: 0
}
};
});
},
onload: function(frm) {
if(frm.doc.__onload) {
if(frm.doc.supplier) {
frm.doc.apply_tds = frm.doc.__onload.supplier_tds ? 1 : 0;
}
if(!frm.doc.__onload.supplier_tds) {
frm.set_df_property("apply_tds", "read_only", 1);
}
}
erpnext.queries.setup_queries(frm, "Warehouse", function() {
return erpnext.queries.warehouse(frm.doc);
});
},
is_subcontracted: function(frm) {
if (frm.doc.is_subcontracted === "Yes") {
erpnext.buying.get_default_bom(frm);
}
frm.toggle_reqd("supplier_warehouse", frm.doc.is_subcontracted==="Yes");
}
})
|
var parent_fields = ['due_date', 'is_opening', 'advances_section', 'from_date', 'to_date'];
if(cint(doc.is_paid) == 1) {
hide_field(parent_fields);
} else {
for (var i in parent_fields) {
var docfield = frappe.meta.docfield_map[doc.doctype][parent_fields[i]];
if(!docfield.hidden) unhide_field(parent_fields[i]);
}
}
var item_fields_stock = ['warehouse_section', 'received_qty', 'rejected_qty'];
cur_frm.fields_dict['items'].grid.set_column_disp(item_fields_stock,
(cint(doc.update_stock)==1 || cint(doc.is_return)==1 ? true : false));
cur_frm.refresh_fields();
}
|
identifier_body
|
purchase_invoice.js
|
// Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
// License: GNU General Public License v3. See license.txt
frappe.provide("erpnext.accounts");
{% include 'erpnext/public/js/controllers/buying.js' %};
erpnext.accounts.PurchaseInvoice = erpnext.buying.BuyingController.extend({
setup: function(doc) {
this.setup_posting_date_time_check();
this._super(doc);
// formatter for purchase invoice item
if(this.frm.doc.update_stock)
|
},
onload: function() {
this._super();
if(!this.frm.doc.__islocal) {
// show credit_to in print format
if(!this.frm.doc.supplier && this.frm.doc.credit_to) {
this.frm.set_df_property("credit_to", "print_hide", 0);
}
}
},
refresh: function(doc) {
const me = this;
this._super();
hide_fields(this.frm.doc);
// Show / Hide button
this.show_general_ledger();
if(doc.update_stock==1 && doc.docstatus==1) {
this.show_stock_ledger();
}
this.frm.add_custom_button("Télécharger Fiche Franchise",
() => {
if(doc.items != null)
{
var data = [];
var docfields = [];
data.push(["Ref Facture",'"'+doc.bill_no+'"',"","DATE",'"'+doc.posting_date+'"',"","","","","",""]);
data.push(["#","Code Article","Designation","Ref Article","Qts","Poids","Poids Total","Prix Unitaire","Montant","pays d'origine","pays de provenance"]);
$.each(doc.items || [], (i, d) => {
var row = [];
row.push(['"'+i+'"','"'+d["item_code"]+'"','"'+d["item_name"]+'"','"'+d["ref_fabricant"]+'"','"'+d["qty"]+'"','"'+d["weight_per_unit"]+'"','"'+d["total_weight"]+'"','"'+d["rate"]+'"','"'+d["amount"]+'"','"'+d["pays"] +'"','"'+doc.pays_de_provenance+'"']);
data.push(row);
});
frappe.tools.downloadify(data, null, "FICHE FRANCHISE "+doc.name+" "+doc.supplier_name);
} });
if(!doc.is_return && doc.docstatus == 1 && doc.outstanding_amount != 0){
if(doc.on_hold) {
this.frm.add_custom_button(
__('Change Release Date'),
function() {me.change_release_date()},
__('Hold Invoice')
);
this.frm.add_custom_button(
__('Unblock Invoice'),
function() {me.unblock_invoice()},
__('Make')
);
} else if (!doc.on_hold) {
this.frm.add_custom_button(
__('Block Invoice'),
function() {me.block_invoice()},
__('Make')
);
}
}
if(doc.docstatus == 1 && doc.outstanding_amount != 0
&& !(doc.is_return && doc.return_against)) {
this.frm.add_custom_button(__('Payment'), this.make_payment_entry, __("Make"));
cur_frm.page.set_inner_btn_group_as_primary(__("Make"));
}
if(!doc.is_return && doc.docstatus==1) {
if(doc.outstanding_amount >= 0 || Math.abs(flt(doc.outstanding_amount)) < flt(doc.grand_total)) {
cur_frm.add_custom_button(__('Return / Debit Note'),
this.make_debit_note, __("Make"));
}
cur_frm.add_custom_button("Recu d'achat",this.make_pr,__("Make"));
if(!doc.auto_repeat) {
cur_frm.add_custom_button(__('Subscription'), function() {
erpnext.utils.make_subscription(doc.doctype, doc.name)
}, __("Make"))
}
}
if (doc.outstanding_amount > 0 && !cint(doc.is_return)) {
cur_frm.add_custom_button(__('Payment Request'), function() {
me.make_payment_request()
}, __("Make"));
}
if(doc.docstatus===0) {
this.frm.add_custom_button(__('Purchase Order'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.buying.doctype.purchase_order.purchase_order.make_purchase_invoice",
source_doctype: "Purchase Order",
target: me.frm,
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["!=", "Closed"],
per_billed: ["<", 99.99],
company: me.frm.doc.company
}
})
}, __("Get items from"));
this.frm.add_custom_button(__('Purchase Receipt'), function() {
erpnext.utils.map_current_doc({
method: "erpnext.stock.doctype.purchase_receipt.purchase_receipt.make_purchase_invoice",
source_doctype: "Purchase Receipt",
target: me.frm,
date_field: "posting_date",
setters: {
supplier: me.frm.doc.supplier || undefined,
},
get_query_filters: {
docstatus: 1,
status: ["not in", ["Closed", "Completed"]],
company: me.frm.doc.company,
is_return: 0
}
})
}, __("Get items from"));
}
this.frm.toggle_reqd("supplier_warehouse", this.frm.doc.is_subcontracted==="Yes");
if (doc.docstatus == 1 && !doc.inter_company_invoice_reference) {
frappe.model.with_doc("Supplier", me.frm.doc.supplier, function() {
var supplier = frappe.model.get_doc("Supplier", me.frm.doc.supplier);
var internal = supplier.is_internal_supplier;
var disabled = supplier.disabled;
if (internal == 1 && disabled == 0) {
me.frm.add_custom_button("Inter Company Invoice", function() {
me.make_inter_company_invoice(me.frm);
}, __("Make"));
}
});
}
},
unblock_invoice: function() {
const me = this;
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.unblock_invoice',
'args': {'name': me.frm.doc.name},
'callback': (r) => me.frm.reload_doc()
});
},
block_invoice: function() {
this.make_comment_dialog_and_block_invoice();
},
change_release_date: function() {
this.make_dialog_and_set_release_date();
},
can_change_release_date: function(date) {
const diff = frappe.datetime.get_diff(date, frappe.datetime.nowdate());
if (diff < 0) {
frappe.throw('New release date should be in the future');
return false;
} else {
return true;
}
},
make_comment_dialog_and_block_invoice: function(){
const me = this;
const title = __('Add Comment');
const fields = [
{
fieldname: 'hold_comment',
read_only: 0,
fieldtype:'Small Text',
label: __('Reason For Putting On Hold'),
default: ""
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
const dialog_data = me.dialog.get_values();
frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.block_invoice',
'args': {'name': me.frm.doc.name, 'hold_comment': dialog_data.hold_comment},
'callback': (r) => me.frm.reload_doc()
});
me.dialog.hide();
});
this.dialog.show();
},
make_dialog_and_set_release_date: function() {
const me = this;
const title = __('Set New Release Date');
const fields = [
{
fieldname: 'release_date',
read_only: 0,
fieldtype:'Date',
label: __('Release Date'),
default: me.frm.doc.release_date
},
];
this.dialog = new frappe.ui.Dialog({
title: title,
fields: fields
});
this.dialog.set_primary_action(__('Save'), function() {
me.dialog_data = me.dialog.get_values();
if(me.can_change_release_date(me.dialog_data.release_date)) {
me.dialog_data.name = me.frm.doc.name;
me.set_release_date(me.dialog_data);
me.dialog.hide();
}
});
this.dialog.show();
},
set_release_date: function(data) {
return frappe.call({
'method': 'erpnext.accounts.doctype.purchase_invoice.purchase_invoice.change_release_date',
'args': data,
'callback': (r) => this.frm.reload_doc()
});
},
supplier: function() {
var me = this;
if(this.frm.updating_party_details)
return;
erpnext.utils.get_party_details(this.frm, "erpnext.accounts.party.get_party_details",
{
posting_date: this.frm.doc.posting_date,
bill_date: this.frm.doc.bill_date,
party: this.frm.doc.supplier,
party_type: "Supplier",
account: this.frm.doc.credit_to,
price_list: this.frm.doc.buying_price_list
}, function() {
me.apply_pricing_rule();
me.frm.doc.apply_tds = me.frm.supplier_tds ? 1 : 0;
me.frm.set_df_property("apply_tds", "read_only", me.frm.supplier_tds ? 0 : 1);
})
},
credit_to: function() {
var me = this;
if(this.frm.doc.credit_to) {
me.frm.call({
method: "frappe.client.get_value",
args: {
doctype: "Account",
fieldname: "account_currency",
filters: { name: me.frm.doc.credit_to },
},
callback: function(r, rt) {
if(r.message) {
me.frm.set_value("party_account_currency", r.message.account_currency);
me.set_dynamic_labels();
}
}
});
}
},
make_inter_company_invoice: function(frm) {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_inter_company_sales_invoice",
frm: frm
});
},
is_paid: function() {
hide_fields(this.frm.doc);
if(cint(this.frm.doc.is_paid)) {
this.frm.set_value("allocate_advances_automatically", 0);
if(!this.frm.doc.company) {
this.frm.set_value("is_paid", 0)
frappe.msgprint(__("Please specify Company to proceed"));
}
}
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
write_off_amount: function() {
this.set_in_company_currency(this.frm.doc, ["write_off_amount"]);
this.calculate_outstanding_amount();
this.frm.refresh_fields();
},
paid_amount: function() {
this.set_in_company_currency(this.frm.doc, ["paid_amount"]);
this.write_off_amount();
this.frm.refresh_fields();
},
allocated_amount: function() {
this.calculate_total_advance();
this.frm.refresh_fields();
},
items_add: function(doc, cdt, cdn) {
var row = frappe.get_doc(cdt, cdn);
this.frm.script_manager.copy_from_first_row("items", row,
["expense_account", "cost_center", "project"]);
},
on_submit: function() {
$.each(this.frm.doc["items"] || [], function(i, row) {
if(row.purchase_receipt) frappe.model.clear_doc("Purchase Receipt", row.purchase_receipt)
})
},
make_pr: function() {
frappe.model.open_mapped_doc({
method:"erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_pr",
frm: cur_frm
});
},
make_debit_note: function() {
frappe.model.open_mapped_doc({
method: "erpnext.accounts.doctype.purchase_invoice.purchase_invoice.make_debit_note",
frm: cur_frm
})
},
asset: function(frm, cdt, cdn) {
var row = locals[cdt][cdn];
if(row.asset) {
frappe.call({
method: "erpnext.assets.doctype.asset_category.asset_category.get_asset_category_account",
args: {
"asset": row.asset,
"fieldname": "fixed_asset_account",
"account": row.expense_account
},
callback: function(r, rt) {
frappe.model.set_value(cdt, cdn, "expense_account", r.message);
}
})
}
}
});
cur_frm.script_manager.make(erpnext.accounts.PurchaseInvoice);
// Hide Fields
// ------------
function hide_fields(doc) {
var parent_fields = ['due_date', 'is_opening', 'advances_section', 'from_date', 'to_date'];
if(cint(doc.is_paid) == 1) {
hide_field(parent_fields);
} else {
for (var i in parent_fields) {
var docfield = frappe.meta.docfield_map[doc.doctype][parent_fields[i]];
if(!docfield.hidden) unhide_field(parent_fields[i]);
}
}
var item_fields_stock = ['warehouse_section', 'received_qty', 'rejected_qty'];
cur_frm.fields_dict['items'].grid.set_column_disp(item_fields_stock,
(cint(doc.update_stock)==1 || cint(doc.is_return)==1 ? true : false));
cur_frm.refresh_fields();
}
cur_frm.cscript.update_stock = function(doc, dt, dn) {
hide_fields(doc, dt, dn);
this.frm.fields_dict.items.grid.toggle_reqd("item_code", doc.update_stock? true: false)
}
cur_frm.fields_dict.cash_bank_account.get_query = function(doc) {
return {
filters: [
["Account", "account_type", "in", ["Cash", "Bank"]],
["Account", "is_group", "=",0],
["Account", "company", "=", doc.company],
["Account", "report_type", "=", "Balance Sheet"]
]
}
}
cur_frm.fields_dict['items'].grid.get_field("item_code").get_query = function(doc, cdt, cdn) {
return {
query: "erpnext.controllers.queries.item_query",
filters: {'is_purchase_item': 1}
}
}
cur_frm.fields_dict['credit_to'].get_query = function(doc) {
// filter on Account
if (doc.supplier) {
return {
filters: {
'account_type': 'Payable',
'is_group': 0,
'company': doc.company
}
}
} else {
return {
filters: {
'report_type': 'Balance Sheet',
'is_group': 0,
'company': doc.company
}
}
}
}
// Get Print Heading
cur_frm.fields_dict['select_print_heading'].get_query = function(doc, cdt, cdn) {
return {
filters:[
['Print Heading', 'docstatus', '!=', 2]
]
}
}
cur_frm.set_query("expense_account", "items", function(doc) {
return {
query: "erpnext.controllers.queries.get_expense_account",
filters: {'company': doc.company}
}
});
cur_frm.set_query("asset", "items", function(doc, cdt, cdn) {
var d = locals[cdt][cdn];
return {
filters: {
'item_code': d.item_code,
'docstatus': 1,
'company': doc.company,
'status': 'Submitted'
}
}
});
cur_frm.cscript.expense_account = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.idx == 1 && d.expense_account){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].expense_account) cl[i].expense_account = d.expense_account;
}
}
refresh_field('items');
}
cur_frm.fields_dict["items"].grid.get_field("cost_center").get_query = function(doc) {
return {
filters: {
'company': doc.company,
'is_group': 0
}
}
}
cur_frm.cscript.cost_center = function(doc, cdt, cdn){
var d = locals[cdt][cdn];
if(d.cost_center){
var cl = doc.items || [];
for(var i = 0; i < cl.length; i++){
if(!cl[i].cost_center) cl[i].cost_center = d.cost_center;
}
}
refresh_field('items');
}
cur_frm.fields_dict['items'].grid.get_field('project').get_query = function(doc, cdt, cdn) {
return{
filters:[
['Project', 'status', 'not in', 'Completed, Cancelled']
]
}
}
cur_frm.cscript.select_print_heading = function(doc,cdt,cdn){
if(doc.select_print_heading){
// print heading
cur_frm.pformat.print_heading = doc.select_print_heading;
}
else
cur_frm.pformat.print_heading = __("Purchase Invoice");
}
frappe.ui.form.on("Purchase Invoice", {
setup: function(frm) {
frm.custom_make_buttons = {
'Purchase Invoice': 'Debit Note',
'Payment Entry': 'Payment'
}
frm.fields_dict['items'].grid.get_field('deferred_expense_account').get_query = function(doc) {
return {
filters: {
'root_type': 'Asset',
'company': doc.company,
"is_group": 0
}
}
}
frm.set_query("cost_center", function() {
return {
filters: {
company: frm.doc.company,
is_group: 0
}
};
});
},
onload: function(frm) {
if(frm.doc.__onload) {
if(frm.doc.supplier) {
frm.doc.apply_tds = frm.doc.__onload.supplier_tds ? 1 : 0;
}
if(!frm.doc.__onload.supplier_tds) {
frm.set_df_property("apply_tds", "read_only", 1);
}
}
erpnext.queries.setup_queries(frm, "Warehouse", function() {
return erpnext.queries.warehouse(frm.doc);
});
},
is_subcontracted: function(frm) {
if (frm.doc.is_subcontracted === "Yes") {
erpnext.buying.get_default_bom(frm);
}
frm.toggle_reqd("supplier_warehouse", frm.doc.is_subcontracted==="Yes");
}
})
|
{
this.frm.set_indicator_formatter('item_code', function(doc) {
return (doc.qty<=doc.received_qty) ? "green" : "orange";
});
}
|
conditional_block
|
app.ts
|
import {routes, ng, model, notify, http, BaseModel, Collection, Behaviours} from 'entcore'
import { forumExtensions } from './extensions';
import { forumController } from './controller'
routes.define(function($routeProvider){
$routeProvider
.when('/view/:categoryId', {
action: 'goToCategory'
})
.when('/view/:categoryId/:subjectId', {
action: 'goToSubject'
})
.when('/print/:categoryId', {
action: 'print'
})
.when('/print/:categoryId/subject/:subjectId', {
action: 'print'
})
.otherwise({
action: 'mainPage'
});
});
// TODO: Legacy implementation to migrate to toolkit style
model.build = function () {
console.log("builllld");
var forumModel = Behaviours.applicationsBehaviours.forum.namespace;
this.makeModels([
forumModel.Category,
forumModel.Subject,
forumModel.Message
]);
forumExtensions.extendEditor()
// Category prototype
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.open = function(cb){
this.subjects.one('sync', function(){
if(typeof cb === 'function'){
cb();
}
}.bind(this));
this.subjects.sync();
};
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.saveModifications = function(callback){
http().putJson('/forum/category/' + this._id, this).done(function(e){
notify.info('forum.subject.modification.saved');
if(typeof callback === 'function')
|
});
};
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.save = function(callback){
if(this._id){
this.saveModifications(callback);
}
else{
this.createCategory(function(){
(model as any).categories.sync();
if (typeof callback === 'function') {
callback();
}
});
}
};
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.toJSON = function(){
return {
name: this.name,
icon: this.icon
};
};
// Build
this.collection(Behaviours.applicationsBehaviours.forum.namespace.Category, {
sync: function(callback){
http().get('/forum/categories').done(function(categories){
this.load(categories);
this.forEach(function(category){
category.open();
});
if(typeof callback === 'function'){
callback();
}
}.bind(this));
},
removeSelection: function(callback){
var counter = this.selection().length;
this.selection().forEach(function(item){
http().delete('/forum/category/' + item._id).done(function(){
counter = counter - 1;
if (counter === 0) {
(model as any).categories.sync();
if(typeof callback === 'function'){
callback();
}
}
});
});
},
behaviours: 'forum'
})
};
ng.controllers.push(forumController);
|
{
callback();
}
|
conditional_block
|
app.ts
|
import {routes, ng, model, notify, http, BaseModel, Collection, Behaviours} from 'entcore'
import { forumExtensions } from './extensions';
import { forumController } from './controller'
routes.define(function($routeProvider){
$routeProvider
.when('/view/:categoryId', {
action: 'goToCategory'
})
.when('/view/:categoryId/:subjectId', {
action: 'goToSubject'
})
.when('/print/:categoryId', {
action: 'print'
})
.when('/print/:categoryId/subject/:subjectId', {
action: 'print'
})
.otherwise({
action: 'mainPage'
});
});
// TODO: Legacy implementation to migrate to toolkit style
model.build = function () {
console.log("builllld");
var forumModel = Behaviours.applicationsBehaviours.forum.namespace;
this.makeModels([
forumModel.Category,
forumModel.Subject,
forumModel.Message
]);
forumExtensions.extendEditor()
// Category prototype
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.open = function(cb){
this.subjects.one('sync', function(){
if(typeof cb === 'function'){
cb();
}
}.bind(this));
this.subjects.sync();
};
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.saveModifications = function(callback){
http().putJson('/forum/category/' + this._id, this).done(function(e){
notify.info('forum.subject.modification.saved');
if(typeof callback === 'function'){
callback();
}
});
};
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.save = function(callback){
if(this._id){
this.saveModifications(callback);
}
else{
this.createCategory(function(){
(model as any).categories.sync();
if (typeof callback === 'function') {
callback();
}
});
}
};
Behaviours.applicationsBehaviours.forum.namespace.Category.prototype.toJSON = function(){
return {
name: this.name,
icon: this.icon
};
};
// Build
this.collection(Behaviours.applicationsBehaviours.forum.namespace.Category, {
sync: function(callback){
http().get('/forum/categories').done(function(categories){
this.load(categories);
this.forEach(function(category){
category.open();
});
if(typeof callback === 'function'){
callback();
|
}
}.bind(this));
},
removeSelection: function(callback){
var counter = this.selection().length;
this.selection().forEach(function(item){
http().delete('/forum/category/' + item._id).done(function(){
counter = counter - 1;
if (counter === 0) {
(model as any).categories.sync();
if(typeof callback === 'function'){
callback();
}
}
});
});
},
behaviours: 'forum'
})
};
ng.controllers.push(forumController);
|
random_line_split
|
|
test_static.py
|
import unittest
from cStringIO import StringIO
from ..backends import static
# There aren't many tests here because it turns out to be way more convenient to
# use test_serializer for the majority of cases
class TestStatic(unittest.TestCase):
def compile(self, input_text, input_data):
return static.compile(input_text, input_data)
def test_get_0(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 2})
self.assertEquals(manifest.get("key"), "value")
children = list(item for item in manifest.iterchildren())
self.assertEquals(len(children), 1)
section = children[0]
self.assertEquals(section.name, "Heading 1")
self.assertEquals(section.get("other_key"), "value_2")
self.assertEquals(section.get("key"), "value")
def test_get_1(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 3})
children = list(item for item in manifest.iterchildren())
section = children[0]
self.assertEquals(section.get("other_key"), "value_3")
def test_get_3(self):
data = """key:
if a == "1": value_1
if a[0] == "ab"[0]: value_2
"""
manifest = self.compile(data, {"a": "1"})
self.assertEquals(manifest.get("key"), "value_1")
manifest = self.compile(data, {"a": "ac"})
self.assertEquals(manifest.get("key"), "value_2")
def test_get_4(self):
data = """key:
if not a: value_1
value_2
"""
manifest = self.compile(data, {"a": True})
self.assertEquals(manifest.get("key"), "value_2")
manifest = self.compile(data, {"a": False})
self.assertEquals(manifest.get("key"), "value_1")
def
|
(self):
data = """key:
if a == 1.5: value_1
value_2
key_1: other_value
"""
manifest = self.compile(data, {"a": 1.5})
self.assertFalse(manifest.is_empty)
self.assertEquals(manifest.root, manifest)
self.assertTrue(manifest.has_key("key_1"))
self.assertFalse(manifest.has_key("key_2"))
self.assertEquals(set(manifest.iterkeys()), set(["key", "key_1"]))
self.assertEquals(set(manifest.itervalues()), set(["value_1", "other_value"]))
def test_is_empty_1(self):
data = """
[Section]
[Subsection]
"""
manifest = self.compile(data, {})
self.assertTrue(manifest.is_empty)
|
test_api
|
identifier_name
|
test_static.py
|
import unittest
from cStringIO import StringIO
from ..backends import static
# There aren't many tests here because it turns out to be way more convenient to
# use test_serializer for the majority of cases
class TestStatic(unittest.TestCase):
def compile(self, input_text, input_data):
return static.compile(input_text, input_data)
def test_get_0(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 2})
self.assertEquals(manifest.get("key"), "value")
children = list(item for item in manifest.iterchildren())
self.assertEquals(len(children), 1)
section = children[0]
self.assertEquals(section.name, "Heading 1")
self.assertEquals(section.get("other_key"), "value_2")
self.assertEquals(section.get("key"), "value")
def test_get_1(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 3})
children = list(item for item in manifest.iterchildren())
section = children[0]
self.assertEquals(section.get("other_key"), "value_3")
def test_get_3(self):
|
def test_get_4(self):
data = """key:
if not a: value_1
value_2
"""
manifest = self.compile(data, {"a": True})
self.assertEquals(manifest.get("key"), "value_2")
manifest = self.compile(data, {"a": False})
self.assertEquals(manifest.get("key"), "value_1")
def test_api(self):
data = """key:
if a == 1.5: value_1
value_2
key_1: other_value
"""
manifest = self.compile(data, {"a": 1.5})
self.assertFalse(manifest.is_empty)
self.assertEquals(manifest.root, manifest)
self.assertTrue(manifest.has_key("key_1"))
self.assertFalse(manifest.has_key("key_2"))
self.assertEquals(set(manifest.iterkeys()), set(["key", "key_1"]))
self.assertEquals(set(manifest.itervalues()), set(["value_1", "other_value"]))
def test_is_empty_1(self):
data = """
[Section]
[Subsection]
"""
manifest = self.compile(data, {})
self.assertTrue(manifest.is_empty)
|
data = """key:
if a == "1": value_1
if a[0] == "ab"[0]: value_2
"""
manifest = self.compile(data, {"a": "1"})
self.assertEquals(manifest.get("key"), "value_1")
manifest = self.compile(data, {"a": "ac"})
self.assertEquals(manifest.get("key"), "value_2")
|
identifier_body
|
test_static.py
|
import unittest
from cStringIO import StringIO
from ..backends import static
# There aren't many tests here because it turns out to be way more convenient to
# use test_serializer for the majority of cases
class TestStatic(unittest.TestCase):
def compile(self, input_text, input_data):
return static.compile(input_text, input_data)
def test_get_0(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 2})
self.assertEquals(manifest.get("key"), "value")
children = list(item for item in manifest.iterchildren())
self.assertEquals(len(children), 1)
section = children[0]
self.assertEquals(section.name, "Heading 1")
self.assertEquals(section.get("other_key"), "value_2")
self.assertEquals(section.get("key"), "value")
def test_get_1(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 3})
children = list(item for item in manifest.iterchildren())
section = children[0]
self.assertEquals(section.get("other_key"), "value_3")
def test_get_3(self):
data = """key:
if a == "1": value_1
|
"""
manifest = self.compile(data, {"a": "1"})
self.assertEquals(manifest.get("key"), "value_1")
manifest = self.compile(data, {"a": "ac"})
self.assertEquals(manifest.get("key"), "value_2")
def test_get_4(self):
data = """key:
if not a: value_1
value_2
"""
manifest = self.compile(data, {"a": True})
self.assertEquals(manifest.get("key"), "value_2")
manifest = self.compile(data, {"a": False})
self.assertEquals(manifest.get("key"), "value_1")
def test_api(self):
data = """key:
if a == 1.5: value_1
value_2
key_1: other_value
"""
manifest = self.compile(data, {"a": 1.5})
self.assertFalse(manifest.is_empty)
self.assertEquals(manifest.root, manifest)
self.assertTrue(manifest.has_key("key_1"))
self.assertFalse(manifest.has_key("key_2"))
self.assertEquals(set(manifest.iterkeys()), set(["key", "key_1"]))
self.assertEquals(set(manifest.itervalues()), set(["value_1", "other_value"]))
def test_is_empty_1(self):
data = """
[Section]
[Subsection]
"""
manifest = self.compile(data, {})
self.assertTrue(manifest.is_empty)
|
if a[0] == "ab"[0]: value_2
|
random_line_split
|
070-E-ClimbingStairs.py
|
# You are climbing a stair case. It takes n steps to reach to the top.
#
# Each time you can either climb 1 or 2 steps. In how many distinct ways can you climb to the top?
#
# Note: Given n will be a positive integer.
#
# Example 1:
#
# Input: 2
# Output: 2
# Explanation: There are two ways to climb to the top.
# 1. 1 step + 1 step
# 2. 2 steps
#
# Example 2:
|
# Explanation: There are three ways to climb to the top.
# 1. 1 step + 1 step + 1 step
# 2. 1 step + 2 steps
# 3. 2 steps + 1 step
class Solution(object):
def climbStairs(self, n):
"""
:type n: int
:rtype: int
"""
table = [1, 2]
i = 2
while i < n:
table.append(table[i-1] + table[i-2])
i += 1
return table[n-1]
# Note:
# Generate two trees one with 1 step and other with 2 step and add both
|
#
# Input: 3
# Output: 3
|
random_line_split
|
070-E-ClimbingStairs.py
|
# You are climbing a stair case. It takes n steps to reach to the top.
#
# Each time you can either climb 1 or 2 steps. In how many distinct ways can you climb to the top?
#
# Note: Given n will be a positive integer.
#
# Example 1:
#
# Input: 2
# Output: 2
# Explanation: There are two ways to climb to the top.
# 1. 1 step + 1 step
# 2. 2 steps
#
# Example 2:
#
# Input: 3
# Output: 3
# Explanation: There are three ways to climb to the top.
# 1. 1 step + 1 step + 1 step
# 2. 1 step + 2 steps
# 3. 2 steps + 1 step
class Solution(object):
def climbStairs(self, n):
"""
:type n: int
:rtype: int
"""
table = [1, 2]
i = 2
while i < n:
|
return table[n-1]
# Note:
# Generate two trees one with 1 step and other with 2 step and add both
|
table.append(table[i-1] + table[i-2])
i += 1
|
conditional_block
|
070-E-ClimbingStairs.py
|
# You are climbing a stair case. It takes n steps to reach to the top.
#
# Each time you can either climb 1 or 2 steps. In how many distinct ways can you climb to the top?
#
# Note: Given n will be a positive integer.
#
# Example 1:
#
# Input: 2
# Output: 2
# Explanation: There are two ways to climb to the top.
# 1. 1 step + 1 step
# 2. 2 steps
#
# Example 2:
#
# Input: 3
# Output: 3
# Explanation: There are three ways to climb to the top.
# 1. 1 step + 1 step + 1 step
# 2. 1 step + 2 steps
# 3. 2 steps + 1 step
class Solution(object):
def
|
(self, n):
"""
:type n: int
:rtype: int
"""
table = [1, 2]
i = 2
while i < n:
table.append(table[i-1] + table[i-2])
i += 1
return table[n-1]
# Note:
# Generate two trees one with 1 step and other with 2 step and add both
|
climbStairs
|
identifier_name
|
070-E-ClimbingStairs.py
|
# You are climbing a stair case. It takes n steps to reach to the top.
#
# Each time you can either climb 1 or 2 steps. In how many distinct ways can you climb to the top?
#
# Note: Given n will be a positive integer.
#
# Example 1:
#
# Input: 2
# Output: 2
# Explanation: There are two ways to climb to the top.
# 1. 1 step + 1 step
# 2. 2 steps
#
# Example 2:
#
# Input: 3
# Output: 3
# Explanation: There are three ways to climb to the top.
# 1. 1 step + 1 step + 1 step
# 2. 1 step + 2 steps
# 3. 2 steps + 1 step
class Solution(object):
def climbStairs(self, n):
|
# Note:
# Generate two trees one with 1 step and other with 2 step and add both
|
"""
:type n: int
:rtype: int
"""
table = [1, 2]
i = 2
while i < n:
table.append(table[i-1] + table[i-2])
i += 1
return table[n-1]
|
identifier_body
|
functions.rs
|
/*
* MIT License
*
* Copyright (c) 2016 Johnathan Fercher
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
**/
// receives two u8 and returns the bigger
fn return_max(a: u8, b: u8) -> u8 {
if a > b {
a
}else{
b
}
}
// receives two u8 and prints the bigger, without any return
fn print_max(a: u8, b: u8) -> () {
let mut low;
if a > b {
low = a;
}else{
low = b;
}
println!("{}", low);
}
fn main() {
println!("{}", return_max(10, 50));
print_max(10, 50);
|
}
|
random_line_split
|
|
functions.rs
|
/*
* MIT License
*
* Copyright (c) 2016 Johnathan Fercher
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
**/
// receives two u8 and returns the bigger
fn return_max(a: u8, b: u8) -> u8
|
// receives two u8 and prints the bigger, without any return
fn print_max(a: u8, b: u8) -> () {
let mut low;
if a > b {
low = a;
}else{
low = b;
}
println!("{}", low);
}
fn main() {
println!("{}", return_max(10, 50));
print_max(10, 50);
}
|
{
if a > b {
a
}else{
b
}
}
|
identifier_body
|
functions.rs
|
/*
* MIT License
*
* Copyright (c) 2016 Johnathan Fercher
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
**/
// receives two u8 and returns the bigger
fn return_max(a: u8, b: u8) -> u8 {
if a > b {
a
}else{
b
}
}
// receives two u8 and prints the bigger, without any return
fn print_max(a: u8, b: u8) -> () {
let mut low;
if a > b
|
else{
low = b;
}
println!("{}", low);
}
fn main() {
println!("{}", return_max(10, 50));
print_max(10, 50);
}
|
{
low = a;
}
|
conditional_block
|
functions.rs
|
/*
* MIT License
*
* Copyright (c) 2016 Johnathan Fercher
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
**/
// receives two u8 and returns the bigger
fn
|
(a: u8, b: u8) -> u8 {
if a > b {
a
}else{
b
}
}
// receives two u8 and prints the bigger, without any return
fn print_max(a: u8, b: u8) -> () {
let mut low;
if a > b {
low = a;
}else{
low = b;
}
println!("{}", low);
}
fn main() {
println!("{}", return_max(10, 50));
print_max(10, 50);
}
|
return_max
|
identifier_name
|
memory.js
|
'use strict';
var client = require('../http-client');
var extend = require('extend');
var http = require('http');
module.exports = function(dependencies) {
var graceperiod = dependencies('graceperiod');
var logger = dependencies('logger');
return function(req, res, options) {
var target = options.endpoint + '/' + options.path + req.url;
var delay = options.graceperiod;
var context = {
user: req.user._id
};
function forwardRequest(callback) {
var requestOptions = {
method: req.method,
url: target,
headers: extend({}, req.headers, { ESNToken: req.token.token })
};
if (options.json) {
requestOptions.json = options.json;
}
if (req.body && req.method !== 'DELETE') {
requestOptions.body = req.body;
}
client(requestOptions, function(err, response, body) {
if (err) {
logger.error('Error while sending request', err);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, new Error('Error while sending request')));
}
return callback(new Error('Error while sending request'));
}
logger.info('Response from remote service: HTTP %s', response.statusCode);
var error;
if (response.statusCode >= 200 && response.statusCode < 300) {
if (options.onSuccess) {
return options.onSuccess(response, body, req, res, callback.bind(null, null, response));
}
} else
|
callback(error, response);
});
}
function onComplete(err, result) {
logger.debug('Task has been completed');
if (err) {
logger.error('Error while sending request to remote service', err);
}
if (result) {
logger.info('Remote service response status code', result.statusCode);
}
}
function onCancel() {
logger.info('Task has been aborted');
}
graceperiod.create(forwardRequest, delay, context, onComplete, onCancel).then(function(task) {
logger.info('Grace Task %s has been created for %s', task.id, target);
res.set('X-ESN-Task-Id', task.id);
return res.status(202).json({id: task.id});
}, function(err) {
logger.error('Error while creating deferred task', err);
return res.status(500).json({error: {code: 500, message: 'Server Error', details: 'Can not get create deferred task'}});
});
};
};
|
{
error = {error: {code: response.statusCode, message: http.STATUS_CODES[response.statusCode], details: response.statusMessage}};
logger.error('Error from remote service : ', response.body);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, error, response));
}
}
|
conditional_block
|
memory.js
|
'use strict';
var client = require('../http-client');
var extend = require('extend');
var http = require('http');
module.exports = function(dependencies) {
var graceperiod = dependencies('graceperiod');
var logger = dependencies('logger');
return function(req, res, options) {
var target = options.endpoint + '/' + options.path + req.url;
var delay = options.graceperiod;
var context = {
user: req.user._id
};
function forwardRequest(callback) {
var requestOptions = {
method: req.method,
url: target,
headers: extend({}, req.headers, { ESNToken: req.token.token })
};
if (options.json) {
requestOptions.json = options.json;
}
if (req.body && req.method !== 'DELETE') {
requestOptions.body = req.body;
}
client(requestOptions, function(err, response, body) {
if (err) {
logger.error('Error while sending request', err);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, new Error('Error while sending request')));
}
return callback(new Error('Error while sending request'));
}
logger.info('Response from remote service: HTTP %s', response.statusCode);
var error;
if (response.statusCode >= 200 && response.statusCode < 300) {
if (options.onSuccess) {
return options.onSuccess(response, body, req, res, callback.bind(null, null, response));
}
} else {
error = {error: {code: response.statusCode, message: http.STATUS_CODES[response.statusCode], details: response.statusMessage}};
logger.error('Error from remote service : ', response.body);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, error, response));
}
}
callback(error, response);
});
}
function
|
(err, result) {
logger.debug('Task has been completed');
if (err) {
logger.error('Error while sending request to remote service', err);
}
if (result) {
logger.info('Remote service response status code', result.statusCode);
}
}
function onCancel() {
logger.info('Task has been aborted');
}
graceperiod.create(forwardRequest, delay, context, onComplete, onCancel).then(function(task) {
logger.info('Grace Task %s has been created for %s', task.id, target);
res.set('X-ESN-Task-Id', task.id);
return res.status(202).json({id: task.id});
}, function(err) {
logger.error('Error while creating deferred task', err);
return res.status(500).json({error: {code: 500, message: 'Server Error', details: 'Can not get create deferred task'}});
});
};
};
|
onComplete
|
identifier_name
|
memory.js
|
'use strict';
var client = require('../http-client');
var extend = require('extend');
var http = require('http');
module.exports = function(dependencies) {
var graceperiod = dependencies('graceperiod');
var logger = dependencies('logger');
return function(req, res, options) {
var target = options.endpoint + '/' + options.path + req.url;
var delay = options.graceperiod;
var context = {
user: req.user._id
};
function forwardRequest(callback) {
var requestOptions = {
method: req.method,
url: target,
headers: extend({}, req.headers, { ESNToken: req.token.token })
};
if (options.json) {
requestOptions.json = options.json;
}
if (req.body && req.method !== 'DELETE') {
requestOptions.body = req.body;
}
client(requestOptions, function(err, response, body) {
if (err) {
logger.error('Error while sending request', err);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, new Error('Error while sending request')));
}
return callback(new Error('Error while sending request'));
}
logger.info('Response from remote service: HTTP %s', response.statusCode);
var error;
if (response.statusCode >= 200 && response.statusCode < 300) {
if (options.onSuccess) {
return options.onSuccess(response, body, req, res, callback.bind(null, null, response));
}
} else {
error = {error: {code: response.statusCode, message: http.STATUS_CODES[response.statusCode], details: response.statusMessage}};
logger.error('Error from remote service : ', response.body);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, error, response));
}
}
callback(error, response);
});
}
function onComplete(err, result) {
logger.debug('Task has been completed');
if (err) {
logger.error('Error while sending request to remote service', err);
}
if (result) {
logger.info('Remote service response status code', result.statusCode);
}
}
function onCancel()
|
graceperiod.create(forwardRequest, delay, context, onComplete, onCancel).then(function(task) {
logger.info('Grace Task %s has been created for %s', task.id, target);
res.set('X-ESN-Task-Id', task.id);
return res.status(202).json({id: task.id});
}, function(err) {
logger.error('Error while creating deferred task', err);
return res.status(500).json({error: {code: 500, message: 'Server Error', details: 'Can not get create deferred task'}});
});
};
};
|
{
logger.info('Task has been aborted');
}
|
identifier_body
|
memory.js
|
'use strict';
var client = require('../http-client');
var extend = require('extend');
var http = require('http');
module.exports = function(dependencies) {
var graceperiod = dependencies('graceperiod');
var logger = dependencies('logger');
|
var target = options.endpoint + '/' + options.path + req.url;
var delay = options.graceperiod;
var context = {
user: req.user._id
};
function forwardRequest(callback) {
var requestOptions = {
method: req.method,
url: target,
headers: extend({}, req.headers, { ESNToken: req.token.token })
};
if (options.json) {
requestOptions.json = options.json;
}
if (req.body && req.method !== 'DELETE') {
requestOptions.body = req.body;
}
client(requestOptions, function(err, response, body) {
if (err) {
logger.error('Error while sending request', err);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, new Error('Error while sending request')));
}
return callback(new Error('Error while sending request'));
}
logger.info('Response from remote service: HTTP %s', response.statusCode);
var error;
if (response.statusCode >= 200 && response.statusCode < 300) {
if (options.onSuccess) {
return options.onSuccess(response, body, req, res, callback.bind(null, null, response));
}
} else {
error = {error: {code: response.statusCode, message: http.STATUS_CODES[response.statusCode], details: response.statusMessage}};
logger.error('Error from remote service : ', response.body);
if (options.onError) {
return options.onError(response, body, req, res, callback.bind(null, error, response));
}
}
callback(error, response);
});
}
function onComplete(err, result) {
logger.debug('Task has been completed');
if (err) {
logger.error('Error while sending request to remote service', err);
}
if (result) {
logger.info('Remote service response status code', result.statusCode);
}
}
function onCancel() {
logger.info('Task has been aborted');
}
graceperiod.create(forwardRequest, delay, context, onComplete, onCancel).then(function(task) {
logger.info('Grace Task %s has been created for %s', task.id, target);
res.set('X-ESN-Task-Id', task.id);
return res.status(202).json({id: task.id});
}, function(err) {
logger.error('Error while creating deferred task', err);
return res.status(500).json({error: {code: 500, message: 'Server Error', details: 'Can not get create deferred task'}});
});
};
};
|
return function(req, res, options) {
|
random_line_split
|
issue-11881.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate rbml;
extern crate serialize;
use std::io;
use std::io::{IoError, IoResult, SeekStyle};
use std::slice;
use serialize::{Encodable, Encoder};
use serialize::json;
use rbml::writer;
use rbml::io::SeekableMemWriter;
#[deriving(Encodable)]
struct Foo {
baz: bool,
}
#[deriving(Encodable)]
struct Bar {
froboz: uint,
}
enum WireProtocol {
JSON,
RBML,
|
// ...
}
fn encode_json<'a,
T: Encodable<json::Encoder<'a>,
std::io::IoError>>(val: &T,
wr: &'a mut SeekableMemWriter) {
let mut encoder = json::Encoder::new(wr);
val.encode(&mut encoder);
}
fn encode_rbml<'a,
T: Encodable<writer::Encoder<'a, SeekableMemWriter>,
std::io::IoError>>(val: &T,
wr: &'a mut SeekableMemWriter) {
let mut encoder = writer::Encoder::new(wr);
val.encode(&mut encoder);
}
pub fn main() {
let target = Foo{baz: false,};
let mut wr = SeekableMemWriter::new();
let proto = WireProtocol::JSON;
match proto {
WireProtocol::JSON => encode_json(&target, &mut wr),
WireProtocol::RBML => encode_rbml(&target, &mut wr)
}
}
|
random_line_split
|
|
issue-11881.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate rbml;
extern crate serialize;
use std::io;
use std::io::{IoError, IoResult, SeekStyle};
use std::slice;
use serialize::{Encodable, Encoder};
use serialize::json;
use rbml::writer;
use rbml::io::SeekableMemWriter;
#[deriving(Encodable)]
struct
|
{
baz: bool,
}
#[deriving(Encodable)]
struct Bar {
froboz: uint,
}
enum WireProtocol {
JSON,
RBML,
// ...
}
fn encode_json<'a,
T: Encodable<json::Encoder<'a>,
std::io::IoError>>(val: &T,
wr: &'a mut SeekableMemWriter) {
let mut encoder = json::Encoder::new(wr);
val.encode(&mut encoder);
}
fn encode_rbml<'a,
T: Encodable<writer::Encoder<'a, SeekableMemWriter>,
std::io::IoError>>(val: &T,
wr: &'a mut SeekableMemWriter) {
let mut encoder = writer::Encoder::new(wr);
val.encode(&mut encoder);
}
pub fn main() {
let target = Foo{baz: false,};
let mut wr = SeekableMemWriter::new();
let proto = WireProtocol::JSON;
match proto {
WireProtocol::JSON => encode_json(&target, &mut wr),
WireProtocol::RBML => encode_rbml(&target, &mut wr)
}
}
|
Foo
|
identifier_name
|
deferred.py
|
# -*- coding: utf-8 -*-
import copy
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.db import models
from django.utils import six
from django.utils.functional import SimpleLazyObject, _super, empty
from shop import settings as shop_settings
class DeferredRelatedField(object):
def __init__(self, to, **kwargs):
try:
self.abstract_model = to._meta.object_name
except AttributeError:
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter must be either a model or a model name" % (self.__class__.__name__, to)
self.abstract_model = to
else:
assert to._meta.abstract, "%s can only define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
self.options = kwargs
class OneToOneField(DeferredRelatedField):
"""
Use this class to specify a one-to-one key in abstract classes. It will be converted into a real
``OneToOneField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.OneToOneField
class ForeignKey(DeferredRelatedField):
"""
Use this class to specify foreign keys in abstract classes. It will be converted into a real
``ForeignKey`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ForeignKey
class ManyToManyField(DeferredRelatedField):
"""
Use this class to specify many-to-many keys in abstract classes. They will be converted into a
real ``ManyToManyField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ManyToManyField
class ForeignKeyBuilder(ModelBase):
"""
Here the magic happens: All known and deferred foreign keys are mapped to their correct model's
counterpart.
If the main application stores its models in its own directory, add to settings.py:
SHOP_APP_LABEL = 'myshop'
so that the models are created inside your own shop instatiation.
"""
_materialized_models = {}
_pending_mappings = []
def __new__(cls, name, bases, attrs):
class Meta:
|
attrs.setdefault('Meta', Meta)
if not hasattr(attrs['Meta'], 'app_label') and not getattr(attrs['Meta'], 'abstract', False):
attrs['Meta'].app_label = Meta.app_label
attrs.setdefault('__module__', getattr(bases[-1], '__module__'))
Model = super(ForeignKeyBuilder, cls).__new__(cls, name, bases, attrs)
if Model._meta.abstract:
return Model
for baseclass in bases:
# classes which materialize an abstract model are added to a mapping dictionary
basename = baseclass.__name__
try:
if not issubclass(Model, baseclass) or not baseclass._meta.abstract:
raise ImproperlyConfigured("Base class %s is not abstract." % basename)
except (AttributeError, NotImplementedError):
pass
else:
if basename in cls._materialized_models:
if Model.__name__ != cls._materialized_models[basename]:
raise AssertionError("Both Model classes '%s' and '%s' inherited from abstract"
"base class %s, which is disallowed in this configuration." %
(Model.__name__, cls._materialized_models[basename], basename))
else:
cls._materialized_models[basename] = Model.__name__
# remember the materialized model mapping in the base class for further usage
baseclass._materialized_model = Model
ForeignKeyBuilder.process_pending_mappings(Model, basename)
# search for deferred foreign fields in our Model
for attrname in dir(Model):
try:
member = getattr(Model, attrname)
except AttributeError:
continue
if not isinstance(member, DeferredRelatedField):
continue
mapmodel = cls._materialized_models.get(member.abstract_model)
if mapmodel:
field = member.MaterializedField(mapmodel, **member.options)
field.contribute_to_class(Model, attrname)
else:
ForeignKeyBuilder._pending_mappings.append((Model, attrname, member,))
return Model
@staticmethod
def process_pending_mappings(Model, basename):
# check for pending mappings and in case, process them and remove them from the list
for mapping in ForeignKeyBuilder._pending_mappings[:]:
if mapping[2].abstract_model == basename:
field = mapping[2].MaterializedField(Model, **mapping[2].options)
field.contribute_to_class(mapping[0], mapping[1])
ForeignKeyBuilder._pending_mappings.remove(mapping)
def __getattr__(self, key):
if key == '_materialized_model':
msg = "No class implements abstract base model: `{}`."
raise ImproperlyConfigured(msg.format(self.__name__))
return object.__getattribute__(self, key)
class MaterializedModel(SimpleLazyObject):
"""
Wrap the base model into a lazy object, so that we can refer to members of its
materialized model using lazy evaluation.
"""
def __init__(self, base_model):
self.__dict__['_base_model'] = base_model
_super(SimpleLazyObject, self).__init__()
def _setup(self):
self._wrapped = getattr(self._base_model, '_materialized_model')
def __call__(self, *args, **kwargs):
# calls the constructor of the materialized model
if self._wrapped is empty:
self._setup()
return self._wrapped(*args, **kwargs)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the latter is proxied.
result = MaterializedModel(self._base_model)
memo[id(self)] = result
return result
else:
return copy.deepcopy(self._wrapped, memo)
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._base_model
else:
repr_attr = self._wrapped
return '<MaterializedModel: {}>'.format(repr_attr)
|
app_label = shop_settings.APP_LABEL
|
identifier_body
|
deferred.py
|
# -*- coding: utf-8 -*-
import copy
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.db import models
from django.utils import six
from django.utils.functional import SimpleLazyObject, _super, empty
from shop import settings as shop_settings
class DeferredRelatedField(object):
def __init__(self, to, **kwargs):
try:
self.abstract_model = to._meta.object_name
except AttributeError:
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter must be either a model or a model name" % (self.__class__.__name__, to)
self.abstract_model = to
else:
assert to._meta.abstract, "%s can only define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
self.options = kwargs
class OneToOneField(DeferredRelatedField):
"""
Use this class to specify a one-to-one key in abstract classes. It will be converted into a real
``OneToOneField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.OneToOneField
class ForeignKey(DeferredRelatedField):
"""
Use this class to specify foreign keys in abstract classes. It will be converted into a real
``ForeignKey`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ForeignKey
class
|
(DeferredRelatedField):
"""
Use this class to specify many-to-many keys in abstract classes. They will be converted into a
real ``ManyToManyField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ManyToManyField
class ForeignKeyBuilder(ModelBase):
"""
Here the magic happens: All known and deferred foreign keys are mapped to their correct model's
counterpart.
If the main application stores its models in its own directory, add to settings.py:
SHOP_APP_LABEL = 'myshop'
so that the models are created inside your own shop instatiation.
"""
_materialized_models = {}
_pending_mappings = []
def __new__(cls, name, bases, attrs):
class Meta:
app_label = shop_settings.APP_LABEL
attrs.setdefault('Meta', Meta)
if not hasattr(attrs['Meta'], 'app_label') and not getattr(attrs['Meta'], 'abstract', False):
attrs['Meta'].app_label = Meta.app_label
attrs.setdefault('__module__', getattr(bases[-1], '__module__'))
Model = super(ForeignKeyBuilder, cls).__new__(cls, name, bases, attrs)
if Model._meta.abstract:
return Model
for baseclass in bases:
# classes which materialize an abstract model are added to a mapping dictionary
basename = baseclass.__name__
try:
if not issubclass(Model, baseclass) or not baseclass._meta.abstract:
raise ImproperlyConfigured("Base class %s is not abstract." % basename)
except (AttributeError, NotImplementedError):
pass
else:
if basename in cls._materialized_models:
if Model.__name__ != cls._materialized_models[basename]:
raise AssertionError("Both Model classes '%s' and '%s' inherited from abstract"
"base class %s, which is disallowed in this configuration." %
(Model.__name__, cls._materialized_models[basename], basename))
else:
cls._materialized_models[basename] = Model.__name__
# remember the materialized model mapping in the base class for further usage
baseclass._materialized_model = Model
ForeignKeyBuilder.process_pending_mappings(Model, basename)
# search for deferred foreign fields in our Model
for attrname in dir(Model):
try:
member = getattr(Model, attrname)
except AttributeError:
continue
if not isinstance(member, DeferredRelatedField):
continue
mapmodel = cls._materialized_models.get(member.abstract_model)
if mapmodel:
field = member.MaterializedField(mapmodel, **member.options)
field.contribute_to_class(Model, attrname)
else:
ForeignKeyBuilder._pending_mappings.append((Model, attrname, member,))
return Model
@staticmethod
def process_pending_mappings(Model, basename):
# check for pending mappings and in case, process them and remove them from the list
for mapping in ForeignKeyBuilder._pending_mappings[:]:
if mapping[2].abstract_model == basename:
field = mapping[2].MaterializedField(Model, **mapping[2].options)
field.contribute_to_class(mapping[0], mapping[1])
ForeignKeyBuilder._pending_mappings.remove(mapping)
def __getattr__(self, key):
if key == '_materialized_model':
msg = "No class implements abstract base model: `{}`."
raise ImproperlyConfigured(msg.format(self.__name__))
return object.__getattribute__(self, key)
class MaterializedModel(SimpleLazyObject):
"""
Wrap the base model into a lazy object, so that we can refer to members of its
materialized model using lazy evaluation.
"""
def __init__(self, base_model):
self.__dict__['_base_model'] = base_model
_super(SimpleLazyObject, self).__init__()
def _setup(self):
self._wrapped = getattr(self._base_model, '_materialized_model')
def __call__(self, *args, **kwargs):
# calls the constructor of the materialized model
if self._wrapped is empty:
self._setup()
return self._wrapped(*args, **kwargs)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the latter is proxied.
result = MaterializedModel(self._base_model)
memo[id(self)] = result
return result
else:
return copy.deepcopy(self._wrapped, memo)
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._base_model
else:
repr_attr = self._wrapped
return '<MaterializedModel: {}>'.format(repr_attr)
|
ManyToManyField
|
identifier_name
|
deferred.py
|
# -*- coding: utf-8 -*-
import copy
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.db import models
from django.utils import six
from django.utils.functional import SimpleLazyObject, _super, empty
from shop import settings as shop_settings
class DeferredRelatedField(object):
def __init__(self, to, **kwargs):
try:
self.abstract_model = to._meta.object_name
except AttributeError:
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter must be either a model or a model name" % (self.__class__.__name__, to)
self.abstract_model = to
else:
assert to._meta.abstract, "%s can only define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
self.options = kwargs
class OneToOneField(DeferredRelatedField):
"""
Use this class to specify a one-to-one key in abstract classes. It will be converted into a real
``OneToOneField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.OneToOneField
class ForeignKey(DeferredRelatedField):
"""
Use this class to specify foreign keys in abstract classes. It will be converted into a real
``ForeignKey`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ForeignKey
class ManyToManyField(DeferredRelatedField):
"""
Use this class to specify many-to-many keys in abstract classes. They will be converted into a
real ``ManyToManyField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ManyToManyField
class ForeignKeyBuilder(ModelBase):
"""
Here the magic happens: All known and deferred foreign keys are mapped to their correct model's
counterpart.
If the main application stores its models in its own directory, add to settings.py:
SHOP_APP_LABEL = 'myshop'
so that the models are created inside your own shop instatiation.
"""
_materialized_models = {}
_pending_mappings = []
def __new__(cls, name, bases, attrs):
class Meta:
app_label = shop_settings.APP_LABEL
attrs.setdefault('Meta', Meta)
if not hasattr(attrs['Meta'], 'app_label') and not getattr(attrs['Meta'], 'abstract', False):
attrs['Meta'].app_label = Meta.app_label
attrs.setdefault('__module__', getattr(bases[-1], '__module__'))
Model = super(ForeignKeyBuilder, cls).__new__(cls, name, bases, attrs)
if Model._meta.abstract:
return Model
for baseclass in bases:
# classes which materialize an abstract model are added to a mapping dictionary
basename = baseclass.__name__
try:
if not issubclass(Model, baseclass) or not baseclass._meta.abstract:
raise ImproperlyConfigured("Base class %s is not abstract." % basename)
except (AttributeError, NotImplementedError):
pass
else:
if basename in cls._materialized_models:
if Model.__name__ != cls._materialized_models[basename]:
raise AssertionError("Both Model classes '%s' and '%s' inherited from abstract"
"base class %s, which is disallowed in this configuration." %
(Model.__name__, cls._materialized_models[basename], basename))
else:
cls._materialized_models[basename] = Model.__name__
# remember the materialized model mapping in the base class for further usage
baseclass._materialized_model = Model
ForeignKeyBuilder.process_pending_mappings(Model, basename)
# search for deferred foreign fields in our Model
for attrname in dir(Model):
try:
member = getattr(Model, attrname)
except AttributeError:
continue
if not isinstance(member, DeferredRelatedField):
continue
mapmodel = cls._materialized_models.get(member.abstract_model)
if mapmodel:
field = member.MaterializedField(mapmodel, **member.options)
field.contribute_to_class(Model, attrname)
else:
ForeignKeyBuilder._pending_mappings.append((Model, attrname, member,))
return Model
@staticmethod
def process_pending_mappings(Model, basename):
# check for pending mappings and in case, process them and remove them from the list
for mapping in ForeignKeyBuilder._pending_mappings[:]:
if mapping[2].abstract_model == basename:
field = mapping[2].MaterializedField(Model, **mapping[2].options)
field.contribute_to_class(mapping[0], mapping[1])
ForeignKeyBuilder._pending_mappings.remove(mapping)
def __getattr__(self, key):
if key == '_materialized_model':
msg = "No class implements abstract base model: `{}`."
raise ImproperlyConfigured(msg.format(self.__name__))
return object.__getattribute__(self, key)
class MaterializedModel(SimpleLazyObject):
"""
Wrap the base model into a lazy object, so that we can refer to members of its
materialized model using lazy evaluation.
"""
def __init__(self, base_model):
self.__dict__['_base_model'] = base_model
_super(SimpleLazyObject, self).__init__()
|
if self._wrapped is empty:
self._setup()
return self._wrapped(*args, **kwargs)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the latter is proxied.
result = MaterializedModel(self._base_model)
memo[id(self)] = result
return result
else:
return copy.deepcopy(self._wrapped, memo)
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._base_model
else:
repr_attr = self._wrapped
return '<MaterializedModel: {}>'.format(repr_attr)
|
def _setup(self):
self._wrapped = getattr(self._base_model, '_materialized_model')
def __call__(self, *args, **kwargs):
# calls the constructor of the materialized model
|
random_line_split
|
deferred.py
|
# -*- coding: utf-8 -*-
import copy
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.db import models
from django.utils import six
from django.utils.functional import SimpleLazyObject, _super, empty
from shop import settings as shop_settings
class DeferredRelatedField(object):
def __init__(self, to, **kwargs):
try:
self.abstract_model = to._meta.object_name
except AttributeError:
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter must be either a model or a model name" % (self.__class__.__name__, to)
self.abstract_model = to
else:
assert to._meta.abstract, "%s can only define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
self.options = kwargs
class OneToOneField(DeferredRelatedField):
"""
Use this class to specify a one-to-one key in abstract classes. It will be converted into a real
``OneToOneField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.OneToOneField
class ForeignKey(DeferredRelatedField):
"""
Use this class to specify foreign keys in abstract classes. It will be converted into a real
``ForeignKey`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ForeignKey
class ManyToManyField(DeferredRelatedField):
"""
Use this class to specify many-to-many keys in abstract classes. They will be converted into a
real ``ManyToManyField`` whenever a real model class is derived from a given abstract class.
"""
MaterializedField = models.ManyToManyField
class ForeignKeyBuilder(ModelBase):
"""
Here the magic happens: All known and deferred foreign keys are mapped to their correct model's
counterpart.
If the main application stores its models in its own directory, add to settings.py:
SHOP_APP_LABEL = 'myshop'
so that the models are created inside your own shop instatiation.
"""
_materialized_models = {}
_pending_mappings = []
def __new__(cls, name, bases, attrs):
class Meta:
app_label = shop_settings.APP_LABEL
attrs.setdefault('Meta', Meta)
if not hasattr(attrs['Meta'], 'app_label') and not getattr(attrs['Meta'], 'abstract', False):
attrs['Meta'].app_label = Meta.app_label
attrs.setdefault('__module__', getattr(bases[-1], '__module__'))
Model = super(ForeignKeyBuilder, cls).__new__(cls, name, bases, attrs)
if Model._meta.abstract:
return Model
for baseclass in bases:
# classes which materialize an abstract model are added to a mapping dictionary
basename = baseclass.__name__
try:
if not issubclass(Model, baseclass) or not baseclass._meta.abstract:
raise ImproperlyConfigured("Base class %s is not abstract." % basename)
except (AttributeError, NotImplementedError):
pass
else:
|
ForeignKeyBuilder.process_pending_mappings(Model, basename)
# search for deferred foreign fields in our Model
for attrname in dir(Model):
try:
member = getattr(Model, attrname)
except AttributeError:
continue
if not isinstance(member, DeferredRelatedField):
continue
mapmodel = cls._materialized_models.get(member.abstract_model)
if mapmodel:
field = member.MaterializedField(mapmodel, **member.options)
field.contribute_to_class(Model, attrname)
else:
ForeignKeyBuilder._pending_mappings.append((Model, attrname, member,))
return Model
@staticmethod
def process_pending_mappings(Model, basename):
# check for pending mappings and in case, process them and remove them from the list
for mapping in ForeignKeyBuilder._pending_mappings[:]:
if mapping[2].abstract_model == basename:
field = mapping[2].MaterializedField(Model, **mapping[2].options)
field.contribute_to_class(mapping[0], mapping[1])
ForeignKeyBuilder._pending_mappings.remove(mapping)
def __getattr__(self, key):
if key == '_materialized_model':
msg = "No class implements abstract base model: `{}`."
raise ImproperlyConfigured(msg.format(self.__name__))
return object.__getattribute__(self, key)
class MaterializedModel(SimpleLazyObject):
"""
Wrap the base model into a lazy object, so that we can refer to members of its
materialized model using lazy evaluation.
"""
def __init__(self, base_model):
self.__dict__['_base_model'] = base_model
_super(SimpleLazyObject, self).__init__()
def _setup(self):
self._wrapped = getattr(self._base_model, '_materialized_model')
def __call__(self, *args, **kwargs):
# calls the constructor of the materialized model
if self._wrapped is empty:
self._setup()
return self._wrapped(*args, **kwargs)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the latter is proxied.
result = MaterializedModel(self._base_model)
memo[id(self)] = result
return result
else:
return copy.deepcopy(self._wrapped, memo)
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._base_model
else:
repr_attr = self._wrapped
return '<MaterializedModel: {}>'.format(repr_attr)
|
if basename in cls._materialized_models:
if Model.__name__ != cls._materialized_models[basename]:
raise AssertionError("Both Model classes '%s' and '%s' inherited from abstract"
"base class %s, which is disallowed in this configuration." %
(Model.__name__, cls._materialized_models[basename], basename))
else:
cls._materialized_models[basename] = Model.__name__
# remember the materialized model mapping in the base class for further usage
baseclass._materialized_model = Model
|
conditional_block
|
macro-doc-escapes.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// When expanding a macro, documentation attributes (including documentation comments) must be
// passed "as is" without being parsed. Otherwise, some text will be incorrectly interpreted as
// escape sequences, leading to an ICE.
//
// Related issues: #25929, #25943
macro_rules! homura {
(#[$x:meta]) => ()
}
homura! {
/// \madoka \x41
}
|
fn main() { }
|
random_line_split
|
|
macro-doc-escapes.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// When expanding a macro, documentation attributes (including documentation comments) must be
// passed "as is" without being parsed. Otherwise, some text will be incorrectly interpreted as
// escape sequences, leading to an ICE.
//
// Related issues: #25929, #25943
macro_rules! homura {
(#[$x:meta]) => ()
}
homura! {
/// \madoka \x41
}
fn
|
() { }
|
main
|
identifier_name
|
macro-doc-escapes.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// When expanding a macro, documentation attributes (including documentation comments) must be
// passed "as is" without being parsed. Otherwise, some text will be incorrectly interpreted as
// escape sequences, leading to an ICE.
//
// Related issues: #25929, #25943
macro_rules! homura {
(#[$x:meta]) => ()
}
homura! {
/// \madoka \x41
}
fn main()
|
{ }
|
identifier_body
|
|
signpdf.py
|
#!/usr/bin/env python
import os
import time
import argparse
import tempfile
import PyPDF2
import datetime
from reportlab.pdfgen import canvas
parser = argparse.ArgumentParser("Add signatures to PDF files")
parser.add_argument("pdf", help="The pdf file to annotate")
parser.add_argument("signature", help="The signature file (png, jpg)")
parser.add_argument("--date", action='store_true')
parser.add_argument("--output", nargs='?',
help="Output file. Defaults to input filename plus '_signed'")
parser.add_argument("--coords", nargs='?', default='2x100x100x125x40',
help="Coordinates to place signature. Format: PAGExXxYxWIDTHxHEIGHT. 1x200x300x125x40 means page 1, 200 units horizontally from the bottom left, 300 units vertically from the bottom left, 125 units wide, 40 units tall. Pages count starts at 1 (1-based indexing). Units are pdf-standard units (1/72 inch).")
def _get_tmp_filename(suffix=".pdf"):
with tempfile.NamedTemporaryFile(suffix=".pdf") as fh:
return fh.name
def sign_pdf(args):
#TODO: use a gui or something.... for now, just trial-and-error the coords
page_num, x1, y1, width, height = [int(a) for a in args.coords.split("x")]
page_num -= 1
output_filename = args.output or "{}_signed{}".format(
*os.path.splitext(args.pdf)
)
pdf_fh = open(args.pdf, 'rb')
sig_tmp_fh = None
pdf = PyPDF2.PdfFileReader(pdf_fh)
writer = PyPDF2.PdfFileWriter()
sig_tmp_filename = None
for i in range(0, pdf.getNumPages()):
page = pdf.getPage(i)
if i == page_num:
# Create PDF for signature
sig_tmp_filename = _get_tmp_filename()
c = canvas.Canvas(sig_tmp_filename, pagesize=page.cropBox)
c.drawImage(args.signature, x1, y1, width, height, mask='auto')
if args.date:
c.drawString(x1 + width, y1, datetime.datetime.now().strftime("%Y-%m-%d"))
c.showPage()
c.save()
# Merge PDF in to original page
sig_tmp_fh = open(sig_tmp_filename, 'rb')
sig_tmp_pdf = PyPDF2.PdfFileReader(sig_tmp_fh)
sig_page = sig_tmp_pdf.getPage(0)
sig_page.mediaBox = page.mediaBox
page.mergePage(sig_page)
writer.addPage(page)
with open(output_filename, 'wb') as fh:
writer.write(fh)
for handle in [pdf_fh, sig_tmp_fh]:
if handle:
handle.close()
if sig_tmp_filename:
os.remove(sig_tmp_filename)
def main():
|
if __name__ == "__main__":
main()
|
sign_pdf(parser.parse_args())
|
identifier_body
|
signpdf.py
|
#!/usr/bin/env python
import os
import time
import argparse
import tempfile
import PyPDF2
import datetime
from reportlab.pdfgen import canvas
parser = argparse.ArgumentParser("Add signatures to PDF files")
parser.add_argument("pdf", help="The pdf file to annotate")
parser.add_argument("signature", help="The signature file (png, jpg)")
parser.add_argument("--date", action='store_true')
parser.add_argument("--output", nargs='?',
help="Output file. Defaults to input filename plus '_signed'")
parser.add_argument("--coords", nargs='?', default='2x100x100x125x40',
help="Coordinates to place signature. Format: PAGExXxYxWIDTHxHEIGHT. 1x200x300x125x40 means page 1, 200 units horizontally from the bottom left, 300 units vertically from the bottom left, 125 units wide, 40 units tall. Pages count starts at 1 (1-based indexing). Units are pdf-standard units (1/72 inch).")
def _get_tmp_filename(suffix=".pdf"):
with tempfile.NamedTemporaryFile(suffix=".pdf") as fh:
return fh.name
def sign_pdf(args):
#TODO: use a gui or something.... for now, just trial-and-error the coords
page_num, x1, y1, width, height = [int(a) for a in args.coords.split("x")]
page_num -= 1
output_filename = args.output or "{}_signed{}".format(
*os.path.splitext(args.pdf)
)
pdf_fh = open(args.pdf, 'rb')
sig_tmp_fh = None
pdf = PyPDF2.PdfFileReader(pdf_fh)
writer = PyPDF2.PdfFileWriter()
sig_tmp_filename = None
for i in range(0, pdf.getNumPages()):
page = pdf.getPage(i)
if i == page_num:
# Create PDF for signature
sig_tmp_filename = _get_tmp_filename()
c = canvas.Canvas(sig_tmp_filename, pagesize=page.cropBox)
c.drawImage(args.signature, x1, y1, width, height, mask='auto')
if args.date:
c.drawString(x1 + width, y1, datetime.datetime.now().strftime("%Y-%m-%d"))
c.showPage()
c.save()
# Merge PDF in to original page
sig_tmp_fh = open(sig_tmp_filename, 'rb')
sig_tmp_pdf = PyPDF2.PdfFileReader(sig_tmp_fh)
sig_page = sig_tmp_pdf.getPage(0)
sig_page.mediaBox = page.mediaBox
page.mergePage(sig_page)
writer.addPage(page)
with open(output_filename, 'wb') as fh:
writer.write(fh)
for handle in [pdf_fh, sig_tmp_fh]:
|
if sig_tmp_filename:
os.remove(sig_tmp_filename)
def main():
sign_pdf(parser.parse_args())
if __name__ == "__main__":
main()
|
if handle:
handle.close()
|
conditional_block
|
signpdf.py
|
#!/usr/bin/env python
import os
import time
import argparse
import tempfile
import PyPDF2
import datetime
from reportlab.pdfgen import canvas
parser = argparse.ArgumentParser("Add signatures to PDF files")
parser.add_argument("pdf", help="The pdf file to annotate")
parser.add_argument("signature", help="The signature file (png, jpg)")
parser.add_argument("--date", action='store_true')
parser.add_argument("--output", nargs='?',
help="Output file. Defaults to input filename plus '_signed'")
parser.add_argument("--coords", nargs='?', default='2x100x100x125x40',
help="Coordinates to place signature. Format: PAGExXxYxWIDTHxHEIGHT. 1x200x300x125x40 means page 1, 200 units horizontally from the bottom left, 300 units vertically from the bottom left, 125 units wide, 40 units tall. Pages count starts at 1 (1-based indexing). Units are pdf-standard units (1/72 inch).")
def _get_tmp_filename(suffix=".pdf"):
with tempfile.NamedTemporaryFile(suffix=".pdf") as fh:
return fh.name
def
|
(args):
#TODO: use a gui or something.... for now, just trial-and-error the coords
page_num, x1, y1, width, height = [int(a) for a in args.coords.split("x")]
page_num -= 1
output_filename = args.output or "{}_signed{}".format(
*os.path.splitext(args.pdf)
)
pdf_fh = open(args.pdf, 'rb')
sig_tmp_fh = None
pdf = PyPDF2.PdfFileReader(pdf_fh)
writer = PyPDF2.PdfFileWriter()
sig_tmp_filename = None
for i in range(0, pdf.getNumPages()):
page = pdf.getPage(i)
if i == page_num:
# Create PDF for signature
sig_tmp_filename = _get_tmp_filename()
c = canvas.Canvas(sig_tmp_filename, pagesize=page.cropBox)
c.drawImage(args.signature, x1, y1, width, height, mask='auto')
if args.date:
c.drawString(x1 + width, y1, datetime.datetime.now().strftime("%Y-%m-%d"))
c.showPage()
c.save()
# Merge PDF in to original page
sig_tmp_fh = open(sig_tmp_filename, 'rb')
sig_tmp_pdf = PyPDF2.PdfFileReader(sig_tmp_fh)
sig_page = sig_tmp_pdf.getPage(0)
sig_page.mediaBox = page.mediaBox
page.mergePage(sig_page)
writer.addPage(page)
with open(output_filename, 'wb') as fh:
writer.write(fh)
for handle in [pdf_fh, sig_tmp_fh]:
if handle:
handle.close()
if sig_tmp_filename:
os.remove(sig_tmp_filename)
def main():
sign_pdf(parser.parse_args())
if __name__ == "__main__":
main()
|
sign_pdf
|
identifier_name
|
signpdf.py
|
#!/usr/bin/env python
import os
import time
import argparse
import tempfile
import PyPDF2
import datetime
from reportlab.pdfgen import canvas
parser = argparse.ArgumentParser("Add signatures to PDF files")
parser.add_argument("pdf", help="The pdf file to annotate")
parser.add_argument("signature", help="The signature file (png, jpg)")
parser.add_argument("--date", action='store_true')
parser.add_argument("--output", nargs='?',
help="Output file. Defaults to input filename plus '_signed'")
parser.add_argument("--coords", nargs='?', default='2x100x100x125x40',
help="Coordinates to place signature. Format: PAGExXxYxWIDTHxHEIGHT. 1x200x300x125x40 means page 1, 200 units horizontally from the bottom left, 300 units vertically from the bottom left, 125 units wide, 40 units tall. Pages count starts at 1 (1-based indexing). Units are pdf-standard units (1/72 inch).")
def _get_tmp_filename(suffix=".pdf"):
with tempfile.NamedTemporaryFile(suffix=".pdf") as fh:
return fh.name
def sign_pdf(args):
#TODO: use a gui or something.... for now, just trial-and-error the coords
page_num, x1, y1, width, height = [int(a) for a in args.coords.split("x")]
page_num -= 1
output_filename = args.output or "{}_signed{}".format(
*os.path.splitext(args.pdf)
)
pdf_fh = open(args.pdf, 'rb')
sig_tmp_fh = None
pdf = PyPDF2.PdfFileReader(pdf_fh)
writer = PyPDF2.PdfFileWriter()
sig_tmp_filename = None
for i in range(0, pdf.getNumPages()):
page = pdf.getPage(i)
if i == page_num:
# Create PDF for signature
sig_tmp_filename = _get_tmp_filename()
c = canvas.Canvas(sig_tmp_filename, pagesize=page.cropBox)
c.drawImage(args.signature, x1, y1, width, height, mask='auto')
if args.date:
c.drawString(x1 + width, y1, datetime.datetime.now().strftime("%Y-%m-%d"))
c.showPage()
c.save()
# Merge PDF in to original page
sig_tmp_fh = open(sig_tmp_filename, 'rb')
sig_tmp_pdf = PyPDF2.PdfFileReader(sig_tmp_fh)
sig_page = sig_tmp_pdf.getPage(0)
sig_page.mediaBox = page.mediaBox
page.mergePage(sig_page)
writer.addPage(page)
with open(output_filename, 'wb') as fh:
writer.write(fh)
for handle in [pdf_fh, sig_tmp_fh]:
if handle:
handle.close()
if sig_tmp_filename:
os.remove(sig_tmp_filename)
|
if __name__ == "__main__":
main()
|
def main():
sign_pdf(parser.parse_args())
|
random_line_split
|
mod.rs
|
//! The SMTP transport sends emails using the SMTP protocol.
//!
//! This SMTP client follows [RFC
//! 5321](https://tools.ietf.org/html/rfc5321), and is designed to efficiently send emails from an
//! application to a relay email server, as it relies as much as possible on the relay server
//! for sanity and RFC compliance checks.
//!
//! It implements the following extensions:
//!
//! * 8BITMIME ([RFC 6152](https://tools.ietf.org/html/rfc6152))
//! * AUTH ([RFC 4954](https://tools.ietf.org/html/rfc4954)) with PLAIN, LOGIN and XOAUTH2 mechanisms
//! * STARTTLS ([RFC 2487](https://tools.ietf.org/html/rfc2487))
//!
//! #### SMTP Transport
//!
//! This transport uses the SMTP protocol to send emails over the network (locally or remotely).
//!
//! It is designed to be:
//!
//! * Secured: connections are encrypted by default
//! * Modern: unicode support for email contents and sender/recipient addresses when compatible
//! * Fast: supports connection reuse and pooling
//!
//! This client is designed to send emails to a relay server, and should *not* be used to send
//! emails directly to the destination server.
//!
//! The relay server can be the local email server, a specific host or a third-party service.
//!
//! #### Simple example
//!
//! This is the most basic example of usage:
//!
//! ```rust,no_run
//! # #[cfg(all(feature = "builder", any(feature = "native-tls", feature = "rustls-tls")))]
//! # fn test() -> Result<(), Box<dyn std::error::Error>> {
//! use lettre::{Message, Transport, SmtpTransport};
//!
//! let email = Message::builder()
//! .from("NoBody <[email protected]>".parse()?)
//! .reply_to("Yuin <[email protected]>".parse()?)
//! .to("Hei <[email protected]>".parse()?)
//! .subject("Happy new year")
//! .body(String::from("Be happy!"))?;
//!
//! // Create TLS transport on port 465
//! let sender = SmtpTransport::relay("smtp.example.com")?
//! .build();
//! // Send the email via remote relay
//! let result = sender.send(&email);
//! assert!(result.is_ok());
//! # Ok(())
//! # }
//! ```
//!
//! #### Authentication
//!
//! Example with authentication and connection pool:
//!
//! ```rust,no_run
//! # #[cfg(all(feature = "builder", any(feature = "native-tls", feature = "rustls-tls")))]
//! # fn test() -> Result<(), Box<dyn std::error::Error>> {
//! use lettre::{Message, Transport, SmtpTransport, transport::smtp::{PoolConfig, authentication::{Credentials, Mechanism}}};
//!
//! let email = Message::builder()
//! .from("NoBody <[email protected]>".parse()?)
//! .reply_to("Yuin <[email protected]>".parse()?)
//! .to("Hei <[email protected]>".parse()?)
//! .subject("Happy new year")
//! .body(String::from("Be happy!"))?;
//!
//! // Create TLS transport on port 587 with STARTTLS
//! let sender = SmtpTransport::starttls_relay("smtp.example.com")?
//! // Add credentials for authentication
//! .credentials(Credentials::new("username".to_string(), "password".to_string()))
//! // Configure expected authentication mechanism
//! .authentication(vec![Mechanism::Plain])
//! // Connection pool settings
//! .pool_config( PoolConfig::new().max_size(20))
//! .build();
//!
//! // Send the email via remote relay
//! let result = sender.send(&email);
//! assert!(result.is_ok());
//! # Ok(())
//! # }
//! ```
//!
//! You can specify custom TLS settings:
//!
//! ```rust,no_run
//! # #[cfg(all(feature = "builder", any(feature = "native-tls", feature = "rustls-tls")))]
//! # fn test() -> Result<(), Box<dyn std::error::Error>> {
//! use lettre::{Message, Transport, SmtpTransport, transport::smtp::client::{TlsParameters, Tls}};
//!
//! let email = Message::builder()
//! .from("NoBody <[email protected]>".parse()?)
//! .reply_to("Yuin <[email protected]>".parse()?)
//! .to("Hei <[email protected]>".parse()?)
//! .subject("Happy new year")
//! .body(String::from("Be happy!"))?;
//!
//! // Custom TLS configuration
//! let tls = TlsParameters::builder("smtp.example.com".to_string())
//! .dangerous_accept_invalid_certs(true).build()?;
//!
//! // Create TLS transport on port 465
//! let sender = SmtpTransport::relay("smtp.example.com")?
//! // Custom TLS configuration
//! .tls(Tls::Required(tls))
//! .build();
//!
//! // Send the email via remote relay
//! let result = sender.send(&email);
//! assert!(result.is_ok());
//! # Ok(())
//! # }
//! ```
#[cfg(any(feature = "tokio1", feature = "async-std1"))]
pub use self::async_transport::{AsyncSmtpTransport, AsyncSmtpTransportBuilder};
#[cfg(feature = "pool")]
pub use self::pool::PoolConfig;
pub use self::{
error::Error,
transport::{SmtpTransport, SmtpTransportBuilder},
};
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
use crate::transport::smtp::client::TlsParameters;
use crate::transport::smtp::{
authentication::{Credentials, Mechanism, DEFAULT_MECHANISMS},
client::SmtpConnection,
extension::ClientId,
response::Response,
};
use client::Tls;
use std::time::Duration;
#[cfg(any(feature = "tokio1", feature = "async-std1"))]
mod async_transport;
pub mod authentication;
pub mod client;
pub mod commands;
mod error;
pub mod extension;
#[cfg(feature = "pool")]
mod pool;
pub mod response;
mod transport;
pub(super) mod util;
// Registered port numbers:
// https://www.iana.
// org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml
/// Default smtp port
pub const SMTP_PORT: u16 = 25;
/// Default submission port
pub const SUBMISSION_PORT: u16 = 587;
/// Default submission over TLS port
///
/// Defined in [RFC8314](https://tools.ietf.org/html/rfc8314)
pub const SUBMISSIONS_PORT: u16 = 465;
/// Default timeout
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(60);
#[derive(Debug, Clone)]
struct SmtpInfo {
/// Name sent during EHLO
hello_name: ClientId,
/// Server we are connecting to
server: String,
/// Port to connect to
port: u16,
/// TLS security configuration
tls: Tls,
/// Optional enforced authentication mechanism
authentication: Vec<Mechanism>,
/// Credentials
credentials: Option<Credentials>,
/// Define network timeout
/// It can be changed later for specific needs (like a different timeout for each SMTP command)
timeout: Option<Duration>,
}
impl Default for SmtpInfo {
fn
|
() -> Self {
Self {
server: "localhost".to_string(),
port: SMTP_PORT,
hello_name: ClientId::default(),
credentials: None,
authentication: DEFAULT_MECHANISMS.into(),
timeout: Some(DEFAULT_TIMEOUT),
tls: Tls::None,
}
}
}
|
default
|
identifier_name
|
mod.rs
|
//! The SMTP transport sends emails using the SMTP protocol.
//!
//! This SMTP client follows [RFC
//! 5321](https://tools.ietf.org/html/rfc5321), and is designed to efficiently send emails from an
//! application to a relay email server, as it relies as much as possible on the relay server
//! for sanity and RFC compliance checks.
//!
//! It implements the following extensions:
//!
//! * 8BITMIME ([RFC 6152](https://tools.ietf.org/html/rfc6152))
//! * AUTH ([RFC 4954](https://tools.ietf.org/html/rfc4954)) with PLAIN, LOGIN and XOAUTH2 mechanisms
//! * STARTTLS ([RFC 2487](https://tools.ietf.org/html/rfc2487))
//!
//! #### SMTP Transport
//!
//! This transport uses the SMTP protocol to send emails over the network (locally or remotely).
//!
|
//! * Fast: supports connection reuse and pooling
//!
//! This client is designed to send emails to a relay server, and should *not* be used to send
//! emails directly to the destination server.
//!
//! The relay server can be the local email server, a specific host or a third-party service.
//!
//! #### Simple example
//!
//! This is the most basic example of usage:
//!
//! ```rust,no_run
//! # #[cfg(all(feature = "builder", any(feature = "native-tls", feature = "rustls-tls")))]
//! # fn test() -> Result<(), Box<dyn std::error::Error>> {
//! use lettre::{Message, Transport, SmtpTransport};
//!
//! let email = Message::builder()
//! .from("NoBody <[email protected]>".parse()?)
//! .reply_to("Yuin <[email protected]>".parse()?)
//! .to("Hei <[email protected]>".parse()?)
//! .subject("Happy new year")
//! .body(String::from("Be happy!"))?;
//!
//! // Create TLS transport on port 465
//! let sender = SmtpTransport::relay("smtp.example.com")?
//! .build();
//! // Send the email via remote relay
//! let result = sender.send(&email);
//! assert!(result.is_ok());
//! # Ok(())
//! # }
//! ```
//!
//! #### Authentication
//!
//! Example with authentication and connection pool:
//!
//! ```rust,no_run
//! # #[cfg(all(feature = "builder", any(feature = "native-tls", feature = "rustls-tls")))]
//! # fn test() -> Result<(), Box<dyn std::error::Error>> {
//! use lettre::{Message, Transport, SmtpTransport, transport::smtp::{PoolConfig, authentication::{Credentials, Mechanism}}};
//!
//! let email = Message::builder()
//! .from("NoBody <[email protected]>".parse()?)
//! .reply_to("Yuin <[email protected]>".parse()?)
//! .to("Hei <[email protected]>".parse()?)
//! .subject("Happy new year")
//! .body(String::from("Be happy!"))?;
//!
//! // Create TLS transport on port 587 with STARTTLS
//! let sender = SmtpTransport::starttls_relay("smtp.example.com")?
//! // Add credentials for authentication
//! .credentials(Credentials::new("username".to_string(), "password".to_string()))
//! // Configure expected authentication mechanism
//! .authentication(vec![Mechanism::Plain])
//! // Connection pool settings
//! .pool_config( PoolConfig::new().max_size(20))
//! .build();
//!
//! // Send the email via remote relay
//! let result = sender.send(&email);
//! assert!(result.is_ok());
//! # Ok(())
//! # }
//! ```
//!
//! You can specify custom TLS settings:
//!
//! ```rust,no_run
//! # #[cfg(all(feature = "builder", any(feature = "native-tls", feature = "rustls-tls")))]
//! # fn test() -> Result<(), Box<dyn std::error::Error>> {
//! use lettre::{Message, Transport, SmtpTransport, transport::smtp::client::{TlsParameters, Tls}};
//!
//! let email = Message::builder()
//! .from("NoBody <[email protected]>".parse()?)
//! .reply_to("Yuin <[email protected]>".parse()?)
//! .to("Hei <[email protected]>".parse()?)
//! .subject("Happy new year")
//! .body(String::from("Be happy!"))?;
//!
//! // Custom TLS configuration
//! let tls = TlsParameters::builder("smtp.example.com".to_string())
//! .dangerous_accept_invalid_certs(true).build()?;
//!
//! // Create TLS transport on port 465
//! let sender = SmtpTransport::relay("smtp.example.com")?
//! // Custom TLS configuration
//! .tls(Tls::Required(tls))
//! .build();
//!
//! // Send the email via remote relay
//! let result = sender.send(&email);
//! assert!(result.is_ok());
//! # Ok(())
//! # }
//! ```
#[cfg(any(feature = "tokio1", feature = "async-std1"))]
pub use self::async_transport::{AsyncSmtpTransport, AsyncSmtpTransportBuilder};
#[cfg(feature = "pool")]
pub use self::pool::PoolConfig;
pub use self::{
error::Error,
transport::{SmtpTransport, SmtpTransportBuilder},
};
#[cfg(any(feature = "native-tls", feature = "rustls-tls"))]
use crate::transport::smtp::client::TlsParameters;
use crate::transport::smtp::{
authentication::{Credentials, Mechanism, DEFAULT_MECHANISMS},
client::SmtpConnection,
extension::ClientId,
response::Response,
};
use client::Tls;
use std::time::Duration;
#[cfg(any(feature = "tokio1", feature = "async-std1"))]
mod async_transport;
pub mod authentication;
pub mod client;
pub mod commands;
mod error;
pub mod extension;
#[cfg(feature = "pool")]
mod pool;
pub mod response;
mod transport;
pub(super) mod util;
// Registered port numbers:
// https://www.iana.
// org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml
/// Default smtp port
pub const SMTP_PORT: u16 = 25;
/// Default submission port
pub const SUBMISSION_PORT: u16 = 587;
/// Default submission over TLS port
///
/// Defined in [RFC8314](https://tools.ietf.org/html/rfc8314)
pub const SUBMISSIONS_PORT: u16 = 465;
/// Default timeout
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(60);
#[derive(Debug, Clone)]
struct SmtpInfo {
/// Name sent during EHLO
hello_name: ClientId,
/// Server we are connecting to
server: String,
/// Port to connect to
port: u16,
/// TLS security configuration
tls: Tls,
/// Optional enforced authentication mechanism
authentication: Vec<Mechanism>,
/// Credentials
credentials: Option<Credentials>,
/// Define network timeout
/// It can be changed later for specific needs (like a different timeout for each SMTP command)
timeout: Option<Duration>,
}
impl Default for SmtpInfo {
fn default() -> Self {
Self {
server: "localhost".to_string(),
port: SMTP_PORT,
hello_name: ClientId::default(),
credentials: None,
authentication: DEFAULT_MECHANISMS.into(),
timeout: Some(DEFAULT_TIMEOUT),
tls: Tls::None,
}
}
}
|
//! It is designed to be:
//!
//! * Secured: connections are encrypted by default
//! * Modern: unicode support for email contents and sender/recipient addresses when compatible
|
random_line_split
|
io.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
import sys
from terminaltables import SingleTable
def _to_utf8(message):
try:
return message.encode('utf-8')
except UnicodeDecodeError:
return message
def _print_message(stream, *components):
message = ' '.join(map(unicode, components))
return print(message, file=stream)
def err(message):
_print_message(sys.stderr, U_ERROR, message)
def warn(message):
_print_message(sys.stderr, U_WARNING, message)
def info(message):
_print_message(sys.stdout, U_INFO, message)
def ok(message):
_print_message(sys.stdout, U_OK, message)
def add_color(message, color):
color_map = {
'red': '[31m',
'green': '[32m',
'yellow': '[33m',
'blue': '[34m',
'purple': '[35m',
'cyan': '[36m',
'white': '[37m',
}
return '\033%s%s\033[0m' % (color_map[color], message)
def print_table(rows, title):
print(SingleTable(rows, title).table)
def collect_single_input(prompt):
message = _to_utf8('%s %s ' % (U_INFO, prompt,))
input_ = raw_input(message).strip()
return input_ if input_ else None
|
"""Prompts the user to select an option in the `selection` list."""
while True:
id_ = collect_single_input(prompt)
if id_ == 'q':
return None
try:
id_ = int(id_) - 1
if id_ < 0:
continue
return selection[id_]
except (ValueError, TypeError, IndexError):
continue
# @see: http://pueblo.sourceforge.net/doc/manual/ansi_color_codes.html
U_ERROR = add_color('[error]', 'red')
U_WARNING = add_color('[warn]', 'yellow')
U_INFO = add_color('==>', 'blue')
U_OK = add_color('OK ✓', 'green')
|
def collect_input(prompt, selection):
|
random_line_split
|
io.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
import sys
from terminaltables import SingleTable
def _to_utf8(message):
try:
return message.encode('utf-8')
except UnicodeDecodeError:
return message
def _print_message(stream, *components):
message = ' '.join(map(unicode, components))
return print(message, file=stream)
def err(message):
_print_message(sys.stderr, U_ERROR, message)
def warn(message):
_print_message(sys.stderr, U_WARNING, message)
def info(message):
_print_message(sys.stdout, U_INFO, message)
def ok(message):
_print_message(sys.stdout, U_OK, message)
def add_color(message, color):
color_map = {
'red': '[31m',
'green': '[32m',
'yellow': '[33m',
'blue': '[34m',
'purple': '[35m',
'cyan': '[36m',
'white': '[37m',
}
return '\033%s%s\033[0m' % (color_map[color], message)
def print_table(rows, title):
print(SingleTable(rows, title).table)
def collect_single_input(prompt):
message = _to_utf8('%s %s ' % (U_INFO, prompt,))
input_ = raw_input(message).strip()
return input_ if input_ else None
def collect_input(prompt, selection):
|
# @see: http://pueblo.sourceforge.net/doc/manual/ansi_color_codes.html
U_ERROR = add_color('[error]', 'red')
U_WARNING = add_color('[warn]', 'yellow')
U_INFO = add_color('==>', 'blue')
U_OK = add_color('OK ✓', 'green')
|
"""Prompts the user to select an option in the `selection` list."""
while True:
id_ = collect_single_input(prompt)
if id_ == 'q':
return None
try:
id_ = int(id_) - 1
if id_ < 0:
continue
return selection[id_]
except (ValueError, TypeError, IndexError):
continue
|
identifier_body
|
io.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
import sys
from terminaltables import SingleTable
def _to_utf8(message):
try:
return message.encode('utf-8')
except UnicodeDecodeError:
return message
def _print_message(stream, *components):
message = ' '.join(map(unicode, components))
return print(message, file=stream)
def err(message):
_print_message(sys.stderr, U_ERROR, message)
def warn(message):
_print_message(sys.stderr, U_WARNING, message)
def info(message):
_print_message(sys.stdout, U_INFO, message)
def ok(message):
_print_message(sys.stdout, U_OK, message)
def add_color(message, color):
color_map = {
'red': '[31m',
'green': '[32m',
'yellow': '[33m',
'blue': '[34m',
'purple': '[35m',
'cyan': '[36m',
'white': '[37m',
}
return '\033%s%s\033[0m' % (color_map[color], message)
def print_table(rows, title):
print(SingleTable(rows, title).table)
def collect_single_input(prompt):
message = _to_utf8('%s %s ' % (U_INFO, prompt,))
input_ = raw_input(message).strip()
return input_ if input_ else None
def collect_input(prompt, selection):
"""Prompts the user to select an option in the `selection` list."""
while True:
|
# @see: http://pueblo.sourceforge.net/doc/manual/ansi_color_codes.html
U_ERROR = add_color('[error]', 'red')
U_WARNING = add_color('[warn]', 'yellow')
U_INFO = add_color('==>', 'blue')
U_OK = add_color('OK ✓', 'green')
|
id_ = collect_single_input(prompt)
if id_ == 'q':
return None
try:
id_ = int(id_) - 1
if id_ < 0:
continue
return selection[id_]
except (ValueError, TypeError, IndexError):
continue
|
conditional_block
|
io.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
import sys
from terminaltables import SingleTable
def _to_utf8(message):
try:
return message.encode('utf-8')
except UnicodeDecodeError:
return message
def
|
(stream, *components):
message = ' '.join(map(unicode, components))
return print(message, file=stream)
def err(message):
_print_message(sys.stderr, U_ERROR, message)
def warn(message):
_print_message(sys.stderr, U_WARNING, message)
def info(message):
_print_message(sys.stdout, U_INFO, message)
def ok(message):
_print_message(sys.stdout, U_OK, message)
def add_color(message, color):
color_map = {
'red': '[31m',
'green': '[32m',
'yellow': '[33m',
'blue': '[34m',
'purple': '[35m',
'cyan': '[36m',
'white': '[37m',
}
return '\033%s%s\033[0m' % (color_map[color], message)
def print_table(rows, title):
print(SingleTable(rows, title).table)
def collect_single_input(prompt):
message = _to_utf8('%s %s ' % (U_INFO, prompt,))
input_ = raw_input(message).strip()
return input_ if input_ else None
def collect_input(prompt, selection):
"""Prompts the user to select an option in the `selection` list."""
while True:
id_ = collect_single_input(prompt)
if id_ == 'q':
return None
try:
id_ = int(id_) - 1
if id_ < 0:
continue
return selection[id_]
except (ValueError, TypeError, IndexError):
continue
# @see: http://pueblo.sourceforge.net/doc/manual/ansi_color_codes.html
U_ERROR = add_color('[error]', 'red')
U_WARNING = add_color('[warn]', 'yellow')
U_INFO = add_color('==>', 'blue')
U_OK = add_color('OK ✓', 'green')
|
_print_message
|
identifier_name
|
rec-align-u64.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2303
#![feature(intrinsics)]
use std::mem;
mod rusti {
extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint;
pub fn min_align_of<T>() -> uint;
}
}
// This is the type with the questionable alignment
#[derive(Debug)]
struct Inner {
c64: u64
}
// This is the type that contains the type with the
// questionable alignment, for testing
#[derive(Debug)]
struct Outer {
c8: u8,
t: Inner
}
#[cfg(any(target_os = "linux",
target_os = "macos",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd"))]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 4 }
pub fn size() -> uint { 12 }
}
#[cfg(any(target_arch = "x86_64", target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "bitrig")]
mod m {
#[cfg(target_arch = "x86_64")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "windows")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
#[cfg(target_arch = "x86_64")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "android")]
mod m {
|
}
pub fn main() {
unsafe {
let x = Outer {c8: 22, t: Inner {c64: 44}};
let y = format!("{:?}", x);
println!("align inner = {:?}", rusti::min_align_of::<Inner>());
println!("size outer = {:?}", mem::size_of::<Outer>());
println!("y = {:?}", y);
// per clang/gcc the alignment of `Inner` is 4 on x86.
assert_eq!(rusti::min_align_of::<Inner>(), m::m::align());
// per clang/gcc the size of `Outer` should be 12
// because `Inner`s alignment was 4.
assert_eq!(mem::size_of::<Outer>(), m::m::size());
assert_eq!(y, "Outer { c8: 22, t: Inner { c64: 44 } }".to_string());
}
}
|
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
|
random_line_split
|
rec-align-u64.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2303
#![feature(intrinsics)]
use std::mem;
mod rusti {
extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint;
pub fn min_align_of<T>() -> uint;
}
}
// This is the type with the questionable alignment
#[derive(Debug)]
struct Inner {
c64: u64
}
// This is the type that contains the type with the
// questionable alignment, for testing
#[derive(Debug)]
struct Outer {
c8: u8,
t: Inner
}
#[cfg(any(target_os = "linux",
target_os = "macos",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd"))]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 4 }
pub fn size() -> uint { 12 }
}
#[cfg(any(target_arch = "x86_64", target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "bitrig")]
mod m {
#[cfg(target_arch = "x86_64")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "windows")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
#[cfg(target_arch = "x86_64")]
pub mod m {
pub fn
|
() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "android")]
mod m {
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
pub fn main() {
unsafe {
let x = Outer {c8: 22, t: Inner {c64: 44}};
let y = format!("{:?}", x);
println!("align inner = {:?}", rusti::min_align_of::<Inner>());
println!("size outer = {:?}", mem::size_of::<Outer>());
println!("y = {:?}", y);
// per clang/gcc the alignment of `Inner` is 4 on x86.
assert_eq!(rusti::min_align_of::<Inner>(), m::m::align());
// per clang/gcc the size of `Outer` should be 12
// because `Inner`s alignment was 4.
assert_eq!(mem::size_of::<Outer>(), m::m::size());
assert_eq!(y, "Outer { c8: 22, t: Inner { c64: 44 } }".to_string());
}
}
|
align
|
identifier_name
|
rec-align-u64.rs
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2303
#![feature(intrinsics)]
use std::mem;
mod rusti {
extern "rust-intrinsic" {
pub fn pref_align_of<T>() -> uint;
pub fn min_align_of<T>() -> uint;
}
}
// This is the type with the questionable alignment
#[derive(Debug)]
struct Inner {
c64: u64
}
// This is the type that contains the type with the
// questionable alignment, for testing
#[derive(Debug)]
struct Outer {
c8: u8,
t: Inner
}
#[cfg(any(target_os = "linux",
target_os = "macos",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd"))]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 4 }
pub fn size() -> uint { 12 }
}
#[cfg(any(target_arch = "x86_64", target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
pub fn align() -> uint
|
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "bitrig")]
mod m {
#[cfg(target_arch = "x86_64")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "windows")]
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
#[cfg(target_arch = "x86_64")]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
#[cfg(target_os = "android")]
mod m {
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
pub fn align() -> uint { 8 }
pub fn size() -> uint { 16 }
}
}
pub fn main() {
unsafe {
let x = Outer {c8: 22, t: Inner {c64: 44}};
let y = format!("{:?}", x);
println!("align inner = {:?}", rusti::min_align_of::<Inner>());
println!("size outer = {:?}", mem::size_of::<Outer>());
println!("y = {:?}", y);
// per clang/gcc the alignment of `Inner` is 4 on x86.
assert_eq!(rusti::min_align_of::<Inner>(), m::m::align());
// per clang/gcc the size of `Outer` should be 12
// because `Inner`s alignment was 4.
assert_eq!(mem::size_of::<Outer>(), m::m::size());
assert_eq!(y, "Outer { c8: 22, t: Inner { c64: 44 } }".to_string());
}
}
|
{ 8 }
|
identifier_body
|
elrte.jp.js
|
/**
* Japanese translation
* @author Tomoaki Yoshida <[email protected]>
* @version 2010-09-18
*/
(function($) {
elRTE.prototype.i18Messages.jp = {
'_translator' : 'Tomoaki Yoshida <[email protected]>',
'_translation' : 'Japanese translation',
'Editor' : 'エディター',
'Source' : 'ソース',
// Panel Name
'Copy/Pase' : 'コピー/ペースト',
'Undo/Redo' : '元に戻す/やり直し',
'Text styles' : 'テキストスタイル',
'Colors' : '色',
'Alignment' : '行揃え',
'Indent/Outdent' : 'インデント/アウトデント',
'Text format' : 'テキストフォーマット',
'Lists' : 'リスト',
'Misc elements' : 'その他',
'Links' : 'リンク',
'Images' : '画像',
'Media' : 'メディア',
'Tables' : 'テーブル',
'File manager (elFinder)' : 'ファイルマネージャ(elFinder)',
// button names
'About this software' : 'このソフトウェアについて',
'Save' : '保存',
'Copy' : 'コピー',
'Cut' : '切り取り',
'Paste' : '貼り付け',
'Paste only text' : 'テキストのみ貼り付け',
'Paste formatted text' : 'フォーマットされたテキストの貼り付け',
'Clean format' : 'フォーマット消去',
'Undo last action' : '元に戻す',
'Redo previous action' : 'やり直し',
'Bold' : '太字',
'Italic' : '斜体',
'Underline' : '下線',
'Strikethrough' : '打ち消し線',
'Superscript' : '上付き文字',
'Subscript' : '添え字',
'Align left' : '左揃え',
'Ailgn right' : '右揃え',
'Align center' : '中央揃え',
'Align full' : '両端揃え',
'Font color' : 'テキスト色',
'Background color' : '背景色',
'Indent' : 'インデント',
'Outdent' : 'アウトデント',
'Format' : 'フォーマット',
'Font size' : 'サイズ',
'Font' : 'フォント',
'Ordered list' : '段落番号',
'Unordered list' : '箇条書き',
'Horizontal rule' : '横罫線',
'Blockquote' : 'ブロック引用',
|
'Delete link' : 'リンク削除',
'Bookmark' : 'アンカー挿入/編集',
'Image' : 'イメージ',
'Table' : 'テーブル',
'Delete table' : 'テーブル削除',
'Insert row before' : '前に行を挿入',
'Insert row after' : '後ろに行を挿入',
'Delete row' : '行を削除',
'Insert column before' : '前に列を挿入',
'Insert column after' : '後ろに列を挿入',
'Delete column' : '列を削除',
'Merge table cells' : 'セルを結合する',
'Split table cell' : 'セルを分割する',
'Toggle display document structure' : '構成要素の表示',
'Table cell properties' : 'テーブルセルプロパティ',
'Table properties' : 'テーブルプロパティ',
'Toggle full screen mode' : '最大化',
'Open file manager' : 'ファイルマネージャを開く',
'Non breakable space' : '改行なしスペース',
'Stop element floating' : 'フロートの解除',
// dialogs
'Warning' : '警告',
'Properties' : 'プロパティ',
'Popup' : 'ポップアップ',
'Advanced' : 'アドバンス',
'Events' : 'イベント',
'Width' : '幅',
'Height' : '高さ',
'Left' : '左',
'Center' : '中央',
'Right' : '右',
'Border' : 'ボーダー',
'Background' : '背景',
'Css class' : 'CSS class',
'Css style' : 'CSS style',
'No' : 'No',
'Title' : 'タイトル',
'Script direction' : '文字表記方向',
'Language' : '言語',
'Charset' : 'Charset',
'Not set' : '設定しない',
'Left to right' : '左から右',
'Right to left' : '右から左',
'In this window' : '同じウィンドウ (_self)',
'In new window (_blank)' : '新しいウィンドウ (_blank)',
'In new parent window (_parent)' : '新しい親ウィンドウ (_parent)',
'In top frame (_top)' : 'トップフレーム (_top)',
'URL' : 'URL',
'Open in' : 'Open in',
// copy
'This operation is disabled in your browser on security reason. Use shortcut instead.' : 'この機能はお使いのブラウザではセキュリティの観点からご利用できません。ショートカットをご利用ください。',
// format
'Heading 1' : '見出し1',
'Heading 2' : '見出し2',
'Heading 3' : '見出し3',
'Heading 4' : '見出し4',
'Heading 5' : '見出し5',
'Heading 6' : '見出し6',
'Paragraph' : '段落',
'Address' : 'アドレス',
'Preformatted' : '',
// font size
'Small (8pt)' : '小(8pt)',
'Small (10px)' : '小(10pt)',
'Small (12pt)' : '小(12pt)',
'Normal (14pt)' : '中(14pt)',
'Large (18pt)' : '大(18pt)',
'Large (24pt)' : '大(24pt)',
'Large (36pt)' : '大(36pt)',
// bookmark
'Bookmark name' : 'アンカー名',
// link
'Link URL' : '(URL)',
'Target' : 'ターゲット',
'Open link in popup window' : 'リンク先をポップアップで開く',
'URL' : 'URL',
'Window name' : 'ウィンドウ名',
'Window size' : 'ウィンドウサイズ',
'Window position' : 'ウィンドウ位置',
'Location bar' : 'ロケーションバー',
'Menu bar' : 'メニューバー',
'Toolbar' : 'ツールバー',
'Scrollbars' : 'スクロールバー',
'Status bar' : 'ステータスバー',
'Resizable' : 'サイズ可変',
'Depedent' : 'ウィンドウ連動',
'Add return false' : 'return falseを加える',
'Target MIME type' : 'Target MIME type',
'Relationship page to target (rel)' : 'ページからターゲットの関係 (rel)',
'Relationship target to page (rev)' : 'ターゲットからページの関係 (rev)',
'Tab index' : 'タブインデックス',
'Access key' : 'アクセスキー',
// image
'Size' : 'サイズ',
'Preview' : 'プレビュー',
'Margins' : 'マージン',
'Alt text' : '代替テキスト',
'Image URL' : '画像URL',
// table
'Spacing' : 'セル間隔 (spacing)',
'Padding' : 'セル内余白 (padding)',
'Rows' : '行',
'Columns' : '列',
'Groups' : 'グループ',
'Cells' : 'セル',
'Caption' : 'キャプション',
'Inner borders' : '内部ボーダー',
// about
'About elRTE' : 'elRTEについて',
'Version' : 'バージョン',
'Licence' : 'ライセンス',
'elRTE is an open-source JavaScript based WYSIWYG HTML-editor.' : 'elRTEはJavascriptベースのオープンソースWYSIWYG HTMLエディタです。',
'Main goal of the editor - simplify work with text and formating (HTML) on sites, blogs, forums and other online services.' : 'このエディタの主な目的は、ウェブサイト、ブログ、フォーラムやその他のオンラインサービスのテキストとHTMLフォーマット入力作業をシンプルにすることです。',
'You can use it in any commercial or non-commercial projects.' : '商用・非商用に関わらずご利用いただけます。',
'Authors' : '著作者',
'Chief developer' : 'チーフデベロッパー',
'Developer, tech support' : 'デベロッパー・テクニカルサポート',
'Interface designer' : 'インターフェイスデザイナー',
'Spanish localization' : 'スペイン語化ローカライゼーション',
'Japanese localization' : '日本語化ローカライゼーション',
'Latvian localization' : 'ラトビア語化ローカライゼーション',
'German localization' : 'ドイツ語化ローカライゼーション',
'Ukranian localization' : 'ウクライナ語化ローカライゼーション',
'For more information about this software visit the' : '次のURLにてこのソフトウェアのより詳しい情報を公開しています。',
'elRTE website' : 'elRTE ウェブサイト'
}
})(jQuery);
|
'Block element (DIV)' : 'ブロック要素 (DIV)',
'Link' : 'リンク',
|
random_line_split
|
easy.rs
|
use std::sync::{Once, ONCE_INIT};
use std::c_vec::CVec;
use std::{io,mem};
use std::collections::HashMap;
use libc::{c_void,c_int,c_long,c_double,size_t};
use super::{consts,err,info,opt};
use super::err::ErrCode;
use http::body::Body;
use http::{header,Response};
type CURL = c_void;
pub type ProgressCb<'a> = |uint, uint, uint, uint|:'a -> ();
#[link(name = "curl")]
extern {
pub fn curl_easy_init() -> *mut CURL;
pub fn curl_easy_setopt(curl: *mut CURL, option: opt::Opt, ...) -> ErrCode;
pub fn curl_easy_perform(curl: *mut CURL) -> ErrCode;
pub fn curl_easy_cleanup(curl: *mut CURL);
pub fn curl_easy_getinfo(curl: *const CURL, info: info::Key, ...) -> ErrCode;
pub fn curl_global_cleanup();
}
pub struct Easy {
curl: *mut CURL
}
impl Easy {
pub fn new() -> Easy {
// Ensure that curl is globally initialized
global_init();
let handle = unsafe {
let p = curl_easy_init();
curl_easy_setopt(p, opt::NOPROGRESS, 0u);
p
};
Easy { curl: handle }
}
#[inline]
pub fn setopt<T: opt::OptVal>(&mut self, option: opt::Opt, val: T) -> Result<(), err::ErrCode> {
// TODO: Prevent setting callback related options
let mut res = err::OK;
unsafe {
val.with_c_repr(|repr| {
res = curl_easy_setopt(self.curl, option, repr);
})
}
if res.is_success() { Ok(()) } else { Err(res) }
}
#[inline]
pub fn perform(&mut self, body: Option<&mut Body>, progress: Option<ProgressCb>) -> Result<Response, err::ErrCode> {
let mut builder = ResponseBuilder::new();
unsafe {
let resp_p: uint = mem::transmute(&builder);
let body_p: uint = match body {
Some(b) => mem::transmute(b),
None => 0
};
let progress_p: uint = match progress.as_ref() {
Some(cb) => mem::transmute(cb),
None => 0
};
debug!("setting read fn: {}", body_p != 0);
// Set callback options
curl_easy_setopt(self.curl, opt::READFUNCTION, curl_read_fn);
curl_easy_setopt(self.curl, opt::READDATA, body_p);
curl_easy_setopt(self.curl, opt::WRITEFUNCTION, curl_write_fn);
curl_easy_setopt(self.curl, opt::WRITEDATA, resp_p);
curl_easy_setopt(self.curl, opt::HEADERFUNCTION, curl_header_fn);
curl_easy_setopt(self.curl, opt::HEADERDATA, resp_p);
curl_easy_setopt(self.curl, opt::PROGRESSFUNCTION, curl_progress_fn);
curl_easy_setopt(self.curl, opt::PROGRESSDATA, progress_p);
}
let err = unsafe { curl_easy_perform(self.curl) };
// If the request failed, abort here
if !err.is_success() {
return Err(err);
}
// Try to get the response code
builder.code = try!(self.get_response_code());
Ok(builder.build())
}
pub fn get_response_code(&self) -> Result<uint, err::ErrCode> {
Ok(try!(self.get_info_long(info::RESPONSE_CODE)) as uint)
}
pub fn get_total_time(&self) -> Result<uint, err::ErrCode> {
Ok(try!(self.get_info_long(info::TOTAL_TIME)) as uint)
}
fn get_info_long(&self, key: info::Key) -> Result<c_long, err::ErrCode> {
let v: c_long = 0;
let res = unsafe {
curl_easy_getinfo(self.curl as *const CURL, key, &v)
};
if !res.is_success() {
return Err(res);
}
Ok(v)
}
}
#[inline]
fn
|
() {
// Schedule curl to be cleaned up after we're done with this whole process
static mut INIT: Once = ONCE_INIT;
unsafe {
INIT.doit(|| ::std::rt::at_exit(proc() curl_global_cleanup()))
}
}
impl Drop for Easy {
fn drop(&mut self) {
unsafe { curl_easy_cleanup(self.curl) }
}
}
/*
*
* TODO: Move this into handle
*
*/
struct ResponseBuilder {
code: uint,
hdrs: HashMap<String,Vec<String>>,
body: Vec<u8>
}
impl ResponseBuilder {
fn new() -> ResponseBuilder {
ResponseBuilder {
code: 0,
hdrs: HashMap::new(),
body: Vec::new()
}
}
fn add_header(&mut self, name: &str, val: &str) {
// TODO: Reduce allocations
use std::ascii::OwnedAsciiExt;
let name = name.to_string().into_ascii_lower();
let inserted = match self.hdrs.find_mut(&name) {
Some(vals) => {
vals.push(val.to_string());
true
}
None => false
};
if !inserted {
self.hdrs.insert(name, vec!(val.to_string()));
}
}
fn build(self) -> Response {
let ResponseBuilder { code, hdrs, body } = self;
Response::new(code, hdrs, body)
}
}
/*
*
* ===== Callbacks =====
*/
pub extern "C" fn curl_read_fn(p: *mut u8, size: size_t, nmemb: size_t, body: *mut Body) -> size_t {
if body.is_null() {
return 0;
}
let mut dst = unsafe { CVec::new(p, (size * nmemb) as uint) };
let body: &mut Body = unsafe { mem::transmute(body) };
match body.read(dst.as_mut_slice()) {
Ok(len) => len as size_t,
Err(e) => {
match e.kind {
io::EndOfFile => 0 as size_t,
_ => consts::CURL_READFUNC_ABORT as size_t
}
}
}
}
pub extern "C" fn curl_write_fn(p: *mut u8, size: size_t, nmemb: size_t, resp: *mut ResponseBuilder) -> size_t {
if !resp.is_null() {
let builder: &mut ResponseBuilder = unsafe { mem::transmute(resp) };
let chunk = unsafe { CVec::new(p, (size * nmemb) as uint) };
builder.body.push_all(chunk.as_slice());
}
size * nmemb
}
pub extern "C" fn curl_header_fn(p: *mut u8, size: size_t, nmemb: size_t, resp: &mut ResponseBuilder) -> size_t {
// TODO: Skip the first call (it seems to be the status line)
let vec = unsafe { CVec::new(p, (size * nmemb) as uint) };
match header::parse(vec.as_slice()) {
Some((name, val)) => {
resp.add_header(name, val);
}
None => {}
}
vec.len() as size_t
}
pub extern "C" fn curl_progress_fn(cb: *mut ProgressCb, dltotal: c_double, dlnow: c_double, ultotal: c_double, ulnow: c_double) -> c_int {
#[inline]
fn to_uint(v: c_double) -> uint {
if v > 0.0 { v as uint } else { 0 }
}
if !cb.is_null() {
let cb: &mut ProgressCb = unsafe { &mut *cb };
(*cb)(to_uint(dltotal), to_uint(dlnow), to_uint(ultotal), to_uint(ulnow));
}
0
}
|
global_init
|
identifier_name
|
easy.rs
|
use std::sync::{Once, ONCE_INIT};
use std::c_vec::CVec;
use std::{io,mem};
use std::collections::HashMap;
use libc::{c_void,c_int,c_long,c_double,size_t};
use super::{consts,err,info,opt};
use super::err::ErrCode;
use http::body::Body;
use http::{header,Response};
type CURL = c_void;
pub type ProgressCb<'a> = |uint, uint, uint, uint|:'a -> ();
#[link(name = "curl")]
extern {
pub fn curl_easy_init() -> *mut CURL;
pub fn curl_easy_setopt(curl: *mut CURL, option: opt::Opt, ...) -> ErrCode;
pub fn curl_easy_perform(curl: *mut CURL) -> ErrCode;
pub fn curl_easy_cleanup(curl: *mut CURL);
pub fn curl_easy_getinfo(curl: *const CURL, info: info::Key, ...) -> ErrCode;
pub fn curl_global_cleanup();
}
pub struct Easy {
curl: *mut CURL
}
impl Easy {
pub fn new() -> Easy {
// Ensure that curl is globally initialized
global_init();
let handle = unsafe {
let p = curl_easy_init();
curl_easy_setopt(p, opt::NOPROGRESS, 0u);
p
};
Easy { curl: handle }
}
#[inline]
pub fn setopt<T: opt::OptVal>(&mut self, option: opt::Opt, val: T) -> Result<(), err::ErrCode> {
// TODO: Prevent setting callback related options
let mut res = err::OK;
unsafe {
val.with_c_repr(|repr| {
res = curl_easy_setopt(self.curl, option, repr);
})
}
if res.is_success() { Ok(()) } else { Err(res) }
}
#[inline]
pub fn perform(&mut self, body: Option<&mut Body>, progress: Option<ProgressCb>) -> Result<Response, err::ErrCode> {
let mut builder = ResponseBuilder::new();
unsafe {
let resp_p: uint = mem::transmute(&builder);
let body_p: uint = match body {
Some(b) => mem::transmute(b),
None => 0
};
let progress_p: uint = match progress.as_ref() {
Some(cb) => mem::transmute(cb),
None => 0
};
debug!("setting read fn: {}", body_p != 0);
// Set callback options
curl_easy_setopt(self.curl, opt::READFUNCTION, curl_read_fn);
curl_easy_setopt(self.curl, opt::READDATA, body_p);
curl_easy_setopt(self.curl, opt::WRITEFUNCTION, curl_write_fn);
curl_easy_setopt(self.curl, opt::WRITEDATA, resp_p);
curl_easy_setopt(self.curl, opt::HEADERFUNCTION, curl_header_fn);
curl_easy_setopt(self.curl, opt::HEADERDATA, resp_p);
curl_easy_setopt(self.curl, opt::PROGRESSFUNCTION, curl_progress_fn);
curl_easy_setopt(self.curl, opt::PROGRESSDATA, progress_p);
}
let err = unsafe { curl_easy_perform(self.curl) };
// If the request failed, abort here
if !err.is_success() {
return Err(err);
}
// Try to get the response code
builder.code = try!(self.get_response_code());
Ok(builder.build())
}
pub fn get_response_code(&self) -> Result<uint, err::ErrCode> {
Ok(try!(self.get_info_long(info::RESPONSE_CODE)) as uint)
}
pub fn get_total_time(&self) -> Result<uint, err::ErrCode> {
Ok(try!(self.get_info_long(info::TOTAL_TIME)) as uint)
}
fn get_info_long(&self, key: info::Key) -> Result<c_long, err::ErrCode> {
let v: c_long = 0;
let res = unsafe {
curl_easy_getinfo(self.curl as *const CURL, key, &v)
};
if !res.is_success() {
return Err(res);
}
Ok(v)
}
}
#[inline]
fn global_init() {
// Schedule curl to be cleaned up after we're done with this whole process
static mut INIT: Once = ONCE_INIT;
unsafe {
INIT.doit(|| ::std::rt::at_exit(proc() curl_global_cleanup()))
}
}
impl Drop for Easy {
fn drop(&mut self) {
unsafe { curl_easy_cleanup(self.curl) }
}
}
/*
*
* TODO: Move this into handle
*
*/
struct ResponseBuilder {
code: uint,
hdrs: HashMap<String,Vec<String>>,
body: Vec<u8>
}
impl ResponseBuilder {
fn new() -> ResponseBuilder {
ResponseBuilder {
code: 0,
hdrs: HashMap::new(),
body: Vec::new()
}
}
fn add_header(&mut self, name: &str, val: &str) {
// TODO: Reduce allocations
use std::ascii::OwnedAsciiExt;
let name = name.to_string().into_ascii_lower();
let inserted = match self.hdrs.find_mut(&name) {
Some(vals) => {
vals.push(val.to_string());
true
}
None => false
};
if !inserted {
self.hdrs.insert(name, vec!(val.to_string()));
}
}
fn build(self) -> Response {
let ResponseBuilder { code, hdrs, body } = self;
Response::new(code, hdrs, body)
}
}
/*
*
* ===== Callbacks =====
*/
pub extern "C" fn curl_read_fn(p: *mut u8, size: size_t, nmemb: size_t, body: *mut Body) -> size_t {
if body.is_null() {
return 0;
}
let mut dst = unsafe { CVec::new(p, (size * nmemb) as uint) };
let body: &mut Body = unsafe { mem::transmute(body) };
match body.read(dst.as_mut_slice()) {
Ok(len) => len as size_t,
Err(e) => {
match e.kind {
io::EndOfFile => 0 as size_t,
_ => consts::CURL_READFUNC_ABORT as size_t
}
}
}
}
pub extern "C" fn curl_write_fn(p: *mut u8, size: size_t, nmemb: size_t, resp: *mut ResponseBuilder) -> size_t
|
pub extern "C" fn curl_header_fn(p: *mut u8, size: size_t, nmemb: size_t, resp: &mut ResponseBuilder) -> size_t {
// TODO: Skip the first call (it seems to be the status line)
let vec = unsafe { CVec::new(p, (size * nmemb) as uint) };
match header::parse(vec.as_slice()) {
Some((name, val)) => {
resp.add_header(name, val);
}
None => {}
}
vec.len() as size_t
}
pub extern "C" fn curl_progress_fn(cb: *mut ProgressCb, dltotal: c_double, dlnow: c_double, ultotal: c_double, ulnow: c_double) -> c_int {
#[inline]
fn to_uint(v: c_double) -> uint {
if v > 0.0 { v as uint } else { 0 }
}
if !cb.is_null() {
let cb: &mut ProgressCb = unsafe { &mut *cb };
(*cb)(to_uint(dltotal), to_uint(dlnow), to_uint(ultotal), to_uint(ulnow));
}
0
}
|
{
if !resp.is_null() {
let builder: &mut ResponseBuilder = unsafe { mem::transmute(resp) };
let chunk = unsafe { CVec::new(p, (size * nmemb) as uint) };
builder.body.push_all(chunk.as_slice());
}
size * nmemb
}
|
identifier_body
|
easy.rs
|
use std::sync::{Once, ONCE_INIT};
use std::c_vec::CVec;
use std::{io,mem};
use std::collections::HashMap;
use libc::{c_void,c_int,c_long,c_double,size_t};
use super::{consts,err,info,opt};
use super::err::ErrCode;
use http::body::Body;
use http::{header,Response};
type CURL = c_void;
pub type ProgressCb<'a> = |uint, uint, uint, uint|:'a -> ();
#[link(name = "curl")]
extern {
pub fn curl_easy_init() -> *mut CURL;
pub fn curl_easy_setopt(curl: *mut CURL, option: opt::Opt, ...) -> ErrCode;
pub fn curl_easy_perform(curl: *mut CURL) -> ErrCode;
pub fn curl_easy_cleanup(curl: *mut CURL);
pub fn curl_easy_getinfo(curl: *const CURL, info: info::Key, ...) -> ErrCode;
pub fn curl_global_cleanup();
}
pub struct Easy {
curl: *mut CURL
}
impl Easy {
pub fn new() -> Easy {
// Ensure that curl is globally initialized
global_init();
let handle = unsafe {
let p = curl_easy_init();
curl_easy_setopt(p, opt::NOPROGRESS, 0u);
p
};
Easy { curl: handle }
}
#[inline]
pub fn setopt<T: opt::OptVal>(&mut self, option: opt::Opt, val: T) -> Result<(), err::ErrCode> {
// TODO: Prevent setting callback related options
let mut res = err::OK;
unsafe {
val.with_c_repr(|repr| {
res = curl_easy_setopt(self.curl, option, repr);
})
}
if res.is_success() { Ok(()) } else { Err(res) }
}
#[inline]
pub fn perform(&mut self, body: Option<&mut Body>, progress: Option<ProgressCb>) -> Result<Response, err::ErrCode> {
let mut builder = ResponseBuilder::new();
unsafe {
let resp_p: uint = mem::transmute(&builder);
let body_p: uint = match body {
Some(b) => mem::transmute(b),
None => 0
};
let progress_p: uint = match progress.as_ref() {
Some(cb) => mem::transmute(cb),
None => 0
};
debug!("setting read fn: {}", body_p != 0);
// Set callback options
curl_easy_setopt(self.curl, opt::READFUNCTION, curl_read_fn);
curl_easy_setopt(self.curl, opt::READDATA, body_p);
curl_easy_setopt(self.curl, opt::WRITEFUNCTION, curl_write_fn);
curl_easy_setopt(self.curl, opt::WRITEDATA, resp_p);
curl_easy_setopt(self.curl, opt::HEADERFUNCTION, curl_header_fn);
curl_easy_setopt(self.curl, opt::HEADERDATA, resp_p);
curl_easy_setopt(self.curl, opt::PROGRESSFUNCTION, curl_progress_fn);
curl_easy_setopt(self.curl, opt::PROGRESSDATA, progress_p);
}
let err = unsafe { curl_easy_perform(self.curl) };
// If the request failed, abort here
if !err.is_success() {
return Err(err);
}
// Try to get the response code
builder.code = try!(self.get_response_code());
Ok(builder.build())
}
pub fn get_response_code(&self) -> Result<uint, err::ErrCode> {
Ok(try!(self.get_info_long(info::RESPONSE_CODE)) as uint)
}
pub fn get_total_time(&self) -> Result<uint, err::ErrCode> {
Ok(try!(self.get_info_long(info::TOTAL_TIME)) as uint)
}
fn get_info_long(&self, key: info::Key) -> Result<c_long, err::ErrCode> {
let v: c_long = 0;
let res = unsafe {
curl_easy_getinfo(self.curl as *const CURL, key, &v)
};
|
return Err(res);
}
Ok(v)
}
}
#[inline]
fn global_init() {
// Schedule curl to be cleaned up after we're done with this whole process
static mut INIT: Once = ONCE_INIT;
unsafe {
INIT.doit(|| ::std::rt::at_exit(proc() curl_global_cleanup()))
}
}
impl Drop for Easy {
fn drop(&mut self) {
unsafe { curl_easy_cleanup(self.curl) }
}
}
/*
*
* TODO: Move this into handle
*
*/
struct ResponseBuilder {
code: uint,
hdrs: HashMap<String,Vec<String>>,
body: Vec<u8>
}
impl ResponseBuilder {
fn new() -> ResponseBuilder {
ResponseBuilder {
code: 0,
hdrs: HashMap::new(),
body: Vec::new()
}
}
fn add_header(&mut self, name: &str, val: &str) {
// TODO: Reduce allocations
use std::ascii::OwnedAsciiExt;
let name = name.to_string().into_ascii_lower();
let inserted = match self.hdrs.find_mut(&name) {
Some(vals) => {
vals.push(val.to_string());
true
}
None => false
};
if !inserted {
self.hdrs.insert(name, vec!(val.to_string()));
}
}
fn build(self) -> Response {
let ResponseBuilder { code, hdrs, body } = self;
Response::new(code, hdrs, body)
}
}
/*
*
* ===== Callbacks =====
*/
pub extern "C" fn curl_read_fn(p: *mut u8, size: size_t, nmemb: size_t, body: *mut Body) -> size_t {
if body.is_null() {
return 0;
}
let mut dst = unsafe { CVec::new(p, (size * nmemb) as uint) };
let body: &mut Body = unsafe { mem::transmute(body) };
match body.read(dst.as_mut_slice()) {
Ok(len) => len as size_t,
Err(e) => {
match e.kind {
io::EndOfFile => 0 as size_t,
_ => consts::CURL_READFUNC_ABORT as size_t
}
}
}
}
pub extern "C" fn curl_write_fn(p: *mut u8, size: size_t, nmemb: size_t, resp: *mut ResponseBuilder) -> size_t {
if !resp.is_null() {
let builder: &mut ResponseBuilder = unsafe { mem::transmute(resp) };
let chunk = unsafe { CVec::new(p, (size * nmemb) as uint) };
builder.body.push_all(chunk.as_slice());
}
size * nmemb
}
pub extern "C" fn curl_header_fn(p: *mut u8, size: size_t, nmemb: size_t, resp: &mut ResponseBuilder) -> size_t {
// TODO: Skip the first call (it seems to be the status line)
let vec = unsafe { CVec::new(p, (size * nmemb) as uint) };
match header::parse(vec.as_slice()) {
Some((name, val)) => {
resp.add_header(name, val);
}
None => {}
}
vec.len() as size_t
}
pub extern "C" fn curl_progress_fn(cb: *mut ProgressCb, dltotal: c_double, dlnow: c_double, ultotal: c_double, ulnow: c_double) -> c_int {
#[inline]
fn to_uint(v: c_double) -> uint {
if v > 0.0 { v as uint } else { 0 }
}
if !cb.is_null() {
let cb: &mut ProgressCb = unsafe { &mut *cb };
(*cb)(to_uint(dltotal), to_uint(dlnow), to_uint(ultotal), to_uint(ulnow));
}
0
}
|
if !res.is_success() {
|
random_line_split
|
webostv.py
|
"""
Support for interface with an LG webOS Smart TV.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.webostv/
"""
import logging
import asyncio
from datetime import timedelta
from urllib.parse import urlparse
import voluptuous as vol
import homeassistant.util as util
from homeassistant.components.media_player import (
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_PLAY,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP,
SUPPORT_SELECT_SOURCE, SUPPORT_PLAY_MEDIA, MEDIA_TYPE_CHANNEL,
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, CONF_MAC, CONF_CUSTOMIZE, STATE_OFF,
STATE_PLAYING, STATE_PAUSED,
STATE_UNKNOWN, CONF_NAME, CONF_FILENAME)
from homeassistant.loader import get_component
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pylgtv==0.1.7',
'websockets==3.2',
'wakeonlan==0.2.2']
_CONFIGURING = {} # type: Dict[str, str]
_LOGGER = logging.getLogger(__name__)
CONF_SOURCES = 'sources'
DEFAULT_NAME = 'LG webOS Smart TV'
WEBOSTV_CONFIG_FILE = 'webostv.conf'
SUPPORT_WEBOSTV = SUPPORT_TURN_OFF | \
SUPPORT_NEXT_TRACK | SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \
SUPPORT_SELECT_SOURCE | SUPPORT_PLAY_MEDIA | SUPPORT_PLAY
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
CUSTOMIZE_SCHEMA = vol.Schema({
vol.Optional(CONF_SOURCES):
vol.All(cv.ensure_list, [cv.string]),
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_CUSTOMIZE, default={}): CUSTOMIZE_SCHEMA,
vol.Optional(CONF_FILENAME, default=WEBOSTV_CONFIG_FILE): cv.string
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the LG WebOS TV platform."""
if discovery_info is not None:
host = urlparse(discovery_info[1]).hostname
else:
host = config.get(CONF_HOST)
if host is None:
_LOGGER.error("No TV found in configuration file or with discovery")
return False
# Only act if we are not already configuring this host
if host in _CONFIGURING:
return
mac = config.get(CONF_MAC)
name = config.get(CONF_NAME)
customize = config.get(CONF_CUSTOMIZE)
config = hass.config.path(config.get(CONF_FILENAME))
setup_tv(host, mac, name, customize, config, hass, add_devices)
def setup_tv(host, mac, name, customize, config, hass, add_devices):
"""Set up a LG WebOS TV based on host parameter."""
from pylgtv import WebOsClient
from pylgtv import PyLGTVPairException
from websockets.exceptions import ConnectionClosed
|
# Try to pair.
try:
client.register()
except PyLGTVPairException:
_LOGGER.warning(
"Connected to LG webOS TV %s but not paired", host)
return
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
_LOGGER.error("Unable to connect to host %s", host)
return
else:
# Not registered, request configuration.
_LOGGER.warning("LG webOS TV %s needs to be paired", host)
request_configuration(
host, mac, name, customize, config, hass, add_devices)
return
# If we came here and configuring this host, mark as done.
if client.is_registered() and host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
add_devices([LgWebOSDevice(host, mac, name, customize, config)], True)
def request_configuration(
host, mac, name, customize, config, hass, add_devices):
"""Request configuration steps from the user."""
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], 'Failed to pair, please try again.')
return
# pylint: disable=unused-argument
def lgtv_configuration_callback(data):
"""Handle configuration changes."""
setup_tv(host, mac, name, customize, config, hass, add_devices)
_CONFIGURING[host] = configurator.request_config(
hass, name, lgtv_configuration_callback,
description='Click start and accept the pairing request on your TV.',
description_image='/static/images/config_webos.png',
submit_caption='Start pairing request'
)
class LgWebOSDevice(MediaPlayerDevice):
"""Representation of a LG WebOS TV."""
def __init__(self, host, mac, name, customize, config):
"""Initialize the webos device."""
from pylgtv import WebOsClient
from wakeonlan import wol
self._client = WebOsClient(host, config)
self._wol = wol
self._mac = mac
self._customize = customize
self._name = name
# Assume that the TV is not muted
self._muted = False
# Assume that the TV is in Play mode
self._playing = True
self._volume = 0
self._current_source = None
self._current_source_id = None
self._state = STATE_UNKNOWN
self._source_list = {}
self._app_list = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update(self):
"""Retrieve the latest data."""
from websockets.exceptions import ConnectionClosed
try:
current_input = self._client.get_input()
if current_input is not None:
self._current_source_id = current_input
if self._state in (STATE_UNKNOWN, STATE_OFF):
self._state = STATE_PLAYING
else:
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
if self._state is not STATE_OFF:
self._muted = self._client.get_muted()
self._volume = self._client.get_volume()
self._source_list = {}
self._app_list = {}
conf_sources = self._customize.get(CONF_SOURCES, [])
for app in self._client.get_apps():
self._app_list[app['id']] = app
if conf_sources:
if app['id'] == self._current_source_id:
self._current_source = app['title']
self._source_list[app['title']] = app
elif (app['id'] in conf_sources or
any(word in app['title']
for word in conf_sources) or
any(word in app['id']
for word in conf_sources)):
self._source_list[app['title']] = app
else:
self._current_source = app['title']
self._source_list[app['title']] = app
for source in self._client.get_inputs():
if conf_sources:
if source['id'] == self._current_source_id:
self._source_list[source['label']] = source
elif (source['label'] in conf_sources or
any(source['label'].find(word) != -1
for word in conf_sources)):
self._source_list[source['label']] = source
else:
self._source_list[source['label']] = source
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume / 100.0
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""List of available input sources."""
return sorted(self._source_list.keys())
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_CHANNEL
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._current_source_id in self._app_list:
icon = self._app_list[self._current_source_id]['largeIcon']
if not icon.startswith('http'):
icon = self._app_list[self._current_source_id]['icon']
return icon
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._mac:
return SUPPORT_WEBOSTV | SUPPORT_TURN_ON
return SUPPORT_WEBOSTV
def turn_off(self):
"""Turn off media player."""
from websockets.exceptions import ConnectionClosed
self._state = STATE_OFF
try:
self._client.power_off()
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
pass
def turn_on(self):
"""Turn on the media player."""
if self._mac:
self._wol.send_magic_packet(self._mac)
def volume_up(self):
"""Volume up the media player."""
self._client.volume_up()
def volume_down(self):
"""Volume down media player."""
self._client.volume_down()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
tv_volume = volume * 100
self._client.set_volume(tv_volume)
def mute_volume(self, mute):
"""Send mute command."""
self._muted = mute
self._client.set_mute(mute)
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def select_source(self, source):
"""Select input source."""
if self._source_list.get(source).get('title'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['title']
self._client.launch_app(self._source_list[source]['id'])
elif self._source_list.get(source).get('label'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['label']
self._client.set_input(self._source_list[source]['id'])
def media_play(self):
"""Send play command."""
self._playing = True
self._state = STATE_PLAYING
self._client.play()
def media_pause(self):
"""Send media pause command to media player."""
self._playing = False
self._state = STATE_PAUSED
self._client.pause()
def media_next_track(self):
"""Send next track command."""
self._client.fast_forward()
def media_previous_track(self):
"""Send the previous track command."""
self._client.rewind()
|
client = WebOsClient(host, config)
if not client.is_registered():
if host in _CONFIGURING:
|
random_line_split
|
webostv.py
|
"""
Support for interface with an LG webOS Smart TV.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.webostv/
"""
import logging
import asyncio
from datetime import timedelta
from urllib.parse import urlparse
import voluptuous as vol
import homeassistant.util as util
from homeassistant.components.media_player import (
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_PLAY,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP,
SUPPORT_SELECT_SOURCE, SUPPORT_PLAY_MEDIA, MEDIA_TYPE_CHANNEL,
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, CONF_MAC, CONF_CUSTOMIZE, STATE_OFF,
STATE_PLAYING, STATE_PAUSED,
STATE_UNKNOWN, CONF_NAME, CONF_FILENAME)
from homeassistant.loader import get_component
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pylgtv==0.1.7',
'websockets==3.2',
'wakeonlan==0.2.2']
_CONFIGURING = {} # type: Dict[str, str]
_LOGGER = logging.getLogger(__name__)
CONF_SOURCES = 'sources'
DEFAULT_NAME = 'LG webOS Smart TV'
WEBOSTV_CONFIG_FILE = 'webostv.conf'
SUPPORT_WEBOSTV = SUPPORT_TURN_OFF | \
SUPPORT_NEXT_TRACK | SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \
SUPPORT_SELECT_SOURCE | SUPPORT_PLAY_MEDIA | SUPPORT_PLAY
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
CUSTOMIZE_SCHEMA = vol.Schema({
vol.Optional(CONF_SOURCES):
vol.All(cv.ensure_list, [cv.string]),
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_CUSTOMIZE, default={}): CUSTOMIZE_SCHEMA,
vol.Optional(CONF_FILENAME, default=WEBOSTV_CONFIG_FILE): cv.string
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the LG WebOS TV platform."""
if discovery_info is not None:
host = urlparse(discovery_info[1]).hostname
else:
host = config.get(CONF_HOST)
if host is None:
_LOGGER.error("No TV found in configuration file or with discovery")
return False
# Only act if we are not already configuring this host
if host in _CONFIGURING:
return
mac = config.get(CONF_MAC)
name = config.get(CONF_NAME)
customize = config.get(CONF_CUSTOMIZE)
config = hass.config.path(config.get(CONF_FILENAME))
setup_tv(host, mac, name, customize, config, hass, add_devices)
def setup_tv(host, mac, name, customize, config, hass, add_devices):
"""Set up a LG WebOS TV based on host parameter."""
from pylgtv import WebOsClient
from pylgtv import PyLGTVPairException
from websockets.exceptions import ConnectionClosed
client = WebOsClient(host, config)
if not client.is_registered():
if host in _CONFIGURING:
# Try to pair.
try:
client.register()
except PyLGTVPairException:
_LOGGER.warning(
"Connected to LG webOS TV %s but not paired", host)
return
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
_LOGGER.error("Unable to connect to host %s", host)
return
else:
# Not registered, request configuration.
_LOGGER.warning("LG webOS TV %s needs to be paired", host)
request_configuration(
host, mac, name, customize, config, hass, add_devices)
return
# If we came here and configuring this host, mark as done.
if client.is_registered() and host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
add_devices([LgWebOSDevice(host, mac, name, customize, config)], True)
def request_configuration(
host, mac, name, customize, config, hass, add_devices):
"""Request configuration steps from the user."""
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], 'Failed to pair, please try again.')
return
# pylint: disable=unused-argument
def lgtv_configuration_callback(data):
"""Handle configuration changes."""
setup_tv(host, mac, name, customize, config, hass, add_devices)
_CONFIGURING[host] = configurator.request_config(
hass, name, lgtv_configuration_callback,
description='Click start and accept the pairing request on your TV.',
description_image='/static/images/config_webos.png',
submit_caption='Start pairing request'
)
class LgWebOSDevice(MediaPlayerDevice):
"""Representation of a LG WebOS TV."""
def __init__(self, host, mac, name, customize, config):
"""Initialize the webos device."""
from pylgtv import WebOsClient
from wakeonlan import wol
self._client = WebOsClient(host, config)
self._wol = wol
self._mac = mac
self._customize = customize
self._name = name
# Assume that the TV is not muted
self._muted = False
# Assume that the TV is in Play mode
self._playing = True
self._volume = 0
self._current_source = None
self._current_source_id = None
self._state = STATE_UNKNOWN
self._source_list = {}
self._app_list = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update(self):
"""Retrieve the latest data."""
from websockets.exceptions import ConnectionClosed
try:
current_input = self._client.get_input()
if current_input is not None:
self._current_source_id = current_input
if self._state in (STATE_UNKNOWN, STATE_OFF):
|
else:
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
if self._state is not STATE_OFF:
self._muted = self._client.get_muted()
self._volume = self._client.get_volume()
self._source_list = {}
self._app_list = {}
conf_sources = self._customize.get(CONF_SOURCES, [])
for app in self._client.get_apps():
self._app_list[app['id']] = app
if conf_sources:
if app['id'] == self._current_source_id:
self._current_source = app['title']
self._source_list[app['title']] = app
elif (app['id'] in conf_sources or
any(word in app['title']
for word in conf_sources) or
any(word in app['id']
for word in conf_sources)):
self._source_list[app['title']] = app
else:
self._current_source = app['title']
self._source_list[app['title']] = app
for source in self._client.get_inputs():
if conf_sources:
if source['id'] == self._current_source_id:
self._source_list[source['label']] = source
elif (source['label'] in conf_sources or
any(source['label'].find(word) != -1
for word in conf_sources)):
self._source_list[source['label']] = source
else:
self._source_list[source['label']] = source
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume / 100.0
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""List of available input sources."""
return sorted(self._source_list.keys())
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_CHANNEL
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._current_source_id in self._app_list:
icon = self._app_list[self._current_source_id]['largeIcon']
if not icon.startswith('http'):
icon = self._app_list[self._current_source_id]['icon']
return icon
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._mac:
return SUPPORT_WEBOSTV | SUPPORT_TURN_ON
return SUPPORT_WEBOSTV
def turn_off(self):
"""Turn off media player."""
from websockets.exceptions import ConnectionClosed
self._state = STATE_OFF
try:
self._client.power_off()
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
pass
def turn_on(self):
"""Turn on the media player."""
if self._mac:
self._wol.send_magic_packet(self._mac)
def volume_up(self):
"""Volume up the media player."""
self._client.volume_up()
def volume_down(self):
"""Volume down media player."""
self._client.volume_down()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
tv_volume = volume * 100
self._client.set_volume(tv_volume)
def mute_volume(self, mute):
"""Send mute command."""
self._muted = mute
self._client.set_mute(mute)
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def select_source(self, source):
"""Select input source."""
if self._source_list.get(source).get('title'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['title']
self._client.launch_app(self._source_list[source]['id'])
elif self._source_list.get(source).get('label'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['label']
self._client.set_input(self._source_list[source]['id'])
def media_play(self):
"""Send play command."""
self._playing = True
self._state = STATE_PLAYING
self._client.play()
def media_pause(self):
"""Send media pause command to media player."""
self._playing = False
self._state = STATE_PAUSED
self._client.pause()
def media_next_track(self):
"""Send next track command."""
self._client.fast_forward()
def media_previous_track(self):
"""Send the previous track command."""
self._client.rewind()
|
self._state = STATE_PLAYING
|
conditional_block
|
webostv.py
|
"""
Support for interface with an LG webOS Smart TV.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.webostv/
"""
import logging
import asyncio
from datetime import timedelta
from urllib.parse import urlparse
import voluptuous as vol
import homeassistant.util as util
from homeassistant.components.media_player import (
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_PLAY,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP,
SUPPORT_SELECT_SOURCE, SUPPORT_PLAY_MEDIA, MEDIA_TYPE_CHANNEL,
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, CONF_MAC, CONF_CUSTOMIZE, STATE_OFF,
STATE_PLAYING, STATE_PAUSED,
STATE_UNKNOWN, CONF_NAME, CONF_FILENAME)
from homeassistant.loader import get_component
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pylgtv==0.1.7',
'websockets==3.2',
'wakeonlan==0.2.2']
_CONFIGURING = {} # type: Dict[str, str]
_LOGGER = logging.getLogger(__name__)
CONF_SOURCES = 'sources'
DEFAULT_NAME = 'LG webOS Smart TV'
WEBOSTV_CONFIG_FILE = 'webostv.conf'
SUPPORT_WEBOSTV = SUPPORT_TURN_OFF | \
SUPPORT_NEXT_TRACK | SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \
SUPPORT_SELECT_SOURCE | SUPPORT_PLAY_MEDIA | SUPPORT_PLAY
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
CUSTOMIZE_SCHEMA = vol.Schema({
vol.Optional(CONF_SOURCES):
vol.All(cv.ensure_list, [cv.string]),
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_CUSTOMIZE, default={}): CUSTOMIZE_SCHEMA,
vol.Optional(CONF_FILENAME, default=WEBOSTV_CONFIG_FILE): cv.string
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the LG WebOS TV platform."""
if discovery_info is not None:
host = urlparse(discovery_info[1]).hostname
else:
host = config.get(CONF_HOST)
if host is None:
_LOGGER.error("No TV found in configuration file or with discovery")
return False
# Only act if we are not already configuring this host
if host in _CONFIGURING:
return
mac = config.get(CONF_MAC)
name = config.get(CONF_NAME)
customize = config.get(CONF_CUSTOMIZE)
config = hass.config.path(config.get(CONF_FILENAME))
setup_tv(host, mac, name, customize, config, hass, add_devices)
def setup_tv(host, mac, name, customize, config, hass, add_devices):
"""Set up a LG WebOS TV based on host parameter."""
from pylgtv import WebOsClient
from pylgtv import PyLGTVPairException
from websockets.exceptions import ConnectionClosed
client = WebOsClient(host, config)
if not client.is_registered():
if host in _CONFIGURING:
# Try to pair.
try:
client.register()
except PyLGTVPairException:
_LOGGER.warning(
"Connected to LG webOS TV %s but not paired", host)
return
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
_LOGGER.error("Unable to connect to host %s", host)
return
else:
# Not registered, request configuration.
_LOGGER.warning("LG webOS TV %s needs to be paired", host)
request_configuration(
host, mac, name, customize, config, hass, add_devices)
return
# If we came here and configuring this host, mark as done.
if client.is_registered() and host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
add_devices([LgWebOSDevice(host, mac, name, customize, config)], True)
def request_configuration(
host, mac, name, customize, config, hass, add_devices):
"""Request configuration steps from the user."""
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], 'Failed to pair, please try again.')
return
# pylint: disable=unused-argument
def lgtv_configuration_callback(data):
"""Handle configuration changes."""
setup_tv(host, mac, name, customize, config, hass, add_devices)
_CONFIGURING[host] = configurator.request_config(
hass, name, lgtv_configuration_callback,
description='Click start and accept the pairing request on your TV.',
description_image='/static/images/config_webos.png',
submit_caption='Start pairing request'
)
class LgWebOSDevice(MediaPlayerDevice):
"""Representation of a LG WebOS TV."""
def __init__(self, host, mac, name, customize, config):
"""Initialize the webos device."""
from pylgtv import WebOsClient
from wakeonlan import wol
self._client = WebOsClient(host, config)
self._wol = wol
self._mac = mac
self._customize = customize
self._name = name
# Assume that the TV is not muted
self._muted = False
# Assume that the TV is in Play mode
self._playing = True
self._volume = 0
self._current_source = None
self._current_source_id = None
self._state = STATE_UNKNOWN
self._source_list = {}
self._app_list = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update(self):
"""Retrieve the latest data."""
from websockets.exceptions import ConnectionClosed
try:
current_input = self._client.get_input()
if current_input is not None:
self._current_source_id = current_input
if self._state in (STATE_UNKNOWN, STATE_OFF):
self._state = STATE_PLAYING
else:
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
if self._state is not STATE_OFF:
self._muted = self._client.get_muted()
self._volume = self._client.get_volume()
self._source_list = {}
self._app_list = {}
conf_sources = self._customize.get(CONF_SOURCES, [])
for app in self._client.get_apps():
self._app_list[app['id']] = app
if conf_sources:
if app['id'] == self._current_source_id:
self._current_source = app['title']
self._source_list[app['title']] = app
elif (app['id'] in conf_sources or
any(word in app['title']
for word in conf_sources) or
any(word in app['id']
for word in conf_sources)):
self._source_list[app['title']] = app
else:
self._current_source = app['title']
self._source_list[app['title']] = app
for source in self._client.get_inputs():
if conf_sources:
if source['id'] == self._current_source_id:
self._source_list[source['label']] = source
elif (source['label'] in conf_sources or
any(source['label'].find(word) != -1
for word in conf_sources)):
self._source_list[source['label']] = source
else:
self._source_list[source['label']] = source
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume / 100.0
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""List of available input sources."""
return sorted(self._source_list.keys())
@property
def media_content_type(self):
|
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._current_source_id in self._app_list:
icon = self._app_list[self._current_source_id]['largeIcon']
if not icon.startswith('http'):
icon = self._app_list[self._current_source_id]['icon']
return icon
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._mac:
return SUPPORT_WEBOSTV | SUPPORT_TURN_ON
return SUPPORT_WEBOSTV
def turn_off(self):
"""Turn off media player."""
from websockets.exceptions import ConnectionClosed
self._state = STATE_OFF
try:
self._client.power_off()
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
pass
def turn_on(self):
"""Turn on the media player."""
if self._mac:
self._wol.send_magic_packet(self._mac)
def volume_up(self):
"""Volume up the media player."""
self._client.volume_up()
def volume_down(self):
"""Volume down media player."""
self._client.volume_down()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
tv_volume = volume * 100
self._client.set_volume(tv_volume)
def mute_volume(self, mute):
"""Send mute command."""
self._muted = mute
self._client.set_mute(mute)
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def select_source(self, source):
"""Select input source."""
if self._source_list.get(source).get('title'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['title']
self._client.launch_app(self._source_list[source]['id'])
elif self._source_list.get(source).get('label'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['label']
self._client.set_input(self._source_list[source]['id'])
def media_play(self):
"""Send play command."""
self._playing = True
self._state = STATE_PLAYING
self._client.play()
def media_pause(self):
"""Send media pause command to media player."""
self._playing = False
self._state = STATE_PAUSED
self._client.pause()
def media_next_track(self):
"""Send next track command."""
self._client.fast_forward()
def media_previous_track(self):
"""Send the previous track command."""
self._client.rewind()
|
"""Content type of current playing media."""
return MEDIA_TYPE_CHANNEL
|
identifier_body
|
webostv.py
|
"""
Support for interface with an LG webOS Smart TV.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.webostv/
"""
import logging
import asyncio
from datetime import timedelta
from urllib.parse import urlparse
import voluptuous as vol
import homeassistant.util as util
from homeassistant.components.media_player import (
SUPPORT_TURN_ON, SUPPORT_TURN_OFF, SUPPORT_PLAY,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP,
SUPPORT_SELECT_SOURCE, SUPPORT_PLAY_MEDIA, MEDIA_TYPE_CHANNEL,
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_HOST, CONF_MAC, CONF_CUSTOMIZE, STATE_OFF,
STATE_PLAYING, STATE_PAUSED,
STATE_UNKNOWN, CONF_NAME, CONF_FILENAME)
from homeassistant.loader import get_component
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pylgtv==0.1.7',
'websockets==3.2',
'wakeonlan==0.2.2']
_CONFIGURING = {} # type: Dict[str, str]
_LOGGER = logging.getLogger(__name__)
CONF_SOURCES = 'sources'
DEFAULT_NAME = 'LG webOS Smart TV'
WEBOSTV_CONFIG_FILE = 'webostv.conf'
SUPPORT_WEBOSTV = SUPPORT_TURN_OFF | \
SUPPORT_NEXT_TRACK | SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \
SUPPORT_SELECT_SOURCE | SUPPORT_PLAY_MEDIA | SUPPORT_PLAY
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
CUSTOMIZE_SCHEMA = vol.Schema({
vol.Optional(CONF_SOURCES):
vol.All(cv.ensure_list, [cv.string]),
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_MAC): cv.string,
vol.Optional(CONF_CUSTOMIZE, default={}): CUSTOMIZE_SCHEMA,
vol.Optional(CONF_FILENAME, default=WEBOSTV_CONFIG_FILE): cv.string
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the LG WebOS TV platform."""
if discovery_info is not None:
host = urlparse(discovery_info[1]).hostname
else:
host = config.get(CONF_HOST)
if host is None:
_LOGGER.error("No TV found in configuration file or with discovery")
return False
# Only act if we are not already configuring this host
if host in _CONFIGURING:
return
mac = config.get(CONF_MAC)
name = config.get(CONF_NAME)
customize = config.get(CONF_CUSTOMIZE)
config = hass.config.path(config.get(CONF_FILENAME))
setup_tv(host, mac, name, customize, config, hass, add_devices)
def setup_tv(host, mac, name, customize, config, hass, add_devices):
"""Set up a LG WebOS TV based on host parameter."""
from pylgtv import WebOsClient
from pylgtv import PyLGTVPairException
from websockets.exceptions import ConnectionClosed
client = WebOsClient(host, config)
if not client.is_registered():
if host in _CONFIGURING:
# Try to pair.
try:
client.register()
except PyLGTVPairException:
_LOGGER.warning(
"Connected to LG webOS TV %s but not paired", host)
return
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
_LOGGER.error("Unable to connect to host %s", host)
return
else:
# Not registered, request configuration.
_LOGGER.warning("LG webOS TV %s needs to be paired", host)
request_configuration(
host, mac, name, customize, config, hass, add_devices)
return
# If we came here and configuring this host, mark as done.
if client.is_registered() and host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
add_devices([LgWebOSDevice(host, mac, name, customize, config)], True)
def request_configuration(
host, mac, name, customize, config, hass, add_devices):
"""Request configuration steps from the user."""
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], 'Failed to pair, please try again.')
return
# pylint: disable=unused-argument
def lgtv_configuration_callback(data):
"""Handle configuration changes."""
setup_tv(host, mac, name, customize, config, hass, add_devices)
_CONFIGURING[host] = configurator.request_config(
hass, name, lgtv_configuration_callback,
description='Click start and accept the pairing request on your TV.',
description_image='/static/images/config_webos.png',
submit_caption='Start pairing request'
)
class LgWebOSDevice(MediaPlayerDevice):
"""Representation of a LG WebOS TV."""
def __init__(self, host, mac, name, customize, config):
"""Initialize the webos device."""
from pylgtv import WebOsClient
from wakeonlan import wol
self._client = WebOsClient(host, config)
self._wol = wol
self._mac = mac
self._customize = customize
self._name = name
# Assume that the TV is not muted
self._muted = False
# Assume that the TV is in Play mode
self._playing = True
self._volume = 0
self._current_source = None
self._current_source_id = None
self._state = STATE_UNKNOWN
self._source_list = {}
self._app_list = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update(self):
"""Retrieve the latest data."""
from websockets.exceptions import ConnectionClosed
try:
current_input = self._client.get_input()
if current_input is not None:
self._current_source_id = current_input
if self._state in (STATE_UNKNOWN, STATE_OFF):
self._state = STATE_PLAYING
else:
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
if self._state is not STATE_OFF:
self._muted = self._client.get_muted()
self._volume = self._client.get_volume()
self._source_list = {}
self._app_list = {}
conf_sources = self._customize.get(CONF_SOURCES, [])
for app in self._client.get_apps():
self._app_list[app['id']] = app
if conf_sources:
if app['id'] == self._current_source_id:
self._current_source = app['title']
self._source_list[app['title']] = app
elif (app['id'] in conf_sources or
any(word in app['title']
for word in conf_sources) or
any(word in app['id']
for word in conf_sources)):
self._source_list[app['title']] = app
else:
self._current_source = app['title']
self._source_list[app['title']] = app
for source in self._client.get_inputs():
if conf_sources:
if source['id'] == self._current_source_id:
self._source_list[source['label']] = source
elif (source['label'] in conf_sources or
any(source['label'].find(word) != -1
for word in conf_sources)):
self._source_list[source['label']] = source
else:
self._source_list[source['label']] = source
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
self._state = STATE_OFF
self._current_source = None
self._current_source_id = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume / 100.0
@property
def
|
(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""List of available input sources."""
return sorted(self._source_list.keys())
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_CHANNEL
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._current_source_id in self._app_list:
icon = self._app_list[self._current_source_id]['largeIcon']
if not icon.startswith('http'):
icon = self._app_list[self._current_source_id]['icon']
return icon
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._mac:
return SUPPORT_WEBOSTV | SUPPORT_TURN_ON
return SUPPORT_WEBOSTV
def turn_off(self):
"""Turn off media player."""
from websockets.exceptions import ConnectionClosed
self._state = STATE_OFF
try:
self._client.power_off()
except (OSError, ConnectionClosed, TypeError,
asyncio.TimeoutError):
pass
def turn_on(self):
"""Turn on the media player."""
if self._mac:
self._wol.send_magic_packet(self._mac)
def volume_up(self):
"""Volume up the media player."""
self._client.volume_up()
def volume_down(self):
"""Volume down media player."""
self._client.volume_down()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
tv_volume = volume * 100
self._client.set_volume(tv_volume)
def mute_volume(self, mute):
"""Send mute command."""
self._muted = mute
self._client.set_mute(mute)
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def select_source(self, source):
"""Select input source."""
if self._source_list.get(source).get('title'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['title']
self._client.launch_app(self._source_list[source]['id'])
elif self._source_list.get(source).get('label'):
self._current_source_id = self._source_list[source]['id']
self._current_source = self._source_list[source]['label']
self._client.set_input(self._source_list[source]['id'])
def media_play(self):
"""Send play command."""
self._playing = True
self._state = STATE_PLAYING
self._client.play()
def media_pause(self):
"""Send media pause command to media player."""
self._playing = False
self._state = STATE_PAUSED
self._client.pause()
def media_next_track(self):
"""Send next track command."""
self._client.fast_forward()
def media_previous_track(self):
"""Send the previous track command."""
self._client.rewind()
|
source
|
identifier_name
|
term.rs
|
use std::fmt;
use super::Field;
use byteorder::{BigEndian, ByteOrder};
use common;
use schema::Facet;
use std::str;
use DateTime;
/// Size (in bytes) of the buffer of a int field.
const INT_TERM_LEN: usize = 4 + 8;
/// Term represents the value that the token can take.
///
/// It actually wraps a `Vec<u8>`.
#[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
pub struct Term<B = Vec<u8>>(B)
where
B: AsRef<[u8]>;
impl Term {
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_i64(field: Field, val: i64) -> Term {
let val_u64: u64 = common::i64_to_u64(val);
Term::from_field_u64(field, val_u64)
}
/// Builds a term given a field, and a DateTime value
///
/// Assuming the term has a field id of 1, and a timestamp i64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the DateTime as i64 timestamp value.
pub fn from_field_date(field: Field, val: &DateTime) -> Term {
let val_timestamp = val.timestamp();
Term::from_field_i64(field, val_timestamp)
}
/// Creates a `Term` given a facet.
pub fn from_facet(field: Field, facet: &Facet) -> Term
|
/// Builds a term given a field, and a string value
///
/// Assuming the term has a field id of 2, and a text value of "abc",
/// the Term will have 4 bytes.
/// The first byte is 2, and the three following bytes are the utf-8
/// representation of "abc".
pub fn from_field_text(field: Field, text: &str) -> Term {
let buffer = Vec::with_capacity(4 + text.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_text(text);
term
}
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_u64(field: Field, val: u64) -> Term {
let mut term = Term(vec![0u8; INT_TERM_LEN]);
term.set_field(field);
term.set_u64(val);
term
}
/// Creates a new Term for a given field.
pub(crate) fn for_field(field: Field) -> Term {
let mut term = Term(Vec::with_capacity(100));
term.set_field(field);
term
}
/// Returns the field.
pub fn set_field(&mut self, field: Field) {
if self.0.len() < 4 {
self.0.resize(4, 0u8);
}
BigEndian::write_u32(&mut self.0[0..4], field.0);
}
/// Sets a u64 value in the term.
///
/// U64 are serialized using (8-byte) BigEndian
/// representation.
/// The use of BigEndian has the benefit of preserving
/// the natural order of the values.
pub fn set_u64(&mut self, val: u64) {
self.0.resize(INT_TERM_LEN, 0u8);
BigEndian::write_u64(&mut self.0[4..], val);
}
/// Sets a `i64` value in the term.
pub fn set_i64(&mut self, val: i64) {
self.set_u64(common::i64_to_u64(val));
}
fn set_bytes(&mut self, bytes: &[u8]) {
self.0.resize(4, 0u8);
self.0.extend(bytes);
}
pub(crate) fn from_field_bytes(field: Field, bytes: &[u8]) -> Term {
let mut term = Term::for_field(field);
term.set_bytes(bytes);
term
}
/// Set the texts only, keeping the field untouched.
pub fn set_text(&mut self, text: &str) {
self.set_bytes(text.as_bytes());
}
}
impl<B> Term<B>
where
B: AsRef<[u8]>,
{
/// Wraps a source of data
pub fn wrap(data: B) -> Term<B> {
Term(data)
}
/// Returns the field.
pub fn field(&self) -> Field {
Field(BigEndian::read_u32(&self.0.as_ref()[..4]))
}
/// Returns the `u64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `u64` field.
pub fn get_u64(&self) -> u64 {
BigEndian::read_u64(&self.0.as_ref()[4..])
}
/// Returns the `i64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `i64` field.
pub fn get_i64(&self) -> i64 {
common::u64_to_i64(BigEndian::read_u64(&self.0.as_ref()[4..]))
}
/// Returns the text associated with the term.
///
/// # Panics
/// If the value is not valid utf-8. This may happen
/// if the index is corrupted or if you try to
/// call this method on a non-string type.
pub fn text(&self) -> &str {
str::from_utf8(self.value_bytes()).expect("Term does not contain valid utf-8.")
}
/// Returns the serialized value of the term.
/// (this does not include the field.)
///
/// If the term is a string, its value is utf-8 encoded.
/// If the term is a u64, its value is encoded according
/// to `byteorder::LittleEndian`.
pub fn value_bytes(&self) -> &[u8] {
&self.0.as_ref()[4..]
}
/// Returns the underlying `&[u8]`
pub fn as_slice(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<B> AsRef<[u8]> for Term<B>
where
B: AsRef<[u8]>,
{
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Term({:?})", &self.0[..])
}
}
#[cfg(test)]
mod tests {
use schema::*;
#[test]
pub fn test_term() {
let mut schema_builder = Schema::builder();
schema_builder.add_text_field("text", STRING);
let title_field = schema_builder.add_text_field("title", STRING);
let count_field = schema_builder.add_text_field("count", STRING);
{
let term = Term::from_field_text(title_field, "test");
assert_eq!(term.field(), title_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 1u8]);
assert_eq!(&term.as_slice()[4..], "test".as_bytes());
}
{
let term = Term::from_field_u64(count_field, 983u64);
assert_eq!(term.field(), count_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 2u8]);
assert_eq!(term.as_slice().len(), 4 + 8);
assert_eq!(term.as_slice()[4], 0u8);
assert_eq!(term.as_slice()[5], 0u8);
assert_eq!(term.as_slice()[6], 0u8);
assert_eq!(term.as_slice()[7], 0u8);
assert_eq!(term.as_slice()[8], 0u8);
assert_eq!(term.as_slice()[9], 0u8);
assert_eq!(term.as_slice()[10], (933u64 / 256u64) as u8);
assert_eq!(term.as_slice()[11], (983u64 % 256u64) as u8);
}
}
}
|
{
let bytes = facet.encoded_str().as_bytes();
let buffer = Vec::with_capacity(4 + bytes.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_bytes(bytes);
term
}
|
identifier_body
|
term.rs
|
use std::fmt;
use super::Field;
use byteorder::{BigEndian, ByteOrder};
use common;
use schema::Facet;
use std::str;
use DateTime;
/// Size (in bytes) of the buffer of a int field.
const INT_TERM_LEN: usize = 4 + 8;
/// Term represents the value that the token can take.
///
/// It actually wraps a `Vec<u8>`.
#[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
pub struct Term<B = Vec<u8>>(B)
where
B: AsRef<[u8]>;
impl Term {
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_i64(field: Field, val: i64) -> Term {
let val_u64: u64 = common::i64_to_u64(val);
Term::from_field_u64(field, val_u64)
}
/// Builds a term given a field, and a DateTime value
///
/// Assuming the term has a field id of 1, and a timestamp i64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the DateTime as i64 timestamp value.
pub fn from_field_date(field: Field, val: &DateTime) -> Term {
let val_timestamp = val.timestamp();
Term::from_field_i64(field, val_timestamp)
}
/// Creates a `Term` given a facet.
pub fn from_facet(field: Field, facet: &Facet) -> Term {
let bytes = facet.encoded_str().as_bytes();
let buffer = Vec::with_capacity(4 + bytes.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_bytes(bytes);
term
}
/// Builds a term given a field, and a string value
///
/// Assuming the term has a field id of 2, and a text value of "abc",
/// the Term will have 4 bytes.
/// The first byte is 2, and the three following bytes are the utf-8
/// representation of "abc".
pub fn from_field_text(field: Field, text: &str) -> Term {
let buffer = Vec::with_capacity(4 + text.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_text(text);
term
}
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_u64(field: Field, val: u64) -> Term {
let mut term = Term(vec![0u8; INT_TERM_LEN]);
term.set_field(field);
term.set_u64(val);
term
}
/// Creates a new Term for a given field.
pub(crate) fn for_field(field: Field) -> Term {
let mut term = Term(Vec::with_capacity(100));
term.set_field(field);
term
}
/// Returns the field.
pub fn set_field(&mut self, field: Field) {
if self.0.len() < 4 {
self.0.resize(4, 0u8);
}
BigEndian::write_u32(&mut self.0[0..4], field.0);
}
/// Sets a u64 value in the term.
///
/// U64 are serialized using (8-byte) BigEndian
/// representation.
/// The use of BigEndian has the benefit of preserving
/// the natural order of the values.
pub fn set_u64(&mut self, val: u64) {
self.0.resize(INT_TERM_LEN, 0u8);
BigEndian::write_u64(&mut self.0[4..], val);
}
/// Sets a `i64` value in the term.
pub fn set_i64(&mut self, val: i64) {
self.set_u64(common::i64_to_u64(val));
}
fn set_bytes(&mut self, bytes: &[u8]) {
self.0.resize(4, 0u8);
self.0.extend(bytes);
}
pub(crate) fn from_field_bytes(field: Field, bytes: &[u8]) -> Term {
let mut term = Term::for_field(field);
term.set_bytes(bytes);
term
}
/// Set the texts only, keeping the field untouched.
pub fn set_text(&mut self, text: &str) {
self.set_bytes(text.as_bytes());
}
}
impl<B> Term<B>
where
B: AsRef<[u8]>,
{
/// Wraps a source of data
pub fn wrap(data: B) -> Term<B> {
Term(data)
}
/// Returns the field.
pub fn field(&self) -> Field {
Field(BigEndian::read_u32(&self.0.as_ref()[..4]))
|
}
/// Returns the `u64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `u64` field.
pub fn get_u64(&self) -> u64 {
BigEndian::read_u64(&self.0.as_ref()[4..])
}
/// Returns the `i64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `i64` field.
pub fn get_i64(&self) -> i64 {
common::u64_to_i64(BigEndian::read_u64(&self.0.as_ref()[4..]))
}
/// Returns the text associated with the term.
///
/// # Panics
/// If the value is not valid utf-8. This may happen
/// if the index is corrupted or if you try to
/// call this method on a non-string type.
pub fn text(&self) -> &str {
str::from_utf8(self.value_bytes()).expect("Term does not contain valid utf-8.")
}
/// Returns the serialized value of the term.
/// (this does not include the field.)
///
/// If the term is a string, its value is utf-8 encoded.
/// If the term is a u64, its value is encoded according
/// to `byteorder::LittleEndian`.
pub fn value_bytes(&self) -> &[u8] {
&self.0.as_ref()[4..]
}
/// Returns the underlying `&[u8]`
pub fn as_slice(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<B> AsRef<[u8]> for Term<B>
where
B: AsRef<[u8]>,
{
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Term({:?})", &self.0[..])
}
}
#[cfg(test)]
mod tests {
use schema::*;
#[test]
pub fn test_term() {
let mut schema_builder = Schema::builder();
schema_builder.add_text_field("text", STRING);
let title_field = schema_builder.add_text_field("title", STRING);
let count_field = schema_builder.add_text_field("count", STRING);
{
let term = Term::from_field_text(title_field, "test");
assert_eq!(term.field(), title_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 1u8]);
assert_eq!(&term.as_slice()[4..], "test".as_bytes());
}
{
let term = Term::from_field_u64(count_field, 983u64);
assert_eq!(term.field(), count_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 2u8]);
assert_eq!(term.as_slice().len(), 4 + 8);
assert_eq!(term.as_slice()[4], 0u8);
assert_eq!(term.as_slice()[5], 0u8);
assert_eq!(term.as_slice()[6], 0u8);
assert_eq!(term.as_slice()[7], 0u8);
assert_eq!(term.as_slice()[8], 0u8);
assert_eq!(term.as_slice()[9], 0u8);
assert_eq!(term.as_slice()[10], (933u64 / 256u64) as u8);
assert_eq!(term.as_slice()[11], (983u64 % 256u64) as u8);
}
}
}
|
random_line_split
|
|
term.rs
|
use std::fmt;
use super::Field;
use byteorder::{BigEndian, ByteOrder};
use common;
use schema::Facet;
use std::str;
use DateTime;
/// Size (in bytes) of the buffer of a int field.
const INT_TERM_LEN: usize = 4 + 8;
/// Term represents the value that the token can take.
///
/// It actually wraps a `Vec<u8>`.
#[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
pub struct Term<B = Vec<u8>>(B)
where
B: AsRef<[u8]>;
impl Term {
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_i64(field: Field, val: i64) -> Term {
let val_u64: u64 = common::i64_to_u64(val);
Term::from_field_u64(field, val_u64)
}
/// Builds a term given a field, and a DateTime value
///
/// Assuming the term has a field id of 1, and a timestamp i64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the DateTime as i64 timestamp value.
pub fn from_field_date(field: Field, val: &DateTime) -> Term {
let val_timestamp = val.timestamp();
Term::from_field_i64(field, val_timestamp)
}
/// Creates a `Term` given a facet.
pub fn from_facet(field: Field, facet: &Facet) -> Term {
let bytes = facet.encoded_str().as_bytes();
let buffer = Vec::with_capacity(4 + bytes.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_bytes(bytes);
term
}
/// Builds a term given a field, and a string value
///
/// Assuming the term has a field id of 2, and a text value of "abc",
/// the Term will have 4 bytes.
/// The first byte is 2, and the three following bytes are the utf-8
/// representation of "abc".
pub fn from_field_text(field: Field, text: &str) -> Term {
let buffer = Vec::with_capacity(4 + text.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_text(text);
term
}
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_u64(field: Field, val: u64) -> Term {
let mut term = Term(vec![0u8; INT_TERM_LEN]);
term.set_field(field);
term.set_u64(val);
term
}
/// Creates a new Term for a given field.
pub(crate) fn for_field(field: Field) -> Term {
let mut term = Term(Vec::with_capacity(100));
term.set_field(field);
term
}
/// Returns the field.
pub fn set_field(&mut self, field: Field) {
if self.0.len() < 4
|
BigEndian::write_u32(&mut self.0[0..4], field.0);
}
/// Sets a u64 value in the term.
///
/// U64 are serialized using (8-byte) BigEndian
/// representation.
/// The use of BigEndian has the benefit of preserving
/// the natural order of the values.
pub fn set_u64(&mut self, val: u64) {
self.0.resize(INT_TERM_LEN, 0u8);
BigEndian::write_u64(&mut self.0[4..], val);
}
/// Sets a `i64` value in the term.
pub fn set_i64(&mut self, val: i64) {
self.set_u64(common::i64_to_u64(val));
}
fn set_bytes(&mut self, bytes: &[u8]) {
self.0.resize(4, 0u8);
self.0.extend(bytes);
}
pub(crate) fn from_field_bytes(field: Field, bytes: &[u8]) -> Term {
let mut term = Term::for_field(field);
term.set_bytes(bytes);
term
}
/// Set the texts only, keeping the field untouched.
pub fn set_text(&mut self, text: &str) {
self.set_bytes(text.as_bytes());
}
}
impl<B> Term<B>
where
B: AsRef<[u8]>,
{
/// Wraps a source of data
pub fn wrap(data: B) -> Term<B> {
Term(data)
}
/// Returns the field.
pub fn field(&self) -> Field {
Field(BigEndian::read_u32(&self.0.as_ref()[..4]))
}
/// Returns the `u64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `u64` field.
pub fn get_u64(&self) -> u64 {
BigEndian::read_u64(&self.0.as_ref()[4..])
}
/// Returns the `i64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `i64` field.
pub fn get_i64(&self) -> i64 {
common::u64_to_i64(BigEndian::read_u64(&self.0.as_ref()[4..]))
}
/// Returns the text associated with the term.
///
/// # Panics
/// If the value is not valid utf-8. This may happen
/// if the index is corrupted or if you try to
/// call this method on a non-string type.
pub fn text(&self) -> &str {
str::from_utf8(self.value_bytes()).expect("Term does not contain valid utf-8.")
}
/// Returns the serialized value of the term.
/// (this does not include the field.)
///
/// If the term is a string, its value is utf-8 encoded.
/// If the term is a u64, its value is encoded according
/// to `byteorder::LittleEndian`.
pub fn value_bytes(&self) -> &[u8] {
&self.0.as_ref()[4..]
}
/// Returns the underlying `&[u8]`
pub fn as_slice(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<B> AsRef<[u8]> for Term<B>
where
B: AsRef<[u8]>,
{
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Term({:?})", &self.0[..])
}
}
#[cfg(test)]
mod tests {
use schema::*;
#[test]
pub fn test_term() {
let mut schema_builder = Schema::builder();
schema_builder.add_text_field("text", STRING);
let title_field = schema_builder.add_text_field("title", STRING);
let count_field = schema_builder.add_text_field("count", STRING);
{
let term = Term::from_field_text(title_field, "test");
assert_eq!(term.field(), title_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 1u8]);
assert_eq!(&term.as_slice()[4..], "test".as_bytes());
}
{
let term = Term::from_field_u64(count_field, 983u64);
assert_eq!(term.field(), count_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 2u8]);
assert_eq!(term.as_slice().len(), 4 + 8);
assert_eq!(term.as_slice()[4], 0u8);
assert_eq!(term.as_slice()[5], 0u8);
assert_eq!(term.as_slice()[6], 0u8);
assert_eq!(term.as_slice()[7], 0u8);
assert_eq!(term.as_slice()[8], 0u8);
assert_eq!(term.as_slice()[9], 0u8);
assert_eq!(term.as_slice()[10], (933u64 / 256u64) as u8);
assert_eq!(term.as_slice()[11], (983u64 % 256u64) as u8);
}
}
}
|
{
self.0.resize(4, 0u8);
}
|
conditional_block
|
term.rs
|
use std::fmt;
use super::Field;
use byteorder::{BigEndian, ByteOrder};
use common;
use schema::Facet;
use std::str;
use DateTime;
/// Size (in bytes) of the buffer of a int field.
const INT_TERM_LEN: usize = 4 + 8;
/// Term represents the value that the token can take.
///
/// It actually wraps a `Vec<u8>`.
#[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Hash)]
pub struct Term<B = Vec<u8>>(B)
where
B: AsRef<[u8]>;
impl Term {
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_i64(field: Field, val: i64) -> Term {
let val_u64: u64 = common::i64_to_u64(val);
Term::from_field_u64(field, val_u64)
}
/// Builds a term given a field, and a DateTime value
///
/// Assuming the term has a field id of 1, and a timestamp i64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the DateTime as i64 timestamp value.
pub fn from_field_date(field: Field, val: &DateTime) -> Term {
let val_timestamp = val.timestamp();
Term::from_field_i64(field, val_timestamp)
}
/// Creates a `Term` given a facet.
pub fn from_facet(field: Field, facet: &Facet) -> Term {
let bytes = facet.encoded_str().as_bytes();
let buffer = Vec::with_capacity(4 + bytes.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_bytes(bytes);
term
}
/// Builds a term given a field, and a string value
///
/// Assuming the term has a field id of 2, and a text value of "abc",
/// the Term will have 4 bytes.
/// The first byte is 2, and the three following bytes are the utf-8
/// representation of "abc".
pub fn from_field_text(field: Field, text: &str) -> Term {
let buffer = Vec::with_capacity(4 + text.len());
let mut term = Term(buffer);
term.set_field(field);
term.set_text(text);
term
}
/// Builds a term given a field, and a u64-value
///
/// Assuming the term has a field id of 1, and a u64 value of 3234,
/// the Term will have 8 bytes.
///
/// The first four byte are dedicated to storing the field id as a u64.
/// The 4 following bytes are encoding the u64 value.
pub fn from_field_u64(field: Field, val: u64) -> Term {
let mut term = Term(vec![0u8; INT_TERM_LEN]);
term.set_field(field);
term.set_u64(val);
term
}
/// Creates a new Term for a given field.
pub(crate) fn for_field(field: Field) -> Term {
let mut term = Term(Vec::with_capacity(100));
term.set_field(field);
term
}
/// Returns the field.
pub fn set_field(&mut self, field: Field) {
if self.0.len() < 4 {
self.0.resize(4, 0u8);
}
BigEndian::write_u32(&mut self.0[0..4], field.0);
}
/// Sets a u64 value in the term.
///
/// U64 are serialized using (8-byte) BigEndian
/// representation.
/// The use of BigEndian has the benefit of preserving
/// the natural order of the values.
pub fn set_u64(&mut self, val: u64) {
self.0.resize(INT_TERM_LEN, 0u8);
BigEndian::write_u64(&mut self.0[4..], val);
}
/// Sets a `i64` value in the term.
pub fn
|
(&mut self, val: i64) {
self.set_u64(common::i64_to_u64(val));
}
fn set_bytes(&mut self, bytes: &[u8]) {
self.0.resize(4, 0u8);
self.0.extend(bytes);
}
pub(crate) fn from_field_bytes(field: Field, bytes: &[u8]) -> Term {
let mut term = Term::for_field(field);
term.set_bytes(bytes);
term
}
/// Set the texts only, keeping the field untouched.
pub fn set_text(&mut self, text: &str) {
self.set_bytes(text.as_bytes());
}
}
impl<B> Term<B>
where
B: AsRef<[u8]>,
{
/// Wraps a source of data
pub fn wrap(data: B) -> Term<B> {
Term(data)
}
/// Returns the field.
pub fn field(&self) -> Field {
Field(BigEndian::read_u32(&self.0.as_ref()[..4]))
}
/// Returns the `u64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `u64` field.
pub fn get_u64(&self) -> u64 {
BigEndian::read_u64(&self.0.as_ref()[4..])
}
/// Returns the `i64` value stored in a term.
///
/// # Panics
/// ... or returns an invalid value
/// if the term is not a `i64` field.
pub fn get_i64(&self) -> i64 {
common::u64_to_i64(BigEndian::read_u64(&self.0.as_ref()[4..]))
}
/// Returns the text associated with the term.
///
/// # Panics
/// If the value is not valid utf-8. This may happen
/// if the index is corrupted or if you try to
/// call this method on a non-string type.
pub fn text(&self) -> &str {
str::from_utf8(self.value_bytes()).expect("Term does not contain valid utf-8.")
}
/// Returns the serialized value of the term.
/// (this does not include the field.)
///
/// If the term is a string, its value is utf-8 encoded.
/// If the term is a u64, its value is encoded according
/// to `byteorder::LittleEndian`.
pub fn value_bytes(&self) -> &[u8] {
&self.0.as_ref()[4..]
}
/// Returns the underlying `&[u8]`
pub fn as_slice(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<B> AsRef<[u8]> for Term<B>
where
B: AsRef<[u8]>,
{
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Term({:?})", &self.0[..])
}
}
#[cfg(test)]
mod tests {
use schema::*;
#[test]
pub fn test_term() {
let mut schema_builder = Schema::builder();
schema_builder.add_text_field("text", STRING);
let title_field = schema_builder.add_text_field("title", STRING);
let count_field = schema_builder.add_text_field("count", STRING);
{
let term = Term::from_field_text(title_field, "test");
assert_eq!(term.field(), title_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 1u8]);
assert_eq!(&term.as_slice()[4..], "test".as_bytes());
}
{
let term = Term::from_field_u64(count_field, 983u64);
assert_eq!(term.field(), count_field);
assert_eq!(&term.as_slice()[0..4], &[0u8, 0u8, 0u8, 2u8]);
assert_eq!(term.as_slice().len(), 4 + 8);
assert_eq!(term.as_slice()[4], 0u8);
assert_eq!(term.as_slice()[5], 0u8);
assert_eq!(term.as_slice()[6], 0u8);
assert_eq!(term.as_slice()[7], 0u8);
assert_eq!(term.as_slice()[8], 0u8);
assert_eq!(term.as_slice()[9], 0u8);
assert_eq!(term.as_slice()[10], (933u64 / 256u64) as u8);
assert_eq!(term.as_slice()[11], (983u64 % 256u64) as u8);
}
}
}
|
set_i64
|
identifier_name
|
cadastro.ts
|
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams, ToastController, Events } from 'ionic-angular';
import { AppPreferences } from '@ionic-native/app-preferences';
import { Cliente } from '../../models/cliente';
import { EnderecoPage } from '../endereco/endereco';
import { Link } from '../../models/link';
import { LogineventProvider } from '../../providers/loginevent/loginevent';
import { Http } from '@angular/http';
import 'rxjs/add/operator/map';
/**
* Generated class for the Cadastro page.
*
|
@IonicPage()
@Component({
selector: 'page-cadastro',
templateUrl: 'cadastro.html',
})
export class CadastroPage {
public data: any;
public link: Link;
nome: string = '';
email: string = '';
senha: string = '';
cliente: Cliente;
constructor(private toastCtrl: ToastController, public navCtrl: NavController, public navParams: NavParams, public http: Http, private appPreferences: AppPreferences, public loginevent: LogineventProvider) {
this.link = new Link();
}
ionViewDidLoad() {
}
usuario_add() {
if (this.validaCampos()) {
this.http.post(this.link.api_url + 'clientes/add', {'Cliente': {'nome': this.nome, 'email': this.email, 'senha': this.senha}})
.map(res => res.json())
.subscribe(data => {
if (typeof data.message == "object") {
this.cliente = data.message['0'];
this.appPreferences.store('key', this.cliente['Cliente']['id'].toString()).then((res) => {
this.loginevent.cadastro();
this.goToEndereco(0);
});
} else {
this.toast(data.message);
}
});
} else {
let toast = this.toastCtrl.create({
message: "Preencha os campos, por gentileza",
duration: 3000,
position: 'top'
});
toast.present();
}
}
toast(cod: Number) {
switch (cod) {
case -2:
let toast = this.toastCtrl.create({
message: "Usuário já existe, tente novamente",
duration: 3000,
position: 'top'
});
toast.present();
break;
case -10:
toast = this.toastCtrl.create({
message: "Ocorreu algum erro, tente novamente",
duration: 3000,
position: 'top'
});
toast.present();
break;
}
}
goToEndereco(id: number) {
if (id != 0) {
this.navCtrl.setRoot(EnderecoPage, {cliente: id});
} else {
this.navCtrl.setRoot(EnderecoPage, {cliente: this.cliente});
}
}
validaCampos() {
if (this.nome == "" || this.email == "" || this.senha == "") {
return false;
}
return true;
}
}
|
* See http://ionicframework.com/docs/components/#navigation for more info
* on Ionic pages and navigation.
*/
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.