file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
search.py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Search module that uses Google App Engine's full text search."""
__author__ = 'Ellis Michael ([email protected])'
import collections
import gettext
import logging
import math
import mimetypes
import os
import time
import traceback
import jinja2
import messages
import resources
import webapp2
import appengine_config
from common import crypto
from common import safe_dom
from common import schema_fields
from controllers import sites
from controllers import utils
from models import config
from models import counters
from models import courses
from models import custom_modules
from models import jobs
from models import services
from models import transforms
from modules.dashboard import dashboard
from google.appengine.api import namespace_manager
from google.appengine.api import search
from google.appengine.ext import db
MODULE_NAME = 'Full Text Search'
DEPRECATED = config.ConfigProperty(
'gcb_can_index_automatically', bool, safe_dom.Text(
'This property has been deprecated; it is retained so that we '
'will not generate no-such-variable error messages for existing '
'installations that have this property set.'),
default_value=False, label='Automatically index search', deprecated=True)
SEARCH_QUERIES_MADE = counters.PerfCounter(
'gcb-search-queries-made',
'The number of student queries made to the search module.')
SEARCH_RESULTS_RETURNED = counters.PerfCounter(
'gcb-search-results-returned',
'The number of search results returned across all student queries.')
SEARCH_FAILURES = counters.PerfCounter(
'gcb-search-failures',
'The number of search failure messages returned across all student '
'queries.')
INDEX_NAME = 'gcb_search_index_loc_%s'
RESULTS_LIMIT = 10
GCB_SEARCH_FOLDER_NAME = os.path.normpath('/modules/search/')
MAX_RETRIES = 5
# Name of a per-course setting determining whether automatic indexing is enabled
AUTO_INDEX_SETTING = 'auto_index'
# I18N: Message displayed on search results page when error occurs.
SEARCH_ERROR_TEXT = gettext.gettext('Search is currently unavailable.')
class ModuleDisabledException(Exception):
"""Exception thrown when the search module is disabled."""
pass
def get_index(namespace, locale):
assert locale, 'Must have a non-null locale'
return search.Index(name=INDEX_NAME % locale, namespace=namespace)
def index_all_docs(course, incremental):
"""Index all of the docs for a given models.Course object.
Args:
course: models.courses.Course. the course to index.
incremental: boolean. whether or not to index only new or out-of-date
items.
Returns:
A dict with three keys.
'num_indexed_docs' maps to an int, the number of documents added to the
index.
'doc_type' maps to a counter with resource types as keys mapping to the
number of that resource added to the index.
'indexing_time_secs' maps to a float representing the number of seconds
the indexing job took.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
start_time = time.time()
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
timestamps, doc_types = (_get_index_metadata(index) if incremental
else ({}, {}))
for doc in resources.generate_all_documents(course, timestamps):
retry_count = 0
while retry_count < MAX_RETRIES:
try:
index.put(doc)
timestamps[doc.doc_id] = doc['date'][0].value
doc_types[doc.doc_id] = doc['type'][0].value
break
except search.Error, e:
if e.results[0].code == search.OperationResult.TRANSIENT_ERROR:
retry_count += 1
if retry_count >= MAX_RETRIES:
logging.error(
'Multiple transient errors indexing doc_id: %s',
doc.doc_id)
else:
logging.error('Failed to index doc_id: %s', doc.doc_id)
break
indexed_doc_types = collections.Counter()
for type_name in doc_types.values():
indexed_doc_types[type_name] += 1
return {'num_indexed_docs': len(timestamps),
'doc_types': indexed_doc_types,
'indexing_time_secs': time.time() - start_time}
def clear_index(namespace, locale):
"""Delete all docs in the index for a given models.Course object."""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(namespace, locale)
doc_ids = [document.doc_id for document in index.get_range(ids_only=True)]
total_docs = len(doc_ids)
while doc_ids:
index.delete(doc_ids)
doc_ids = [document.doc_id
for document in index.get_range(ids_only=True)]
return {'deleted_docs': total_docs}
def _get_index_metadata(index):
"""Returns dict from doc_id to timestamp and one from doc_id to doc_type."""
timestamps = []
doc_types = []
cursor = search.Cursor()
while cursor:
options = search.QueryOptions(
limit=1000,
cursor=cursor,
returned_fields=['date', 'type'])
query = search.Query(query_string='', options=options)
current_docs = index.search(query)
cursor = current_docs.cursor
for doc in current_docs:
timestamps.append((doc.doc_id, doc['date'][0].value))
doc_types.append((doc.doc_id, doc['type'][0].value))
return dict(timestamps), dict(doc_types)
def fetch(course, query_string, offset=0, limit=RESULTS_LIMIT):
"""Return an HTML fragment with the results of a search for query_string.
Args:
course: models.courses.Course. the course to search.
query_string: str. the user's specified query.
offset: int. the number of results to skip.
limit: int. the number of results to return.
Returns:
A dict with two keys.
'results' maps to an ordered list of resources.Result objects.
'total_found' maps to the total number of results in the index which
match query_string.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
try:
# TODO(emichael): Don't compute these for every query
returned_fields = resources.get_returned_fields()
snippeted_fields = resources.get_snippeted_fields()
options = search.QueryOptions(
limit=limit,
offset=offset,
returned_fields=returned_fields,
number_found_accuracy=100,
snippeted_fields=snippeted_fields)
query = search.Query(query_string=query_string, options=options)
results = index.search(query)
except search.Error:
logging.info('Failed searching for: %s', query_string)
return {'results': None, 'total_found': 0}
processed_results = resources.process_results(results)
return {'results': processed_results, 'total_found': results.number_found}
class SearchHandler(utils.BaseHandler):
"""Handler for generating the search results page."""
def get(self):
"""Process GET request."""
# TODO(emichael): move timing to Javascript
if not custom_module.enabled:
self.error(404)
return
student = self.personalize_page_and_get_enrolled(
supports_transient_student=True)
if not student:
return
try:
start = time.time()
# TODO(emichael): Don't use get because it can't handle utf-8
query = self.request.get('query')
offset = self.request.get('offset')
self.template_value['navbar'] = {}
if query:
try:
offset = int(offset)
except (ValueError, TypeError):
offset = 0
self.template_value['query'] = query
SEARCH_QUERIES_MADE.inc()
response = fetch(self.get_course(), query, offset=offset)
response = self.filter(response, student)
self.template_value['time'] = '%.2f' % (time.time() - start)
self.template_value['search_results'] = response['results']
total_found = response['total_found']
if offset + RESULTS_LIMIT < total_found:
self.template_value['next_link'] = (
'search?query=%s&offset=%d' %
(query, offset + RESULTS_LIMIT))
if offset - RESULTS_LIMIT >= 0:
self.template_value['previous_link'] = (
'search?query=%s&offset=%d' %
(query, offset - RESULTS_LIMIT))
self.template_value['page_number'] = offset / RESULTS_LIMIT + 1
self.template_value['total_pages'] = int(math.ceil(
float(total_found) / RESULTS_LIMIT))
if response['results']:
SEARCH_RESULTS_RETURNED.inc(len(response['results']))
# TODO(emichael): Remove this check when the unicode issue is fixed in
# dev_appserver.
except UnicodeEncodeError as e:
SEARCH_FAILURES.inc()
if not appengine_config.PRODUCTION_MODE:
# This message will only be displayed to the course author in
# dev, so it does not need to be I18N'd
self.template_value['search_error'] = (
'There is a known issue in App Engine\'s SDK '
'(code.google.com/p/googleappengine/issues/detail?id=9335) '
'which causes an error when generating search snippets '
'which contain non-ASCII characters. This error does not '
'occur in the production environment, so you can safely '
'run your course with unicode characters on appspot.com.')
logging.error('[Unicode/Dev server issue] Error rendering the '
'search page: %s.', e)
else:
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
except Exception as e: # pylint: disable=broad-except
SEARCH_FAILURES.inc()
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
finally:
path = sites.abspath(self.app_context.get_home_folder(),
GCB_SEARCH_FOLDER_NAME)
template = self.get_template('search.html', additional_dirs=[path])
self.template_value['navbar'] = {}
self.response.out.write(template.render(self.template_value))
def filter(self, response, student):
if not response['results']:
return response
filtered_results = []
units, lessons = self.get_course().get_track_matching_student(student)
available_unit_ids = set(str(unit.unit_id) for unit in units)
for result in response['results']:
if not result.unit_id or str(result.unit_id) in available_unit_ids:
filtered_results.append(result)
return {
'results': filtered_results,
'total_found': len(filtered_results)
}
class AssetsHandler(webapp2.RequestHandler):
"""Content handler for assets associated with search."""
def get(self):
"""Respond to HTTP GET methods."""
if not custom_module.enabled:
self.error(404)
return
path = self.request.path
if path.startswith('/'):
path = path[1:]
path = os.path.normpath(path)
if os.path.basename(os.path.dirname(path)) != 'assets':
self.error(404)
return
resource_file = os.path.join(appengine_config.BUNDLE_ROOT, path)
mimetype = mimetypes.guess_type(resource_file)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
try:
sites.set_static_resource_cache_control(self)
self.response.status = 200
stream = open(resource_file)
content = stream.read()
self.response.headers['Content-Type'] = mimetype
self.response.write(content)
except IOError:
self.error(404)
def _get_search(handler):
|
def _post_index_course(handler):
"""Submits a new indexing operation."""
try:
check_job_and_submit(handler.app_context, incremental=False)
except db.TransactionFailedError:
# Double submission from multiple browsers, just pass
pass
handler.redirect('/dashboard?action=settings_search')
class CronIndexCourse(utils.AbstractAllCoursesCronHandler):
"""Index courses where auto-indexing is enabled.
All jobs should be submitted through the transactional check_job_and_submit
method to prevent multiple index operations from running at the same time.
If an index job is currently running when this cron job attempts to start
one, this operation will be a noop for that course.
"""
URL = '/cron/search/index_courses'
@classmethod
def is_globally_enabled(cls):
return True
@classmethod
def is_enabled_for_course(cls, app_context):
course_settings = app_context.get_environ().get('course')
return course_settings and course_settings.get(AUTO_INDEX_SETTING)
def cron_action(self, app_context, unused_global_state):
try:
check_job_and_submit(app_context, incremental=True)
logging.info('Index submitted for namespace %s.',
app_context.get_namespace_name())
except db.TransactionFailedError as e:
logging.info(
'Failed to submit re-index job in namespace %s: %s',
app_context.get_namespace_name(), e)
@db.transactional(xg=True)
def check_job_and_submit(app_context, incremental=True):
"""Determines whether an indexing job is running and submits if not."""
indexing_job = IndexCourse(app_context, incremental=False)
job_entity = IndexCourse(app_context).load()
bad_status_codes = [jobs.STATUS_CODE_STARTED, jobs.STATUS_CODE_QUEUED]
if job_entity and job_entity.status_code in bad_status_codes:
raise db.TransactionFailedError('Index job is currently running.')
indexing_job.non_transactional_submit()
class IndexCourse(jobs.DurableJob):
"""A job that indexes the course."""
@staticmethod
def get_description():
return 'course index'
def __init__(self, app_context, incremental=True):
super(IndexCourse, self).__init__(app_context)
self.incremental = incremental
def run(self):
"""Index the course."""
namespace = namespace_manager.get_namespace()
logging.info('Running indexing job for namespace %s. Incremental: %s',
namespace_manager.get_namespace(), self.incremental)
app_context = sites.get_app_context_for_namespace(namespace)
# Make a request URL to make sites.get_course_for_current_request work
sites.set_path_info(app_context.slug)
indexing_stats = {
'deleted_docs': 0,
'num_indexed_docs': 0,
'doc_types': collections.Counter(),
'indexing_time_secs': 0,
'locales': []
}
for locale in app_context.get_allowed_locales():
stats = clear_index(namespace, locale)
indexing_stats['deleted_docs'] += stats['deleted_docs']
for locale in app_context.get_allowed_locales():
app_context.set_current_locale(locale)
course = courses.Course(None, app_context=app_context)
stats = index_all_docs(course, self.incremental)
indexing_stats['num_indexed_docs'] += stats['num_indexed_docs']
indexing_stats['doc_types'] += stats['doc_types']
indexing_stats['indexing_time_secs'] += stats['indexing_time_secs']
indexing_stats['locales'].append(locale)
return indexing_stats
# Module registration
custom_module = None
def register_module():
"""Registers this module in the registry."""
global_routes = [
('/modules/search/assets/.*', AssetsHandler),
(CronIndexCourse.URL, CronIndexCourse)
]
namespaced_routes = [
('/search', SearchHandler)
]
auto_index_enabled = schema_fields.SchemaField(
'course:' + AUTO_INDEX_SETTING, 'Auto-Index', 'boolean',
description=services.help_urls.make_learn_more_message(
messages.SEARCH_AUTO_INDEX_DESCRIPTION, 'course:auto_index'),
i18n=False, optional=True)
course_settings_fields = [
lambda course: auto_index_enabled
]
def notify_module_enabled():
dashboard.DashboardHandler.add_sub_nav_mapping(
'publish', 'search', 'Search', action='settings_search',
contents=_get_search, placement=1000)
dashboard.DashboardHandler.add_custom_post_action(
'index_course', _post_index_course)
courses.Course.OPTIONS_SCHEMA_PROVIDERS[
courses.Course.SCHEMA_SECTION_COURSE] += course_settings_fields
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
MODULE_NAME,
'Provides search capabilities for courses',
global_routes, namespaced_routes,
notify_module_enabled=notify_module_enabled)
return custom_module
| """Renders course indexing view."""
template_values = {'page_title': handler.format_title('Search')}
mc_template_value = {}
mc_template_value['module_enabled'] = custom_module.enabled
indexing_job = IndexCourse(handler.app_context).load()
if indexing_job:
if indexing_job.status_code in [jobs.STATUS_CODE_STARTED,
jobs.STATUS_CODE_QUEUED]:
mc_template_value['status_message'] = 'Indexing in progress.'
mc_template_value['job_in_progress'] = True
elif indexing_job.status_code == jobs.STATUS_CODE_COMPLETED:
mc_template_value['indexed'] = True
mc_template_value['last_updated'] = (
indexing_job.updated_on.strftime(
utils.HUMAN_READABLE_DATETIME_FORMAT))
mc_template_value['index_info'] = transforms.loads(
indexing_job.output)
elif indexing_job.status_code == jobs.STATUS_CODE_FAILED:
mc_template_value['status_message'] = (
'Indexing job failed with error: %s' % indexing_job.output)
else:
mc_template_value['status_message'] = (
'No indexing job has been run yet.')
mc_template_value['index_course_xsrf_token'] = (
crypto.XsrfTokenManager.create_xsrf_token('index_course'))
template_values['main_content'] = jinja2.Markup(handler.get_template(
'search_dashboard.html', [os.path.dirname(__file__)]
).render(mc_template_value, autoescape=True))
return template_values | identifier_body |
plat_win.py | # Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at | from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class SHFILEOPSTRUCTW(Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
if not isinstance(path, text_type):
path = text_type(path, 'mbcs')
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg) | # http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint | random_line_split |
plat_win.py | # Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint
from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class SHFILEOPSTRUCTW(Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
| if not isinstance(path, text_type):
path = text_type(path, 'mbcs')
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg) | identifier_body |
|
plat_win.py | # Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint
from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class SHFILEOPSTRUCTW(Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
if not isinstance(path, text_type):
|
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg)
| path = text_type(path, 'mbcs') | conditional_block |
plat_win.py | # Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint
from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class | (Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
if not isinstance(path, text_type):
path = text_type(path, 'mbcs')
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg)
| SHFILEOPSTRUCTW | identifier_name |
revealer.rs | // Copyright 2013-2015, The Rust-GNOME Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! Hide and show with animation
use ffi;
use cast::{GTK_REVEALER};
use glib::{to_bool, to_gboolean};
/// GtkRevealer — Hide and show with animation
struct_Widget!(Revealer);
impl Revealer {
pub fn new() -> Option<Revealer> {
let tmp_pointer = unsafe { ffi::gtk_revealer_new() };
check_pointer!(tmp_pointer, Revealer)
}
pub fn get_reveal_child(&self) -> bool {
unsafe {
to_bool(ffi::gtk_revealer_get_reveal_child(GTK_REVEALER(self.pointer)))
}
}
pub fn set_reveal_child(&self, reveal_child: bool) {
unsafe {
ffi::gtk_revealer_set_reveal_child(GTK_REVEALER(self.pointer),
to_gboolean(reveal_child))
}
}
pub fn is_child_revealed(&self) -> bool {
| pub fn get_transition_duration(&self) -> u32 {
unsafe {
ffi::gtk_revealer_get_transition_duration(GTK_REVEALER(self.pointer))
}
}
pub fn set_transition_duration(&self, duration: u32) {
unsafe {
ffi::gtk_revealer_set_transition_duration(GTK_REVEALER(self.pointer), duration)
}
}
pub fn set_transition_type(&self, transition: ::RevealerTransitionType) {
unsafe {
ffi::gtk_revealer_set_transition_type(GTK_REVEALER(self.pointer), transition)
}
}
pub fn get_transition_type(&self) -> ::RevealerTransitionType {
unsafe {
ffi::gtk_revealer_get_transition_type(GTK_REVEALER(self.pointer))
}
}
}
impl_drop!(Revealer);
impl_TraitWidget!(Revealer);
impl ::ContainerTrait for Revealer {}
impl ::BinTrait for Revealer {}
| unsafe {
to_bool(ffi::gtk_revealer_get_child_revealed(GTK_REVEALER(self.pointer)))
}
}
| identifier_body |
revealer.rs | // Copyright 2013-2015, The Rust-GNOME Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! Hide and show with animation
use ffi;
use cast::{GTK_REVEALER};
use glib::{to_bool, to_gboolean};
/// GtkRevealer — Hide and show with animation
struct_Widget!(Revealer);
impl Revealer {
pub fn new() -> Option<Revealer> {
let tmp_pointer = unsafe { ffi::gtk_revealer_new() };
check_pointer!(tmp_pointer, Revealer)
}
pub fn get_reveal_child(&self) -> bool {
unsafe {
to_bool(ffi::gtk_revealer_get_reveal_child(GTK_REVEALER(self.pointer)))
}
}
pub fn set_reveal_child(&self, reveal_child: bool) {
unsafe {
ffi::gtk_revealer_set_reveal_child(GTK_REVEALER(self.pointer),
to_gboolean(reveal_child))
}
}
pub fn is_child_revealed(&self) -> bool {
unsafe {
to_bool(ffi::gtk_revealer_get_child_revealed(GTK_REVEALER(self.pointer)))
}
}
pub fn get_transition_duration(&self) -> u32 {
unsafe {
ffi::gtk_revealer_get_transition_duration(GTK_REVEALER(self.pointer))
}
}
pub fn set_transition_duration(&self, duration: u32) {
unsafe {
ffi::gtk_revealer_set_transition_duration(GTK_REVEALER(self.pointer), duration)
}
}
pub fn set_transition_type(&self, transition: ::RevealerTransitionType) {
unsafe {
ffi::gtk_revealer_set_transition_type(GTK_REVEALER(self.pointer), transition)
}
}
pub fn get_transition_type(&self) -> ::RevealerTransitionType {
unsafe {
ffi::gtk_revealer_get_transition_type(GTK_REVEALER(self.pointer))
}
}
} | impl ::ContainerTrait for Revealer {}
impl ::BinTrait for Revealer {} |
impl_drop!(Revealer);
impl_TraitWidget!(Revealer);
| random_line_split |
revealer.rs | // Copyright 2013-2015, The Rust-GNOME Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! Hide and show with animation
use ffi;
use cast::{GTK_REVEALER};
use glib::{to_bool, to_gboolean};
/// GtkRevealer — Hide and show with animation
struct_Widget!(Revealer);
impl Revealer {
pub fn new() -> Option<Revealer> {
let tmp_pointer = unsafe { ffi::gtk_revealer_new() };
check_pointer!(tmp_pointer, Revealer)
}
pub fn get_reveal_child(&self) -> bool {
unsafe {
to_bool(ffi::gtk_revealer_get_reveal_child(GTK_REVEALER(self.pointer)))
}
}
pub fn set_reveal_child(&self, reveal_child: bool) {
unsafe {
ffi::gtk_revealer_set_reveal_child(GTK_REVEALER(self.pointer),
to_gboolean(reveal_child))
}
}
pub fn is_child_revealed(&self) -> bool {
unsafe {
to_bool(ffi::gtk_revealer_get_child_revealed(GTK_REVEALER(self.pointer)))
}
}
pub fn get_transition_duration(&self) -> u32 {
unsafe {
ffi::gtk_revealer_get_transition_duration(GTK_REVEALER(self.pointer))
}
}
pub fn se | self, duration: u32) {
unsafe {
ffi::gtk_revealer_set_transition_duration(GTK_REVEALER(self.pointer), duration)
}
}
pub fn set_transition_type(&self, transition: ::RevealerTransitionType) {
unsafe {
ffi::gtk_revealer_set_transition_type(GTK_REVEALER(self.pointer), transition)
}
}
pub fn get_transition_type(&self) -> ::RevealerTransitionType {
unsafe {
ffi::gtk_revealer_get_transition_type(GTK_REVEALER(self.pointer))
}
}
}
impl_drop!(Revealer);
impl_TraitWidget!(Revealer);
impl ::ContainerTrait for Revealer {}
impl ::BinTrait for Revealer {}
| t_transition_duration(& | identifier_name |
recent_commits_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for recent commit controllers."""
from __future__ import annotations
from core import feconf
from core.platform import models
from core.tests import test_utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
class RecentCommitsHandlerUnitTests(test_utils.GenericTestBase):
"""Test the RecentCommitsHandler class."""
def setUp(self):
super(RecentCommitsHandlerUnitTests, self).setUp()
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.set_moderators([self.MODERATOR_USERNAME])
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.committer_1_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.committer_2_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
commit1 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 0, self.committer_1_id, 'create',
'created first commit', [], 'public', True)
commit2 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 1, self.committer_2_id, 'edit', 'edited commit', [], | 'public', True)
commit3 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_2', 0, self.committer_1_id, 'create',
'created second commit', [], 'private', False)
commit1.exploration_id = 'exp_1'
commit2.exploration_id = 'exp_1'
commit3.exploration_id = 'exp_2'
commit1.update_timestamps()
commit1.put()
commit2.update_timestamps()
commit2.put()
commit3.update_timestamps()
commit3.put()
def test_get_recent_commits(self):
"""Test that this method should return all nonprivate commits."""
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['results']), 2)
self.assertDictContainsSubset(
{'username': self.VIEWER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 0,
'commit_message': 'created first commit',
'commit_type': 'create'},
response_dict['results'][1])
self.assertDictContainsSubset(
{'username': self.NEW_USER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 1,
'commit_message': 'edited commit',
'commit_type': 'edit'},
response_dict['results'][0])
self.logout()
def test_get_recent_commits_explorations(self):
"""Test that the response dict contains the correct exploration."""
self.login(self.MODERATOR_EMAIL)
self.save_new_default_exploration(
'exp_1', 'owner0', title='MyExploration')
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['exp_ids_to_exp_data']), 1)
self.assertEqual(
response_dict['exp_ids_to_exp_data']['exp_1']['title'],
'MyExploration')
self.logout()
def test_get_recent_commits_three_pages_with_cursor(self):
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertFalse(response_dict['more'])
for i in range(feconf.COMMIT_LIST_PAGE_SIZE * 2):
entity_id = 'my_entity_%s' % i
exp_id = 'exp_%s' % i
commit_i = exp_models.ExplorationCommitLogEntryModel.create(
entity_id, 0, self.committer_2_id, 'create', 'created commit',
[], 'public', True)
commit_i.exploration_id = exp_id
commit_i.update_timestamps()
commit_i.put()
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(
len(response_dict['results']), feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertEqual(
len(response_dict['results']),
feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertFalse(response_dict['more'])
self.assertEqual(len(response_dict['results']), 2)
self.logout()
def test_get_recent_commits_with_invalid_query_type_returns_404_status(
self):
self.login(self.MODERATOR_EMAIL)
self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'invalid_query_type'},
expected_status_int=404)
self.logout() | random_line_split |
|
recent_commits_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for recent commit controllers."""
from __future__ import annotations
from core import feconf
from core.platform import models
from core.tests import test_utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
class RecentCommitsHandlerUnitTests(test_utils.GenericTestBase):
| """Test the RecentCommitsHandler class."""
def setUp(self):
super(RecentCommitsHandlerUnitTests, self).setUp()
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.set_moderators([self.MODERATOR_USERNAME])
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.committer_1_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.committer_2_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
commit1 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 0, self.committer_1_id, 'create',
'created first commit', [], 'public', True)
commit2 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 1, self.committer_2_id, 'edit', 'edited commit', [],
'public', True)
commit3 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_2', 0, self.committer_1_id, 'create',
'created second commit', [], 'private', False)
commit1.exploration_id = 'exp_1'
commit2.exploration_id = 'exp_1'
commit3.exploration_id = 'exp_2'
commit1.update_timestamps()
commit1.put()
commit2.update_timestamps()
commit2.put()
commit3.update_timestamps()
commit3.put()
def test_get_recent_commits(self):
"""Test that this method should return all nonprivate commits."""
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['results']), 2)
self.assertDictContainsSubset(
{'username': self.VIEWER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 0,
'commit_message': 'created first commit',
'commit_type': 'create'},
response_dict['results'][1])
self.assertDictContainsSubset(
{'username': self.NEW_USER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 1,
'commit_message': 'edited commit',
'commit_type': 'edit'},
response_dict['results'][0])
self.logout()
def test_get_recent_commits_explorations(self):
"""Test that the response dict contains the correct exploration."""
self.login(self.MODERATOR_EMAIL)
self.save_new_default_exploration(
'exp_1', 'owner0', title='MyExploration')
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['exp_ids_to_exp_data']), 1)
self.assertEqual(
response_dict['exp_ids_to_exp_data']['exp_1']['title'],
'MyExploration')
self.logout()
def test_get_recent_commits_three_pages_with_cursor(self):
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertFalse(response_dict['more'])
for i in range(feconf.COMMIT_LIST_PAGE_SIZE * 2):
entity_id = 'my_entity_%s' % i
exp_id = 'exp_%s' % i
commit_i = exp_models.ExplorationCommitLogEntryModel.create(
entity_id, 0, self.committer_2_id, 'create', 'created commit',
[], 'public', True)
commit_i.exploration_id = exp_id
commit_i.update_timestamps()
commit_i.put()
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(
len(response_dict['results']), feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertEqual(
len(response_dict['results']),
feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertFalse(response_dict['more'])
self.assertEqual(len(response_dict['results']), 2)
self.logout()
def test_get_recent_commits_with_invalid_query_type_returns_404_status(
self):
self.login(self.MODERATOR_EMAIL)
self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'invalid_query_type'},
expected_status_int=404)
self.logout() | identifier_body |
|
recent_commits_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for recent commit controllers."""
from __future__ import annotations
from core import feconf
from core.platform import models
from core.tests import test_utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
class | (test_utils.GenericTestBase):
"""Test the RecentCommitsHandler class."""
def setUp(self):
super(RecentCommitsHandlerUnitTests, self).setUp()
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.set_moderators([self.MODERATOR_USERNAME])
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.committer_1_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.committer_2_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
commit1 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 0, self.committer_1_id, 'create',
'created first commit', [], 'public', True)
commit2 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 1, self.committer_2_id, 'edit', 'edited commit', [],
'public', True)
commit3 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_2', 0, self.committer_1_id, 'create',
'created second commit', [], 'private', False)
commit1.exploration_id = 'exp_1'
commit2.exploration_id = 'exp_1'
commit3.exploration_id = 'exp_2'
commit1.update_timestamps()
commit1.put()
commit2.update_timestamps()
commit2.put()
commit3.update_timestamps()
commit3.put()
def test_get_recent_commits(self):
"""Test that this method should return all nonprivate commits."""
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['results']), 2)
self.assertDictContainsSubset(
{'username': self.VIEWER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 0,
'commit_message': 'created first commit',
'commit_type': 'create'},
response_dict['results'][1])
self.assertDictContainsSubset(
{'username': self.NEW_USER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 1,
'commit_message': 'edited commit',
'commit_type': 'edit'},
response_dict['results'][0])
self.logout()
def test_get_recent_commits_explorations(self):
"""Test that the response dict contains the correct exploration."""
self.login(self.MODERATOR_EMAIL)
self.save_new_default_exploration(
'exp_1', 'owner0', title='MyExploration')
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['exp_ids_to_exp_data']), 1)
self.assertEqual(
response_dict['exp_ids_to_exp_data']['exp_1']['title'],
'MyExploration')
self.logout()
def test_get_recent_commits_three_pages_with_cursor(self):
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertFalse(response_dict['more'])
for i in range(feconf.COMMIT_LIST_PAGE_SIZE * 2):
entity_id = 'my_entity_%s' % i
exp_id = 'exp_%s' % i
commit_i = exp_models.ExplorationCommitLogEntryModel.create(
entity_id, 0, self.committer_2_id, 'create', 'created commit',
[], 'public', True)
commit_i.exploration_id = exp_id
commit_i.update_timestamps()
commit_i.put()
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(
len(response_dict['results']), feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertEqual(
len(response_dict['results']),
feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertFalse(response_dict['more'])
self.assertEqual(len(response_dict['results']), 2)
self.logout()
def test_get_recent_commits_with_invalid_query_type_returns_404_status(
self):
self.login(self.MODERATOR_EMAIL)
self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'invalid_query_type'},
expected_status_int=404)
self.logout()
| RecentCommitsHandlerUnitTests | identifier_name |
recent_commits_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for recent commit controllers."""
from __future__ import annotations
from core import feconf
from core.platform import models
from core.tests import test_utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
class RecentCommitsHandlerUnitTests(test_utils.GenericTestBase):
"""Test the RecentCommitsHandler class."""
def setUp(self):
super(RecentCommitsHandlerUnitTests, self).setUp()
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.set_moderators([self.MODERATOR_USERNAME])
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.committer_1_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.committer_2_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
commit1 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 0, self.committer_1_id, 'create',
'created first commit', [], 'public', True)
commit2 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 1, self.committer_2_id, 'edit', 'edited commit', [],
'public', True)
commit3 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_2', 0, self.committer_1_id, 'create',
'created second commit', [], 'private', False)
commit1.exploration_id = 'exp_1'
commit2.exploration_id = 'exp_1'
commit3.exploration_id = 'exp_2'
commit1.update_timestamps()
commit1.put()
commit2.update_timestamps()
commit2.put()
commit3.update_timestamps()
commit3.put()
def test_get_recent_commits(self):
"""Test that this method should return all nonprivate commits."""
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['results']), 2)
self.assertDictContainsSubset(
{'username': self.VIEWER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 0,
'commit_message': 'created first commit',
'commit_type': 'create'},
response_dict['results'][1])
self.assertDictContainsSubset(
{'username': self.NEW_USER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 1,
'commit_message': 'edited commit',
'commit_type': 'edit'},
response_dict['results'][0])
self.logout()
def test_get_recent_commits_explorations(self):
"""Test that the response dict contains the correct exploration."""
self.login(self.MODERATOR_EMAIL)
self.save_new_default_exploration(
'exp_1', 'owner0', title='MyExploration')
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['exp_ids_to_exp_data']), 1)
self.assertEqual(
response_dict['exp_ids_to_exp_data']['exp_1']['title'],
'MyExploration')
self.logout()
def test_get_recent_commits_three_pages_with_cursor(self):
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertFalse(response_dict['more'])
for i in range(feconf.COMMIT_LIST_PAGE_SIZE * 2):
|
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(
len(response_dict['results']), feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertEqual(
len(response_dict['results']),
feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertFalse(response_dict['more'])
self.assertEqual(len(response_dict['results']), 2)
self.logout()
def test_get_recent_commits_with_invalid_query_type_returns_404_status(
self):
self.login(self.MODERATOR_EMAIL)
self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'invalid_query_type'},
expected_status_int=404)
self.logout()
| entity_id = 'my_entity_%s' % i
exp_id = 'exp_%s' % i
commit_i = exp_models.ExplorationCommitLogEntryModel.create(
entity_id, 0, self.committer_2_id, 'create', 'created commit',
[], 'public', True)
commit_i.exploration_id = exp_id
commit_i.update_timestamps()
commit_i.put() | conditional_block |
test_uptime.py | import unittest
try:
from unittest import mock
except ImportError:
import mock
from pi3bar.plugins.uptime import get_uptime_seconds, uptime_format, Uptime
class GetUptimeSecondsTestCase(unittest.TestCase):
def test(self):
m = mock.mock_open(read_data='5')
m.return_value.readline.return_value = '5' # py33
with mock.patch('pi3bar.plugins.uptime.open', m, create=True):
seconds = get_uptime_seconds()
self.assertEqual(5, seconds)
class UptimeFormatTestCase(unittest.TestCase):
def test_seconds(self):
s = uptime_format(5)
self.assertEqual('0:00:00:05', s)
def test_minutes(self):
s = uptime_format(3540)
self.assertEqual('0:00:59:00', s)
def test_hours(self):
s = uptime_format(49020)
self.assertEqual('0:13:37:00', s)
def test_days(self):
|
def test_format_days_applied_to_hours(self):
s = uptime_format(135420, '%H:%M:%S')
self.assertEqual('37:37:00', s)
def test_format_hours_applied_to_minutes(self):
s = uptime_format(49020, '%M:%S')
self.assertEqual('817:00', s)
class UptimeTestCase(unittest.TestCase):
def test(self):
plugin = Uptime()
self.assertEqual('%d days %H:%M:%S up', plugin.full_format)
self.assertEqual('%dd %H:%M up', plugin.short_format)
@mock.patch('pi3bar.plugins.uptime.get_uptime_seconds')
def test_cycle(self, mock_get_uptime_seconds):
plugin = Uptime()
mock_get_uptime_seconds.return_value = 49020
plugin.cycle()
self.assertEqual('0 days 13:37:00 up', plugin.full_text)
self.assertEqual('0d 13:37 up', plugin.short_text)
| s = uptime_format(135420)
self.assertEqual('1:13:37:00', s) | identifier_body |
test_uptime.py | import unittest
try:
from unittest import mock
except ImportError:
import mock
from pi3bar.plugins.uptime import get_uptime_seconds, uptime_format, Uptime
class GetUptimeSecondsTestCase(unittest.TestCase):
def test(self):
m = mock.mock_open(read_data='5')
m.return_value.readline.return_value = '5' # py33
with mock.patch('pi3bar.plugins.uptime.open', m, create=True):
seconds = get_uptime_seconds()
self.assertEqual(5, seconds)
class | (unittest.TestCase):
def test_seconds(self):
s = uptime_format(5)
self.assertEqual('0:00:00:05', s)
def test_minutes(self):
s = uptime_format(3540)
self.assertEqual('0:00:59:00', s)
def test_hours(self):
s = uptime_format(49020)
self.assertEqual('0:13:37:00', s)
def test_days(self):
s = uptime_format(135420)
self.assertEqual('1:13:37:00', s)
def test_format_days_applied_to_hours(self):
s = uptime_format(135420, '%H:%M:%S')
self.assertEqual('37:37:00', s)
def test_format_hours_applied_to_minutes(self):
s = uptime_format(49020, '%M:%S')
self.assertEqual('817:00', s)
class UptimeTestCase(unittest.TestCase):
def test(self):
plugin = Uptime()
self.assertEqual('%d days %H:%M:%S up', plugin.full_format)
self.assertEqual('%dd %H:%M up', plugin.short_format)
@mock.patch('pi3bar.plugins.uptime.get_uptime_seconds')
def test_cycle(self, mock_get_uptime_seconds):
plugin = Uptime()
mock_get_uptime_seconds.return_value = 49020
plugin.cycle()
self.assertEqual('0 days 13:37:00 up', plugin.full_text)
self.assertEqual('0d 13:37 up', plugin.short_text)
| UptimeFormatTestCase | identifier_name |
test_uptime.py | import unittest
try:
from unittest import mock
except ImportError:
import mock
from pi3bar.plugins.uptime import get_uptime_seconds, uptime_format, Uptime
class GetUptimeSecondsTestCase(unittest.TestCase):
def test(self):
m = mock.mock_open(read_data='5') |
class UptimeFormatTestCase(unittest.TestCase):
def test_seconds(self):
s = uptime_format(5)
self.assertEqual('0:00:00:05', s)
def test_minutes(self):
s = uptime_format(3540)
self.assertEqual('0:00:59:00', s)
def test_hours(self):
s = uptime_format(49020)
self.assertEqual('0:13:37:00', s)
def test_days(self):
s = uptime_format(135420)
self.assertEqual('1:13:37:00', s)
def test_format_days_applied_to_hours(self):
s = uptime_format(135420, '%H:%M:%S')
self.assertEqual('37:37:00', s)
def test_format_hours_applied_to_minutes(self):
s = uptime_format(49020, '%M:%S')
self.assertEqual('817:00', s)
class UptimeTestCase(unittest.TestCase):
def test(self):
plugin = Uptime()
self.assertEqual('%d days %H:%M:%S up', plugin.full_format)
self.assertEqual('%dd %H:%M up', plugin.short_format)
@mock.patch('pi3bar.plugins.uptime.get_uptime_seconds')
def test_cycle(self, mock_get_uptime_seconds):
plugin = Uptime()
mock_get_uptime_seconds.return_value = 49020
plugin.cycle()
self.assertEqual('0 days 13:37:00 up', plugin.full_text)
self.assertEqual('0d 13:37 up', plugin.short_text) | m.return_value.readline.return_value = '5' # py33
with mock.patch('pi3bar.plugins.uptime.open', m, create=True):
seconds = get_uptime_seconds()
self.assertEqual(5, seconds) | random_line_split |
minesweeperNN.js | // ==UserScript==
// @name mineAI
// @namespace minesAI
// @include http://minesweeperonline.com/#beginner-night
// @version 1
// @required http://localhost:8000/convnetjs.js
// @grant none
// ==/UserScript==
// Load the library.
var D = document;
var appTarg = D.getElementsByTagName ('head')[0] || D.body || D.documentElement;
var jsNode = D.createElement ('script');
jsNode.src = 'http://localhost:8000/convnetjs.js';
jsNode.addEventListener ("load", initConvNetJsOnDelay, false);
appTarg.appendChild (jsNode);
// Allow some time for the library to initialize after loading.
function initConvNetJsOnDelay () |
// Call the library's start-up function, if any. Note needed use of unsafeWindow.
function initConvNetJs () {
// species a 2-layer neural network with one hidden layer of 20 neurons
var layer_defs = [];
// ConvNetJS works on 3-Dimensional volumes (sx, sy, depth), but if you're not dealing with images
// then the first two dimensions (sx, sy) will always be kept at size 1
layer_defs.push({type:'input', out_sx:1, out_sy:1, out_depth:2});
// declare 4 neurons, followed by ReLU (rectified linear unit non-linearity)
layer_defs.push({type:'fc', num_neurons:4, activation:'relu'});
// 3 more for good measure
layer_defs.push({type:'fc', num_neurons:3, activation:'relu'});
// declare the linear classifier on top of the previous hidden layer
layer_defs.push({type:'softmax', num_classes:2});
// defined our net with unsafeWindow for use in GreaseMonkey
var net = new unsafeWindow.convnetjs.Net();
// create our net with layers as defined above
net.makeLayers(layer_defs);
// define trainer
var trainer = new convnetjs.SGDTrainer(net, {learning_rate:0.01, l2_decay:0.001});
// define inputs (XOR)
var t1 = new convnetjs.Vol([0, 0]); // class 0
var t2 = new convnetjs.Vol([0, 1]); // class 1
var t3 = new convnetjs.Vol([1, 0]); // class 1
var t4 = new convnetjs.Vol([1, 1]); // class 0
// train for 1000 iterations with corresponding classes
for (var i = 0; i < 1000; i++) {
trainer.train(t1, 0);
trainer.train(t2, 1);
trainer.train(t3, 1);
trainer.train(t4, 0);
}
// learned probability
var prob00 = net.forward(t1);
var prob01 = net.forward(t2);
var prob10 = net.forward(t3);
var prob11 = net.forward(t4);
// log probability
console.log('p(0 | 00): ' + prob00.w[0] + ", p(1 | 00): " + prob00.w[1]);
console.log('p(0 | 01): ' + prob01.w[0] + ", p(1 | 01): " + prob01.w[1]);
console.log('p(0 | 10): ' + prob10.w[0] + ", p(1 | 10): " + prob10.w[1]);
console.log('p(0 | 11): ' + prob11.w[0] + ", p(1 | 11): " + prob11.w[1]);
}
alert("Done"); | {
setTimeout (initConvNetJs, 666);
} | identifier_body |
minesweeperNN.js | // ==UserScript==
// @name mineAI
// @namespace minesAI
// @include http://minesweeperonline.com/#beginner-night
// @version 1
// @required http://localhost:8000/convnetjs.js
// @grant none
// ==/UserScript==
// Load the library.
var D = document;
var appTarg = D.getElementsByTagName ('head')[0] || D.body || D.documentElement;
var jsNode = D.createElement ('script');
jsNode.src = 'http://localhost:8000/convnetjs.js';
jsNode.addEventListener ("load", initConvNetJsOnDelay, false);
appTarg.appendChild (jsNode);
// Allow some time for the library to initialize after loading.
function initConvNetJsOnDelay () {
setTimeout (initConvNetJs, 666);
}
// Call the library's start-up function, if any. Note needed use of unsafeWindow.
function initConvNetJs () {
// species a 2-layer neural network with one hidden layer of 20 neurons
var layer_defs = [];
// ConvNetJS works on 3-Dimensional volumes (sx, sy, depth), but if you're not dealing with images
// then the first two dimensions (sx, sy) will always be kept at size 1
layer_defs.push({type:'input', out_sx:1, out_sy:1, out_depth:2});
// declare 4 neurons, followed by ReLU (rectified linear unit non-linearity)
layer_defs.push({type:'fc', num_neurons:4, activation:'relu'});
// 3 more for good measure
layer_defs.push({type:'fc', num_neurons:3, activation:'relu'});
// declare the linear classifier on top of the previous hidden layer
layer_defs.push({type:'softmax', num_classes:2});
// defined our net with unsafeWindow for use in GreaseMonkey
var net = new unsafeWindow.convnetjs.Net();
// create our net with layers as defined above
net.makeLayers(layer_defs);
// define trainer
var trainer = new convnetjs.SGDTrainer(net, {learning_rate:0.01, l2_decay:0.001});
// define inputs (XOR)
var t1 = new convnetjs.Vol([0, 0]); // class 0
var t2 = new convnetjs.Vol([0, 1]); // class 1
var t3 = new convnetjs.Vol([1, 0]); // class 1
var t4 = new convnetjs.Vol([1, 1]); // class 0
// train for 1000 iterations with corresponding classes
for (var i = 0; i < 1000; i++) |
// learned probability
var prob00 = net.forward(t1);
var prob01 = net.forward(t2);
var prob10 = net.forward(t3);
var prob11 = net.forward(t4);
// log probability
console.log('p(0 | 00): ' + prob00.w[0] + ", p(1 | 00): " + prob00.w[1]);
console.log('p(0 | 01): ' + prob01.w[0] + ", p(1 | 01): " + prob01.w[1]);
console.log('p(0 | 10): ' + prob10.w[0] + ", p(1 | 10): " + prob10.w[1]);
console.log('p(0 | 11): ' + prob11.w[0] + ", p(1 | 11): " + prob11.w[1]);
}
alert("Done"); | {
trainer.train(t1, 0);
trainer.train(t2, 1);
trainer.train(t3, 1);
trainer.train(t4, 0);
} | conditional_block |
minesweeperNN.js | // ==UserScript==
// @name mineAI
// @namespace minesAI
// @include http://minesweeperonline.com/#beginner-night
// @version 1
// @required http://localhost:8000/convnetjs.js
// @grant none
// ==/UserScript==
// Load the library.
var D = document;
var appTarg = D.getElementsByTagName ('head')[0] || D.body || D.documentElement;
var jsNode = D.createElement ('script');
jsNode.src = 'http://localhost:8000/convnetjs.js';
jsNode.addEventListener ("load", initConvNetJsOnDelay, false);
appTarg.appendChild (jsNode);
// Allow some time for the library to initialize after loading.
function initConvNetJsOnDelay () {
setTimeout (initConvNetJs, 666);
}
// Call the library's start-up function, if any. Note needed use of unsafeWindow.
function | () {
// species a 2-layer neural network with one hidden layer of 20 neurons
var layer_defs = [];
// ConvNetJS works on 3-Dimensional volumes (sx, sy, depth), but if you're not dealing with images
// then the first two dimensions (sx, sy) will always be kept at size 1
layer_defs.push({type:'input', out_sx:1, out_sy:1, out_depth:2});
// declare 4 neurons, followed by ReLU (rectified linear unit non-linearity)
layer_defs.push({type:'fc', num_neurons:4, activation:'relu'});
// 3 more for good measure
layer_defs.push({type:'fc', num_neurons:3, activation:'relu'});
// declare the linear classifier on top of the previous hidden layer
layer_defs.push({type:'softmax', num_classes:2});
// defined our net with unsafeWindow for use in GreaseMonkey
var net = new unsafeWindow.convnetjs.Net();
// create our net with layers as defined above
net.makeLayers(layer_defs);
// define trainer
var trainer = new convnetjs.SGDTrainer(net, {learning_rate:0.01, l2_decay:0.001});
// define inputs (XOR)
var t1 = new convnetjs.Vol([0, 0]); // class 0
var t2 = new convnetjs.Vol([0, 1]); // class 1
var t3 = new convnetjs.Vol([1, 0]); // class 1
var t4 = new convnetjs.Vol([1, 1]); // class 0
// train for 1000 iterations with corresponding classes
for (var i = 0; i < 1000; i++) {
trainer.train(t1, 0);
trainer.train(t2, 1);
trainer.train(t3, 1);
trainer.train(t4, 0);
}
// learned probability
var prob00 = net.forward(t1);
var prob01 = net.forward(t2);
var prob10 = net.forward(t3);
var prob11 = net.forward(t4);
// log probability
console.log('p(0 | 00): ' + prob00.w[0] + ", p(1 | 00): " + prob00.w[1]);
console.log('p(0 | 01): ' + prob01.w[0] + ", p(1 | 01): " + prob01.w[1]);
console.log('p(0 | 10): ' + prob10.w[0] + ", p(1 | 10): " + prob10.w[1]);
console.log('p(0 | 11): ' + prob11.w[0] + ", p(1 | 11): " + prob11.w[1]);
}
alert("Done"); | initConvNetJs | identifier_name |
minesweeperNN.js | // ==UserScript==
// @name mineAI
// @namespace minesAI
// @include http://minesweeperonline.com/#beginner-night
// @version 1
// @required http://localhost:8000/convnetjs.js
// @grant none
// ==/UserScript==
// Load the library.
var D = document;
var appTarg = D.getElementsByTagName ('head')[0] || D.body || D.documentElement;
var jsNode = D.createElement ('script');
jsNode.src = 'http://localhost:8000/convnetjs.js';
jsNode.addEventListener ("load", initConvNetJsOnDelay, false);
appTarg.appendChild (jsNode);
// Allow some time for the library to initialize after loading.
function initConvNetJsOnDelay () {
setTimeout (initConvNetJs, 666); | // species a 2-layer neural network with one hidden layer of 20 neurons
var layer_defs = [];
// ConvNetJS works on 3-Dimensional volumes (sx, sy, depth), but if you're not dealing with images
// then the first two dimensions (sx, sy) will always be kept at size 1
layer_defs.push({type:'input', out_sx:1, out_sy:1, out_depth:2});
// declare 4 neurons, followed by ReLU (rectified linear unit non-linearity)
layer_defs.push({type:'fc', num_neurons:4, activation:'relu'});
// 3 more for good measure
layer_defs.push({type:'fc', num_neurons:3, activation:'relu'});
// declare the linear classifier on top of the previous hidden layer
layer_defs.push({type:'softmax', num_classes:2});
// defined our net with unsafeWindow for use in GreaseMonkey
var net = new unsafeWindow.convnetjs.Net();
// create our net with layers as defined above
net.makeLayers(layer_defs);
// define trainer
var trainer = new convnetjs.SGDTrainer(net, {learning_rate:0.01, l2_decay:0.001});
// define inputs (XOR)
var t1 = new convnetjs.Vol([0, 0]); // class 0
var t2 = new convnetjs.Vol([0, 1]); // class 1
var t3 = new convnetjs.Vol([1, 0]); // class 1
var t4 = new convnetjs.Vol([1, 1]); // class 0
// train for 1000 iterations with corresponding classes
for (var i = 0; i < 1000; i++) {
trainer.train(t1, 0);
trainer.train(t2, 1);
trainer.train(t3, 1);
trainer.train(t4, 0);
}
// learned probability
var prob00 = net.forward(t1);
var prob01 = net.forward(t2);
var prob10 = net.forward(t3);
var prob11 = net.forward(t4);
// log probability
console.log('p(0 | 00): ' + prob00.w[0] + ", p(1 | 00): " + prob00.w[1]);
console.log('p(0 | 01): ' + prob01.w[0] + ", p(1 | 01): " + prob01.w[1]);
console.log('p(0 | 10): ' + prob10.w[0] + ", p(1 | 10): " + prob10.w[1]);
console.log('p(0 | 11): ' + prob11.w[0] + ", p(1 | 11): " + prob11.w[1]);
}
alert("Done"); | }
// Call the library's start-up function, if any. Note needed use of unsafeWindow.
function initConvNetJs () {
| random_line_split |
state.directive.ts | import {Directive, ElementRef, Renderer, Input, OnChanges} from '@angular/core';
@Directive({
selector: '[state]'
})
export class StateDirective implements OnChanges { |
constructor(renderer: Renderer, el: ElementRef) {
this.element = el;
this.renderer = renderer;
}
ngOnChanges() {
let cssClass = `state-${this.itemState}`;
let text = 'A livrer';
let elementNode = this.element.nativeElement;
switch (this.itemState) {
case 1 :
text = "aaaaa";
break;
case 2 :
text = "xxxxx";
break;
case 3 :
text = "wwwww";
break;
default:
text = "zied !!!";
}
this.renderer.setElementClass(elementNode, cssClass, true);
this.renderer.setText(elementNode, text);
}
} | @Input('state') itemState: any;
private element: ElementRef;
private renderer: Renderer; | random_line_split |
state.directive.ts | import {Directive, ElementRef, Renderer, Input, OnChanges} from '@angular/core';
@Directive({
selector: '[state]'
})
export class StateDirective implements OnChanges {
@Input('state') itemState: any;
private element: ElementRef;
private renderer: Renderer;
constructor(renderer: Renderer, el: ElementRef) |
ngOnChanges() {
let cssClass = `state-${this.itemState}`;
let text = 'A livrer';
let elementNode = this.element.nativeElement;
switch (this.itemState) {
case 1 :
text = "aaaaa";
break;
case 2 :
text = "xxxxx";
break;
case 3 :
text = "wwwww";
break;
default:
text = "zied !!!";
}
this.renderer.setElementClass(elementNode, cssClass, true);
this.renderer.setText(elementNode, text);
}
} | {
this.element = el;
this.renderer = renderer;
} | identifier_body |
state.directive.ts | import {Directive, ElementRef, Renderer, Input, OnChanges} from '@angular/core';
@Directive({
selector: '[state]'
})
export class | implements OnChanges {
@Input('state') itemState: any;
private element: ElementRef;
private renderer: Renderer;
constructor(renderer: Renderer, el: ElementRef) {
this.element = el;
this.renderer = renderer;
}
ngOnChanges() {
let cssClass = `state-${this.itemState}`;
let text = 'A livrer';
let elementNode = this.element.nativeElement;
switch (this.itemState) {
case 1 :
text = "aaaaa";
break;
case 2 :
text = "xxxxx";
break;
case 3 :
text = "wwwww";
break;
default:
text = "zied !!!";
}
this.renderer.setElementClass(elementNode, cssClass, true);
this.renderer.setText(elementNode, text);
}
} | StateDirective | identifier_name |
vsoFormatterTests.ts | /*
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { assert } from "chai";
import * as ts from "typescript";
import { IFormatter, TestUtils } from "../lint";
import { createFailure } from "./utils";
describe("VSO Formatter", () => {
const TEST_FILE = "formatters/vsoFormatter.test.ts";
let sourceFile: ts.SourceFile;
let formatter: IFormatter;
before(() => {
const Formatter = TestUtils.getFormatter("vso");
sourceFile = TestUtils.getSourceFile(TEST_FILE);
formatter = new Formatter();
});
it("formats failures", () => {
const maxPosition = sourceFile.getFullWidth();
const failures = [
createFailure(sourceFile, 0, 1, "first failure", "first-name", undefined, "error"),
createFailure(sourceFile, 32, 36, "mid failure", "mid-name", undefined, "error"),
createFailure(sourceFile, maxPosition - 1, maxPosition, "last failure", "last-name", undefined, "error"),
];
const expectedResult =
getFailureString(TEST_FILE, 1, 1, "first failure", "first-name") +
getFailureString(TEST_FILE, 2, 12, "mid failure", "mid-name") +
getFailureString(TEST_FILE, 9, 2, "last failure", "last-name");
const actualResult = formatter.format(failures);
assert.equal(actualResult, expectedResult);
});
it("does not duplicate output for fixed failures", () => {
const maxPosition = sourceFile.getFullWidth();
const failures = [
createFailure(sourceFile, 0, 1, "first failure", "first-name", undefined, "error"),
createFailure(sourceFile, 32, 36, "mid failure", "mid-name", undefined, "error"),
createFailure(sourceFile, maxPosition - 1, maxPosition, "last failure", "last-name", undefined, "error"),
];
const expectedResult =
getFailureString(TEST_FILE, 1, 1, "first failure", "first-name") +
getFailureString(TEST_FILE, 2, 12, "mid failure", "mid-name") +
getFailureString(TEST_FILE, 9, 2, "last failure", "last-name");
const fixed = failures.slice();
const actualResult = formatter.format(failures, fixed);
assert.equal(actualResult, expectedResult);
});
it("handles no failures", () => {
const result = formatter.format([]);
assert.equal(result, "\n");
});
function getFailureString(file: string, line: number, character: number, reason: string, code: string) |
});
| {
return `##vso[task.logissue type=warning;sourcepath=${file};linenumber=${line};columnnumber=${character};code=${code};]${reason}\n`;
} | identifier_body |
vsoFormatterTests.ts | /*
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { assert } from "chai";
import * as ts from "typescript";
import { IFormatter, TestUtils } from "../lint";
import { createFailure } from "./utils";
describe("VSO Formatter", () => {
const TEST_FILE = "formatters/vsoFormatter.test.ts";
let sourceFile: ts.SourceFile;
let formatter: IFormatter;
before(() => {
const Formatter = TestUtils.getFormatter("vso");
sourceFile = TestUtils.getSourceFile(TEST_FILE);
formatter = new Formatter();
});
it("formats failures", () => {
const maxPosition = sourceFile.getFullWidth();
const failures = [
createFailure(sourceFile, 0, 1, "first failure", "first-name", undefined, "error"),
createFailure(sourceFile, 32, 36, "mid failure", "mid-name", undefined, "error"),
createFailure(sourceFile, maxPosition - 1, maxPosition, "last failure", "last-name", undefined, "error"),
];
const expectedResult =
getFailureString(TEST_FILE, 1, 1, "first failure", "first-name") +
getFailureString(TEST_FILE, 2, 12, "mid failure", "mid-name") +
getFailureString(TEST_FILE, 9, 2, "last failure", "last-name");
const actualResult = formatter.format(failures);
assert.equal(actualResult, expectedResult);
});
it("does not duplicate output for fixed failures", () => {
const maxPosition = sourceFile.getFullWidth();
const failures = [
createFailure(sourceFile, 0, 1, "first failure", "first-name", undefined, "error"),
createFailure(sourceFile, 32, 36, "mid failure", "mid-name", undefined, "error"),
createFailure(sourceFile, maxPosition - 1, maxPosition, "last failure", "last-name", undefined, "error"),
];
const expectedResult =
getFailureString(TEST_FILE, 1, 1, "first failure", "first-name") +
getFailureString(TEST_FILE, 2, 12, "mid failure", "mid-name") +
getFailureString(TEST_FILE, 9, 2, "last failure", "last-name");
const fixed = failures.slice();
const actualResult = formatter.format(failures, fixed);
assert.equal(actualResult, expectedResult);
});
it("handles no failures", () => {
const result = formatter.format([]);
assert.equal(result, "\n");
});
function | (file: string, line: number, character: number, reason: string, code: string) {
return `##vso[task.logissue type=warning;sourcepath=${file};linenumber=${line};columnnumber=${character};code=${code};]${reason}\n`;
}
});
| getFailureString | identifier_name |
vsoFormatterTests.ts | /*
* Copyright 2013 Palantir Technologies, Inc.
* | * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { assert } from "chai";
import * as ts from "typescript";
import { IFormatter, TestUtils } from "../lint";
import { createFailure } from "./utils";
describe("VSO Formatter", () => {
const TEST_FILE = "formatters/vsoFormatter.test.ts";
let sourceFile: ts.SourceFile;
let formatter: IFormatter;
before(() => {
const Formatter = TestUtils.getFormatter("vso");
sourceFile = TestUtils.getSourceFile(TEST_FILE);
formatter = new Formatter();
});
it("formats failures", () => {
const maxPosition = sourceFile.getFullWidth();
const failures = [
createFailure(sourceFile, 0, 1, "first failure", "first-name", undefined, "error"),
createFailure(sourceFile, 32, 36, "mid failure", "mid-name", undefined, "error"),
createFailure(sourceFile, maxPosition - 1, maxPosition, "last failure", "last-name", undefined, "error"),
];
const expectedResult =
getFailureString(TEST_FILE, 1, 1, "first failure", "first-name") +
getFailureString(TEST_FILE, 2, 12, "mid failure", "mid-name") +
getFailureString(TEST_FILE, 9, 2, "last failure", "last-name");
const actualResult = formatter.format(failures);
assert.equal(actualResult, expectedResult);
});
it("does not duplicate output for fixed failures", () => {
const maxPosition = sourceFile.getFullWidth();
const failures = [
createFailure(sourceFile, 0, 1, "first failure", "first-name", undefined, "error"),
createFailure(sourceFile, 32, 36, "mid failure", "mid-name", undefined, "error"),
createFailure(sourceFile, maxPosition - 1, maxPosition, "last failure", "last-name", undefined, "error"),
];
const expectedResult =
getFailureString(TEST_FILE, 1, 1, "first failure", "first-name") +
getFailureString(TEST_FILE, 2, 12, "mid failure", "mid-name") +
getFailureString(TEST_FILE, 9, 2, "last failure", "last-name");
const fixed = failures.slice();
const actualResult = formatter.format(failures, fixed);
assert.equal(actualResult, expectedResult);
});
it("handles no failures", () => {
const result = formatter.format([]);
assert.equal(result, "\n");
});
function getFailureString(file: string, line: number, character: number, reason: string, code: string) {
return `##vso[task.logissue type=warning;sourcepath=${file};linenumber=${line};columnnumber=${character};code=${code};]${reason}\n`;
}
}); | random_line_split |
|
mainStaticPersonTask.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name:
# Purpose: This .py file is the main Framework file
# It ranks images of a specific person of interest in a static manner
#
# Required libs: python-dateutil, numpy,matplotlib,pyparsing
# Author: konkonst
#
# Created: 30/03/2014
# Copyright: (c) ITI (CERTH) 2014
# Licence: <apache licence 2.0>
#-------------------------------------------------------------------------------
import time,os,pickle,glob,shutil, personPopularity
from staticCommPersonTask import communitystatic
print('staticCommPersonCentered')
print(time.asctime( time.localtime(time.time()) ))
'''PARAMETERS'''
#Construct the data class from scratch: 1-yes / 2- from the community detection/ else-perform only the ranking
dataextract = 1
#Provide a time Limit (unix timestamp) about when the dataset begins in case you only want part of the dataset. If it is set to 0 the whole dataset is considered.
timeLimit = 0 #1071561600
#Community detection method. 'Ahn','Demon' and 'Copra' for overlapping and 'Louvain' for non. Ahn carries a threshold.
commDetectMethod = ['Demon', 0.66]
#User sets desired number of displayed top images
topImages = 8
#User sets desired number of most frequent people to retrieve
topPeople = 200
#Provide people set or leave empty to retrieve images for the number of topPeople as set above
peopleSet = ['justin_timberlake','oprah_winfrey','lady_gaga','justin_bieber','michael_schumacher','miley_cyrus','jk_rowling','zinedine_zidane','barack_obama','prince_william','brad_pitt_actor','leonardo_dicaprio','natalie_portman']
peopleSet.sort()
##peopleSet = [] #Uncomment this to activate the use of the rankedPeople.txt pool of users
#Delete all previous folders containing results? (Does not apply to the html files)
delFolders = 0
#If there are any nodes that should not be considered, please place them in './data/txt/stopNodes.txt'
'''Functions'''
t = time.time()
filename = [f for f in os.listdir("./data/txt/")]
for idx,files in enumerate(filename):
print(str(idx+1) + '.' + files) |
selection = int(input('Select a dataset from the above: '))-1
dataset_path_results = "./data/"+filename[selection][:-4]+"/staticPersonCentered_"+commDetectMethod[0]+"/results/"
dataset_path_tmp = "./data/"+filename[selection][:-4]+"/staticPersonCentered_"+commDetectMethod[0]+"/tmp/"
datasetFilename = './data/txt/'+filename[selection]
if not os.path.exists(dataset_path_results):
os.makedirs(dataset_path_results)
os.makedirs(dataset_path_tmp)
if not os.path.exists(dataset_path_results+"rankedPeople.txt"):
personPopularity.popPerson(datasetFilename, dataset_path_results, dataset_path_tmp, commDetectMethod,timeLimit=timeLimit)
if dataextract==1:#Start from scratch
data = communitystatic.from_txt(datasetFilename,dataset_path_results,dataset_path_tmp,timeLimit=timeLimit)
dataPck = open(dataset_path_tmp + "allPersondata.pck", "wb")
pickle.dump(data, dataPck , protocol = 2)
dataPck.close()
del(data)
elapsed = time.time() - t
print('Stage 1: %.2f seconds' % elapsed)
if dataextract==1 or dataextract==2:#If the basic data (authors, mentions, time) has been created
data = pickle.load(open(dataset_path_tmp + "allPersondata.pck", "rb"))
captiondict = data.captiondict
print('static Community detection method selected is :'+commDetectMethod[0])
dataStatic=data.extraction(commDetectMethod)
del(data)
elapsed = time.time() - t
print('\nStage 2: %.2f seconds' % elapsed)
decisionforAll = input('\nRetrieve the topImages by screening them one by one???(y or n) ')
if dataextract ==1 or dataextract ==2 or dataextract ==3:#Only ranking beyond this point
data = pickle.load(open(dataset_path_tmp + "allPersondata.pck", "rb"))
captiondict = data.captiondict
del(data)
dataStatic = pickle.load(open(dataset_path_tmp + 'comm_'+commDetectMethod[0]+'.pck','rb'))
#delete folders if you're starting from scratch
if delFolders == 1:
result_files = glob.glob(dataset_path_results+'/analysis/*.txt')
if result_files:
for file in result_files:
os.remove(file)
if not peopleSet:
with open(dataset_path_results+'rankedPeople.txt','r') as f:
for lineId,line in enumerate(f):
if lineId>topPeople-1:
break
line = line.split('\t')
peopleSet.append(line[0])
for person in peopleSet:
if decisionforAll != str('n') and not os.path.exists(dataset_path_results+'html/'+person):
os.makedirs(dataset_path_results+'html/'+person)
if decisionforAll != str('n'):
personDecision = input('\nRetrieve images for '+person+'???(y or n) ')
if decisionforAll == str('n'):
print("\nRetrieval Commences for "+person)
if decisionforAll == str('n') or personDecision == str('y'):
dataStatic.photoRetrieval(topImages, person, captiondict,decisionforAll)
dataStatic.popularity_coappearence(topImages, person, captiondict)
elapsed = time.time() - t
print('\nStage 3: %.2f seconds' % elapsed) | random_line_split |
|
mainStaticPersonTask.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name:
# Purpose: This .py file is the main Framework file
# It ranks images of a specific person of interest in a static manner
#
# Required libs: python-dateutil, numpy,matplotlib,pyparsing
# Author: konkonst
#
# Created: 30/03/2014
# Copyright: (c) ITI (CERTH) 2014
# Licence: <apache licence 2.0>
#-------------------------------------------------------------------------------
import time,os,pickle,glob,shutil, personPopularity
from staticCommPersonTask import communitystatic
print('staticCommPersonCentered')
print(time.asctime( time.localtime(time.time()) ))
'''PARAMETERS'''
#Construct the data class from scratch: 1-yes / 2- from the community detection/ else-perform only the ranking
dataextract = 1
#Provide a time Limit (unix timestamp) about when the dataset begins in case you only want part of the dataset. If it is set to 0 the whole dataset is considered.
timeLimit = 0 #1071561600
#Community detection method. 'Ahn','Demon' and 'Copra' for overlapping and 'Louvain' for non. Ahn carries a threshold.
commDetectMethod = ['Demon', 0.66]
#User sets desired number of displayed top images
topImages = 8
#User sets desired number of most frequent people to retrieve
topPeople = 200
#Provide people set or leave empty to retrieve images for the number of topPeople as set above
peopleSet = ['justin_timberlake','oprah_winfrey','lady_gaga','justin_bieber','michael_schumacher','miley_cyrus','jk_rowling','zinedine_zidane','barack_obama','prince_william','brad_pitt_actor','leonardo_dicaprio','natalie_portman']
peopleSet.sort()
##peopleSet = [] #Uncomment this to activate the use of the rankedPeople.txt pool of users
#Delete all previous folders containing results? (Does not apply to the html files)
delFolders = 0
#If there are any nodes that should not be considered, please place them in './data/txt/stopNodes.txt'
'''Functions'''
t = time.time()
filename = [f for f in os.listdir("./data/txt/")]
for idx,files in enumerate(filename):
print(str(idx+1) + '.' + files)
selection = int(input('Select a dataset from the above: '))-1
dataset_path_results = "./data/"+filename[selection][:-4]+"/staticPersonCentered_"+commDetectMethod[0]+"/results/"
dataset_path_tmp = "./data/"+filename[selection][:-4]+"/staticPersonCentered_"+commDetectMethod[0]+"/tmp/"
datasetFilename = './data/txt/'+filename[selection]
if not os.path.exists(dataset_path_results):
os.makedirs(dataset_path_results)
os.makedirs(dataset_path_tmp)
if not os.path.exists(dataset_path_results+"rankedPeople.txt"):
personPopularity.popPerson(datasetFilename, dataset_path_results, dataset_path_tmp, commDetectMethod,timeLimit=timeLimit)
if dataextract==1:#Start from scratch
|
if dataextract==1 or dataextract==2:#If the basic data (authors, mentions, time) has been created
data = pickle.load(open(dataset_path_tmp + "allPersondata.pck", "rb"))
captiondict = data.captiondict
print('static Community detection method selected is :'+commDetectMethod[0])
dataStatic=data.extraction(commDetectMethod)
del(data)
elapsed = time.time() - t
print('\nStage 2: %.2f seconds' % elapsed)
decisionforAll = input('\nRetrieve the topImages by screening them one by one???(y or n) ')
if dataextract ==1 or dataextract ==2 or dataextract ==3:#Only ranking beyond this point
data = pickle.load(open(dataset_path_tmp + "allPersondata.pck", "rb"))
captiondict = data.captiondict
del(data)
dataStatic = pickle.load(open(dataset_path_tmp + 'comm_'+commDetectMethod[0]+'.pck','rb'))
#delete folders if you're starting from scratch
if delFolders == 1:
result_files = glob.glob(dataset_path_results+'/analysis/*.txt')
if result_files:
for file in result_files:
os.remove(file)
if not peopleSet:
with open(dataset_path_results+'rankedPeople.txt','r') as f:
for lineId,line in enumerate(f):
if lineId>topPeople-1:
break
line = line.split('\t')
peopleSet.append(line[0])
for person in peopleSet:
if decisionforAll != str('n') and not os.path.exists(dataset_path_results+'html/'+person):
os.makedirs(dataset_path_results+'html/'+person)
if decisionforAll != str('n'):
personDecision = input('\nRetrieve images for '+person+'???(y or n) ')
if decisionforAll == str('n'):
print("\nRetrieval Commences for "+person)
if decisionforAll == str('n') or personDecision == str('y'):
dataStatic.photoRetrieval(topImages, person, captiondict,decisionforAll)
dataStatic.popularity_coappearence(topImages, person, captiondict)
elapsed = time.time() - t
print('\nStage 3: %.2f seconds' % elapsed)
| data = communitystatic.from_txt(datasetFilename,dataset_path_results,dataset_path_tmp,timeLimit=timeLimit)
dataPck = open(dataset_path_tmp + "allPersondata.pck", "wb")
pickle.dump(data, dataPck , protocol = 2)
dataPck.close()
del(data)
elapsed = time.time() - t
print('Stage 1: %.2f seconds' % elapsed) | conditional_block |
test_instructor_dashboard.py | """
End to end tests for Instructor Dashboard.
"""
from bok_choy.web_app_test import WebAppTest
from regression.pages.lms.course_page_lms import CourseHomePageExtended
from regression.pages.lms.dashboard_lms import DashboardPageExtended
from regression.pages.lms.instructor_dashboard import InstructorDashboardPageExtended
from regression.pages.lms.utils import get_course_key
from regression.tests.helpers.api_clients import LmsLoginApi
from regression.tests.helpers.utils import get_course_display_name, get_course_info | """
Regression tests on Analytics on Instructor Dashboard
"""
def setUp(self):
super().setUp()
login_api = LmsLoginApi()
login_api.authenticate(self.browser)
course_info = get_course_info()
self.dashboard_page = DashboardPageExtended(self.browser)
self.instructor_dashboard = InstructorDashboardPageExtended(
self.browser,
get_course_key(course_info)
)
self.course_page = CourseHomePageExtended(
self.browser,
get_course_key(course_info)
)
self.dashboard_page.visit()
self.dashboard_page.select_course(get_course_display_name())
self.course_page.wait_for_page()
self.instructor_dashboard.visit() |
class AnalyticsTest(WebAppTest): | random_line_split |
test_instructor_dashboard.py | """
End to end tests for Instructor Dashboard.
"""
from bok_choy.web_app_test import WebAppTest
from regression.pages.lms.course_page_lms import CourseHomePageExtended
from regression.pages.lms.dashboard_lms import DashboardPageExtended
from regression.pages.lms.instructor_dashboard import InstructorDashboardPageExtended
from regression.pages.lms.utils import get_course_key
from regression.tests.helpers.api_clients import LmsLoginApi
from regression.tests.helpers.utils import get_course_display_name, get_course_info
class AnalyticsTest(WebAppTest):
| """
Regression tests on Analytics on Instructor Dashboard
"""
def setUp(self):
super().setUp()
login_api = LmsLoginApi()
login_api.authenticate(self.browser)
course_info = get_course_info()
self.dashboard_page = DashboardPageExtended(self.browser)
self.instructor_dashboard = InstructorDashboardPageExtended(
self.browser,
get_course_key(course_info)
)
self.course_page = CourseHomePageExtended(
self.browser,
get_course_key(course_info)
)
self.dashboard_page.visit()
self.dashboard_page.select_course(get_course_display_name())
self.course_page.wait_for_page()
self.instructor_dashboard.visit() | identifier_body |
|
test_instructor_dashboard.py | """
End to end tests for Instructor Dashboard.
"""
from bok_choy.web_app_test import WebAppTest
from regression.pages.lms.course_page_lms import CourseHomePageExtended
from regression.pages.lms.dashboard_lms import DashboardPageExtended
from regression.pages.lms.instructor_dashboard import InstructorDashboardPageExtended
from regression.pages.lms.utils import get_course_key
from regression.tests.helpers.api_clients import LmsLoginApi
from regression.tests.helpers.utils import get_course_display_name, get_course_info
class AnalyticsTest(WebAppTest):
"""
Regression tests on Analytics on Instructor Dashboard
"""
def | (self):
super().setUp()
login_api = LmsLoginApi()
login_api.authenticate(self.browser)
course_info = get_course_info()
self.dashboard_page = DashboardPageExtended(self.browser)
self.instructor_dashboard = InstructorDashboardPageExtended(
self.browser,
get_course_key(course_info)
)
self.course_page = CourseHomePageExtended(
self.browser,
get_course_key(course_info)
)
self.dashboard_page.visit()
self.dashboard_page.select_course(get_course_display_name())
self.course_page.wait_for_page()
self.instructor_dashboard.visit()
| setUp | identifier_name |
reportcommon.py | #!/usr/local/munkireport/munkireport-python2
# encoding: utf-8
from . import display
from . import prefs
from . import constants
from . import FoundationPlist
from munkilib.purl import Purl
from munkilib.phpserialize import *
import subprocess
import pwd
import sys
import hashlib
import platform
from urllib import urlencode
import re
import time
import os
# PyLint cannot properly find names inside Cocoa libraries, so issues bogus
# No name 'Foo' in module 'Bar' warnings. Disable them.
# pylint: disable=E0611
from Foundation import NSArray, NSDate, NSMetadataQuery, NSPredicate
from Foundation import CFPreferencesAppSynchronize
from Foundation import CFPreferencesCopyAppValue
from Foundation import CFPreferencesCopyKeyList
from Foundation import CFPreferencesSetValue
from Foundation import kCFPreferencesAnyUser
from Foundation import kCFPreferencesCurrentUser
from Foundation import kCFPreferencesCurrentHost
from Foundation import NSHTTPURLResponse
from SystemConfiguration import SCDynamicStoreCopyConsoleUser
# pylint: enable=E0611
# our preferences "bundle_id"
BUNDLE_ID = "MunkiReport"
class CurlError(Exception):
def __init__(self, status, message):
display_error(message)
finish_run()
def set_verbosity(level):
"""Set verbosity level."""
display.verbose = int(level)
def display_error(msg, *args):
"""Call display error msg handler."""
display.display_error("%s" % msg, *args)
def display_warning(msg, *args):
"""Call display warning msg handler."""
display.display_warning("%s" % msg, *args)
def display_detail(msg, *args):
|
def finish_run():
remove_run_file()
display_detail("## Finished run")
exit(0)
def remove_run_file():
touchfile = '/Users/Shared/.com.github.munkireport.run'
if os.path.exists(touchfile):
os.remove(touchfile)
def curl(url, values):
options = dict()
options["url"] = url
options["method"] = "POST"
options["content_type"] = "application/x-www-form-urlencoded"
options["body"] = urlencode(values)
options["logging_function"] = display_detail
options["connection_timeout"] = 60
if pref("UseMunkiAdditionalHttpHeaders"):
custom_headers = prefs.pref(constants.ADDITIONAL_HTTP_HEADERS_KEY)
if custom_headers:
options["additional_headers"] = dict()
for header in custom_headers:
m = re.search(r"^(?P<header_name>.*?): (?P<header_value>.*?)$", header)
if m:
options["additional_headers"][m.group("header_name")] = m.group(
"header_value"
)
else:
raise CurlError(
-1,
"UseMunkiAdditionalHttpHeaders defined, "
"but not found in Munki preferences",
)
# Build Purl with initial settings
connection = Purl.alloc().initWithOptions_(options)
connection.start()
try:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages if we exit the loop first
if connection.isDone():
break
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise CurlError(-1, str(err))
if connection.error != None:
# Gurl returned an error
display.display_detail(
"Download error %s: %s",
connection.error.code(),
connection.error.localizedDescription(),
)
if connection.SSLerror:
display_detail("SSL error detail: %s", str(connection.SSLerror))
display_detail("Headers: %s", connection.headers)
raise CurlError(
connection.error.code(), connection.error.localizedDescription()
)
if connection.response != None and connection.status != 200:
display.display_detail("Status: %s", connection.status)
display.display_detail("Headers: %s", connection.headers)
if connection.redirection != []:
display.display_detail("Redirection: %s", connection.redirection)
connection.headers["http_result_code"] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(connection.status)
connection.headers["http_result_description"] = description
if str(connection.status).startswith("2"):
return connection.get_response_data()
else:
# there was an HTTP error of some sort.
raise CurlError(
connection.status,
"%s failed, HTTP returncode %s (%s)"
% (
url,
connection.status,
connection.headers.get("http_result_description", "Failed"),
),
)
def get_hardware_info():
"""Uses system profiler to get hardware info for this machine."""
cmd = ["/usr/sbin/system_profiler", "SPHardwareDataType", "-xml"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, dummy_error) = proc.communicate()
try:
plist = FoundationPlist.readPlistFromString(output)
# system_profiler xml is an array
sp_dict = plist[0]
items = sp_dict["_items"]
sp_hardware_dict = items[0]
return sp_hardware_dict
except BaseException:
return {}
def get_long_username(username):
try:
long_name = pwd.getpwnam(username)[4]
except:
long_name = ""
return long_name.decode("utf-8")
def get_uid(username):
try:
uid = pwd.getpwnam(username)[2]
except:
uid = ""
return uid
def get_computername():
cmd = ["/usr/sbin/scutil", "--get", "ComputerName"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_cpuinfo():
cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_buildversion():
cmd = ["/usr/bin/sw_vers", "-buildVersion"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_uptime():
cmd = ["/usr/sbin/sysctl", "-n", "kern.boottime"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
sec = int(re.sub(".*sec = (\d+),.*", "\\1", output))
up = int(time.time() - sec)
return up if up > 0 else -1
def set_pref(pref_name, pref_value):
"""Sets a preference, See prefs.py for details."""
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
print "set pref"
try:
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
except Exception:
pass
def pref(pref_name):
"""Return a preference.
See prefs.py for details
"""
pref_value = CFPreferencesCopyAppValue(pref_name, BUNDLE_ID)
return pref_value
def process(serial, items):
"""Process receives a list of items, checks if they need updating and
updates them if necessary."""
# Sanitize serial
serial = "".join([c for c in serial if c.isalnum()])
# Get prefs
baseurl = pref("BaseUrl") or prefs.pref("SoftwareRepoURL") + "/report/"
hashurl = baseurl + "index.php?/report/hash_check"
checkurl = baseurl + "index.php?/report/check_in"
# Get passphrase
passphrase = pref("Passphrase")
# Get hashes for all scripts
for key, i in items.items():
if i.get("path"):
i["hash"] = getmd5hash(i.get("path"))
# Check dict
check = {}
for key, i in items.items():
if i.get("hash"):
check[key] = {"hash": i.get("hash")}
# Send hashes to server
values = {"serial": serial, "items": serialize(check), "passphrase": passphrase}
server_data = curl(hashurl, values)
# = response.read()
# Decode response
try:
result = unserialize(server_data)
except Exception, e:
display_error("Could not unserialize server data: %s" % str(e))
display_error("Request: %s" % str(values))
display_error("Response: %s" % str(server_data))
return -1
if result.get("error") != "":
display_error("Server error: %s" % result["error"])
return -1
if result.get("info") != "":
display_detail("Server info: %s" % result["info"])
# Retrieve hashes that need updating
total_size = 0
for i in items.keys():
if i in result:
if items[i].get("path"):
try:
f = open(items[i]["path"], "r")
items[i]["data"] = f.read()
except:
display_warning("Can't open %s" % items[i]["path"])
del items[i]
continue
size = len(items[i]["data"])
display_detail("Need to update %s (%s)" % (i, sizeof_fmt(size)))
total_size = total_size + size
else: # delete items that don't have to be uploaded
del items[i]
# Send new files with hashes
if len(items):
display_detail("Sending items (%s)" % sizeof_fmt(total_size))
response = curl(
checkurl,
{"serial": serial, "items": serialize(items), "passphrase": passphrase},
)
display_detail(response)
else:
display_detail("No changes")
def runExternalScriptWithTimeout(
script, allow_insecure=False, script_args=(), timeout=30
):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path.
RunExternalScriptError: there was an error running the script.
"""
from munkilib import utils
if not os.path.exists(script):
raise ScriptNotFoundError("script does not exist: %s" % script)
if not allow_insecure:
try:
utils.verifyFileOnlyWritableByMunkiAndRoot(script)
except utils.VerifyFilePermissionsError, e:
msg = (
"Skipping execution due to failed file permissions "
"verification: %s\n%s" % (script, str(e))
)
raise utils.RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = subprocess.Popen(
cmd,
shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
while timeout > 0:
if proc.poll() is not None:
(stdout, stderr) = proc.communicate()
return (
proc.returncode,
stdout.decode("UTF-8", "replace"),
stderr.decode("UTF-8", "replace"),
)
time.sleep(0.1)
timeout -= 0.1
else:
try:
proc.kill()
except OSError, e:
if e.errno != 3:
raise
raise utils.RunExternalScriptError("%s timed out" % script)
return (0, None, None)
else:
raise utils.RunExternalScriptError("%s not executable" % script)
def rundir(scriptdir, runtype, abort=False, submitscript=""):
"""Run scripts in directory scriptdir runtype is passed to the script if
abort is True, a non-zero exit status will abort munki submitscript is put
at the end of the scriptlist."""
if os.path.exists(scriptdir):
from munkilib import utils
# Get timeout for scripts
scriptTimeOut = 30
if pref("scriptTimeOut"):
scriptTimeOut = int(pref("scriptTimeOut"))
display_detail("# Set custom script timeout to %s seconds" % scriptTimeOut)
# Directory containing the scripts
parentdir = os.path.basename(scriptdir)
display_detail("# Executing scripts in %s" % parentdir)
# Get all files in scriptdir
files = os.listdir(scriptdir)
# Sort files
files.sort()
# Find submit script and stick it on the end of the list
if submitscript:
try:
sub = files.pop(files.index(submitscript))
files.append(sub)
except Exception, e:
display_error("%s not found in %s" % (submitscript, parentdir))
for script in files:
# Skip files that start with a period
if script.startswith("."):
continue
# Concatenate dir and filename
scriptpath = os.path.join(scriptdir, script)
# Skip directories
if os.path.isdir(scriptpath):
continue
try:
# Attempt to execute script
display_detail("Running %s" % script)
result, stdout, stderr = runExternalScriptWithTimeout(
scriptpath,
allow_insecure=False,
script_args=[runtype],
timeout=scriptTimeOut,
)
if stdout:
display_detail(stdout)
if stderr:
display_detail("%s Error: %s" % (script, stderr))
if result:
if abort:
display_detail("Aborted by %s" % script)
exit(1)
else:
display_warning("%s return code: %d" % (script, result))
except utils.ScriptNotFoundError:
pass # Script has disappeared - pass.
except Exception, e:
display_warning("%s: %s" % (script, str(e)))
def sizeof_fmt(num):
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
if abs(num) < 1000.0:
return "%.0f%s" % (num, unit)
num /= 1000.0
return "%.1f%s" % (num, "YB")
def gethash(filename, hash_function):
"""Calculates the hashvalue of the given file with the given hash_function.
Args:
filename: The file name to calculate the hash value of.
hash_function: The hash function object to use, which was instantiated
before calling this function, e.g. hashlib.md5().
Returns:
The hashvalue of the given file as hex string.
"""
if not os.path.isfile(filename):
return "NOT A FILE"
fileref = open(filename, "rb")
while 1:
chunk = fileref.read(2 ** 16)
if not chunk:
break
hash_function.update(chunk)
fileref.close()
return hash_function.hexdigest()
def getmd5hash(filename):
"""Returns hex of MD5 checksum of a file."""
hash_function = hashlib.md5()
return gethash(filename, hash_function)
def getOsVersion(only_major_minor=True, as_tuple=False):
"""Returns an OS version.
Args:
only_major_minor: Boolean. If True, only include major/minor versions.
as_tuple: Boolean. If True, return a tuple of ints, otherwise a string.
"""
os.environ["SYSTEM_VERSION_COMPAT"] = '0'
cmd = ["/usr/bin/sw_vers -productVersion"]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
os_version_tuple = output.split(".")
if only_major_minor:
os_version_tuple = os_version_tuple[0:2]
if as_tuple:
return tuple(map(int, os_version_tuple))
else:
return ".".join(os_version_tuple)
def getconsoleuser():
"""Return console user."""
cfuser = SCDynamicStoreCopyConsoleUser(None, None, None)
return cfuser[0]
# End of reportcommon
| """Call display detail msg handler."""
display.display_detail("%s" % msg, *args) | identifier_body |
reportcommon.py | #!/usr/local/munkireport/munkireport-python2
# encoding: utf-8
from . import display
from . import prefs
from . import constants
from . import FoundationPlist
from munkilib.purl import Purl
from munkilib.phpserialize import *
import subprocess
import pwd
import sys
import hashlib
import platform
from urllib import urlencode
import re
import time
import os
# PyLint cannot properly find names inside Cocoa libraries, so issues bogus
# No name 'Foo' in module 'Bar' warnings. Disable them.
# pylint: disable=E0611
from Foundation import NSArray, NSDate, NSMetadataQuery, NSPredicate
from Foundation import CFPreferencesAppSynchronize
from Foundation import CFPreferencesCopyAppValue
from Foundation import CFPreferencesCopyKeyList
from Foundation import CFPreferencesSetValue
from Foundation import kCFPreferencesAnyUser
from Foundation import kCFPreferencesCurrentUser
from Foundation import kCFPreferencesCurrentHost
from Foundation import NSHTTPURLResponse
from SystemConfiguration import SCDynamicStoreCopyConsoleUser
# pylint: enable=E0611
# our preferences "bundle_id"
BUNDLE_ID = "MunkiReport"
class CurlError(Exception):
def __init__(self, status, message):
display_error(message)
finish_run()
def set_verbosity(level):
"""Set verbosity level."""
display.verbose = int(level)
def display_error(msg, *args):
"""Call display error msg handler."""
display.display_error("%s" % msg, *args)
def display_warning(msg, *args):
"""Call display warning msg handler."""
display.display_warning("%s" % msg, *args)
def display_detail(msg, *args):
"""Call display detail msg handler."""
display.display_detail("%s" % msg, *args)
def finish_run():
remove_run_file()
display_detail("## Finished run")
exit(0)
def remove_run_file():
touchfile = '/Users/Shared/.com.github.munkireport.run'
if os.path.exists(touchfile):
os.remove(touchfile)
def curl(url, values):
options = dict()
options["url"] = url
options["method"] = "POST"
options["content_type"] = "application/x-www-form-urlencoded"
options["body"] = urlencode(values)
options["logging_function"] = display_detail
options["connection_timeout"] = 60
if pref("UseMunkiAdditionalHttpHeaders"):
custom_headers = prefs.pref(constants.ADDITIONAL_HTTP_HEADERS_KEY)
if custom_headers:
options["additional_headers"] = dict()
for header in custom_headers:
m = re.search(r"^(?P<header_name>.*?): (?P<header_value>.*?)$", header)
if m:
options["additional_headers"][m.group("header_name")] = m.group(
"header_value"
)
else:
raise CurlError(
-1,
"UseMunkiAdditionalHttpHeaders defined, "
"but not found in Munki preferences",
)
# Build Purl with initial settings
connection = Purl.alloc().initWithOptions_(options)
connection.start()
try:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages if we exit the loop first
if connection.isDone():
break
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise CurlError(-1, str(err))
if connection.error != None:
# Gurl returned an error
display.display_detail(
"Download error %s: %s",
connection.error.code(),
connection.error.localizedDescription(),
)
if connection.SSLerror:
display_detail("SSL error detail: %s", str(connection.SSLerror))
display_detail("Headers: %s", connection.headers)
raise CurlError(
connection.error.code(), connection.error.localizedDescription()
)
if connection.response != None and connection.status != 200:
display.display_detail("Status: %s", connection.status)
display.display_detail("Headers: %s", connection.headers)
if connection.redirection != []:
display.display_detail("Redirection: %s", connection.redirection)
connection.headers["http_result_code"] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(connection.status)
connection.headers["http_result_description"] = description
if str(connection.status).startswith("2"):
return connection.get_response_data()
else:
# there was an HTTP error of some sort.
raise CurlError(
connection.status,
"%s failed, HTTP returncode %s (%s)"
% (
url,
connection.status,
connection.headers.get("http_result_description", "Failed"),
),
)
def get_hardware_info():
"""Uses system profiler to get hardware info for this machine."""
cmd = ["/usr/sbin/system_profiler", "SPHardwareDataType", "-xml"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, dummy_error) = proc.communicate()
try:
plist = FoundationPlist.readPlistFromString(output)
# system_profiler xml is an array
sp_dict = plist[0]
items = sp_dict["_items"]
sp_hardware_dict = items[0]
return sp_hardware_dict
except BaseException:
return {}
def get_long_username(username):
try:
long_name = pwd.getpwnam(username)[4]
except:
long_name = ""
return long_name.decode("utf-8")
def get_uid(username):
try:
uid = pwd.getpwnam(username)[2]
except:
uid = ""
return uid
def get_computername():
cmd = ["/usr/sbin/scutil", "--get", "ComputerName"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_cpuinfo():
cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_buildversion():
cmd = ["/usr/bin/sw_vers", "-buildVersion"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_uptime():
cmd = ["/usr/sbin/sysctl", "-n", "kern.boottime"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
sec = int(re.sub(".*sec = (\d+),.*", "\\1", output))
up = int(time.time() - sec)
return up if up > 0 else -1
def set_pref(pref_name, pref_value):
"""Sets a preference, See prefs.py for details."""
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
print "set pref"
try:
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
except Exception:
pass
def pref(pref_name):
"""Return a preference.
See prefs.py for details
"""
pref_value = CFPreferencesCopyAppValue(pref_name, BUNDLE_ID)
return pref_value
def process(serial, items):
"""Process receives a list of items, checks if they need updating and
updates them if necessary."""
# Sanitize serial
serial = "".join([c for c in serial if c.isalnum()])
# Get prefs
baseurl = pref("BaseUrl") or prefs.pref("SoftwareRepoURL") + "/report/"
hashurl = baseurl + "index.php?/report/hash_check"
checkurl = baseurl + "index.php?/report/check_in"
# Get passphrase
passphrase = pref("Passphrase")
# Get hashes for all scripts
for key, i in items.items():
if i.get("path"):
i["hash"] = getmd5hash(i.get("path"))
# Check dict
check = {}
for key, i in items.items():
if i.get("hash"):
check[key] = {"hash": i.get("hash")}
# Send hashes to server
values = {"serial": serial, "items": serialize(check), "passphrase": passphrase}
server_data = curl(hashurl, values)
# = response.read()
# Decode response
try:
result = unserialize(server_data)
except Exception, e:
display_error("Could not unserialize server data: %s" % str(e))
display_error("Request: %s" % str(values))
display_error("Response: %s" % str(server_data))
return -1
if result.get("error") != "":
display_error("Server error: %s" % result["error"])
return -1
if result.get("info") != "":
display_detail("Server info: %s" % result["info"])
# Retrieve hashes that need updating
total_size = 0
for i in items.keys():
if i in result:
if items[i].get("path"):
try:
f = open(items[i]["path"], "r")
items[i]["data"] = f.read()
except:
display_warning("Can't open %s" % items[i]["path"])
del items[i]
continue
size = len(items[i]["data"])
display_detail("Need to update %s (%s)" % (i, sizeof_fmt(size)))
total_size = total_size + size
else: # delete items that don't have to be uploaded
del items[i]
# Send new files with hashes
if len(items):
display_detail("Sending items (%s)" % sizeof_fmt(total_size))
response = curl(
checkurl,
{"serial": serial, "items": serialize(items), "passphrase": passphrase},
)
display_detail(response)
else:
display_detail("No changes")
def runExternalScriptWithTimeout(
script, allow_insecure=False, script_args=(), timeout=30
):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path. | if not os.path.exists(script):
raise ScriptNotFoundError("script does not exist: %s" % script)
if not allow_insecure:
try:
utils.verifyFileOnlyWritableByMunkiAndRoot(script)
except utils.VerifyFilePermissionsError, e:
msg = (
"Skipping execution due to failed file permissions "
"verification: %s\n%s" % (script, str(e))
)
raise utils.RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = subprocess.Popen(
cmd,
shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
while timeout > 0:
if proc.poll() is not None:
(stdout, stderr) = proc.communicate()
return (
proc.returncode,
stdout.decode("UTF-8", "replace"),
stderr.decode("UTF-8", "replace"),
)
time.sleep(0.1)
timeout -= 0.1
else:
try:
proc.kill()
except OSError, e:
if e.errno != 3:
raise
raise utils.RunExternalScriptError("%s timed out" % script)
return (0, None, None)
else:
raise utils.RunExternalScriptError("%s not executable" % script)
def rundir(scriptdir, runtype, abort=False, submitscript=""):
"""Run scripts in directory scriptdir runtype is passed to the script if
abort is True, a non-zero exit status will abort munki submitscript is put
at the end of the scriptlist."""
if os.path.exists(scriptdir):
from munkilib import utils
# Get timeout for scripts
scriptTimeOut = 30
if pref("scriptTimeOut"):
scriptTimeOut = int(pref("scriptTimeOut"))
display_detail("# Set custom script timeout to %s seconds" % scriptTimeOut)
# Directory containing the scripts
parentdir = os.path.basename(scriptdir)
display_detail("# Executing scripts in %s" % parentdir)
# Get all files in scriptdir
files = os.listdir(scriptdir)
# Sort files
files.sort()
# Find submit script and stick it on the end of the list
if submitscript:
try:
sub = files.pop(files.index(submitscript))
files.append(sub)
except Exception, e:
display_error("%s not found in %s" % (submitscript, parentdir))
for script in files:
# Skip files that start with a period
if script.startswith("."):
continue
# Concatenate dir and filename
scriptpath = os.path.join(scriptdir, script)
# Skip directories
if os.path.isdir(scriptpath):
continue
try:
# Attempt to execute script
display_detail("Running %s" % script)
result, stdout, stderr = runExternalScriptWithTimeout(
scriptpath,
allow_insecure=False,
script_args=[runtype],
timeout=scriptTimeOut,
)
if stdout:
display_detail(stdout)
if stderr:
display_detail("%s Error: %s" % (script, stderr))
if result:
if abort:
display_detail("Aborted by %s" % script)
exit(1)
else:
display_warning("%s return code: %d" % (script, result))
except utils.ScriptNotFoundError:
pass # Script has disappeared - pass.
except Exception, e:
display_warning("%s: %s" % (script, str(e)))
def sizeof_fmt(num):
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
if abs(num) < 1000.0:
return "%.0f%s" % (num, unit)
num /= 1000.0
return "%.1f%s" % (num, "YB")
def gethash(filename, hash_function):
"""Calculates the hashvalue of the given file with the given hash_function.
Args:
filename: The file name to calculate the hash value of.
hash_function: The hash function object to use, which was instantiated
before calling this function, e.g. hashlib.md5().
Returns:
The hashvalue of the given file as hex string.
"""
if not os.path.isfile(filename):
return "NOT A FILE"
fileref = open(filename, "rb")
while 1:
chunk = fileref.read(2 ** 16)
if not chunk:
break
hash_function.update(chunk)
fileref.close()
return hash_function.hexdigest()
def getmd5hash(filename):
"""Returns hex of MD5 checksum of a file."""
hash_function = hashlib.md5()
return gethash(filename, hash_function)
def getOsVersion(only_major_minor=True, as_tuple=False):
"""Returns an OS version.
Args:
only_major_minor: Boolean. If True, only include major/minor versions.
as_tuple: Boolean. If True, return a tuple of ints, otherwise a string.
"""
os.environ["SYSTEM_VERSION_COMPAT"] = '0'
cmd = ["/usr/bin/sw_vers -productVersion"]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
os_version_tuple = output.split(".")
if only_major_minor:
os_version_tuple = os_version_tuple[0:2]
if as_tuple:
return tuple(map(int, os_version_tuple))
else:
return ".".join(os_version_tuple)
def getconsoleuser():
"""Return console user."""
cfuser = SCDynamicStoreCopyConsoleUser(None, None, None)
return cfuser[0]
# End of reportcommon | RunExternalScriptError: there was an error running the script.
"""
from munkilib import utils
| random_line_split |
reportcommon.py | #!/usr/local/munkireport/munkireport-python2
# encoding: utf-8
from . import display
from . import prefs
from . import constants
from . import FoundationPlist
from munkilib.purl import Purl
from munkilib.phpserialize import *
import subprocess
import pwd
import sys
import hashlib
import platform
from urllib import urlencode
import re
import time
import os
# PyLint cannot properly find names inside Cocoa libraries, so issues bogus
# No name 'Foo' in module 'Bar' warnings. Disable them.
# pylint: disable=E0611
from Foundation import NSArray, NSDate, NSMetadataQuery, NSPredicate
from Foundation import CFPreferencesAppSynchronize
from Foundation import CFPreferencesCopyAppValue
from Foundation import CFPreferencesCopyKeyList
from Foundation import CFPreferencesSetValue
from Foundation import kCFPreferencesAnyUser
from Foundation import kCFPreferencesCurrentUser
from Foundation import kCFPreferencesCurrentHost
from Foundation import NSHTTPURLResponse
from SystemConfiguration import SCDynamicStoreCopyConsoleUser
# pylint: enable=E0611
# our preferences "bundle_id"
BUNDLE_ID = "MunkiReport"
class CurlError(Exception):
def __init__(self, status, message):
display_error(message)
finish_run()
def set_verbosity(level):
"""Set verbosity level."""
display.verbose = int(level)
def display_error(msg, *args):
"""Call display error msg handler."""
display.display_error("%s" % msg, *args)
def display_warning(msg, *args):
"""Call display warning msg handler."""
display.display_warning("%s" % msg, *args)
def display_detail(msg, *args):
"""Call display detail msg handler."""
display.display_detail("%s" % msg, *args)
def finish_run():
remove_run_file()
display_detail("## Finished run")
exit(0)
def remove_run_file():
touchfile = '/Users/Shared/.com.github.munkireport.run'
if os.path.exists(touchfile):
os.remove(touchfile)
def curl(url, values):
options = dict()
options["url"] = url
options["method"] = "POST"
options["content_type"] = "application/x-www-form-urlencoded"
options["body"] = urlencode(values)
options["logging_function"] = display_detail
options["connection_timeout"] = 60
if pref("UseMunkiAdditionalHttpHeaders"):
custom_headers = prefs.pref(constants.ADDITIONAL_HTTP_HEADERS_KEY)
if custom_headers:
options["additional_headers"] = dict()
for header in custom_headers:
m = re.search(r"^(?P<header_name>.*?): (?P<header_value>.*?)$", header)
if m:
options["additional_headers"][m.group("header_name")] = m.group(
"header_value"
)
else:
raise CurlError(
-1,
"UseMunkiAdditionalHttpHeaders defined, "
"but not found in Munki preferences",
)
# Build Purl with initial settings
connection = Purl.alloc().initWithOptions_(options)
connection.start()
try:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages if we exit the loop first
if connection.isDone():
break
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise CurlError(-1, str(err))
if connection.error != None:
# Gurl returned an error
display.display_detail(
"Download error %s: %s",
connection.error.code(),
connection.error.localizedDescription(),
)
if connection.SSLerror:
display_detail("SSL error detail: %s", str(connection.SSLerror))
display_detail("Headers: %s", connection.headers)
raise CurlError(
connection.error.code(), connection.error.localizedDescription()
)
if connection.response != None and connection.status != 200:
display.display_detail("Status: %s", connection.status)
display.display_detail("Headers: %s", connection.headers)
if connection.redirection != []:
display.display_detail("Redirection: %s", connection.redirection)
connection.headers["http_result_code"] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(connection.status)
connection.headers["http_result_description"] = description
if str(connection.status).startswith("2"):
return connection.get_response_data()
else:
# there was an HTTP error of some sort.
raise CurlError(
connection.status,
"%s failed, HTTP returncode %s (%s)"
% (
url,
connection.status,
connection.headers.get("http_result_description", "Failed"),
),
)
def get_hardware_info():
"""Uses system profiler to get hardware info for this machine."""
cmd = ["/usr/sbin/system_profiler", "SPHardwareDataType", "-xml"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, dummy_error) = proc.communicate()
try:
plist = FoundationPlist.readPlistFromString(output)
# system_profiler xml is an array
sp_dict = plist[0]
items = sp_dict["_items"]
sp_hardware_dict = items[0]
return sp_hardware_dict
except BaseException:
return {}
def get_long_username(username):
try:
long_name = pwd.getpwnam(username)[4]
except:
long_name = ""
return long_name.decode("utf-8")
def get_uid(username):
try:
uid = pwd.getpwnam(username)[2]
except:
uid = ""
return uid
def get_computername():
cmd = ["/usr/sbin/scutil", "--get", "ComputerName"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_cpuinfo():
cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_buildversion():
cmd = ["/usr/bin/sw_vers", "-buildVersion"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_uptime():
cmd = ["/usr/sbin/sysctl", "-n", "kern.boottime"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
sec = int(re.sub(".*sec = (\d+),.*", "\\1", output))
up = int(time.time() - sec)
return up if up > 0 else -1
def set_pref(pref_name, pref_value):
"""Sets a preference, See prefs.py for details."""
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
print "set pref"
try:
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
except Exception:
pass
def pref(pref_name):
"""Return a preference.
See prefs.py for details
"""
pref_value = CFPreferencesCopyAppValue(pref_name, BUNDLE_ID)
return pref_value
def process(serial, items):
"""Process receives a list of items, checks if they need updating and
updates them if necessary."""
# Sanitize serial
serial = "".join([c for c in serial if c.isalnum()])
# Get prefs
baseurl = pref("BaseUrl") or prefs.pref("SoftwareRepoURL") + "/report/"
hashurl = baseurl + "index.php?/report/hash_check"
checkurl = baseurl + "index.php?/report/check_in"
# Get passphrase
passphrase = pref("Passphrase")
# Get hashes for all scripts
for key, i in items.items():
if i.get("path"):
i["hash"] = getmd5hash(i.get("path"))
# Check dict
check = {}
for key, i in items.items():
if i.get("hash"):
check[key] = {"hash": i.get("hash")}
# Send hashes to server
values = {"serial": serial, "items": serialize(check), "passphrase": passphrase}
server_data = curl(hashurl, values)
# = response.read()
# Decode response
try:
result = unserialize(server_data)
except Exception, e:
display_error("Could not unserialize server data: %s" % str(e))
display_error("Request: %s" % str(values))
display_error("Response: %s" % str(server_data))
return -1
if result.get("error") != "":
display_error("Server error: %s" % result["error"])
return -1
if result.get("info") != "":
display_detail("Server info: %s" % result["info"])
# Retrieve hashes that need updating
total_size = 0
for i in items.keys():
if i in result:
if items[i].get("path"):
try:
f = open(items[i]["path"], "r")
items[i]["data"] = f.read()
except:
display_warning("Can't open %s" % items[i]["path"])
del items[i]
continue
size = len(items[i]["data"])
display_detail("Need to update %s (%s)" % (i, sizeof_fmt(size)))
total_size = total_size + size
else: # delete items that don't have to be uploaded
del items[i]
# Send new files with hashes
if len(items):
display_detail("Sending items (%s)" % sizeof_fmt(total_size))
response = curl(
checkurl,
{"serial": serial, "items": serialize(items), "passphrase": passphrase},
)
display_detail(response)
else:
display_detail("No changes")
def | (
script, allow_insecure=False, script_args=(), timeout=30
):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path.
RunExternalScriptError: there was an error running the script.
"""
from munkilib import utils
if not os.path.exists(script):
raise ScriptNotFoundError("script does not exist: %s" % script)
if not allow_insecure:
try:
utils.verifyFileOnlyWritableByMunkiAndRoot(script)
except utils.VerifyFilePermissionsError, e:
msg = (
"Skipping execution due to failed file permissions "
"verification: %s\n%s" % (script, str(e))
)
raise utils.RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = subprocess.Popen(
cmd,
shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
while timeout > 0:
if proc.poll() is not None:
(stdout, stderr) = proc.communicate()
return (
proc.returncode,
stdout.decode("UTF-8", "replace"),
stderr.decode("UTF-8", "replace"),
)
time.sleep(0.1)
timeout -= 0.1
else:
try:
proc.kill()
except OSError, e:
if e.errno != 3:
raise
raise utils.RunExternalScriptError("%s timed out" % script)
return (0, None, None)
else:
raise utils.RunExternalScriptError("%s not executable" % script)
def rundir(scriptdir, runtype, abort=False, submitscript=""):
"""Run scripts in directory scriptdir runtype is passed to the script if
abort is True, a non-zero exit status will abort munki submitscript is put
at the end of the scriptlist."""
if os.path.exists(scriptdir):
from munkilib import utils
# Get timeout for scripts
scriptTimeOut = 30
if pref("scriptTimeOut"):
scriptTimeOut = int(pref("scriptTimeOut"))
display_detail("# Set custom script timeout to %s seconds" % scriptTimeOut)
# Directory containing the scripts
parentdir = os.path.basename(scriptdir)
display_detail("# Executing scripts in %s" % parentdir)
# Get all files in scriptdir
files = os.listdir(scriptdir)
# Sort files
files.sort()
# Find submit script and stick it on the end of the list
if submitscript:
try:
sub = files.pop(files.index(submitscript))
files.append(sub)
except Exception, e:
display_error("%s not found in %s" % (submitscript, parentdir))
for script in files:
# Skip files that start with a period
if script.startswith("."):
continue
# Concatenate dir and filename
scriptpath = os.path.join(scriptdir, script)
# Skip directories
if os.path.isdir(scriptpath):
continue
try:
# Attempt to execute script
display_detail("Running %s" % script)
result, stdout, stderr = runExternalScriptWithTimeout(
scriptpath,
allow_insecure=False,
script_args=[runtype],
timeout=scriptTimeOut,
)
if stdout:
display_detail(stdout)
if stderr:
display_detail("%s Error: %s" % (script, stderr))
if result:
if abort:
display_detail("Aborted by %s" % script)
exit(1)
else:
display_warning("%s return code: %d" % (script, result))
except utils.ScriptNotFoundError:
pass # Script has disappeared - pass.
except Exception, e:
display_warning("%s: %s" % (script, str(e)))
def sizeof_fmt(num):
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
if abs(num) < 1000.0:
return "%.0f%s" % (num, unit)
num /= 1000.0
return "%.1f%s" % (num, "YB")
def gethash(filename, hash_function):
"""Calculates the hashvalue of the given file with the given hash_function.
Args:
filename: The file name to calculate the hash value of.
hash_function: The hash function object to use, which was instantiated
before calling this function, e.g. hashlib.md5().
Returns:
The hashvalue of the given file as hex string.
"""
if not os.path.isfile(filename):
return "NOT A FILE"
fileref = open(filename, "rb")
while 1:
chunk = fileref.read(2 ** 16)
if not chunk:
break
hash_function.update(chunk)
fileref.close()
return hash_function.hexdigest()
def getmd5hash(filename):
"""Returns hex of MD5 checksum of a file."""
hash_function = hashlib.md5()
return gethash(filename, hash_function)
def getOsVersion(only_major_minor=True, as_tuple=False):
"""Returns an OS version.
Args:
only_major_minor: Boolean. If True, only include major/minor versions.
as_tuple: Boolean. If True, return a tuple of ints, otherwise a string.
"""
os.environ["SYSTEM_VERSION_COMPAT"] = '0'
cmd = ["/usr/bin/sw_vers -productVersion"]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
os_version_tuple = output.split(".")
if only_major_minor:
os_version_tuple = os_version_tuple[0:2]
if as_tuple:
return tuple(map(int, os_version_tuple))
else:
return ".".join(os_version_tuple)
def getconsoleuser():
"""Return console user."""
cfuser = SCDynamicStoreCopyConsoleUser(None, None, None)
return cfuser[0]
# End of reportcommon
| runExternalScriptWithTimeout | identifier_name |
reportcommon.py | #!/usr/local/munkireport/munkireport-python2
# encoding: utf-8
from . import display
from . import prefs
from . import constants
from . import FoundationPlist
from munkilib.purl import Purl
from munkilib.phpserialize import *
import subprocess
import pwd
import sys
import hashlib
import platform
from urllib import urlencode
import re
import time
import os
# PyLint cannot properly find names inside Cocoa libraries, so issues bogus
# No name 'Foo' in module 'Bar' warnings. Disable them.
# pylint: disable=E0611
from Foundation import NSArray, NSDate, NSMetadataQuery, NSPredicate
from Foundation import CFPreferencesAppSynchronize
from Foundation import CFPreferencesCopyAppValue
from Foundation import CFPreferencesCopyKeyList
from Foundation import CFPreferencesSetValue
from Foundation import kCFPreferencesAnyUser
from Foundation import kCFPreferencesCurrentUser
from Foundation import kCFPreferencesCurrentHost
from Foundation import NSHTTPURLResponse
from SystemConfiguration import SCDynamicStoreCopyConsoleUser
# pylint: enable=E0611
# our preferences "bundle_id"
BUNDLE_ID = "MunkiReport"
class CurlError(Exception):
def __init__(self, status, message):
display_error(message)
finish_run()
def set_verbosity(level):
"""Set verbosity level."""
display.verbose = int(level)
def display_error(msg, *args):
"""Call display error msg handler."""
display.display_error("%s" % msg, *args)
def display_warning(msg, *args):
"""Call display warning msg handler."""
display.display_warning("%s" % msg, *args)
def display_detail(msg, *args):
"""Call display detail msg handler."""
display.display_detail("%s" % msg, *args)
def finish_run():
remove_run_file()
display_detail("## Finished run")
exit(0)
def remove_run_file():
touchfile = '/Users/Shared/.com.github.munkireport.run'
if os.path.exists(touchfile):
os.remove(touchfile)
def curl(url, values):
options = dict()
options["url"] = url
options["method"] = "POST"
options["content_type"] = "application/x-www-form-urlencoded"
options["body"] = urlencode(values)
options["logging_function"] = display_detail
options["connection_timeout"] = 60
if pref("UseMunkiAdditionalHttpHeaders"):
custom_headers = prefs.pref(constants.ADDITIONAL_HTTP_HEADERS_KEY)
if custom_headers:
options["additional_headers"] = dict()
for header in custom_headers:
m = re.search(r"^(?P<header_name>.*?): (?P<header_value>.*?)$", header)
if m:
options["additional_headers"][m.group("header_name")] = m.group(
"header_value"
)
else:
raise CurlError(
-1,
"UseMunkiAdditionalHttpHeaders defined, "
"but not found in Munki preferences",
)
# Build Purl with initial settings
connection = Purl.alloc().initWithOptions_(options)
connection.start()
try:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages if we exit the loop first
if connection.isDone():
break
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise CurlError(-1, str(err))
if connection.error != None:
# Gurl returned an error
display.display_detail(
"Download error %s: %s",
connection.error.code(),
connection.error.localizedDescription(),
)
if connection.SSLerror:
display_detail("SSL error detail: %s", str(connection.SSLerror))
display_detail("Headers: %s", connection.headers)
raise CurlError(
connection.error.code(), connection.error.localizedDescription()
)
if connection.response != None and connection.status != 200:
display.display_detail("Status: %s", connection.status)
display.display_detail("Headers: %s", connection.headers)
if connection.redirection != []:
display.display_detail("Redirection: %s", connection.redirection)
connection.headers["http_result_code"] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(connection.status)
connection.headers["http_result_description"] = description
if str(connection.status).startswith("2"):
return connection.get_response_data()
else:
# there was an HTTP error of some sort.
raise CurlError(
connection.status,
"%s failed, HTTP returncode %s (%s)"
% (
url,
connection.status,
connection.headers.get("http_result_description", "Failed"),
),
)
def get_hardware_info():
"""Uses system profiler to get hardware info for this machine."""
cmd = ["/usr/sbin/system_profiler", "SPHardwareDataType", "-xml"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, dummy_error) = proc.communicate()
try:
plist = FoundationPlist.readPlistFromString(output)
# system_profiler xml is an array
sp_dict = plist[0]
items = sp_dict["_items"]
sp_hardware_dict = items[0]
return sp_hardware_dict
except BaseException:
return {}
def get_long_username(username):
try:
long_name = pwd.getpwnam(username)[4]
except:
long_name = ""
return long_name.decode("utf-8")
def get_uid(username):
try:
uid = pwd.getpwnam(username)[2]
except:
uid = ""
return uid
def get_computername():
cmd = ["/usr/sbin/scutil", "--get", "ComputerName"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_cpuinfo():
cmd = ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_buildversion():
cmd = ["/usr/bin/sw_vers", "-buildVersion"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
return output.decode("utf-8")
def get_uptime():
cmd = ["/usr/sbin/sysctl", "-n", "kern.boottime"]
proc = subprocess.Popen(
cmd,
shell=False,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
sec = int(re.sub(".*sec = (\d+),.*", "\\1", output))
up = int(time.time() - sec)
return up if up > 0 else -1
def set_pref(pref_name, pref_value):
"""Sets a preference, See prefs.py for details."""
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
print "set pref"
try:
CFPreferencesSetValue(
pref_name,
pref_value,
BUNDLE_ID,
kCFPreferencesAnyUser,
kCFPreferencesCurrentHost,
)
CFPreferencesAppSynchronize(BUNDLE_ID)
except Exception:
pass
def pref(pref_name):
"""Return a preference.
See prefs.py for details
"""
pref_value = CFPreferencesCopyAppValue(pref_name, BUNDLE_ID)
return pref_value
def process(serial, items):
"""Process receives a list of items, checks if they need updating and
updates them if necessary."""
# Sanitize serial
serial = "".join([c for c in serial if c.isalnum()])
# Get prefs
baseurl = pref("BaseUrl") or prefs.pref("SoftwareRepoURL") + "/report/"
hashurl = baseurl + "index.php?/report/hash_check"
checkurl = baseurl + "index.php?/report/check_in"
# Get passphrase
passphrase = pref("Passphrase")
# Get hashes for all scripts
for key, i in items.items():
if i.get("path"):
i["hash"] = getmd5hash(i.get("path"))
# Check dict
check = {}
for key, i in items.items():
if i.get("hash"):
check[key] = {"hash": i.get("hash")}
# Send hashes to server
values = {"serial": serial, "items": serialize(check), "passphrase": passphrase}
server_data = curl(hashurl, values)
# = response.read()
# Decode response
try:
result = unserialize(server_data)
except Exception, e:
display_error("Could not unserialize server data: %s" % str(e))
display_error("Request: %s" % str(values))
display_error("Response: %s" % str(server_data))
return -1
if result.get("error") != "":
display_error("Server error: %s" % result["error"])
return -1
if result.get("info") != "":
display_detail("Server info: %s" % result["info"])
# Retrieve hashes that need updating
total_size = 0
for i in items.keys():
if i in result:
if items[i].get("path"):
try:
f = open(items[i]["path"], "r")
items[i]["data"] = f.read()
except:
display_warning("Can't open %s" % items[i]["path"])
del items[i]
continue
size = len(items[i]["data"])
display_detail("Need to update %s (%s)" % (i, sizeof_fmt(size)))
total_size = total_size + size
else: # delete items that don't have to be uploaded
del items[i]
# Send new files with hashes
if len(items):
display_detail("Sending items (%s)" % sizeof_fmt(total_size))
response = curl(
checkurl,
{"serial": serial, "items": serialize(items), "passphrase": passphrase},
)
display_detail(response)
else:
display_detail("No changes")
def runExternalScriptWithTimeout(
script, allow_insecure=False, script_args=(), timeout=30
):
"""Run a script (e.g. preflight/postflight) and return its exit status.
Args:
script: string path to the script to execute.
allow_insecure: bool skip the permissions check of executable.
args: args to pass to the script.
Returns:
Tuple. (integer exit status from script, str stdout, str stderr).
Raises:
ScriptNotFoundError: the script was not found at the given path.
RunExternalScriptError: there was an error running the script.
"""
from munkilib import utils
if not os.path.exists(script):
raise ScriptNotFoundError("script does not exist: %s" % script)
if not allow_insecure:
try:
utils.verifyFileOnlyWritableByMunkiAndRoot(script)
except utils.VerifyFilePermissionsError, e:
msg = (
"Skipping execution due to failed file permissions "
"verification: %s\n%s" % (script, str(e))
)
raise utils.RunExternalScriptError(msg)
if os.access(script, os.X_OK):
cmd = [script]
if script_args:
cmd.extend(script_args)
proc = subprocess.Popen(
cmd,
shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
while timeout > 0:
if proc.poll() is not None:
(stdout, stderr) = proc.communicate()
return (
proc.returncode,
stdout.decode("UTF-8", "replace"),
stderr.decode("UTF-8", "replace"),
)
time.sleep(0.1)
timeout -= 0.1
else:
try:
proc.kill()
except OSError, e:
if e.errno != 3:
raise
raise utils.RunExternalScriptError("%s timed out" % script)
return (0, None, None)
else:
raise utils.RunExternalScriptError("%s not executable" % script)
def rundir(scriptdir, runtype, abort=False, submitscript=""):
"""Run scripts in directory scriptdir runtype is passed to the script if
abort is True, a non-zero exit status will abort munki submitscript is put
at the end of the scriptlist."""
if os.path.exists(scriptdir):
from munkilib import utils
# Get timeout for scripts
scriptTimeOut = 30
if pref("scriptTimeOut"):
scriptTimeOut = int(pref("scriptTimeOut"))
display_detail("# Set custom script timeout to %s seconds" % scriptTimeOut)
# Directory containing the scripts
parentdir = os.path.basename(scriptdir)
display_detail("# Executing scripts in %s" % parentdir)
# Get all files in scriptdir
files = os.listdir(scriptdir)
# Sort files
files.sort()
# Find submit script and stick it on the end of the list
if submitscript:
try:
sub = files.pop(files.index(submitscript))
files.append(sub)
except Exception, e:
display_error("%s not found in %s" % (submitscript, parentdir))
for script in files:
# Skip files that start with a period
if script.startswith("."):
continue
# Concatenate dir and filename
scriptpath = os.path.join(scriptdir, script)
# Skip directories
if os.path.isdir(scriptpath):
continue
try:
# Attempt to execute script
display_detail("Running %s" % script)
result, stdout, stderr = runExternalScriptWithTimeout(
scriptpath,
allow_insecure=False,
script_args=[runtype],
timeout=scriptTimeOut,
)
if stdout:
display_detail(stdout)
if stderr:
display_detail("%s Error: %s" % (script, stderr))
if result:
if abort:
display_detail("Aborted by %s" % script)
exit(1)
else:
display_warning("%s return code: %d" % (script, result))
except utils.ScriptNotFoundError:
pass # Script has disappeared - pass.
except Exception, e:
display_warning("%s: %s" % (script, str(e)))
def sizeof_fmt(num):
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
if abs(num) < 1000.0:
return "%.0f%s" % (num, unit)
num /= 1000.0
return "%.1f%s" % (num, "YB")
def gethash(filename, hash_function):
"""Calculates the hashvalue of the given file with the given hash_function.
Args:
filename: The file name to calculate the hash value of.
hash_function: The hash function object to use, which was instantiated
before calling this function, e.g. hashlib.md5().
Returns:
The hashvalue of the given file as hex string.
"""
if not os.path.isfile(filename):
return "NOT A FILE"
fileref = open(filename, "rb")
while 1:
|
fileref.close()
return hash_function.hexdigest()
def getmd5hash(filename):
"""Returns hex of MD5 checksum of a file."""
hash_function = hashlib.md5()
return gethash(filename, hash_function)
def getOsVersion(only_major_minor=True, as_tuple=False):
"""Returns an OS version.
Args:
only_major_minor: Boolean. If True, only include major/minor versions.
as_tuple: Boolean. If True, return a tuple of ints, otherwise a string.
"""
os.environ["SYSTEM_VERSION_COMPAT"] = '0'
cmd = ["/usr/bin/sw_vers -productVersion"]
proc = subprocess.Popen(
cmd,
shell=True,
bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
(output, unused_error) = proc.communicate()
output = output.strip()
os_version_tuple = output.split(".")
if only_major_minor:
os_version_tuple = os_version_tuple[0:2]
if as_tuple:
return tuple(map(int, os_version_tuple))
else:
return ".".join(os_version_tuple)
def getconsoleuser():
"""Return console user."""
cfuser = SCDynamicStoreCopyConsoleUser(None, None, None)
return cfuser[0]
# End of reportcommon
| chunk = fileref.read(2 ** 16)
if not chunk:
break
hash_function.update(chunk) | conditional_block |
regroup_gradient_stops.rs | // svgcleaner could help you to clean up your SVG files
// from unnecessary data.
// Copyright (C) 2012-2018 Evgeniy Reizner
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
use svgdom::{
Document,
ElementType,
Node,
};
use task::short::{EId, AId};
pub fn regroup_gradient_stops(doc: &mut Document) |
fn gen_id(doc: &Document, prefix: &str) -> String {
let mut n = 1;
let mut s = String::new();
loop {
s.clear();
s.push_str(prefix);
s.push_str(&n.to_string());
// TODO: very slow
if !doc.descendants().any(|n| *n.id() == s) {
break;
}
n += 1;
}
s
}
#[cfg(test)]
mod tests {
use super::*;
use svgdom::{Document, ToStringWithOptions};
use task;
macro_rules! test {
($name:ident, $in_text:expr, $out_text:expr) => (
#[test]
fn $name() {
let mut doc = Document::from_str($in_text).unwrap();
task::resolve_linear_gradient_attributes(&doc);
task::resolve_radial_gradient_attributes(&doc);
task::resolve_stop_attributes(&doc).unwrap();
regroup_gradient_stops(&mut doc);
assert_eq_text!(doc.to_string_with_opt(&write_opt_for_tests!()), $out_text);
}
)
}
test!(rm_1,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg3'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg3'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg3'/>
</svg>
");
test!(rm_2,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg4' x1='100'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg5'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg5'/>
<linearGradient id='lg6'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50' xlink:href='#lg6'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg5'/>
<linearGradient id='lg4' x1='100' xlink:href='#lg6'/>
</svg>
");
}
| {
let mut nodes: Vec<Node> = doc.descendants()
.filter(|n| n.is_gradient())
.filter(|n| n.has_children())
.filter(|n| !n.has_attribute(AId::XlinkHref))
.collect();
let mut is_changed = false;
let mut join_nodes = Vec::new();
// TODO: join with rm_dupl_defs::rm_loop
let mut i1 = 0;
while i1 < nodes.len() {
let mut node1 = nodes[i1].clone();
let mut i2 = i1 + 1;
while i2 < nodes.len() {
let node2 = nodes[i2].clone();
i2 += 1;
if super::rm_dupl_defs::is_equal_stops(&node1, &node2) {
join_nodes.push(node2.clone());
nodes.remove(i2 - 1);
i2 -= 1;
}
}
if !join_nodes.is_empty() {
is_changed = true;
let mut new_lg = doc.create_element(EId::LinearGradient);
let new_id = gen_id(doc, "lg");
new_lg.set_id(new_id);
while node1.has_children() {
let mut c = node1.first_child().unwrap();
c.detach();
new_lg.append(&c);
}
node1.set_attribute((AId::XlinkHref, new_lg.clone()));
node1.insert_before(&new_lg);
for jn in &mut join_nodes {
while jn.has_children() {
let mut c = jn.first_child().unwrap();
c.remove();
}
jn.set_attribute((AId::XlinkHref, new_lg.clone()));
}
join_nodes.clear();
}
i1 += 1;
}
if is_changed {
// We must resolve attributes for gradients created above.
super::resolve_linear_gradient_attributes(doc);
}
} | identifier_body |
regroup_gradient_stops.rs | // svgcleaner could help you to clean up your SVG files
// from unnecessary data.
// Copyright (C) 2012-2018 Evgeniy Reizner
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
use svgdom::{
Document,
ElementType,
Node,
};
use task::short::{EId, AId};
pub fn | (doc: &mut Document) {
let mut nodes: Vec<Node> = doc.descendants()
.filter(|n| n.is_gradient())
.filter(|n| n.has_children())
.filter(|n| !n.has_attribute(AId::XlinkHref))
.collect();
let mut is_changed = false;
let mut join_nodes = Vec::new();
// TODO: join with rm_dupl_defs::rm_loop
let mut i1 = 0;
while i1 < nodes.len() {
let mut node1 = nodes[i1].clone();
let mut i2 = i1 + 1;
while i2 < nodes.len() {
let node2 = nodes[i2].clone();
i2 += 1;
if super::rm_dupl_defs::is_equal_stops(&node1, &node2) {
join_nodes.push(node2.clone());
nodes.remove(i2 - 1);
i2 -= 1;
}
}
if !join_nodes.is_empty() {
is_changed = true;
let mut new_lg = doc.create_element(EId::LinearGradient);
let new_id = gen_id(doc, "lg");
new_lg.set_id(new_id);
while node1.has_children() {
let mut c = node1.first_child().unwrap();
c.detach();
new_lg.append(&c);
}
node1.set_attribute((AId::XlinkHref, new_lg.clone()));
node1.insert_before(&new_lg);
for jn in &mut join_nodes {
while jn.has_children() {
let mut c = jn.first_child().unwrap();
c.remove();
}
jn.set_attribute((AId::XlinkHref, new_lg.clone()));
}
join_nodes.clear();
}
i1 += 1;
}
if is_changed {
// We must resolve attributes for gradients created above.
super::resolve_linear_gradient_attributes(doc);
}
}
fn gen_id(doc: &Document, prefix: &str) -> String {
let mut n = 1;
let mut s = String::new();
loop {
s.clear();
s.push_str(prefix);
s.push_str(&n.to_string());
// TODO: very slow
if !doc.descendants().any(|n| *n.id() == s) {
break;
}
n += 1;
}
s
}
#[cfg(test)]
mod tests {
use super::*;
use svgdom::{Document, ToStringWithOptions};
use task;
macro_rules! test {
($name:ident, $in_text:expr, $out_text:expr) => (
#[test]
fn $name() {
let mut doc = Document::from_str($in_text).unwrap();
task::resolve_linear_gradient_attributes(&doc);
task::resolve_radial_gradient_attributes(&doc);
task::resolve_stop_attributes(&doc).unwrap();
regroup_gradient_stops(&mut doc);
assert_eq_text!(doc.to_string_with_opt(&write_opt_for_tests!()), $out_text);
}
)
}
test!(rm_1,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg3'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg3'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg3'/>
</svg>
");
test!(rm_2,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg4' x1='100'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg5'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg5'/>
<linearGradient id='lg6'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50' xlink:href='#lg6'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg5'/>
<linearGradient id='lg4' x1='100' xlink:href='#lg6'/>
</svg>
");
}
| regroup_gradient_stops | identifier_name |
regroup_gradient_stops.rs | // svgcleaner could help you to clean up your SVG files
// from unnecessary data.
// Copyright (C) 2012-2018 Evgeniy Reizner
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
use svgdom::{
Document,
ElementType,
Node,
};
use task::short::{EId, AId};
pub fn regroup_gradient_stops(doc: &mut Document) {
let mut nodes: Vec<Node> = doc.descendants()
.filter(|n| n.is_gradient())
.filter(|n| n.has_children())
.filter(|n| !n.has_attribute(AId::XlinkHref))
.collect();
let mut is_changed = false;
let mut join_nodes = Vec::new();
// TODO: join with rm_dupl_defs::rm_loop
let mut i1 = 0;
while i1 < nodes.len() {
let mut node1 = nodes[i1].clone();
let mut i2 = i1 + 1;
while i2 < nodes.len() {
let node2 = nodes[i2].clone();
i2 += 1;
if super::rm_dupl_defs::is_equal_stops(&node1, &node2) {
join_nodes.push(node2.clone());
nodes.remove(i2 - 1);
i2 -= 1;
}
}
if !join_nodes.is_empty() {
is_changed = true;
let mut new_lg = doc.create_element(EId::LinearGradient);
let new_id = gen_id(doc, "lg");
new_lg.set_id(new_id);
while node1.has_children() {
let mut c = node1.first_child().unwrap();
c.detach();
new_lg.append(&c);
}
node1.set_attribute((AId::XlinkHref, new_lg.clone()));
node1.insert_before(&new_lg);
for jn in &mut join_nodes {
while jn.has_children() {
let mut c = jn.first_child().unwrap();
c.remove();
}
jn.set_attribute((AId::XlinkHref, new_lg.clone()));
}
join_nodes.clear();
}
i1 += 1;
}
if is_changed {
// We must resolve attributes for gradients created above.
super::resolve_linear_gradient_attributes(doc);
}
}
fn gen_id(doc: &Document, prefix: &str) -> String {
let mut n = 1;
let mut s = String::new();
loop {
s.clear();
s.push_str(prefix);
s.push_str(&n.to_string());
// TODO: very slow
if !doc.descendants().any(|n| *n.id() == s) |
n += 1;
}
s
}
#[cfg(test)]
mod tests {
use super::*;
use svgdom::{Document, ToStringWithOptions};
use task;
macro_rules! test {
($name:ident, $in_text:expr, $out_text:expr) => (
#[test]
fn $name() {
let mut doc = Document::from_str($in_text).unwrap();
task::resolve_linear_gradient_attributes(&doc);
task::resolve_radial_gradient_attributes(&doc);
task::resolve_stop_attributes(&doc).unwrap();
regroup_gradient_stops(&mut doc);
assert_eq_text!(doc.to_string_with_opt(&write_opt_for_tests!()), $out_text);
}
)
}
test!(rm_1,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg3'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg3'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg3'/>
</svg>
");
test!(rm_2,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg4' x1='100'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg5'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg5'/>
<linearGradient id='lg6'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50' xlink:href='#lg6'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg5'/>
<linearGradient id='lg4' x1='100' xlink:href='#lg6'/>
</svg>
");
}
| {
break;
} | conditional_block |
regroup_gradient_stops.rs | // svgcleaner could help you to clean up your SVG files
// from unnecessary data.
// Copyright (C) 2012-2018 Evgeniy Reizner
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
use svgdom::{
Document,
ElementType,
Node,
};
use task::short::{EId, AId};
pub fn regroup_gradient_stops(doc: &mut Document) {
let mut nodes: Vec<Node> = doc.descendants()
.filter(|n| n.is_gradient())
.filter(|n| n.has_children())
.filter(|n| !n.has_attribute(AId::XlinkHref))
.collect();
let mut is_changed = false;
let mut join_nodes = Vec::new();
// TODO: join with rm_dupl_defs::rm_loop
let mut i1 = 0;
while i1 < nodes.len() {
let mut node1 = nodes[i1].clone();
let mut i2 = i1 + 1;
while i2 < nodes.len() {
let node2 = nodes[i2].clone();
i2 += 1;
if super::rm_dupl_defs::is_equal_stops(&node1, &node2) {
join_nodes.push(node2.clone());
nodes.remove(i2 - 1);
i2 -= 1;
}
}
if !join_nodes.is_empty() {
is_changed = true;
let mut new_lg = doc.create_element(EId::LinearGradient);
let new_id = gen_id(doc, "lg");
new_lg.set_id(new_id);
while node1.has_children() {
let mut c = node1.first_child().unwrap();
c.detach();
new_lg.append(&c);
}
node1.set_attribute((AId::XlinkHref, new_lg.clone()));
node1.insert_before(&new_lg);
for jn in &mut join_nodes {
while jn.has_children() {
let mut c = jn.first_child().unwrap();
c.remove();
}
jn.set_attribute((AId::XlinkHref, new_lg.clone()));
}
join_nodes.clear();
}
i1 += 1;
}
if is_changed {
// We must resolve attributes for gradients created above.
super::resolve_linear_gradient_attributes(doc);
}
}
fn gen_id(doc: &Document, prefix: &str) -> String {
let mut n = 1;
let mut s = String::new();
loop {
s.clear();
s.push_str(prefix);
s.push_str(&n.to_string());
// TODO: very slow
if !doc.descendants().any(|n| *n.id() == s) {
break;
}
n += 1;
}
s
}
#[cfg(test)]
mod tests {
use super::*;
use svgdom::{Document, ToStringWithOptions};
use task;
macro_rules! test {
($name:ident, $in_text:expr, $out_text:expr) => (
#[test]
fn $name() {
let mut doc = Document::from_str($in_text).unwrap();
task::resolve_linear_gradient_attributes(&doc);
task::resolve_radial_gradient_attributes(&doc);
task::resolve_stop_attributes(&doc).unwrap(); | )
}
test!(rm_1,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg3'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg3'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg3'/>
</svg>
");
test!(rm_2,
"<svg>
<linearGradient id='lg1' x1='50'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg2' x1='100'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg4' x1='100'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
</svg>",
"<svg>
<linearGradient id='lg5'>
<stop offset='0'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg1' x1='50' xlink:href='#lg5'/>
<linearGradient id='lg6'>
<stop offset='0.5'/>
<stop offset='1'/>
</linearGradient>
<linearGradient id='lg3' x1='50' xlink:href='#lg6'/>
<linearGradient id='lg2' x1='100' xlink:href='#lg5'/>
<linearGradient id='lg4' x1='100' xlink:href='#lg6'/>
</svg>
");
} | regroup_gradient_stops(&mut doc);
assert_eq_text!(doc.to_string_with_opt(&write_opt_for_tests!()), $out_text);
} | random_line_split |
document_loader.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use ipc_channel::ipc::IpcSender;
use net_traits::{CoreResourceMsg, FetchResponseMsg, ResourceThreads, IpcSend};
use net_traits::request::RequestInit;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::fetch_async) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.mut_loader().add_blocking_load(load.clone());
LoadBlocker {
doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if !thread::panicking() {
debug_assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
resource_threads: ResourceThreads,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn new(existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_threads(existing.resource_threads.clone(), None)
}
pub fn new_with_threads(resource_threads: ResourceThreads,
initial_load: Option<Url>) -> DocumentLoader |
/// Add a load to the list of blocking loads.
fn add_blocking_load(&mut self, load: LoadType) {
debug!("Adding blocking load {:?} ({}).", load, self.blocking_loads.len());
self.blocking_loads.push(load);
}
/// Initiate a new fetch.
pub fn fetch_async(&mut self,
load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
self.add_blocking_load(load);
self.resource_threads.sender().send(CoreResourceMsg::Fetch(request, fetch_target)).unwrap();
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
debug!("Removing blocking load {:?} ({}).", load, self.blocking_loads.len());
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
pub fn resource_threads(&self) -> &ResourceThreads {
&self.resource_threads
}
}
| {
debug!("Initial blocking load {:?}.", initial_load);
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_threads: resource_threads,
blocking_loads: initial_loads,
events_inhibited: false,
}
} | identifier_body |
document_loader.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use ipc_channel::ipc::IpcSender;
use net_traits::{CoreResourceMsg, FetchResponseMsg, ResourceThreads, IpcSend};
use net_traits::request::RequestInit;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::fetch_async) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.mut_loader().add_blocking_load(load.clone());
LoadBlocker {
doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if !thread::panicking() |
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
resource_threads: ResourceThreads,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn new(existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_threads(existing.resource_threads.clone(), None)
}
pub fn new_with_threads(resource_threads: ResourceThreads,
initial_load: Option<Url>) -> DocumentLoader {
debug!("Initial blocking load {:?}.", initial_load);
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_threads: resource_threads,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
fn add_blocking_load(&mut self, load: LoadType) {
debug!("Adding blocking load {:?} ({}).", load, self.blocking_loads.len());
self.blocking_loads.push(load);
}
/// Initiate a new fetch.
pub fn fetch_async(&mut self,
load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
self.add_blocking_load(load);
self.resource_threads.sender().send(CoreResourceMsg::Fetch(request, fetch_target)).unwrap();
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
debug!("Removing blocking load {:?} ({}).", load, self.blocking_loads.len());
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
pub fn resource_threads(&self) -> &ResourceThreads {
&self.resource_threads
}
}
| {
debug_assert!(self.load.is_none());
} | conditional_block |
document_loader.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use ipc_channel::ipc::IpcSender;
use net_traits::{CoreResourceMsg, FetchResponseMsg, ResourceThreads, IpcSend};
use net_traits::request::RequestInit;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::fetch_async) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load. | doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if !thread::panicking() {
debug_assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
resource_threads: ResourceThreads,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn new(existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_threads(existing.resource_threads.clone(), None)
}
pub fn new_with_threads(resource_threads: ResourceThreads,
initial_load: Option<Url>) -> DocumentLoader {
debug!("Initial blocking load {:?}.", initial_load);
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_threads: resource_threads,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
fn add_blocking_load(&mut self, load: LoadType) {
debug!("Adding blocking load {:?} ({}).", load, self.blocking_loads.len());
self.blocking_loads.push(load);
}
/// Initiate a new fetch.
pub fn fetch_async(&mut self,
load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
self.add_blocking_load(load);
self.resource_threads.sender().send(CoreResourceMsg::Fetch(request, fetch_target)).unwrap();
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
debug!("Removing blocking load {:?} ({}).", load, self.blocking_loads.len());
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
pub fn resource_threads(&self) -> &ResourceThreads {
&self.resource_threads
}
} | pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.mut_loader().add_blocking_load(load.clone());
LoadBlocker { | random_line_split |
document_loader.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use ipc_channel::ipc::IpcSender;
use net_traits::{CoreResourceMsg, FetchResponseMsg, ResourceThreads, IpcSend};
use net_traits::request::RequestInit;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::fetch_async) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.mut_loader().add_blocking_load(load.clone());
LoadBlocker {
doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if !thread::panicking() {
debug_assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
resource_threads: ResourceThreads,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn | (existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_threads(existing.resource_threads.clone(), None)
}
pub fn new_with_threads(resource_threads: ResourceThreads,
initial_load: Option<Url>) -> DocumentLoader {
debug!("Initial blocking load {:?}.", initial_load);
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_threads: resource_threads,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
fn add_blocking_load(&mut self, load: LoadType) {
debug!("Adding blocking load {:?} ({}).", load, self.blocking_loads.len());
self.blocking_loads.push(load);
}
/// Initiate a new fetch.
pub fn fetch_async(&mut self,
load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
self.add_blocking_load(load);
self.resource_threads.sender().send(CoreResourceMsg::Fetch(request, fetch_target)).unwrap();
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
debug!("Removing blocking load {:?} ({}).", load, self.blocking_loads.len());
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
pub fn resource_threads(&self) -> &ResourceThreads {
&self.resource_threads
}
}
| new | identifier_name |
test_rowcount.py | from sqlalchemy import *
from sqlalchemy.test import *
class FoundRowsTest(TestBase, AssertsExecutionResults):
"""tests rowcount functionality"""
__requires__ = ('sane_rowcount', )
@classmethod
def | (cls):
global employees_table, metadata
metadata = MetaData(testing.db)
employees_table = Table('employees', metadata,
Column('employee_id', Integer,
Sequence('employee_id_seq', optional=True),
primary_key=True),
Column('name', String(50)),
Column('department', String(1)),
)
metadata.create_all()
def setup(self):
global data
data = [ ('Angela', 'A'),
('Andrew', 'A'),
('Anand', 'A'),
('Bob', 'B'),
('Bobette', 'B'),
('Buffy', 'B'),
('Charlie', 'C'),
('Cynthia', 'C'),
('Chris', 'C') ]
i = employees_table.insert()
i.execute(*[{'name':n, 'department':d} for n, d in data])
def teardown(self):
employees_table.delete().execute()
@classmethod
def teardown_class(cls):
metadata.drop_all()
def testbasic(self):
s = employees_table.select()
r = s.execute().fetchall()
assert len(r) == len(data)
def test_update_rowcount1(self):
# WHERE matches 3, 3 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_update_rowcount2(self):
# WHERE matches 3, 0 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
| setup_class | identifier_name |
test_rowcount.py | from sqlalchemy import *
from sqlalchemy.test import *
class FoundRowsTest(TestBase, AssertsExecutionResults):
"""tests rowcount functionality"""
__requires__ = ('sane_rowcount', )
@classmethod
def setup_class(cls):
global employees_table, metadata
metadata = MetaData(testing.db)
employees_table = Table('employees', metadata,
Column('employee_id', Integer,
Sequence('employee_id_seq', optional=True),
primary_key=True),
Column('name', String(50)),
Column('department', String(1)),
)
metadata.create_all()
def setup(self):
global data
data = [ ('Angela', 'A'),
('Andrew', 'A'),
('Anand', 'A'),
('Bob', 'B'),
('Bobette', 'B'),
('Buffy', 'B'),
('Charlie', 'C'),
('Cynthia', 'C'),
('Chris', 'C') ]
i = employees_table.insert()
i.execute(*[{'name':n, 'department':d} for n, d in data])
def teardown(self):
employees_table.delete().execute()
|
def testbasic(self):
s = employees_table.select()
r = s.execute().fetchall()
assert len(r) == len(data)
def test_update_rowcount1(self):
# WHERE matches 3, 3 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_update_rowcount2(self):
# WHERE matches 3, 0 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3 | @classmethod
def teardown_class(cls):
metadata.drop_all() | random_line_split |
test_rowcount.py | from sqlalchemy import *
from sqlalchemy.test import *
class FoundRowsTest(TestBase, AssertsExecutionResults):
"""tests rowcount functionality"""
__requires__ = ('sane_rowcount', )
@classmethod
def setup_class(cls):
global employees_table, metadata
metadata = MetaData(testing.db)
employees_table = Table('employees', metadata,
Column('employee_id', Integer,
Sequence('employee_id_seq', optional=True),
primary_key=True),
Column('name', String(50)),
Column('department', String(1)),
)
metadata.create_all()
def setup(self):
global data
data = [ ('Angela', 'A'),
('Andrew', 'A'),
('Anand', 'A'),
('Bob', 'B'),
('Bobette', 'B'),
('Buffy', 'B'),
('Charlie', 'C'),
('Cynthia', 'C'),
('Chris', 'C') ]
i = employees_table.insert()
i.execute(*[{'name':n, 'department':d} for n, d in data])
def teardown(self):
employees_table.delete().execute()
@classmethod
def teardown_class(cls):
metadata.drop_all()
def testbasic(self):
s = employees_table.select()
r = s.execute().fetchall()
assert len(r) == len(data)
def test_update_rowcount1(self):
# WHERE matches 3, 3 rows changed
|
def test_update_rowcount2(self):
# WHERE matches 3, 0 rows changed
department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='C')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
r = employees_table.delete(department=='C').execute()
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3
| department = employees_table.c.department
r = employees_table.update(department=='C').execute(department='Z')
print "expecting 3, dialect reports %s" % r.rowcount
assert r.rowcount == 3 | identifier_body |
lib.rs | // =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png")]
//! <p>Using the Amazon Cognito User Pools API, you can create a user pool to manage directories and users. You can authenticate a user to obtain tokens related to user identity and access policies.</p> <p>This API reference provides information about user pools in Amazon Cognito User Pools.</p> <p>For more information, see the Amazon Cognito Documentation.</p>
//!
//! If you're using the service, you're probably looking for [CognitoIdentityProviderClient](struct.CognitoIdentityProviderClient.html) and [CognitoIdentityProvider](trait.CognitoIdentityProvider.html).
| #[macro_use]
extern crate serde_derive;
extern crate serde_json;
mod generated;
mod custom;
pub use generated::*;
pub use custom::*; | extern crate futures;
extern crate rusoto_core;
extern crate serde; | random_line_split |
common.js | "use strict";
var core_1 = require('@angular/core');
var NgTranscludeDirective = (function () {
function NgTranscludeDirective(_viewRef) |
Object.defineProperty(NgTranscludeDirective.prototype, "ngTransclude", {
get: function () {
return this._ngTransclude;
},
set: function (templateRef) {
this._ngTransclude = templateRef;
if (templateRef) {
this.viewRef.createEmbeddedView(templateRef);
}
},
enumerable: true,
configurable: true
});
NgTranscludeDirective.decorators = [
{ type: core_1.Directive, args: [{
selector: '[ngTransclude]'
},] },
];
/** @nocollapse */
NgTranscludeDirective.ctorParameters = [
{ type: core_1.ViewContainerRef, },
];
NgTranscludeDirective.propDecorators = {
'ngTransclude': [{ type: core_1.Input },],
};
return NgTranscludeDirective;
}());
exports.NgTranscludeDirective = NgTranscludeDirective;
| {
this._viewRef = _viewRef;
this.viewRef = _viewRef;
} | identifier_body |
common.js | "use strict";
var core_1 = require('@angular/core');
var NgTranscludeDirective = (function () {
function NgTranscludeDirective(_viewRef) {
this._viewRef = _viewRef;
this.viewRef = _viewRef;
}
Object.defineProperty(NgTranscludeDirective.prototype, "ngTransclude", {
get: function () {
return this._ngTransclude;
},
set: function (templateRef) {
this._ngTransclude = templateRef;
if (templateRef) |
},
enumerable: true,
configurable: true
});
NgTranscludeDirective.decorators = [
{ type: core_1.Directive, args: [{
selector: '[ngTransclude]'
},] },
];
/** @nocollapse */
NgTranscludeDirective.ctorParameters = [
{ type: core_1.ViewContainerRef, },
];
NgTranscludeDirective.propDecorators = {
'ngTransclude': [{ type: core_1.Input },],
};
return NgTranscludeDirective;
}());
exports.NgTranscludeDirective = NgTranscludeDirective;
| {
this.viewRef.createEmbeddedView(templateRef);
} | conditional_block |
common.js | "use strict";
var core_1 = require('@angular/core');
var NgTranscludeDirective = (function () {
function | (_viewRef) {
this._viewRef = _viewRef;
this.viewRef = _viewRef;
}
Object.defineProperty(NgTranscludeDirective.prototype, "ngTransclude", {
get: function () {
return this._ngTransclude;
},
set: function (templateRef) {
this._ngTransclude = templateRef;
if (templateRef) {
this.viewRef.createEmbeddedView(templateRef);
}
},
enumerable: true,
configurable: true
});
NgTranscludeDirective.decorators = [
{ type: core_1.Directive, args: [{
selector: '[ngTransclude]'
},] },
];
/** @nocollapse */
NgTranscludeDirective.ctorParameters = [
{ type: core_1.ViewContainerRef, },
];
NgTranscludeDirective.propDecorators = {
'ngTransclude': [{ type: core_1.Input },],
};
return NgTranscludeDirective;
}());
exports.NgTranscludeDirective = NgTranscludeDirective;
| NgTranscludeDirective | identifier_name |
common.js | "use strict";
var core_1 = require('@angular/core');
var NgTranscludeDirective = (function () {
function NgTranscludeDirective(_viewRef) {
this._viewRef = _viewRef;
this.viewRef = _viewRef;
}
Object.defineProperty(NgTranscludeDirective.prototype, "ngTransclude", {
get: function () {
return this._ngTransclude;
},
set: function (templateRef) {
this._ngTransclude = templateRef;
if (templateRef) {
this.viewRef.createEmbeddedView(templateRef);
}
}, | enumerable: true,
configurable: true
});
NgTranscludeDirective.decorators = [
{ type: core_1.Directive, args: [{
selector: '[ngTransclude]'
},] },
];
/** @nocollapse */
NgTranscludeDirective.ctorParameters = [
{ type: core_1.ViewContainerRef, },
];
NgTranscludeDirective.propDecorators = {
'ngTransclude': [{ type: core_1.Input },],
};
return NgTranscludeDirective;
}());
exports.NgTranscludeDirective = NgTranscludeDirective; | random_line_split |
|
mod.rs | // Copyright 2017, Igor Shaula
// Licensed under the MIT License <LICENSE or
// http://opensource.org/licenses/MIT>. This file
// may not be copied, modified, or distributed
// except according to those terms.
use super::enums::*;
use super::RegKey;
use std::error::Error;
use std::fmt;
use std::io;
use winapi::shared::minwindef::DWORD;
macro_rules! read_value {
($s:ident) => {
match mem::replace(&mut $s.f_name, None) {
Some(ref s) => $s.key.get_value(s).map_err(DecoderError::IoError),
None => Err(DecoderError::NoFieldName),
}
};
}
macro_rules! parse_string {
($s:ident) => {{
let s: String = read_value!($s)?;
s.parse()
.map_err(|e| DecoderError::ParseError(format!("{:?}", e)))
}};
}
macro_rules! no_impl {
($e:expr) => {
Err(DecoderError::DecodeNotImplemented($e.to_owned()))
};
}
#[cfg(feature = "serialization-serde")]
mod serialization_serde;
#[derive(Debug)]
pub enum DecoderError {
DecodeNotImplemented(String),
DeserializerError(String),
IoError(io::Error),
ParseError(String),
NoFieldName,
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for DecoderError {}
impl From<io::Error> for DecoderError {
fn from(err: io::Error) -> DecoderError {
DecoderError::IoError(err)
}
}
pub type DecodeResult<T> = Result<T, DecoderError>;
#[derive(Debug)]
enum DecoderReadingState {
WaitingForKey,
WaitingForValue,
}
#[derive(Debug)]
enum DecoderEnumerationState {
EnumeratingKeys(DWORD),
EnumeratingValues(DWORD),
}
#[derive(Debug)]
pub struct Decoder {
key: RegKey, | enumeration_state: DecoderEnumerationState,
}
const DECODER_SAM: DWORD = KEY_QUERY_VALUE | KEY_ENUMERATE_SUB_KEYS;
impl Decoder {
pub fn from_key(key: &RegKey) -> DecodeResult<Decoder> {
key.open_subkey_with_flags("", DECODER_SAM)
.map(Decoder::new)
.map_err(DecoderError::IoError)
}
fn new(key: RegKey) -> Decoder {
Decoder {
key,
f_name: None,
reading_state: DecoderReadingState::WaitingForKey,
enumeration_state: DecoderEnumerationState::EnumeratingKeys(0),
}
}
} | f_name: Option<String>,
reading_state: DecoderReadingState, | random_line_split |
mod.rs | // Copyright 2017, Igor Shaula
// Licensed under the MIT License <LICENSE or
// http://opensource.org/licenses/MIT>. This file
// may not be copied, modified, or distributed
// except according to those terms.
use super::enums::*;
use super::RegKey;
use std::error::Error;
use std::fmt;
use std::io;
use winapi::shared::minwindef::DWORD;
macro_rules! read_value {
($s:ident) => {
match mem::replace(&mut $s.f_name, None) {
Some(ref s) => $s.key.get_value(s).map_err(DecoderError::IoError),
None => Err(DecoderError::NoFieldName),
}
};
}
macro_rules! parse_string {
($s:ident) => {{
let s: String = read_value!($s)?;
s.parse()
.map_err(|e| DecoderError::ParseError(format!("{:?}", e)))
}};
}
macro_rules! no_impl {
($e:expr) => {
Err(DecoderError::DecodeNotImplemented($e.to_owned()))
};
}
#[cfg(feature = "serialization-serde")]
mod serialization_serde;
#[derive(Debug)]
pub enum DecoderError {
DecodeNotImplemented(String),
DeserializerError(String),
IoError(io::Error),
ParseError(String),
NoFieldName,
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for DecoderError {}
impl From<io::Error> for DecoderError {
fn | (err: io::Error) -> DecoderError {
DecoderError::IoError(err)
}
}
pub type DecodeResult<T> = Result<T, DecoderError>;
#[derive(Debug)]
enum DecoderReadingState {
WaitingForKey,
WaitingForValue,
}
#[derive(Debug)]
enum DecoderEnumerationState {
EnumeratingKeys(DWORD),
EnumeratingValues(DWORD),
}
#[derive(Debug)]
pub struct Decoder {
key: RegKey,
f_name: Option<String>,
reading_state: DecoderReadingState,
enumeration_state: DecoderEnumerationState,
}
const DECODER_SAM: DWORD = KEY_QUERY_VALUE | KEY_ENUMERATE_SUB_KEYS;
impl Decoder {
pub fn from_key(key: &RegKey) -> DecodeResult<Decoder> {
key.open_subkey_with_flags("", DECODER_SAM)
.map(Decoder::new)
.map_err(DecoderError::IoError)
}
fn new(key: RegKey) -> Decoder {
Decoder {
key,
f_name: None,
reading_state: DecoderReadingState::WaitingForKey,
enumeration_state: DecoderEnumerationState::EnumeratingKeys(0),
}
}
}
| from | identifier_name |
util.js |
export const ATTR_ID = 'data-referid'
export let info = {
component: {
amount: 0,
mounts: 0,
unmounts: 0
}
}
export let getId = () => Math.random().toString(36).substr(2)
export let pipe = (fn1, fn2) => function(...args) {
fn1.apply(this, args)
return fn2.apply(this, args)
}
export let createCallbackStore = name => {
let store = []
return {
name,
clear() | ,
push(item) {
store.push(item)
},
store
}
}
export let wrapNative = (obj, method, fn) => {
let nativeMethod = obj[method]
let wrapper = function(...args) {
fn.apply(this, args)
return nativeMethod.apply(this, args)
}
obj[method] = wrapper
return () => obj[method] = nativeMethod
}
if (!Object.assign) {
Object.assign = (target, ...args) => {
args.forEach(source => {
for (let key in source) {
if (!source.hasOwnProperty(key)) {
continue
}
target[key] = source[key]
}
})
return target
}
} | {
while (store.length) {
store.shift()()
}
} | identifier_body |
util.js |
export const ATTR_ID = 'data-referid'
export let info = {
component: {
amount: 0,
mounts: 0,
unmounts: 0
}
}
export let getId = () => Math.random().toString(36).substr(2)
export let pipe = (fn1, fn2) => function(...args) {
fn1.apply(this, args)
return fn2.apply(this, args)
}
export let createCallbackStore = name => {
let store = []
return {
name,
clear() {
while (store.length) {
store.shift()()
}
},
| (item) {
store.push(item)
},
store
}
}
export let wrapNative = (obj, method, fn) => {
let nativeMethod = obj[method]
let wrapper = function(...args) {
fn.apply(this, args)
return nativeMethod.apply(this, args)
}
obj[method] = wrapper
return () => obj[method] = nativeMethod
}
if (!Object.assign) {
Object.assign = (target, ...args) => {
args.forEach(source => {
for (let key in source) {
if (!source.hasOwnProperty(key)) {
continue
}
target[key] = source[key]
}
})
return target
}
} | push | identifier_name |
util.js |
export const ATTR_ID = 'data-referid'
export let info = {
component: {
amount: 0,
mounts: 0,
unmounts: 0
}
}
export let getId = () => Math.random().toString(36).substr(2)
export let pipe = (fn1, fn2) => function(...args) {
fn1.apply(this, args)
return fn2.apply(this, args)
}
export let createCallbackStore = name => {
let store = []
return {
name,
clear() {
while (store.length) {
store.shift()()
}
},
push(item) {
store.push(item)
},
store
}
}
export let wrapNative = (obj, method, fn) => {
let nativeMethod = obj[method]
let wrapper = function(...args) {
fn.apply(this, args)
return nativeMethod.apply(this, args)
}
obj[method] = wrapper
return () => obj[method] = nativeMethod
}
if (!Object.assign) {
Object.assign = (target, ...args) => {
args.forEach(source => {
for (let key in source) {
if (!source.hasOwnProperty(key)) |
target[key] = source[key]
}
})
return target
}
} | {
continue
} | conditional_block |
util.js | export const ATTR_ID = 'data-referid'
export let info = {
component: {
amount: 0, | }
export let getId = () => Math.random().toString(36).substr(2)
export let pipe = (fn1, fn2) => function(...args) {
fn1.apply(this, args)
return fn2.apply(this, args)
}
export let createCallbackStore = name => {
let store = []
return {
name,
clear() {
while (store.length) {
store.shift()()
}
},
push(item) {
store.push(item)
},
store
}
}
export let wrapNative = (obj, method, fn) => {
let nativeMethod = obj[method]
let wrapper = function(...args) {
fn.apply(this, args)
return nativeMethod.apply(this, args)
}
obj[method] = wrapper
return () => obj[method] = nativeMethod
}
if (!Object.assign) {
Object.assign = (target, ...args) => {
args.forEach(source => {
for (let key in source) {
if (!source.hasOwnProperty(key)) {
continue
}
target[key] = source[key]
}
})
return target
}
} | mounts: 0,
unmounts: 0
} | random_line_split |
sensor.py | """Bitcoin information service that uses blockchain.info."""
from datetime import timedelta
import logging
from blockchain import exchangerates, statistics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_CURRENCY, CONF_DISPLAY_OPTIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by blockchain.info"
DEFAULT_CURRENCY = "USD"
ICON = "mdi:currency-btc"
SCAN_INTERVAL = timedelta(minutes=5)
OPTION_TYPES = {
"exchangerate": ["Exchange rate (1 BTC)", None],
"trade_volume_btc": ["Trade volume", "BTC"],
"miners_revenue_usd": ["Miners revenue", "USD"],
"btc_mined": ["Mined", "BTC"],
"trade_volume_usd": ["Trade volume", "USD"],
"difficulty": ["Difficulty", None],
"minutes_between_blocks": ["Time between Blocks", "min"],
"number_of_transactions": ["No. of Transactions", None],
"hash_rate": ["Hash rate", "PH/s"],
"timestamp": ["Timestamp", None],
"mined_blocks": ["Mined Blocks", None],
"blocks_size": ["Block size", None],
"total_fees_btc": ["Total fees", "BTC"],
"total_btc_sent": ["Total sent", "BTC"],
"estimated_btc_sent": ["Estimated sent", "BTC"],
"total_btc": ["Total", "BTC"],
"total_blocks": ["Total Blocks", None],
"next_retarget": ["Next retarget", None],
"estimated_transaction_volume_usd": ["Est. Transaction volume", "USD"],
"miners_revenue_btc": ["Miners revenue", "BTC"],
"market_price_usd": ["Market price", "USD"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(OPTION_TYPES)]
),
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bitcoin sensors."""
currency = config.get(CONF_CURRENCY)
if currency not in exchangerates.get_ticker():
_LOGGER.warning("Currency %s is not available. Using USD", currency)
currency = DEFAULT_CURRENCY
data = BitcoinData()
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(BitcoinSensor(data, variable, currency))
add_entities(dev, True)
class BitcoinSensor(Entity):
"""Representation of a Bitcoin sensor."""
def __init__(self, data, option_type, currency):
"""Initialize the sensor."""
self.data = data
self._name = OPTION_TYPES[option_type][0]
self._unit_of_measurement = OPTION_TYPES[option_type][1]
self._currency = currency
self.type = option_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
stats = self.data.stats
ticker = self.data.ticker
if self.type == "exchangerate":
self._state = ticker[self._currency].p15min
self._unit_of_measurement = self._currency
elif self.type == "trade_volume_btc":
self._state = "{0:.1f}".format(stats.trade_volume_btc)
elif self.type == "miners_revenue_usd":
self._state = "{0:.0f}".format(stats.miners_revenue_usd)
elif self.type == "btc_mined":
self._state = "{}".format(stats.btc_mined * 0.00000001)
elif self.type == "trade_volume_usd":
self._state = "{0:.1f}".format(stats.trade_volume_usd)
elif self.type == "difficulty":
self._state = "{0:.0f}".format(stats.difficulty)
elif self.type == "minutes_between_blocks":
self._state = "{0:.2f}".format(stats.minutes_between_blocks)
elif self.type == "number_of_transactions":
self._state = "{}".format(stats.number_of_transactions)
elif self.type == "hash_rate":
self._state = "{0:.1f}".format(stats.hash_rate * 0.000001)
elif self.type == "timestamp":
self._state = stats.timestamp
elif self.type == "mined_blocks":
self._state = "{}".format(stats.mined_blocks)
elif self.type == "blocks_size":
self._state = "{0:.1f}".format(stats.blocks_size)
elif self.type == "total_fees_btc":
self._state = "{0:.2f}".format(stats.total_fees_btc * 0.00000001)
elif self.type == "total_btc_sent":
|
elif self.type == "estimated_btc_sent":
self._state = "{0:.2f}".format(stats.estimated_btc_sent * 0.00000001)
elif self.type == "total_btc":
self._state = "{0:.2f}".format(stats.total_btc * 0.00000001)
elif self.type == "total_blocks":
self._state = "{0:.0f}".format(stats.total_blocks)
elif self.type == "next_retarget":
self._state = "{0:.2f}".format(stats.next_retarget)
elif self.type == "estimated_transaction_volume_usd":
self._state = "{0:.2f}".format(stats.estimated_transaction_volume_usd)
elif self.type == "miners_revenue_btc":
self._state = "{0:.1f}".format(stats.miners_revenue_btc * 0.00000001)
elif self.type == "market_price_usd":
self._state = "{0:.2f}".format(stats.market_price_usd)
class BitcoinData:
"""Get the latest data and update the states."""
def __init__(self):
"""Initialize the data object."""
self.stats = None
self.ticker = None
def update(self):
"""Get the latest data from blockchain.info."""
self.stats = statistics.get()
self.ticker = exchangerates.get_ticker()
| self._state = "{0:.2f}".format(stats.total_btc_sent * 0.00000001) | conditional_block |
sensor.py | """Bitcoin information service that uses blockchain.info."""
from datetime import timedelta
import logging
from blockchain import exchangerates, statistics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_CURRENCY, CONF_DISPLAY_OPTIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by blockchain.info"
DEFAULT_CURRENCY = "USD"
ICON = "mdi:currency-btc"
SCAN_INTERVAL = timedelta(minutes=5)
OPTION_TYPES = {
"exchangerate": ["Exchange rate (1 BTC)", None],
"trade_volume_btc": ["Trade volume", "BTC"],
"miners_revenue_usd": ["Miners revenue", "USD"],
"btc_mined": ["Mined", "BTC"],
"trade_volume_usd": ["Trade volume", "USD"],
"difficulty": ["Difficulty", None],
"minutes_between_blocks": ["Time between Blocks", "min"],
"number_of_transactions": ["No. of Transactions", None],
"hash_rate": ["Hash rate", "PH/s"],
"timestamp": ["Timestamp", None],
"mined_blocks": ["Mined Blocks", None],
"blocks_size": ["Block size", None],
"total_fees_btc": ["Total fees", "BTC"],
"total_btc_sent": ["Total sent", "BTC"],
"estimated_btc_sent": ["Estimated sent", "BTC"],
"total_btc": ["Total", "BTC"],
"total_blocks": ["Total Blocks", None],
"next_retarget": ["Next retarget", None],
"estimated_transaction_volume_usd": ["Est. Transaction volume", "USD"],
"miners_revenue_btc": ["Miners revenue", "BTC"],
"market_price_usd": ["Market price", "USD"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(OPTION_TYPES)]
),
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bitcoin sensors."""
currency = config.get(CONF_CURRENCY)
if currency not in exchangerates.get_ticker():
_LOGGER.warning("Currency %s is not available. Using USD", currency)
currency = DEFAULT_CURRENCY
data = BitcoinData()
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(BitcoinSensor(data, variable, currency))
add_entities(dev, True)
class BitcoinSensor(Entity):
"""Representation of a Bitcoin sensor."""
def __init__(self, data, option_type, currency):
|
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
stats = self.data.stats
ticker = self.data.ticker
if self.type == "exchangerate":
self._state = ticker[self._currency].p15min
self._unit_of_measurement = self._currency
elif self.type == "trade_volume_btc":
self._state = "{0:.1f}".format(stats.trade_volume_btc)
elif self.type == "miners_revenue_usd":
self._state = "{0:.0f}".format(stats.miners_revenue_usd)
elif self.type == "btc_mined":
self._state = "{}".format(stats.btc_mined * 0.00000001)
elif self.type == "trade_volume_usd":
self._state = "{0:.1f}".format(stats.trade_volume_usd)
elif self.type == "difficulty":
self._state = "{0:.0f}".format(stats.difficulty)
elif self.type == "minutes_between_blocks":
self._state = "{0:.2f}".format(stats.minutes_between_blocks)
elif self.type == "number_of_transactions":
self._state = "{}".format(stats.number_of_transactions)
elif self.type == "hash_rate":
self._state = "{0:.1f}".format(stats.hash_rate * 0.000001)
elif self.type == "timestamp":
self._state = stats.timestamp
elif self.type == "mined_blocks":
self._state = "{}".format(stats.mined_blocks)
elif self.type == "blocks_size":
self._state = "{0:.1f}".format(stats.blocks_size)
elif self.type == "total_fees_btc":
self._state = "{0:.2f}".format(stats.total_fees_btc * 0.00000001)
elif self.type == "total_btc_sent":
self._state = "{0:.2f}".format(stats.total_btc_sent * 0.00000001)
elif self.type == "estimated_btc_sent":
self._state = "{0:.2f}".format(stats.estimated_btc_sent * 0.00000001)
elif self.type == "total_btc":
self._state = "{0:.2f}".format(stats.total_btc * 0.00000001)
elif self.type == "total_blocks":
self._state = "{0:.0f}".format(stats.total_blocks)
elif self.type == "next_retarget":
self._state = "{0:.2f}".format(stats.next_retarget)
elif self.type == "estimated_transaction_volume_usd":
self._state = "{0:.2f}".format(stats.estimated_transaction_volume_usd)
elif self.type == "miners_revenue_btc":
self._state = "{0:.1f}".format(stats.miners_revenue_btc * 0.00000001)
elif self.type == "market_price_usd":
self._state = "{0:.2f}".format(stats.market_price_usd)
class BitcoinData:
"""Get the latest data and update the states."""
def __init__(self):
"""Initialize the data object."""
self.stats = None
self.ticker = None
def update(self):
"""Get the latest data from blockchain.info."""
self.stats = statistics.get()
self.ticker = exchangerates.get_ticker()
| """Initialize the sensor."""
self.data = data
self._name = OPTION_TYPES[option_type][0]
self._unit_of_measurement = OPTION_TYPES[option_type][1]
self._currency = currency
self.type = option_type
self._state = None | identifier_body |
sensor.py | """Bitcoin information service that uses blockchain.info."""
from datetime import timedelta
import logging
from blockchain import exchangerates, statistics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_CURRENCY, CONF_DISPLAY_OPTIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by blockchain.info"
DEFAULT_CURRENCY = "USD"
ICON = "mdi:currency-btc"
SCAN_INTERVAL = timedelta(minutes=5)
OPTION_TYPES = {
"exchangerate": ["Exchange rate (1 BTC)", None],
"trade_volume_btc": ["Trade volume", "BTC"],
"miners_revenue_usd": ["Miners revenue", "USD"],
"btc_mined": ["Mined", "BTC"],
"trade_volume_usd": ["Trade volume", "USD"],
"difficulty": ["Difficulty", None],
"minutes_between_blocks": ["Time between Blocks", "min"],
"number_of_transactions": ["No. of Transactions", None],
"hash_rate": ["Hash rate", "PH/s"],
"timestamp": ["Timestamp", None],
"mined_blocks": ["Mined Blocks", None],
"blocks_size": ["Block size", None],
"total_fees_btc": ["Total fees", "BTC"],
"total_btc_sent": ["Total sent", "BTC"],
"estimated_btc_sent": ["Estimated sent", "BTC"],
"total_btc": ["Total", "BTC"],
"total_blocks": ["Total Blocks", None],
"next_retarget": ["Next retarget", None],
"estimated_transaction_volume_usd": ["Est. Transaction volume", "USD"],
"miners_revenue_btc": ["Miners revenue", "BTC"],
"market_price_usd": ["Market price", "USD"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(OPTION_TYPES)]
),
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bitcoin sensors."""
currency = config.get(CONF_CURRENCY)
if currency not in exchangerates.get_ticker():
_LOGGER.warning("Currency %s is not available. Using USD", currency)
currency = DEFAULT_CURRENCY
data = BitcoinData()
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(BitcoinSensor(data, variable, currency))
add_entities(dev, True)
class BitcoinSensor(Entity):
"""Representation of a Bitcoin sensor."""
def __init__(self, data, option_type, currency):
"""Initialize the sensor."""
self.data = data
self._name = OPTION_TYPES[option_type][0]
self._unit_of_measurement = OPTION_TYPES[option_type][1]
self._currency = currency
self.type = option_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
stats = self.data.stats
ticker = self.data.ticker
if self.type == "exchangerate":
self._state = ticker[self._currency].p15min
self._unit_of_measurement = self._currency
elif self.type == "trade_volume_btc": | elif self.type == "trade_volume_usd":
self._state = "{0:.1f}".format(stats.trade_volume_usd)
elif self.type == "difficulty":
self._state = "{0:.0f}".format(stats.difficulty)
elif self.type == "minutes_between_blocks":
self._state = "{0:.2f}".format(stats.minutes_between_blocks)
elif self.type == "number_of_transactions":
self._state = "{}".format(stats.number_of_transactions)
elif self.type == "hash_rate":
self._state = "{0:.1f}".format(stats.hash_rate * 0.000001)
elif self.type == "timestamp":
self._state = stats.timestamp
elif self.type == "mined_blocks":
self._state = "{}".format(stats.mined_blocks)
elif self.type == "blocks_size":
self._state = "{0:.1f}".format(stats.blocks_size)
elif self.type == "total_fees_btc":
self._state = "{0:.2f}".format(stats.total_fees_btc * 0.00000001)
elif self.type == "total_btc_sent":
self._state = "{0:.2f}".format(stats.total_btc_sent * 0.00000001)
elif self.type == "estimated_btc_sent":
self._state = "{0:.2f}".format(stats.estimated_btc_sent * 0.00000001)
elif self.type == "total_btc":
self._state = "{0:.2f}".format(stats.total_btc * 0.00000001)
elif self.type == "total_blocks":
self._state = "{0:.0f}".format(stats.total_blocks)
elif self.type == "next_retarget":
self._state = "{0:.2f}".format(stats.next_retarget)
elif self.type == "estimated_transaction_volume_usd":
self._state = "{0:.2f}".format(stats.estimated_transaction_volume_usd)
elif self.type == "miners_revenue_btc":
self._state = "{0:.1f}".format(stats.miners_revenue_btc * 0.00000001)
elif self.type == "market_price_usd":
self._state = "{0:.2f}".format(stats.market_price_usd)
class BitcoinData:
"""Get the latest data and update the states."""
def __init__(self):
"""Initialize the data object."""
self.stats = None
self.ticker = None
def update(self):
"""Get the latest data from blockchain.info."""
self.stats = statistics.get()
self.ticker = exchangerates.get_ticker() | self._state = "{0:.1f}".format(stats.trade_volume_btc)
elif self.type == "miners_revenue_usd":
self._state = "{0:.0f}".format(stats.miners_revenue_usd)
elif self.type == "btc_mined":
self._state = "{}".format(stats.btc_mined * 0.00000001) | random_line_split |
sensor.py | """Bitcoin information service that uses blockchain.info."""
from datetime import timedelta
import logging
from blockchain import exchangerates, statistics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_CURRENCY, CONF_DISPLAY_OPTIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by blockchain.info"
DEFAULT_CURRENCY = "USD"
ICON = "mdi:currency-btc"
SCAN_INTERVAL = timedelta(minutes=5)
OPTION_TYPES = {
"exchangerate": ["Exchange rate (1 BTC)", None],
"trade_volume_btc": ["Trade volume", "BTC"],
"miners_revenue_usd": ["Miners revenue", "USD"],
"btc_mined": ["Mined", "BTC"],
"trade_volume_usd": ["Trade volume", "USD"],
"difficulty": ["Difficulty", None],
"minutes_between_blocks": ["Time between Blocks", "min"],
"number_of_transactions": ["No. of Transactions", None],
"hash_rate": ["Hash rate", "PH/s"],
"timestamp": ["Timestamp", None],
"mined_blocks": ["Mined Blocks", None],
"blocks_size": ["Block size", None],
"total_fees_btc": ["Total fees", "BTC"],
"total_btc_sent": ["Total sent", "BTC"],
"estimated_btc_sent": ["Estimated sent", "BTC"],
"total_btc": ["Total", "BTC"],
"total_blocks": ["Total Blocks", None],
"next_retarget": ["Next retarget", None],
"estimated_transaction_volume_usd": ["Est. Transaction volume", "USD"],
"miners_revenue_btc": ["Miners revenue", "BTC"],
"market_price_usd": ["Market price", "USD"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(OPTION_TYPES)]
),
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
}
)
def | (hass, config, add_entities, discovery_info=None):
"""Set up the Bitcoin sensors."""
currency = config.get(CONF_CURRENCY)
if currency not in exchangerates.get_ticker():
_LOGGER.warning("Currency %s is not available. Using USD", currency)
currency = DEFAULT_CURRENCY
data = BitcoinData()
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(BitcoinSensor(data, variable, currency))
add_entities(dev, True)
class BitcoinSensor(Entity):
"""Representation of a Bitcoin sensor."""
def __init__(self, data, option_type, currency):
"""Initialize the sensor."""
self.data = data
self._name = OPTION_TYPES[option_type][0]
self._unit_of_measurement = OPTION_TYPES[option_type][1]
self._currency = currency
self.type = option_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
stats = self.data.stats
ticker = self.data.ticker
if self.type == "exchangerate":
self._state = ticker[self._currency].p15min
self._unit_of_measurement = self._currency
elif self.type == "trade_volume_btc":
self._state = "{0:.1f}".format(stats.trade_volume_btc)
elif self.type == "miners_revenue_usd":
self._state = "{0:.0f}".format(stats.miners_revenue_usd)
elif self.type == "btc_mined":
self._state = "{}".format(stats.btc_mined * 0.00000001)
elif self.type == "trade_volume_usd":
self._state = "{0:.1f}".format(stats.trade_volume_usd)
elif self.type == "difficulty":
self._state = "{0:.0f}".format(stats.difficulty)
elif self.type == "minutes_between_blocks":
self._state = "{0:.2f}".format(stats.minutes_between_blocks)
elif self.type == "number_of_transactions":
self._state = "{}".format(stats.number_of_transactions)
elif self.type == "hash_rate":
self._state = "{0:.1f}".format(stats.hash_rate * 0.000001)
elif self.type == "timestamp":
self._state = stats.timestamp
elif self.type == "mined_blocks":
self._state = "{}".format(stats.mined_blocks)
elif self.type == "blocks_size":
self._state = "{0:.1f}".format(stats.blocks_size)
elif self.type == "total_fees_btc":
self._state = "{0:.2f}".format(stats.total_fees_btc * 0.00000001)
elif self.type == "total_btc_sent":
self._state = "{0:.2f}".format(stats.total_btc_sent * 0.00000001)
elif self.type == "estimated_btc_sent":
self._state = "{0:.2f}".format(stats.estimated_btc_sent * 0.00000001)
elif self.type == "total_btc":
self._state = "{0:.2f}".format(stats.total_btc * 0.00000001)
elif self.type == "total_blocks":
self._state = "{0:.0f}".format(stats.total_blocks)
elif self.type == "next_retarget":
self._state = "{0:.2f}".format(stats.next_retarget)
elif self.type == "estimated_transaction_volume_usd":
self._state = "{0:.2f}".format(stats.estimated_transaction_volume_usd)
elif self.type == "miners_revenue_btc":
self._state = "{0:.1f}".format(stats.miners_revenue_btc * 0.00000001)
elif self.type == "market_price_usd":
self._state = "{0:.2f}".format(stats.market_price_usd)
class BitcoinData:
"""Get the latest data and update the states."""
def __init__(self):
"""Initialize the data object."""
self.stats = None
self.ticker = None
def update(self):
"""Get the latest data from blockchain.info."""
self.stats = statistics.get()
self.ticker = exchangerates.get_ticker()
| setup_platform | identifier_name |
main.rs | // CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate serde;
extern crate serde_json;
extern crate libproto;
extern crate util;
extern crate threadpool;
extern crate rustc_serialize;
extern crate protobuf;
#[macro_use]
extern crate log;
extern crate clap;
extern crate tx_pool;
extern crate cita_crypto as crypto;
extern crate proof;
extern crate pubsub;
extern crate engine_json;
extern crate engine;
extern crate parking_lot;
extern crate cpuprofiler;
extern crate cita_log;
extern crate dotenv;
pub mod core;
use clap::App;
use core::Spec;
use core::handler;
use cpuprofiler::PROFILER;
use libproto::*;
use log::LogLevelFilter;
use pubsub::start_pubsub;
use std::sync::mpsc::channel;
use std::thread;
use std::time::{Duration, Instant};
fn main() {
dotenv::dotenv().ok();
// Always print backtrace on panic.
::std::env::set_var("RUST_BACKTRACE", "1");
cita_log::format(LogLevelFilter::Info);
println!("CITA:consensus:poa");
let matches = App::new("authority_round")
.version("0.1")
.author("Cryptape")
.about("CITA Block Chain Node powered by Rust")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'")
.args_from_usage("--prof-start=[TIME] 'Sets profiling start time (second from app start)'")
.args_from_usage("--prof-duration=[DURATION] 'Sets duration(second) of profiling'")
.get_matches();
let mut config_path = "config";
if let Some(c) = matches.value_of("config") {
trace!("Value for config: {}", c);
config_path = c;
}
//start profiling
let mut prof_start: u64 = 0;
let mut prof_duration: u64 = 0;
if let Some(start) = matches.value_of("prof-start") {
trace!("Value for prof-start: {}", start);
prof_start = start.parse::<u64>().unwrap();
}
if let Some(duration) = matches.value_of("prof-duration") {
trace!("Value for prof-duration: {}", duration);
prof_duration = duration.parse::<u64>().unwrap();
}
if prof_start != 0 && prof_duration != 0 |
let threadpool = threadpool::ThreadPool::new(2);
let (tx, rx) = channel();
let (tx_sub, rx_sub) = channel();
let (tx_pub, rx_pub) = channel();
start_pubsub("consensus", vec!["net.tx", "jsonrpc.new_tx", "net.msg", "chain.status"], tx_sub, rx_pub);
thread::spawn(move || loop {
let (key, body) = rx_sub.recv().unwrap();
let tx = tx.clone();
handler::receive(&threadpool, &tx, key_to_id(&key), body);
});
let spec = Spec::new_test_round(config_path);
let engine = spec.engine;
let ready = spec.rx;
let process = engine.clone();
let tx_pub1 = tx_pub.clone();
thread::spawn(move || loop {
let process = process.clone();
handler::process(process, &rx, tx_pub1.clone());
});
let seal = engine.clone();
let dur = engine.duration();
let mut old_height = 0;
let mut new_height = 0;
let tx_pub = tx_pub.clone();
thread::spawn(move || loop {
let seal = seal.clone();
trace!("seal worker lock!");
loop {
new_height = ready.recv().unwrap();
if new_height > old_height {
old_height = new_height;
break;
}
}
trace!("seal worker go {}!!!", new_height);
let now = Instant::now();
trace!("seal worker ready!");
handler::seal(seal, tx_pub.clone());
let elapsed = now.elapsed();
if let Some(dur1) = dur.checked_sub(elapsed) {
trace!("seal worker sleep !!!!!{:?}", dur1);
thread::sleep(dur1);
}
});
loop {
thread::sleep(Duration::from_millis(10000));
}
}
| {
thread::spawn(move || {
thread::sleep(Duration::new(prof_start, 0));
println!("******Profiling Start******");
PROFILER.lock().unwrap().start("./consensus_poa.profile").expect("Couldn't start");
thread::sleep(Duration::new(prof_duration, 0));
println!("******Profiling Stop******");
PROFILER.lock().unwrap().stop().unwrap();
});
} | conditional_block |
main.rs | // CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate serde;
extern crate serde_json;
extern crate libproto;
extern crate util;
extern crate threadpool;
extern crate rustc_serialize;
extern crate protobuf;
#[macro_use]
extern crate log;
extern crate clap;
extern crate tx_pool;
extern crate cita_crypto as crypto;
extern crate proof;
extern crate pubsub;
extern crate engine_json;
extern crate engine;
extern crate parking_lot;
extern crate cpuprofiler;
extern crate cita_log;
extern crate dotenv;
pub mod core;
use clap::App;
use core::Spec;
use core::handler;
use cpuprofiler::PROFILER;
use libproto::*;
use log::LogLevelFilter;
use pubsub::start_pubsub;
use std::sync::mpsc::channel;
use std::thread;
use std::time::{Duration, Instant};
fn | () {
dotenv::dotenv().ok();
// Always print backtrace on panic.
::std::env::set_var("RUST_BACKTRACE", "1");
cita_log::format(LogLevelFilter::Info);
println!("CITA:consensus:poa");
let matches = App::new("authority_round")
.version("0.1")
.author("Cryptape")
.about("CITA Block Chain Node powered by Rust")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'")
.args_from_usage("--prof-start=[TIME] 'Sets profiling start time (second from app start)'")
.args_from_usage("--prof-duration=[DURATION] 'Sets duration(second) of profiling'")
.get_matches();
let mut config_path = "config";
if let Some(c) = matches.value_of("config") {
trace!("Value for config: {}", c);
config_path = c;
}
//start profiling
let mut prof_start: u64 = 0;
let mut prof_duration: u64 = 0;
if let Some(start) = matches.value_of("prof-start") {
trace!("Value for prof-start: {}", start);
prof_start = start.parse::<u64>().unwrap();
}
if let Some(duration) = matches.value_of("prof-duration") {
trace!("Value for prof-duration: {}", duration);
prof_duration = duration.parse::<u64>().unwrap();
}
if prof_start != 0 && prof_duration != 0 {
thread::spawn(move || {
thread::sleep(Duration::new(prof_start, 0));
println!("******Profiling Start******");
PROFILER.lock().unwrap().start("./consensus_poa.profile").expect("Couldn't start");
thread::sleep(Duration::new(prof_duration, 0));
println!("******Profiling Stop******");
PROFILER.lock().unwrap().stop().unwrap();
});
}
let threadpool = threadpool::ThreadPool::new(2);
let (tx, rx) = channel();
let (tx_sub, rx_sub) = channel();
let (tx_pub, rx_pub) = channel();
start_pubsub("consensus", vec!["net.tx", "jsonrpc.new_tx", "net.msg", "chain.status"], tx_sub, rx_pub);
thread::spawn(move || loop {
let (key, body) = rx_sub.recv().unwrap();
let tx = tx.clone();
handler::receive(&threadpool, &tx, key_to_id(&key), body);
});
let spec = Spec::new_test_round(config_path);
let engine = spec.engine;
let ready = spec.rx;
let process = engine.clone();
let tx_pub1 = tx_pub.clone();
thread::spawn(move || loop {
let process = process.clone();
handler::process(process, &rx, tx_pub1.clone());
});
let seal = engine.clone();
let dur = engine.duration();
let mut old_height = 0;
let mut new_height = 0;
let tx_pub = tx_pub.clone();
thread::spawn(move || loop {
let seal = seal.clone();
trace!("seal worker lock!");
loop {
new_height = ready.recv().unwrap();
if new_height > old_height {
old_height = new_height;
break;
}
}
trace!("seal worker go {}!!!", new_height);
let now = Instant::now();
trace!("seal worker ready!");
handler::seal(seal, tx_pub.clone());
let elapsed = now.elapsed();
if let Some(dur1) = dur.checked_sub(elapsed) {
trace!("seal worker sleep !!!!!{:?}", dur1);
thread::sleep(dur1);
}
});
loop {
thread::sleep(Duration::from_millis(10000));
}
}
| main | identifier_name |
main.rs | // CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate serde;
extern crate serde_json;
extern crate libproto;
extern crate util;
extern crate threadpool;
extern crate rustc_serialize;
extern crate protobuf;
#[macro_use]
extern crate log;
extern crate clap;
extern crate tx_pool;
extern crate cita_crypto as crypto;
extern crate proof;
extern crate pubsub;
extern crate engine_json;
extern crate engine;
extern crate parking_lot;
extern crate cpuprofiler;
extern crate cita_log;
extern crate dotenv;
pub mod core;
use clap::App;
use core::Spec;
use core::handler;
use cpuprofiler::PROFILER;
use libproto::*;
use log::LogLevelFilter;
use pubsub::start_pubsub;
use std::sync::mpsc::channel;
use std::thread;
use std::time::{Duration, Instant};
fn main() {
dotenv::dotenv().ok();
// Always print backtrace on panic.
::std::env::set_var("RUST_BACKTRACE", "1");
cita_log::format(LogLevelFilter::Info);
println!("CITA:consensus:poa");
let matches = App::new("authority_round")
.version("0.1")
.author("Cryptape")
.about("CITA Block Chain Node powered by Rust")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'")
.args_from_usage("--prof-start=[TIME] 'Sets profiling start time (second from app start)'")
.args_from_usage("--prof-duration=[DURATION] 'Sets duration(second) of profiling'")
.get_matches();
let mut config_path = "config";
if let Some(c) = matches.value_of("config") {
trace!("Value for config: {}", c);
config_path = c;
}
//start profiling
let mut prof_start: u64 = 0;
let mut prof_duration: u64 = 0;
if let Some(start) = matches.value_of("prof-start") {
trace!("Value for prof-start: {}", start);
prof_start = start.parse::<u64>().unwrap();
}
if let Some(duration) = matches.value_of("prof-duration") {
trace!("Value for prof-duration: {}", duration);
prof_duration = duration.parse::<u64>().unwrap();
}
if prof_start != 0 && prof_duration != 0 {
thread::spawn(move || {
thread::sleep(Duration::new(prof_start, 0));
println!("******Profiling Start******");
PROFILER.lock().unwrap().start("./consensus_poa.profile").expect("Couldn't start");
thread::sleep(Duration::new(prof_duration, 0));
println!("******Profiling Stop******");
PROFILER.lock().unwrap().stop().unwrap();
});
}
let threadpool = threadpool::ThreadPool::new(2);
let (tx, rx) = channel();
let (tx_sub, rx_sub) = channel();
let (tx_pub, rx_pub) = channel();
start_pubsub("consensus", vec!["net.tx", "jsonrpc.new_tx", "net.msg", "chain.status"], tx_sub, rx_pub);
thread::spawn(move || loop {
let (key, body) = rx_sub.recv().unwrap();
let tx = tx.clone();
handler::receive(&threadpool, &tx, key_to_id(&key), body);
});
let spec = Spec::new_test_round(config_path);
let engine = spec.engine;
let ready = spec.rx;
let process = engine.clone();
let tx_pub1 = tx_pub.clone(); | let seal = engine.clone();
let dur = engine.duration();
let mut old_height = 0;
let mut new_height = 0;
let tx_pub = tx_pub.clone();
thread::spawn(move || loop {
let seal = seal.clone();
trace!("seal worker lock!");
loop {
new_height = ready.recv().unwrap();
if new_height > old_height {
old_height = new_height;
break;
}
}
trace!("seal worker go {}!!!", new_height);
let now = Instant::now();
trace!("seal worker ready!");
handler::seal(seal, tx_pub.clone());
let elapsed = now.elapsed();
if let Some(dur1) = dur.checked_sub(elapsed) {
trace!("seal worker sleep !!!!!{:?}", dur1);
thread::sleep(dur1);
}
});
loop {
thread::sleep(Duration::from_millis(10000));
}
} | thread::spawn(move || loop {
let process = process.clone();
handler::process(process, &rx, tx_pub1.clone());
});
| random_line_split |
main.rs | // CITA
// Copyright 2016-2017 Cryptape Technologies LLC.
// This program is free software: you can redistribute it
// and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any
// later version.
// This program is distributed in the hope that it will be
// useful, but WITHOUT ANY WARRANTY; without even the implied
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate serde;
extern crate serde_json;
extern crate libproto;
extern crate util;
extern crate threadpool;
extern crate rustc_serialize;
extern crate protobuf;
#[macro_use]
extern crate log;
extern crate clap;
extern crate tx_pool;
extern crate cita_crypto as crypto;
extern crate proof;
extern crate pubsub;
extern crate engine_json;
extern crate engine;
extern crate parking_lot;
extern crate cpuprofiler;
extern crate cita_log;
extern crate dotenv;
pub mod core;
use clap::App;
use core::Spec;
use core::handler;
use cpuprofiler::PROFILER;
use libproto::*;
use log::LogLevelFilter;
use pubsub::start_pubsub;
use std::sync::mpsc::channel;
use std::thread;
use std::time::{Duration, Instant};
fn main() | {
dotenv::dotenv().ok();
// Always print backtrace on panic.
::std::env::set_var("RUST_BACKTRACE", "1");
cita_log::format(LogLevelFilter::Info);
println!("CITA:consensus:poa");
let matches = App::new("authority_round")
.version("0.1")
.author("Cryptape")
.about("CITA Block Chain Node powered by Rust")
.args_from_usage("-c, --config=[FILE] 'Sets a custom config file'")
.args_from_usage("--prof-start=[TIME] 'Sets profiling start time (second from app start)'")
.args_from_usage("--prof-duration=[DURATION] 'Sets duration(second) of profiling'")
.get_matches();
let mut config_path = "config";
if let Some(c) = matches.value_of("config") {
trace!("Value for config: {}", c);
config_path = c;
}
//start profiling
let mut prof_start: u64 = 0;
let mut prof_duration: u64 = 0;
if let Some(start) = matches.value_of("prof-start") {
trace!("Value for prof-start: {}", start);
prof_start = start.parse::<u64>().unwrap();
}
if let Some(duration) = matches.value_of("prof-duration") {
trace!("Value for prof-duration: {}", duration);
prof_duration = duration.parse::<u64>().unwrap();
}
if prof_start != 0 && prof_duration != 0 {
thread::spawn(move || {
thread::sleep(Duration::new(prof_start, 0));
println!("******Profiling Start******");
PROFILER.lock().unwrap().start("./consensus_poa.profile").expect("Couldn't start");
thread::sleep(Duration::new(prof_duration, 0));
println!("******Profiling Stop******");
PROFILER.lock().unwrap().stop().unwrap();
});
}
let threadpool = threadpool::ThreadPool::new(2);
let (tx, rx) = channel();
let (tx_sub, rx_sub) = channel();
let (tx_pub, rx_pub) = channel();
start_pubsub("consensus", vec!["net.tx", "jsonrpc.new_tx", "net.msg", "chain.status"], tx_sub, rx_pub);
thread::spawn(move || loop {
let (key, body) = rx_sub.recv().unwrap();
let tx = tx.clone();
handler::receive(&threadpool, &tx, key_to_id(&key), body);
});
let spec = Spec::new_test_round(config_path);
let engine = spec.engine;
let ready = spec.rx;
let process = engine.clone();
let tx_pub1 = tx_pub.clone();
thread::spawn(move || loop {
let process = process.clone();
handler::process(process, &rx, tx_pub1.clone());
});
let seal = engine.clone();
let dur = engine.duration();
let mut old_height = 0;
let mut new_height = 0;
let tx_pub = tx_pub.clone();
thread::spawn(move || loop {
let seal = seal.clone();
trace!("seal worker lock!");
loop {
new_height = ready.recv().unwrap();
if new_height > old_height {
old_height = new_height;
break;
}
}
trace!("seal worker go {}!!!", new_height);
let now = Instant::now();
trace!("seal worker ready!");
handler::seal(seal, tx_pub.clone());
let elapsed = now.elapsed();
if let Some(dur1) = dur.checked_sub(elapsed) {
trace!("seal worker sleep !!!!!{:?}", dur1);
thread::sleep(dur1);
}
});
loop {
thread::sleep(Duration::from_millis(10000));
}
} | identifier_body |
|
nccl_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nccl ops. See also the cc test for nccl_communicator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
import numpy as np
from tensorflow.contrib import nccl
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.platform import test
def _DeviceTensors(tensors, devices):
res = []
for t, d in zip(tensors, devices):
with ops.device(d):
res.append(array_ops.identity(t))
return res
def _NcclAllReduce(nccl_fun, tensors, devices):
return nccl_fun(_DeviceTensors(tensors, devices))
def _NcclReduce(nccl_fun, tensors, devices):
|
def _NcclBroadcast(tensors, devices):
sender = np.random.randint(0, len(devices))
with ops.device(devices[sender]):
tensor = array_ops.identity(tensors[0])
broadcast = nccl.broadcast(tensor)
return _DeviceTensors([broadcast] * len(devices), devices)
class NcclTestCase(test.TestCase):
def _Test(self,
nccl_reduce,
numpy_fn,
device_sets=(['/device:GPU:1', '/device:GPU:2', '/device:GPU:0'],
['/device:GPU:1', '/device:GPU:0'])):
"""Tests that nccl_reduce does the same as reduction with numpy_fn.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the reduction of the
two.
device_sets: Tuple of virtual devices to run test on.
"""
if not test.is_gpu_available():
return # Test requires access to a GPU
for dtype in [np.float32, np.int32, np.int64, np.float64]:
# Create session inside outer loop to test use of
# same communicator across multiple sessions.
with self.test_session(use_gpu=True) as sess:
for devices in device_sets:
shape = (3, 4)
random = (np.random.random_sample(shape) - .5) * 1024
tensors = []
for _ in devices:
tensors.append(random.astype(dtype))
np_ans = tensors[0]
for t in tensors[1:]:
np_ans = numpy_fn(np_ans, t)
reduce_tensors = nccl_reduce(tensors, devices)
self.assertNotEmpty(reduce_tensors)
# Test shape inference.
for r in reduce_tensors:
self.assertEqual(shape, r.get_shape())
result_tensors = [array_ops.identity(t) for t in reduce_tensors]
# Test execution and results.
for t in sess.run(result_tensors):
self.assertAllClose(t, np_ans)
def _TestGradient(self, nccl_reduce, numpy_fn):
"""Tests the gradient of nccl_reduce.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the gradient of the
reduction of the two.
"""
def _Gradient(tensors, devices):
inputs = [array_ops.placeholder(t.dtype, t.shape) for t in tensors]
reduce_tensors = nccl_reduce(inputs, devices)
losses = _DeviceTensors(tensors, [t.device for t in reduce_tensors])
grads = gradients.gradients(
reduce_tensors, inputs, losses, colocate_gradients_with_ops=True)
return [g for g in grads if g is not None]
self._Test(_Gradient, numpy_fn)
class AllReduceTest(NcclTestCase):
def testAllReduce(self):
self._Test(partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
self._Test(partial(_NcclAllReduce, nccl.all_prod), lambda x, y: x * y)
self._Test(partial(_NcclAllReduce, nccl.all_min), np.minimum)
self._Test(partial(_NcclAllReduce, nccl.all_max), np.maximum)
def testAllSumGrad(self):
self._TestGradient(
partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
def testErrors(self):
with self.assertRaisesRegexp(ValueError, 'Device assignment required'):
nccl.all_sum([array_ops.identity(np.random.random_sample((3, 4)))])
with self.assertRaisesRegexp(ValueError, 'Must pass >0 tensors'):
nccl.all_sum([])
class SingleReduceTest(NcclTestCase):
def testSum(self):
self._Test(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x + y)
def testSumGrad(self):
self._TestGradient(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x)
class BroadcastTest(NcclTestCase):
def testBroadcast(self):
self._Test(_NcclBroadcast, lambda x, y: x)
def testBroadcastSingleDevice(self):
# Broadcasts on a single device are removed completely during rewrite.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:GPU:0'],))
def testBroadcastToCpuError(self):
# Broadcasts to CPU is not supported.
with self.assertRaisesRegexp(
errors.NotFoundError,
"No registered '_NcclBroadcastRecv' OpKernel for CPU devices"):
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:CPU:0'],))
class CombinedTest(NcclTestCase):
"""Test all-reduce vs. single-reduce plus broadcast in one session.run."""
def _Combined(self, tensors, devices):
all_reduce_tensors = _NcclAllReduce(nccl.all_sum, tensors, devices)
single_reduce_tensors = _NcclReduce(nccl.reduce_sum, tensors, devices)
broadcast_tensors = _NcclBroadcast(single_reduce_tensors, devices)
return all_reduce_tensors + broadcast_tensors
def testCombined(self):
self._Test(self._Combined, lambda x, y: x + y)
if __name__ == '__main__':
test.main()
| receiver = np.random.randint(0, len(devices))
with ops.device(devices[receiver]):
return [nccl_fun(_DeviceTensors(tensors, devices))] | identifier_body |
nccl_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nccl ops. See also the cc test for nccl_communicator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
import numpy as np
from tensorflow.contrib import nccl
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.platform import test
def | (tensors, devices):
res = []
for t, d in zip(tensors, devices):
with ops.device(d):
res.append(array_ops.identity(t))
return res
def _NcclAllReduce(nccl_fun, tensors, devices):
return nccl_fun(_DeviceTensors(tensors, devices))
def _NcclReduce(nccl_fun, tensors, devices):
receiver = np.random.randint(0, len(devices))
with ops.device(devices[receiver]):
return [nccl_fun(_DeviceTensors(tensors, devices))]
def _NcclBroadcast(tensors, devices):
sender = np.random.randint(0, len(devices))
with ops.device(devices[sender]):
tensor = array_ops.identity(tensors[0])
broadcast = nccl.broadcast(tensor)
return _DeviceTensors([broadcast] * len(devices), devices)
class NcclTestCase(test.TestCase):
def _Test(self,
nccl_reduce,
numpy_fn,
device_sets=(['/device:GPU:1', '/device:GPU:2', '/device:GPU:0'],
['/device:GPU:1', '/device:GPU:0'])):
"""Tests that nccl_reduce does the same as reduction with numpy_fn.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the reduction of the
two.
device_sets: Tuple of virtual devices to run test on.
"""
if not test.is_gpu_available():
return # Test requires access to a GPU
for dtype in [np.float32, np.int32, np.int64, np.float64]:
# Create session inside outer loop to test use of
# same communicator across multiple sessions.
with self.test_session(use_gpu=True) as sess:
for devices in device_sets:
shape = (3, 4)
random = (np.random.random_sample(shape) - .5) * 1024
tensors = []
for _ in devices:
tensors.append(random.astype(dtype))
np_ans = tensors[0]
for t in tensors[1:]:
np_ans = numpy_fn(np_ans, t)
reduce_tensors = nccl_reduce(tensors, devices)
self.assertNotEmpty(reduce_tensors)
# Test shape inference.
for r in reduce_tensors:
self.assertEqual(shape, r.get_shape())
result_tensors = [array_ops.identity(t) for t in reduce_tensors]
# Test execution and results.
for t in sess.run(result_tensors):
self.assertAllClose(t, np_ans)
def _TestGradient(self, nccl_reduce, numpy_fn):
"""Tests the gradient of nccl_reduce.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the gradient of the
reduction of the two.
"""
def _Gradient(tensors, devices):
inputs = [array_ops.placeholder(t.dtype, t.shape) for t in tensors]
reduce_tensors = nccl_reduce(inputs, devices)
losses = _DeviceTensors(tensors, [t.device for t in reduce_tensors])
grads = gradients.gradients(
reduce_tensors, inputs, losses, colocate_gradients_with_ops=True)
return [g for g in grads if g is not None]
self._Test(_Gradient, numpy_fn)
class AllReduceTest(NcclTestCase):
def testAllReduce(self):
self._Test(partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
self._Test(partial(_NcclAllReduce, nccl.all_prod), lambda x, y: x * y)
self._Test(partial(_NcclAllReduce, nccl.all_min), np.minimum)
self._Test(partial(_NcclAllReduce, nccl.all_max), np.maximum)
def testAllSumGrad(self):
self._TestGradient(
partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
def testErrors(self):
with self.assertRaisesRegexp(ValueError, 'Device assignment required'):
nccl.all_sum([array_ops.identity(np.random.random_sample((3, 4)))])
with self.assertRaisesRegexp(ValueError, 'Must pass >0 tensors'):
nccl.all_sum([])
class SingleReduceTest(NcclTestCase):
def testSum(self):
self._Test(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x + y)
def testSumGrad(self):
self._TestGradient(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x)
class BroadcastTest(NcclTestCase):
def testBroadcast(self):
self._Test(_NcclBroadcast, lambda x, y: x)
def testBroadcastSingleDevice(self):
# Broadcasts on a single device are removed completely during rewrite.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:GPU:0'],))
def testBroadcastToCpuError(self):
# Broadcasts to CPU is not supported.
with self.assertRaisesRegexp(
errors.NotFoundError,
"No registered '_NcclBroadcastRecv' OpKernel for CPU devices"):
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:CPU:0'],))
class CombinedTest(NcclTestCase):
"""Test all-reduce vs. single-reduce plus broadcast in one session.run."""
def _Combined(self, tensors, devices):
all_reduce_tensors = _NcclAllReduce(nccl.all_sum, tensors, devices)
single_reduce_tensors = _NcclReduce(nccl.reduce_sum, tensors, devices)
broadcast_tensors = _NcclBroadcast(single_reduce_tensors, devices)
return all_reduce_tensors + broadcast_tensors
def testCombined(self):
self._Test(self._Combined, lambda x, y: x + y)
if __name__ == '__main__':
test.main()
| _DeviceTensors | identifier_name |
nccl_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nccl ops. See also the cc test for nccl_communicator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
import numpy as np
from tensorflow.contrib import nccl
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.platform import test
def _DeviceTensors(tensors, devices):
res = []
for t, d in zip(tensors, devices):
with ops.device(d):
res.append(array_ops.identity(t))
return res
def _NcclAllReduce(nccl_fun, tensors, devices):
return nccl_fun(_DeviceTensors(tensors, devices))
def _NcclReduce(nccl_fun, tensors, devices):
receiver = np.random.randint(0, len(devices))
with ops.device(devices[receiver]):
return [nccl_fun(_DeviceTensors(tensors, devices))]
def _NcclBroadcast(tensors, devices):
sender = np.random.randint(0, len(devices))
with ops.device(devices[sender]):
tensor = array_ops.identity(tensors[0])
broadcast = nccl.broadcast(tensor)
return _DeviceTensors([broadcast] * len(devices), devices)
class NcclTestCase(test.TestCase):
def _Test(self,
nccl_reduce,
numpy_fn,
device_sets=(['/device:GPU:1', '/device:GPU:2', '/device:GPU:0'],
['/device:GPU:1', '/device:GPU:0'])):
"""Tests that nccl_reduce does the same as reduction with numpy_fn.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the reduction of the
two.
device_sets: Tuple of virtual devices to run test on.
"""
if not test.is_gpu_available():
return # Test requires access to a GPU
for dtype in [np.float32, np.int32, np.int64, np.float64]:
# Create session inside outer loop to test use of
# same communicator across multiple sessions.
with self.test_session(use_gpu=True) as sess:
for devices in device_sets:
shape = (3, 4)
random = (np.random.random_sample(shape) - .5) * 1024
tensors = []
for _ in devices:
|
np_ans = tensors[0]
for t in tensors[1:]:
np_ans = numpy_fn(np_ans, t)
reduce_tensors = nccl_reduce(tensors, devices)
self.assertNotEmpty(reduce_tensors)
# Test shape inference.
for r in reduce_tensors:
self.assertEqual(shape, r.get_shape())
result_tensors = [array_ops.identity(t) for t in reduce_tensors]
# Test execution and results.
for t in sess.run(result_tensors):
self.assertAllClose(t, np_ans)
def _TestGradient(self, nccl_reduce, numpy_fn):
"""Tests the gradient of nccl_reduce.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the gradient of the
reduction of the two.
"""
def _Gradient(tensors, devices):
inputs = [array_ops.placeholder(t.dtype, t.shape) for t in tensors]
reduce_tensors = nccl_reduce(inputs, devices)
losses = _DeviceTensors(tensors, [t.device for t in reduce_tensors])
grads = gradients.gradients(
reduce_tensors, inputs, losses, colocate_gradients_with_ops=True)
return [g for g in grads if g is not None]
self._Test(_Gradient, numpy_fn)
class AllReduceTest(NcclTestCase):
def testAllReduce(self):
self._Test(partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
self._Test(partial(_NcclAllReduce, nccl.all_prod), lambda x, y: x * y)
self._Test(partial(_NcclAllReduce, nccl.all_min), np.minimum)
self._Test(partial(_NcclAllReduce, nccl.all_max), np.maximum)
def testAllSumGrad(self):
self._TestGradient(
partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
def testErrors(self):
with self.assertRaisesRegexp(ValueError, 'Device assignment required'):
nccl.all_sum([array_ops.identity(np.random.random_sample((3, 4)))])
with self.assertRaisesRegexp(ValueError, 'Must pass >0 tensors'):
nccl.all_sum([])
class SingleReduceTest(NcclTestCase):
def testSum(self):
self._Test(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x + y)
def testSumGrad(self):
self._TestGradient(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x)
class BroadcastTest(NcclTestCase):
def testBroadcast(self):
self._Test(_NcclBroadcast, lambda x, y: x)
def testBroadcastSingleDevice(self):
# Broadcasts on a single device are removed completely during rewrite.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:GPU:0'],))
def testBroadcastToCpuError(self):
# Broadcasts to CPU is not supported.
with self.assertRaisesRegexp(
errors.NotFoundError,
"No registered '_NcclBroadcastRecv' OpKernel for CPU devices"):
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:CPU:0'],))
class CombinedTest(NcclTestCase):
"""Test all-reduce vs. single-reduce plus broadcast in one session.run."""
def _Combined(self, tensors, devices):
all_reduce_tensors = _NcclAllReduce(nccl.all_sum, tensors, devices)
single_reduce_tensors = _NcclReduce(nccl.reduce_sum, tensors, devices)
broadcast_tensors = _NcclBroadcast(single_reduce_tensors, devices)
return all_reduce_tensors + broadcast_tensors
def testCombined(self):
self._Test(self._Combined, lambda x, y: x + y)
if __name__ == '__main__':
test.main()
| tensors.append(random.astype(dtype)) | conditional_block |
nccl_ops_test.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nccl ops. See also the cc test for nccl_communicator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
import numpy as np
from tensorflow.contrib import nccl
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.platform import test
def _DeviceTensors(tensors, devices):
res = []
for t, d in zip(tensors, devices):
with ops.device(d):
res.append(array_ops.identity(t))
return res
def _NcclAllReduce(nccl_fun, tensors, devices):
return nccl_fun(_DeviceTensors(tensors, devices))
def _NcclReduce(nccl_fun, tensors, devices):
receiver = np.random.randint(0, len(devices))
with ops.device(devices[receiver]):
return [nccl_fun(_DeviceTensors(tensors, devices))]
def _NcclBroadcast(tensors, devices):
sender = np.random.randint(0, len(devices))
with ops.device(devices[sender]):
tensor = array_ops.identity(tensors[0])
broadcast = nccl.broadcast(tensor)
return _DeviceTensors([broadcast] * len(devices), devices)
class NcclTestCase(test.TestCase):
def _Test(self,
nccl_reduce,
numpy_fn,
device_sets=(['/device:GPU:1', '/device:GPU:2', '/device:GPU:0'],
['/device:GPU:1', '/device:GPU:0'])):
"""Tests that nccl_reduce does the same as reduction with numpy_fn.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the reduction of the
two.
device_sets: Tuple of virtual devices to run test on.
"""
if not test.is_gpu_available():
return # Test requires access to a GPU
for dtype in [np.float32, np.int32, np.int64, np.float64]:
# Create session inside outer loop to test use of
# same communicator across multiple sessions. |
for devices in device_sets:
shape = (3, 4)
random = (np.random.random_sample(shape) - .5) * 1024
tensors = []
for _ in devices:
tensors.append(random.astype(dtype))
np_ans = tensors[0]
for t in tensors[1:]:
np_ans = numpy_fn(np_ans, t)
reduce_tensors = nccl_reduce(tensors, devices)
self.assertNotEmpty(reduce_tensors)
# Test shape inference.
for r in reduce_tensors:
self.assertEqual(shape, r.get_shape())
result_tensors = [array_ops.identity(t) for t in reduce_tensors]
# Test execution and results.
for t in sess.run(result_tensors):
self.assertAllClose(t, np_ans)
def _TestGradient(self, nccl_reduce, numpy_fn):
"""Tests the gradient of nccl_reduce.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the gradient of the
reduction of the two.
"""
def _Gradient(tensors, devices):
inputs = [array_ops.placeholder(t.dtype, t.shape) for t in tensors]
reduce_tensors = nccl_reduce(inputs, devices)
losses = _DeviceTensors(tensors, [t.device for t in reduce_tensors])
grads = gradients.gradients(
reduce_tensors, inputs, losses, colocate_gradients_with_ops=True)
return [g for g in grads if g is not None]
self._Test(_Gradient, numpy_fn)
class AllReduceTest(NcclTestCase):
def testAllReduce(self):
self._Test(partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
self._Test(partial(_NcclAllReduce, nccl.all_prod), lambda x, y: x * y)
self._Test(partial(_NcclAllReduce, nccl.all_min), np.minimum)
self._Test(partial(_NcclAllReduce, nccl.all_max), np.maximum)
def testAllSumGrad(self):
self._TestGradient(
partial(_NcclAllReduce, nccl.all_sum), lambda x, y: x + y)
def testErrors(self):
with self.assertRaisesRegexp(ValueError, 'Device assignment required'):
nccl.all_sum([array_ops.identity(np.random.random_sample((3, 4)))])
with self.assertRaisesRegexp(ValueError, 'Must pass >0 tensors'):
nccl.all_sum([])
class SingleReduceTest(NcclTestCase):
def testSum(self):
self._Test(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x + y)
def testSumGrad(self):
self._TestGradient(partial(_NcclReduce, nccl.reduce_sum), lambda x, y: x)
class BroadcastTest(NcclTestCase):
def testBroadcast(self):
self._Test(_NcclBroadcast, lambda x, y: x)
def testBroadcastSingleDevice(self):
# Broadcasts on a single device are removed completely during rewrite.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:GPU:0'],))
def testBroadcastToCpuError(self):
# Broadcasts to CPU is not supported.
with self.assertRaisesRegexp(
errors.NotFoundError,
"No registered '_NcclBroadcastRecv' OpKernel for CPU devices"):
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:CPU:0'],))
class CombinedTest(NcclTestCase):
"""Test all-reduce vs. single-reduce plus broadcast in one session.run."""
def _Combined(self, tensors, devices):
all_reduce_tensors = _NcclAllReduce(nccl.all_sum, tensors, devices)
single_reduce_tensors = _NcclReduce(nccl.reduce_sum, tensors, devices)
broadcast_tensors = _NcclBroadcast(single_reduce_tensors, devices)
return all_reduce_tensors + broadcast_tensors
def testCombined(self):
self._Test(self._Combined, lambda x, y: x + y)
if __name__ == '__main__':
test.main() | with self.test_session(use_gpu=True) as sess: | random_line_split |
runner.rs | use regex::Regex;
use state::Cucumber;
use event::request::{InvokeArgument, Request};
use event::response::{InvokeResponse, Response, StepMatchesResponse};
use definitions::registration::{CucumberRegistrar, SimpleStep};
use std::panic::{self, AssertUnwindSafe};
use std::str::FromStr;
/// The step runner for [Cucumber state](../state/struct.Cucumber.html)
///
/// The runner stands in for the Cucumber instance and provides an interface for
/// [Request](../event/request/enum.Request.html) events to be translated into
/// state changes and
/// step invocations, along with a
/// [Response](../event/response/enum.Response.html). These are typically
/// supplied by a running
/// [Server](../server/struct.Server.html), but may be supplied by a native
/// Gherkin implementation
/// later.
///
/// Typically this struct will only be instantiated by the user, and then
/// passed to a Server to
/// maintain.
///
#[allow(dead_code)]
pub struct WorldRunner<World> {
cuke: Cucumber<World>,
world: World,
}
impl<World> WorldRunner<World> {
#[allow(dead_code)]
pub fn new(world: World) -> WorldRunner<World> {
WorldRunner {
cuke: Cucumber::new(),
world: world,
}
}
}
/// An interface for implementers that can consume a
/// [Request](../event/request/enum.Request.html) and yield a
/// [Response](../event/response/enum.Response.html)
///
/// This generally refers to [WorldRunner](./struct.WorldRunner.html)
pub trait CommandRunner {
fn execute_cmd(&mut self, req: Request) -> Response;
}
impl<T: Fn(Request) -> Response> CommandRunner for T {
fn execute_cmd(&mut self, req: Request) -> Response {
self(req)
}
}
impl<World> CommandRunner for WorldRunner<World> {
fn execute_cmd(&mut self, req: Request) -> Response {
match req {
Request::BeginScenario(params) => {
self.cuke.tags = params.tags;
Response::BeginScenario
},
Request::Invoke(params) => {
let step = self.cuke
.step(u32::from_str(¶ms.id).unwrap())
.unwrap();
Response::Invoke(invoke_to_response(step, &self.cuke, &mut self.world, params.args))
},
Request::StepMatches(params) => {
let matches = self.cuke.find_match(¶ms.name_to_match);
if matches.len() == 0 {
Response::StepMatches(StepMatchesResponse::NoMatch)
} else {
Response::StepMatches(StepMatchesResponse::Match(matches))
}
},
Request::EndScenario(_) => {
self.cuke.tags = Vec::new();
Response::EndScenario
},
// TODO: For some reason, cucumber prints the ruby snippet too. Fix that
Request::SnippetText(params) => {
let text = format!(" // In a step registration block where cuke: &mut \
CucumberRegistrar<YourWorld>\n use cucumber::InvokeResponse;\n use \
cucumber::helpers::r;\n {}!(cuke, r(\"^{}$\"), Box::new(move |c, _, \
_| {{\n c.pending(\"TODO\")\n }}));",
params.step_keyword,
params.step_name);
Response::SnippetText(text)
},
}
}
}
impl<World> CucumberRegistrar<World> for WorldRunner<World> {
fn given(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) |
fn when(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.when(file, line, regex, step)
}
fn then(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.then(file, line, regex, step)
}
}
pub fn invoke_to_response<World>(test_body: &SimpleStep<World>,
cuke: &Cucumber<World>,
world: &mut World,
args: Vec<InvokeArgument>)
-> InvokeResponse {
let result = panic::catch_unwind(AssertUnwindSafe(|| test_body(cuke, world, args)));
match result {
Ok(()) => InvokeResponse::Success,
Err(err) => {
// Yoinked from rustc libstd, with InvokeResponse added as a possible cast
let msg = match err.downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match err.downcast_ref::<String>() {
Some(s) => &s[..],
None => {
match err.downcast_ref::<InvokeResponse>() {
Some(s) => return s.clone(),
None => "Box<Any>",
}
},
}
},
};
InvokeResponse::fail_from_str(msg)
},
}
}
| {
self.cuke.given(file, line, regex, step)
} | identifier_body |
runner.rs | use regex::Regex;
use state::Cucumber;
use event::request::{InvokeArgument, Request};
use event::response::{InvokeResponse, Response, StepMatchesResponse};
use definitions::registration::{CucumberRegistrar, SimpleStep};
use std::panic::{self, AssertUnwindSafe};
use std::str::FromStr;
/// The step runner for [Cucumber state](../state/struct.Cucumber.html)
///
/// The runner stands in for the Cucumber instance and provides an interface for
/// [Request](../event/request/enum.Request.html) events to be translated into
/// state changes and
/// step invocations, along with a
/// [Response](../event/response/enum.Response.html). These are typically
/// supplied by a running
/// [Server](../server/struct.Server.html), but may be supplied by a native
/// Gherkin implementation
/// later.
///
/// Typically this struct will only be instantiated by the user, and then
/// passed to a Server to
/// maintain.
///
#[allow(dead_code)]
pub struct WorldRunner<World> {
cuke: Cucumber<World>,
world: World,
}
impl<World> WorldRunner<World> {
#[allow(dead_code)]
pub fn new(world: World) -> WorldRunner<World> {
WorldRunner {
cuke: Cucumber::new(),
world: world,
}
}
}
/// An interface for implementers that can consume a
/// [Request](../event/request/enum.Request.html) and yield a
/// [Response](../event/response/enum.Response.html)
///
/// This generally refers to [WorldRunner](./struct.WorldRunner.html)
pub trait CommandRunner {
fn execute_cmd(&mut self, req: Request) -> Response;
}
impl<T: Fn(Request) -> Response> CommandRunner for T {
fn execute_cmd(&mut self, req: Request) -> Response {
self(req)
}
}
impl<World> CommandRunner for WorldRunner<World> {
fn execute_cmd(&mut self, req: Request) -> Response {
match req {
Request::BeginScenario(params) => {
self.cuke.tags = params.tags;
Response::BeginScenario
},
Request::Invoke(params) => {
let step = self.cuke
.step(u32::from_str(¶ms.id).unwrap())
.unwrap();
Response::Invoke(invoke_to_response(step, &self.cuke, &mut self.world, params.args))
},
Request::StepMatches(params) => {
let matches = self.cuke.find_match(¶ms.name_to_match);
if matches.len() == 0 {
Response::StepMatches(StepMatchesResponse::NoMatch)
} else {
Response::StepMatches(StepMatchesResponse::Match(matches))
}
},
Request::EndScenario(_) => {
self.cuke.tags = Vec::new();
Response::EndScenario
},
// TODO: For some reason, cucumber prints the ruby snippet too. Fix that
Request::SnippetText(params) => {
let text = format!(" // In a step registration block where cuke: &mut \
CucumberRegistrar<YourWorld>\n use cucumber::InvokeResponse;\n use \
cucumber::helpers::r;\n {}!(cuke, r(\"^{}$\"), Box::new(move |c, _, \
_| {{\n c.pending(\"TODO\")\n }}));",
params.step_keyword,
params.step_name);
Response::SnippetText(text)
},
}
}
}
impl<World> CucumberRegistrar<World> for WorldRunner<World> {
fn given(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.given(file, line, regex, step)
}
fn when(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.when(file, line, regex, step)
}
fn then(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.then(file, line, regex, step)
}
}
pub fn invoke_to_response<World>(test_body: &SimpleStep<World>,
cuke: &Cucumber<World>,
world: &mut World,
args: Vec<InvokeArgument>)
-> InvokeResponse {
let result = panic::catch_unwind(AssertUnwindSafe(|| test_body(cuke, world, args)));
match result {
Ok(()) => InvokeResponse::Success,
Err(err) => {
// Yoinked from rustc libstd, with InvokeResponse added as a possible cast
let msg = match err.downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match err.downcast_ref::<String>() {
Some(s) => &s[..],
None => {
match err.downcast_ref::<InvokeResponse>() {
Some(s) => return s.clone(),
None => "Box<Any>",
}
}, | }
},
};
InvokeResponse::fail_from_str(msg)
},
}
} | random_line_split |
|
runner.rs | use regex::Regex;
use state::Cucumber;
use event::request::{InvokeArgument, Request};
use event::response::{InvokeResponse, Response, StepMatchesResponse};
use definitions::registration::{CucumberRegistrar, SimpleStep};
use std::panic::{self, AssertUnwindSafe};
use std::str::FromStr;
/// The step runner for [Cucumber state](../state/struct.Cucumber.html)
///
/// The runner stands in for the Cucumber instance and provides an interface for
/// [Request](../event/request/enum.Request.html) events to be translated into
/// state changes and
/// step invocations, along with a
/// [Response](../event/response/enum.Response.html). These are typically
/// supplied by a running
/// [Server](../server/struct.Server.html), but may be supplied by a native
/// Gherkin implementation
/// later.
///
/// Typically this struct will only be instantiated by the user, and then
/// passed to a Server to
/// maintain.
///
#[allow(dead_code)]
pub struct WorldRunner<World> {
cuke: Cucumber<World>,
world: World,
}
impl<World> WorldRunner<World> {
#[allow(dead_code)]
pub fn new(world: World) -> WorldRunner<World> {
WorldRunner {
cuke: Cucumber::new(),
world: world,
}
}
}
/// An interface for implementers that can consume a
/// [Request](../event/request/enum.Request.html) and yield a
/// [Response](../event/response/enum.Response.html)
///
/// This generally refers to [WorldRunner](./struct.WorldRunner.html)
pub trait CommandRunner {
fn execute_cmd(&mut self, req: Request) -> Response;
}
impl<T: Fn(Request) -> Response> CommandRunner for T {
fn execute_cmd(&mut self, req: Request) -> Response {
self(req)
}
}
impl<World> CommandRunner for WorldRunner<World> {
fn execute_cmd(&mut self, req: Request) -> Response {
match req {
Request::BeginScenario(params) => {
self.cuke.tags = params.tags;
Response::BeginScenario
},
Request::Invoke(params) => {
let step = self.cuke
.step(u32::from_str(¶ms.id).unwrap())
.unwrap();
Response::Invoke(invoke_to_response(step, &self.cuke, &mut self.world, params.args))
},
Request::StepMatches(params) => | ,
Request::EndScenario(_) => {
self.cuke.tags = Vec::new();
Response::EndScenario
},
// TODO: For some reason, cucumber prints the ruby snippet too. Fix that
Request::SnippetText(params) => {
let text = format!(" // In a step registration block where cuke: &mut \
CucumberRegistrar<YourWorld>\n use cucumber::InvokeResponse;\n use \
cucumber::helpers::r;\n {}!(cuke, r(\"^{}$\"), Box::new(move |c, _, \
_| {{\n c.pending(\"TODO\")\n }}));",
params.step_keyword,
params.step_name);
Response::SnippetText(text)
},
}
}
}
impl<World> CucumberRegistrar<World> for WorldRunner<World> {
fn given(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.given(file, line, regex, step)
}
fn when(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.when(file, line, regex, step)
}
fn then(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.then(file, line, regex, step)
}
}
pub fn invoke_to_response<World>(test_body: &SimpleStep<World>,
cuke: &Cucumber<World>,
world: &mut World,
args: Vec<InvokeArgument>)
-> InvokeResponse {
let result = panic::catch_unwind(AssertUnwindSafe(|| test_body(cuke, world, args)));
match result {
Ok(()) => InvokeResponse::Success,
Err(err) => {
// Yoinked from rustc libstd, with InvokeResponse added as a possible cast
let msg = match err.downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match err.downcast_ref::<String>() {
Some(s) => &s[..],
None => {
match err.downcast_ref::<InvokeResponse>() {
Some(s) => return s.clone(),
None => "Box<Any>",
}
},
}
},
};
InvokeResponse::fail_from_str(msg)
},
}
}
| {
let matches = self.cuke.find_match(¶ms.name_to_match);
if matches.len() == 0 {
Response::StepMatches(StepMatchesResponse::NoMatch)
} else {
Response::StepMatches(StepMatchesResponse::Match(matches))
}
} | conditional_block |
runner.rs | use regex::Regex;
use state::Cucumber;
use event::request::{InvokeArgument, Request};
use event::response::{InvokeResponse, Response, StepMatchesResponse};
use definitions::registration::{CucumberRegistrar, SimpleStep};
use std::panic::{self, AssertUnwindSafe};
use std::str::FromStr;
/// The step runner for [Cucumber state](../state/struct.Cucumber.html)
///
/// The runner stands in for the Cucumber instance and provides an interface for
/// [Request](../event/request/enum.Request.html) events to be translated into
/// state changes and
/// step invocations, along with a
/// [Response](../event/response/enum.Response.html). These are typically
/// supplied by a running
/// [Server](../server/struct.Server.html), but may be supplied by a native
/// Gherkin implementation
/// later.
///
/// Typically this struct will only be instantiated by the user, and then
/// passed to a Server to
/// maintain.
///
#[allow(dead_code)]
pub struct WorldRunner<World> {
cuke: Cucumber<World>,
world: World,
}
impl<World> WorldRunner<World> {
#[allow(dead_code)]
pub fn new(world: World) -> WorldRunner<World> {
WorldRunner {
cuke: Cucumber::new(),
world: world,
}
}
}
/// An interface for implementers that can consume a
/// [Request](../event/request/enum.Request.html) and yield a
/// [Response](../event/response/enum.Response.html)
///
/// This generally refers to [WorldRunner](./struct.WorldRunner.html)
pub trait CommandRunner {
fn execute_cmd(&mut self, req: Request) -> Response;
}
impl<T: Fn(Request) -> Response> CommandRunner for T {
fn execute_cmd(&mut self, req: Request) -> Response {
self(req)
}
}
impl<World> CommandRunner for WorldRunner<World> {
fn execute_cmd(&mut self, req: Request) -> Response {
match req {
Request::BeginScenario(params) => {
self.cuke.tags = params.tags;
Response::BeginScenario
},
Request::Invoke(params) => {
let step = self.cuke
.step(u32::from_str(¶ms.id).unwrap())
.unwrap();
Response::Invoke(invoke_to_response(step, &self.cuke, &mut self.world, params.args))
},
Request::StepMatches(params) => {
let matches = self.cuke.find_match(¶ms.name_to_match);
if matches.len() == 0 {
Response::StepMatches(StepMatchesResponse::NoMatch)
} else {
Response::StepMatches(StepMatchesResponse::Match(matches))
}
},
Request::EndScenario(_) => {
self.cuke.tags = Vec::new();
Response::EndScenario
},
// TODO: For some reason, cucumber prints the ruby snippet too. Fix that
Request::SnippetText(params) => {
let text = format!(" // In a step registration block where cuke: &mut \
CucumberRegistrar<YourWorld>\n use cucumber::InvokeResponse;\n use \
cucumber::helpers::r;\n {}!(cuke, r(\"^{}$\"), Box::new(move |c, _, \
_| {{\n c.pending(\"TODO\")\n }}));",
params.step_keyword,
params.step_name);
Response::SnippetText(text)
},
}
}
}
impl<World> CucumberRegistrar<World> for WorldRunner<World> {
fn given(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.given(file, line, regex, step)
}
fn | (&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.when(file, line, regex, step)
}
fn then(&mut self, file: &str, line: u32, regex: Regex, step: SimpleStep<World>) {
self.cuke.then(file, line, regex, step)
}
}
pub fn invoke_to_response<World>(test_body: &SimpleStep<World>,
cuke: &Cucumber<World>,
world: &mut World,
args: Vec<InvokeArgument>)
-> InvokeResponse {
let result = panic::catch_unwind(AssertUnwindSafe(|| test_body(cuke, world, args)));
match result {
Ok(()) => InvokeResponse::Success,
Err(err) => {
// Yoinked from rustc libstd, with InvokeResponse added as a possible cast
let msg = match err.downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match err.downcast_ref::<String>() {
Some(s) => &s[..],
None => {
match err.downcast_ref::<InvokeResponse>() {
Some(s) => return s.clone(),
None => "Box<Any>",
}
},
}
},
};
InvokeResponse::fail_from_str(msg)
},
}
}
| when | identifier_name |
counting.py | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple, hierarchical distributed counter."""
import threading
import time
from typing import Dict, Mapping, Optional, Union
from acme import core
Number = Union[int, float]
class Counter(core.Saveable):
"""A simple counter object that can periodically sync with a parent."""
def __init__(self,
parent: Optional['Counter'] = None,
prefix: str = '',
time_delta: float = 1.0,
return_only_prefixed: bool = False):
"""Initialize the counter.
Args:
parent: a Counter object to cache locally (or None for no caching).
prefix: string prefix to use for all local counts.
time_delta: time difference in seconds between syncing with the parent
counter.
return_only_prefixed: if True, and if `prefix` isn't empty, return counts
restricted to the given `prefix` on each call to `increment` and
`get_counts`. The `prefix` is stripped from returned count names.
"""
self._parent = parent
self._prefix = prefix
self._time_delta = time_delta
# Hold local counts and we'll lock around that.
# These are counts to be synced to the parent and the cache.
self._counts = {}
self._lock = threading.Lock()
# We'll sync periodically (when the last sync was more than self._time_delta
# seconds ago.)
self._cache = {}
self._last_sync_time = 0.0
self._return_only_prefixed = return_only_prefixed
def increment(self, **counts: Number) -> Dict[str, Number]:
"""Increment a set of counters.
Args:
**counts: keyword arguments specifying count increments.
Returns:
The [name, value] mapping of all counters stored, i.e. this will also
include counts that were not updated by this call to increment.
"""
with self._lock:
for key, value in counts.items():
self._counts.setdefault(key, 0)
self._counts[key] += value
return self.get_counts()
def get_counts(self) -> Dict[str, Number]:
"""Return all counts tracked by this counter."""
now = time.time()
# TODO(b/144421838): use futures instead of blocking.
if self._parent and (now - self._last_sync_time) > self._time_delta:
with self._lock:
counts = _prefix_keys(self._counts, self._prefix)
# Reset the local counts, as they will be merged into the parent and the
# cache.
self._counts = {}
self._cache = self._parent.increment(**counts)
self._last_sync_time = now
# Potentially prefix the keys in the counts dictionary.
counts = _prefix_keys(self._counts, self._prefix)
# If there's no prefix make a copy of the dictionary so we don't modify the
# internal self._counts.
if not self._prefix:
counts = dict(counts)
# Combine local counts with any parent counts.
for key, value in self._cache.items():
|
if self._prefix and self._return_only_prefixed:
counts = dict([(key[len(self._prefix) + 1:], value)
for key, value in counts.items()
if key.startswith(f'{self._prefix}_')])
return counts
def save(self) -> Mapping[str, Mapping[str, Number]]:
return {'counts': self._counts, 'cache': self._cache}
def restore(self, state: Mapping[str, Mapping[str, Number]]):
# Force a sync, if necessary, on the next get_counts call.
self._last_sync_time = 0.
self._counts = state['counts']
self._cache = state['cache']
def _prefix_keys(dictionary: Dict[str, Number], prefix: str):
"""Return a dictionary with prefixed keys.
Args:
dictionary: dictionary to return a copy of.
prefix: string to use as the prefix.
Returns:
Return a copy of the given dictionary whose keys are replaced by
"{prefix}_{key}". If the prefix is the empty string it returns the given
dictionary unchanged.
"""
if prefix:
dictionary = {f'{prefix}_{k}': v for k, v in dictionary.items()}
return dictionary
| counts[key] = counts.get(key, 0) + value | conditional_block |
counting.py | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple, hierarchical distributed counter."""
import threading
import time
from typing import Dict, Mapping, Optional, Union
from acme import core
Number = Union[int, float]
class Counter(core.Saveable):
"""A simple counter object that can periodically sync with a parent."""
def __init__(self,
parent: Optional['Counter'] = None,
prefix: str = '',
time_delta: float = 1.0,
return_only_prefixed: bool = False):
"""Initialize the counter.
Args:
parent: a Counter object to cache locally (or None for no caching).
prefix: string prefix to use for all local counts.
time_delta: time difference in seconds between syncing with the parent
counter.
return_only_prefixed: if True, and if `prefix` isn't empty, return counts
restricted to the given `prefix` on each call to `increment` and
`get_counts`. The `prefix` is stripped from returned count names.
"""
self._parent = parent
self._prefix = prefix
self._time_delta = time_delta
# Hold local counts and we'll lock around that.
# These are counts to be synced to the parent and the cache.
self._counts = {}
self._lock = threading.Lock()
# We'll sync periodically (when the last sync was more than self._time_delta
# seconds ago.)
self._cache = {}
self._last_sync_time = 0.0
self._return_only_prefixed = return_only_prefixed
def increment(self, **counts: Number) -> Dict[str, Number]:
"""Increment a set of counters.
Args:
**counts: keyword arguments specifying count increments.
Returns:
The [name, value] mapping of all counters stored, i.e. this will also
include counts that were not updated by this call to increment.
"""
with self._lock:
for key, value in counts.items():
self._counts.setdefault(key, 0)
self._counts[key] += value
return self.get_counts()
def get_counts(self) -> Dict[str, Number]:
"""Return all counts tracked by this counter."""
now = time.time()
# TODO(b/144421838): use futures instead of blocking.
if self._parent and (now - self._last_sync_time) > self._time_delta:
with self._lock:
counts = _prefix_keys(self._counts, self._prefix)
# Reset the local counts, as they will be merged into the parent and the
# cache.
self._counts = {}
self._cache = self._parent.increment(**counts)
self._last_sync_time = now
# Potentially prefix the keys in the counts dictionary.
counts = _prefix_keys(self._counts, self._prefix)
# If there's no prefix make a copy of the dictionary so we don't modify the
# internal self._counts.
if not self._prefix:
counts = dict(counts)
# Combine local counts with any parent counts.
for key, value in self._cache.items():
counts[key] = counts.get(key, 0) + value
if self._prefix and self._return_only_prefixed:
counts = dict([(key[len(self._prefix) + 1:], value)
for key, value in counts.items()
if key.startswith(f'{self._prefix}_')])
return counts
def save(self) -> Mapping[str, Mapping[str, Number]]:
return {'counts': self._counts, 'cache': self._cache}
def restore(self, state: Mapping[str, Mapping[str, Number]]):
# Force a sync, if necessary, on the next get_counts call.
self._last_sync_time = 0.
self._counts = state['counts']
self._cache = state['cache']
def _prefix_keys(dictionary: Dict[str, Number], prefix: str):
| """Return a dictionary with prefixed keys.
Args:
dictionary: dictionary to return a copy of.
prefix: string to use as the prefix.
Returns:
Return a copy of the given dictionary whose keys are replaced by
"{prefix}_{key}". If the prefix is the empty string it returns the given
dictionary unchanged.
"""
if prefix:
dictionary = {f'{prefix}_{k}': v for k, v in dictionary.items()}
return dictionary | identifier_body |
|
counting.py | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple, hierarchical distributed counter."""
import threading
import time
from typing import Dict, Mapping, Optional, Union
from acme import core
Number = Union[int, float]
class Counter(core.Saveable):
"""A simple counter object that can periodically sync with a parent."""
def __init__(self,
parent: Optional['Counter'] = None,
prefix: str = '',
time_delta: float = 1.0,
return_only_prefixed: bool = False):
"""Initialize the counter.
Args:
parent: a Counter object to cache locally (or None for no caching).
prefix: string prefix to use for all local counts.
time_delta: time difference in seconds between syncing with the parent
counter.
return_only_prefixed: if True, and if `prefix` isn't empty, return counts
restricted to the given `prefix` on each call to `increment` and
`get_counts`. The `prefix` is stripped from returned count names.
"""
self._parent = parent
self._prefix = prefix
self._time_delta = time_delta
# Hold local counts and we'll lock around that.
# These are counts to be synced to the parent and the cache.
self._counts = {}
self._lock = threading.Lock()
# We'll sync periodically (when the last sync was more than self._time_delta
# seconds ago.)
self._cache = {}
self._last_sync_time = 0.0
self._return_only_prefixed = return_only_prefixed
def increment(self, **counts: Number) -> Dict[str, Number]:
"""Increment a set of counters.
Args:
**counts: keyword arguments specifying count increments.
Returns:
The [name, value] mapping of all counters stored, i.e. this will also
include counts that were not updated by this call to increment.
"""
with self._lock:
for key, value in counts.items():
self._counts.setdefault(key, 0)
self._counts[key] += value
return self.get_counts()
def get_counts(self) -> Dict[str, Number]:
"""Return all counts tracked by this counter."""
now = time.time()
# TODO(b/144421838): use futures instead of blocking.
if self._parent and (now - self._last_sync_time) > self._time_delta:
with self._lock:
counts = _prefix_keys(self._counts, self._prefix)
# Reset the local counts, as they will be merged into the parent and the
# cache.
self._counts = {}
self._cache = self._parent.increment(**counts)
self._last_sync_time = now
# Potentially prefix the keys in the counts dictionary.
counts = _prefix_keys(self._counts, self._prefix)
# If there's no prefix make a copy of the dictionary so we don't modify the
# internal self._counts.
if not self._prefix:
counts = dict(counts)
# Combine local counts with any parent counts.
for key, value in self._cache.items():
counts[key] = counts.get(key, 0) + value
if self._prefix and self._return_only_prefixed:
counts = dict([(key[len(self._prefix) + 1:], value)
for key, value in counts.items()
if key.startswith(f'{self._prefix}_')])
return counts
def save(self) -> Mapping[str, Mapping[str, Number]]:
return {'counts': self._counts, 'cache': self._cache}
def restore(self, state: Mapping[str, Mapping[str, Number]]):
# Force a sync, if necessary, on the next get_counts call.
self._last_sync_time = 0.
self._counts = state['counts']
self._cache = state['cache']
def | (dictionary: Dict[str, Number], prefix: str):
"""Return a dictionary with prefixed keys.
Args:
dictionary: dictionary to return a copy of.
prefix: string to use as the prefix.
Returns:
Return a copy of the given dictionary whose keys are replaced by
"{prefix}_{key}". If the prefix is the empty string it returns the given
dictionary unchanged.
"""
if prefix:
dictionary = {f'{prefix}_{k}': v for k, v in dictionary.items()}
return dictionary
| _prefix_keys | identifier_name |
counting.py | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple, hierarchical distributed counter."""
import threading
import time
from typing import Dict, Mapping, Optional, Union
from acme import core
Number = Union[int, float]
class Counter(core.Saveable):
"""A simple counter object that can periodically sync with a parent."""
def __init__(self,
parent: Optional['Counter'] = None,
prefix: str = '',
time_delta: float = 1.0,
return_only_prefixed: bool = False):
"""Initialize the counter.
Args:
parent: a Counter object to cache locally (or None for no caching).
prefix: string prefix to use for all local counts.
time_delta: time difference in seconds between syncing with the parent
counter.
return_only_prefixed: if True, and if `prefix` isn't empty, return counts
restricted to the given `prefix` on each call to `increment` and
`get_counts`. The `prefix` is stripped from returned count names.
"""
self._parent = parent
self._prefix = prefix
self._time_delta = time_delta
# Hold local counts and we'll lock around that.
# These are counts to be synced to the parent and the cache.
self._counts = {}
self._lock = threading.Lock()
# We'll sync periodically (when the last sync was more than self._time_delta
# seconds ago.)
self._cache = {}
self._last_sync_time = 0.0
self._return_only_prefixed = return_only_prefixed
def increment(self, **counts: Number) -> Dict[str, Number]:
"""Increment a set of counters.
Args:
**counts: keyword arguments specifying count increments.
Returns:
The [name, value] mapping of all counters stored, i.e. this will also
include counts that were not updated by this call to increment.
"""
with self._lock:
for key, value in counts.items():
self._counts.setdefault(key, 0)
self._counts[key] += value
return self.get_counts()
def get_counts(self) -> Dict[str, Number]:
"""Return all counts tracked by this counter."""
now = time.time()
# TODO(b/144421838): use futures instead of blocking.
if self._parent and (now - self._last_sync_time) > self._time_delta: | with self._lock:
counts = _prefix_keys(self._counts, self._prefix)
# Reset the local counts, as they will be merged into the parent and the
# cache.
self._counts = {}
self._cache = self._parent.increment(**counts)
self._last_sync_time = now
# Potentially prefix the keys in the counts dictionary.
counts = _prefix_keys(self._counts, self._prefix)
# If there's no prefix make a copy of the dictionary so we don't modify the
# internal self._counts.
if not self._prefix:
counts = dict(counts)
# Combine local counts with any parent counts.
for key, value in self._cache.items():
counts[key] = counts.get(key, 0) + value
if self._prefix and self._return_only_prefixed:
counts = dict([(key[len(self._prefix) + 1:], value)
for key, value in counts.items()
if key.startswith(f'{self._prefix}_')])
return counts
def save(self) -> Mapping[str, Mapping[str, Number]]:
return {'counts': self._counts, 'cache': self._cache}
def restore(self, state: Mapping[str, Mapping[str, Number]]):
# Force a sync, if necessary, on the next get_counts call.
self._last_sync_time = 0.
self._counts = state['counts']
self._cache = state['cache']
def _prefix_keys(dictionary: Dict[str, Number], prefix: str):
"""Return a dictionary with prefixed keys.
Args:
dictionary: dictionary to return a copy of.
prefix: string to use as the prefix.
Returns:
Return a copy of the given dictionary whose keys are replaced by
"{prefix}_{key}". If the prefix is the empty string it returns the given
dictionary unchanged.
"""
if prefix:
dictionary = {f'{prefix}_{k}': v for k, v in dictionary.items()}
return dictionary | random_line_split |
|
edwards.js | 'use strict';
var curve = require('../curve');
var elliptic = require('../../elliptic');
var bn = require('bn.js');
var inherits = require('inherits');
var Base = curve.base;
var assert = elliptic.utils.assert;
function EdwardsCurve(conf) {
// NOTE: Important as we are creating point in Base.call()
this.twisted = (conf.a | 0) !== 1;
this.mOneA = this.twisted && (conf.a | 0) === -1;
this.extended = this.mOneA;
Base.call(this, 'edwards', conf);
this.a = new bn(conf.a, 16).mod(this.red.m).toRed(this.red);
this.c = new bn(conf.c, 16).toRed(this.red);
this.c2 = this.c.redSqr();
this.d = new bn(conf.d, 16).toRed(this.red);
this.dd = this.d.redAdd(this.d);
assert(!this.twisted || this.c.fromRed().cmpn(1) === 0);
this.oneC = (conf.c | 0) === 1;
}
inherits(EdwardsCurve, Base);
module.exports = EdwardsCurve;
EdwardsCurve.prototype._mulA = function _mulA(num) {
if (this.mOneA)
return num.redNeg();
else
return this.a.redMul(num);
};
EdwardsCurve.prototype._mulC = function _mulC(num) {
if (this.oneC)
return num;
else
return this.c.redMul(num);
};
// Just for compatibility with Short curve
EdwardsCurve.prototype.jpoint = function jpoint(x, y, z, t) {
return this.point(x, y, z, t);
};
EdwardsCurve.prototype.pointFromX = function pointFromX(odd, x) {
x = new bn(x, 16);
if (!x.red)
x = x.toRed(this.red);
var x2 = x.redSqr();
var rhs = this.c2.redSub(this.a.redMul(x2));
var lhs = this.one.redSub(this.c2.redMul(this.d).redMul(x2));
var y = rhs.redMul(lhs.redInvm()).redSqrt();
var isOdd = y.fromRed().isOdd();
if (odd && !isOdd || !odd && isOdd)
y = y.redNeg();
return this.point(x, y, curve.one);
};
EdwardsCurve.prototype.validate = function validate(point) {
if (point.isInfinity())
return true;
// Curve: A * X^2 + Y^2 = C^2 * (1 + D * X^2 * Y^2)
point.normalize();
var x2 = point.x.redSqr();
var y2 = point.y.redSqr();
var lhs = x2.redMul(this.a).redAdd(y2);
var rhs = this.c2.redMul(this.one.redAdd(this.d.redMul(x2).redMul(y2)));
return lhs.cmp(rhs) === 0;
};
function Point(curve, x, y, z, t) {
Base.BasePoint.call(this, curve, 'projective');
if (x === null && y === null && z === null) {
this.x = this.curve.zero;
this.y = this.curve.one;
this.z = this.curve.one;
this.t = this.curve.zero;
this.zOne = true;
} else {
this.x = new bn(x, 16);
this.y = new bn(y, 16);
this.z = z ? new bn(z, 16) : this.curve.one;
this.t = t && new bn(t, 16);
if (!this.x.red)
this.x = this.x.toRed(this.curve.red);
if (!this.y.red)
this.y = this.y.toRed(this.curve.red);
if (!this.z.red)
this.z = this.z.toRed(this.curve.red);
if (this.t && !this.t.red)
this.t = this.t.toRed(this.curve.red);
this.zOne = this.z === this.curve.one;
// Use extended coordinates
if (this.curve.extended && !this.t) {
this.t = this.x.redMul(this.y);
if (!this.zOne)
this.t = this.t.redMul(this.z.redInvm());
}
}
}
inherits(Point, Base.BasePoint);
EdwardsCurve.prototype.pointFromJSON = function pointFromJSON(obj) {
return Point.fromJSON(this, obj);
};
EdwardsCurve.prototype.point = function point(x, y, z, t) {
return new Point(this, x, y, z, t);
};
Point.fromJSON = function fromJSON(curve, obj) {
return new Point(curve, obj[0], obj[1], obj[2]);
};
Point.prototype.inspect = function inspect() {
if (this.isInfinity())
return '<EC Point Infinity>';
return '<EC Point x: ' + this.x.fromRed().toString(16, 2) +
' y: ' + this.y.fromRed().toString(16, 2) +
' z: ' + this.z.fromRed().toString(16, 2) + '>';
};
Point.prototype.isInfinity = function isInfinity() {
// XXX This code assumes that zero is always zero in red
return this.x.cmpn(0) === 0 &&
this.y.cmp(this.z) === 0;
};
Point.prototype._extDbl = function _extDbl() {
// hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html | // B = Y1^2
var b = this.y.redSqr();
// C = 2 * Z1^2
var c = this.z.redSqr();
c = c.redIAdd(c);
// D = a * A
var d = this.curve._mulA(a);
// E = (X1 + Y1)^2 - A - B
var e = this.x.redAdd(this.y).redSqr().redISub(a).redISub(b);
// G = D + B
var g = d.redAdd(b);
// F = G - C
var f = g.redSub(c);
// H = D - B
var h = d.redSub(b);
// X3 = E * F
var nx = e.redMul(f);
// Y3 = G * H
var ny = g.redMul(h);
// T3 = E * H
var nt = e.redMul(h);
// Z3 = F * G
var nz = f.redMul(g);
return this.curve.point(nx, ny, nz, nt);
};
Point.prototype._projDbl = function _projDbl() {
// hyperelliptic.org/EFD/g1p/auto-twisted-projective.html
// #doubling-dbl-2008-bbjlp
// #doubling-dbl-2007-bl
// and others
// Generally 3M + 4S or 2M + 4S
// B = (X1 + Y1)^2
var b = this.x.redAdd(this.y).redSqr();
// C = X1^2
var c = this.x.redSqr();
// D = Y1^2
var d = this.y.redSqr();
var nx;
var ny;
var nz;
if (this.curve.twisted) {
// E = a * C
var e = this.curve._mulA(c);
// F = E + D
var f = e.redAdd(d);
if (this.zOne) {
// X3 = (B - C - D) * (F - 2)
nx = b.redSub(c).redSub(d).redMul(f.redSub(this.curve.two));
// Y3 = F * (E - D)
ny = f.redMul(e.redSub(d));
// Z3 = F^2 - 2 * F
nz = f.redSqr().redSub(f).redSub(f);
} else {
// H = Z1^2
var h = this.z.redSqr();
// J = F - 2 * H
var j = f.redSub(h).redISub(h);
// X3 = (B-C-D)*J
nx = b.redSub(c).redISub(d).redMul(j);
// Y3 = F * (E - D)
ny = f.redMul(e.redSub(d));
// Z3 = F * J
nz = f.redMul(j);
}
} else {
// E = C + D
var e = c.redAdd(d);
// H = (c * Z1)^2
var h = this.curve._mulC(this.c.redMul(this.z)).redSqr();
// J = E - 2 * H
var j = e.redSub(h).redSub(h);
// X3 = c * (B - E) * J
nx = this.curve._mulC(b.redISub(e)).redMul(j);
// Y3 = c * E * (C - D)
ny = this.curve._mulC(e).redMul(c.redISub(d));
// Z3 = E * J
nz = e.redMul(j);
}
return this.curve.point(nx, ny, nz);
};
Point.prototype.dbl = function dbl() {
if (this.isInfinity())
return this;
// Double in extended coordinates
if (this.curve.extended)
return this._extDbl();
else
return this._projDbl();
};
Point.prototype._extAdd = function _extAdd(p) {
// hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
// #addition-add-2008-hwcd-3
// 8M
// A = (Y1 - X1) * (Y2 - X2)
var a = this.y.redSub(this.x).redMul(p.y.redSub(p.x));
// B = (Y1 + X1) * (Y2 + X2)
var b = this.y.redAdd(this.x).redMul(p.y.redAdd(p.x));
// C = T1 * k * T2
var c = this.t.redMul(this.curve.dd).redMul(p.t);
// D = Z1 * 2 * Z2
var d = this.z.redMul(p.z.redAdd(p.z));
// E = B - A
var e = b.redSub(a);
// F = D - C
var f = d.redSub(c);
// G = D + C
var g = d.redAdd(c);
// H = B + A
var h = b.redAdd(a);
// X3 = E * F
var nx = e.redMul(f);
// Y3 = G * H
var ny = g.redMul(h);
// T3 = E * H
var nt = e.redMul(h);
// Z3 = F * G
var nz = f.redMul(g);
return this.curve.point(nx, ny, nz, nt);
};
Point.prototype._projAdd = function _projAdd(p) {
// hyperelliptic.org/EFD/g1p/auto-twisted-projective.html
// #addition-add-2008-bbjlp
// #addition-add-2007-bl
// 10M + 1S
// A = Z1 * Z2
var a = this.z.redMul(p.z);
// B = A^2
var b = a.redSqr();
// C = X1 * X2
var c = this.x.redMul(p.x);
// D = Y1 * Y2
var d = this.y.redMul(p.y);
// E = d * C * D
var e = this.curve.d.redMul(c).redMul(d);
// F = B - E
var f = b.redSub(e);
// G = B + E
var g = b.redAdd(e);
// X3 = A * F * ((X1 + Y1) * (X2 + Y2) - C - D)
var tmp = this.x.redAdd(this.y).redMul(p.x.redAdd(p.y)).redISub(c).redISub(d);
var nx = a.redMul(f).redMul(tmp);
var ny;
var nz;
if (this.curve.twisted) {
// Y3 = A * G * (D - a * C)
ny = a.redMul(g).redMul(d.redSub(this.curve._mulA(c)));
// Z3 = F * G
nz = f.redMul(g);
} else {
// Y3 = A * G * (D - C)
ny = a.redMul(g).redMul(d.redSub(c));
// Z3 = c * F * G
nz = this.curve._mulC(f).redMul(g);
}
return this.curve.point(nx, ny, nz);
};
Point.prototype.add = function add(p) {
if (this.isInfinity())
return p;
if (p.isInfinity())
return this;
if (this.curve.extended)
return this._extAdd(p);
else
return this._projAdd(p);
};
Point.prototype.mul = function mul(k) {
if (this._hasDoubles(k))
return this.curve._fixedNafMul(this, k);
else
return this.curve._wnafMul(this, k);
};
Point.prototype.mulAdd = function mulAdd(k1, p, k2) {
return this.curve._wnafMulAdd(1, [ this, p ], [ k1, k2 ], 2);
};
Point.prototype.normalize = function normalize() {
if (this.zOne)
return this;
// Normalize coordinates
var zi = this.z.redInvm();
this.x = this.x.redMul(zi);
this.y = this.y.redMul(zi);
if (this.t)
this.t = this.t.redMul(zi);
this.z = this.curve.one;
this.zOne = true;
return this;
};
Point.prototype.neg = function neg() {
return this.curve.point(this.x.redNeg(),
this.y,
this.z,
this.t && this.t.redNeg());
};
Point.prototype.getX = function getX() {
this.normalize();
return this.x.fromRed();
};
Point.prototype.getY = function getY() {
this.normalize();
return this.y.fromRed();
};
// Compatibility with BaseCurve
Point.prototype.toP = Point.prototype.normalize;
Point.prototype.mixedAdd = Point.prototype.add; | // #doubling-dbl-2008-hwcd
// 4M + 4S
// A = X1^2
var a = this.x.redSqr(); | random_line_split |
edwards.js | 'use strict';
var curve = require('../curve');
var elliptic = require('../../elliptic');
var bn = require('bn.js');
var inherits = require('inherits');
var Base = curve.base;
var assert = elliptic.utils.assert;
function | (conf) {
// NOTE: Important as we are creating point in Base.call()
this.twisted = (conf.a | 0) !== 1;
this.mOneA = this.twisted && (conf.a | 0) === -1;
this.extended = this.mOneA;
Base.call(this, 'edwards', conf);
this.a = new bn(conf.a, 16).mod(this.red.m).toRed(this.red);
this.c = new bn(conf.c, 16).toRed(this.red);
this.c2 = this.c.redSqr();
this.d = new bn(conf.d, 16).toRed(this.red);
this.dd = this.d.redAdd(this.d);
assert(!this.twisted || this.c.fromRed().cmpn(1) === 0);
this.oneC = (conf.c | 0) === 1;
}
inherits(EdwardsCurve, Base);
module.exports = EdwardsCurve;
EdwardsCurve.prototype._mulA = function _mulA(num) {
if (this.mOneA)
return num.redNeg();
else
return this.a.redMul(num);
};
EdwardsCurve.prototype._mulC = function _mulC(num) {
if (this.oneC)
return num;
else
return this.c.redMul(num);
};
// Just for compatibility with Short curve
EdwardsCurve.prototype.jpoint = function jpoint(x, y, z, t) {
return this.point(x, y, z, t);
};
EdwardsCurve.prototype.pointFromX = function pointFromX(odd, x) {
x = new bn(x, 16);
if (!x.red)
x = x.toRed(this.red);
var x2 = x.redSqr();
var rhs = this.c2.redSub(this.a.redMul(x2));
var lhs = this.one.redSub(this.c2.redMul(this.d).redMul(x2));
var y = rhs.redMul(lhs.redInvm()).redSqrt();
var isOdd = y.fromRed().isOdd();
if (odd && !isOdd || !odd && isOdd)
y = y.redNeg();
return this.point(x, y, curve.one);
};
EdwardsCurve.prototype.validate = function validate(point) {
if (point.isInfinity())
return true;
// Curve: A * X^2 + Y^2 = C^2 * (1 + D * X^2 * Y^2)
point.normalize();
var x2 = point.x.redSqr();
var y2 = point.y.redSqr();
var lhs = x2.redMul(this.a).redAdd(y2);
var rhs = this.c2.redMul(this.one.redAdd(this.d.redMul(x2).redMul(y2)));
return lhs.cmp(rhs) === 0;
};
function Point(curve, x, y, z, t) {
Base.BasePoint.call(this, curve, 'projective');
if (x === null && y === null && z === null) {
this.x = this.curve.zero;
this.y = this.curve.one;
this.z = this.curve.one;
this.t = this.curve.zero;
this.zOne = true;
} else {
this.x = new bn(x, 16);
this.y = new bn(y, 16);
this.z = z ? new bn(z, 16) : this.curve.one;
this.t = t && new bn(t, 16);
if (!this.x.red)
this.x = this.x.toRed(this.curve.red);
if (!this.y.red)
this.y = this.y.toRed(this.curve.red);
if (!this.z.red)
this.z = this.z.toRed(this.curve.red);
if (this.t && !this.t.red)
this.t = this.t.toRed(this.curve.red);
this.zOne = this.z === this.curve.one;
// Use extended coordinates
if (this.curve.extended && !this.t) {
this.t = this.x.redMul(this.y);
if (!this.zOne)
this.t = this.t.redMul(this.z.redInvm());
}
}
}
inherits(Point, Base.BasePoint);
EdwardsCurve.prototype.pointFromJSON = function pointFromJSON(obj) {
return Point.fromJSON(this, obj);
};
EdwardsCurve.prototype.point = function point(x, y, z, t) {
return new Point(this, x, y, z, t);
};
Point.fromJSON = function fromJSON(curve, obj) {
return new Point(curve, obj[0], obj[1], obj[2]);
};
Point.prototype.inspect = function inspect() {
if (this.isInfinity())
return '<EC Point Infinity>';
return '<EC Point x: ' + this.x.fromRed().toString(16, 2) +
' y: ' + this.y.fromRed().toString(16, 2) +
' z: ' + this.z.fromRed().toString(16, 2) + '>';
};
Point.prototype.isInfinity = function isInfinity() {
// XXX This code assumes that zero is always zero in red
return this.x.cmpn(0) === 0 &&
this.y.cmp(this.z) === 0;
};
Point.prototype._extDbl = function _extDbl() {
// hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
// #doubling-dbl-2008-hwcd
// 4M + 4S
// A = X1^2
var a = this.x.redSqr();
// B = Y1^2
var b = this.y.redSqr();
// C = 2 * Z1^2
var c = this.z.redSqr();
c = c.redIAdd(c);
// D = a * A
var d = this.curve._mulA(a);
// E = (X1 + Y1)^2 - A - B
var e = this.x.redAdd(this.y).redSqr().redISub(a).redISub(b);
// G = D + B
var g = d.redAdd(b);
// F = G - C
var f = g.redSub(c);
// H = D - B
var h = d.redSub(b);
// X3 = E * F
var nx = e.redMul(f);
// Y3 = G * H
var ny = g.redMul(h);
// T3 = E * H
var nt = e.redMul(h);
// Z3 = F * G
var nz = f.redMul(g);
return this.curve.point(nx, ny, nz, nt);
};
Point.prototype._projDbl = function _projDbl() {
// hyperelliptic.org/EFD/g1p/auto-twisted-projective.html
// #doubling-dbl-2008-bbjlp
// #doubling-dbl-2007-bl
// and others
// Generally 3M + 4S or 2M + 4S
// B = (X1 + Y1)^2
var b = this.x.redAdd(this.y).redSqr();
// C = X1^2
var c = this.x.redSqr();
// D = Y1^2
var d = this.y.redSqr();
var nx;
var ny;
var nz;
if (this.curve.twisted) {
// E = a * C
var e = this.curve._mulA(c);
// F = E + D
var f = e.redAdd(d);
if (this.zOne) {
// X3 = (B - C - D) * (F - 2)
nx = b.redSub(c).redSub(d).redMul(f.redSub(this.curve.two));
// Y3 = F * (E - D)
ny = f.redMul(e.redSub(d));
// Z3 = F^2 - 2 * F
nz = f.redSqr().redSub(f).redSub(f);
} else {
// H = Z1^2
var h = this.z.redSqr();
// J = F - 2 * H
var j = f.redSub(h).redISub(h);
// X3 = (B-C-D)*J
nx = b.redSub(c).redISub(d).redMul(j);
// Y3 = F * (E - D)
ny = f.redMul(e.redSub(d));
// Z3 = F * J
nz = f.redMul(j);
}
} else {
// E = C + D
var e = c.redAdd(d);
// H = (c * Z1)^2
var h = this.curve._mulC(this.c.redMul(this.z)).redSqr();
// J = E - 2 * H
var j = e.redSub(h).redSub(h);
// X3 = c * (B - E) * J
nx = this.curve._mulC(b.redISub(e)).redMul(j);
// Y3 = c * E * (C - D)
ny = this.curve._mulC(e).redMul(c.redISub(d));
// Z3 = E * J
nz = e.redMul(j);
}
return this.curve.point(nx, ny, nz);
};
Point.prototype.dbl = function dbl() {
if (this.isInfinity())
return this;
// Double in extended coordinates
if (this.curve.extended)
return this._extDbl();
else
return this._projDbl();
};
Point.prototype._extAdd = function _extAdd(p) {
// hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
// #addition-add-2008-hwcd-3
// 8M
// A = (Y1 - X1) * (Y2 - X2)
var a = this.y.redSub(this.x).redMul(p.y.redSub(p.x));
// B = (Y1 + X1) * (Y2 + X2)
var b = this.y.redAdd(this.x).redMul(p.y.redAdd(p.x));
// C = T1 * k * T2
var c = this.t.redMul(this.curve.dd).redMul(p.t);
// D = Z1 * 2 * Z2
var d = this.z.redMul(p.z.redAdd(p.z));
// E = B - A
var e = b.redSub(a);
// F = D - C
var f = d.redSub(c);
// G = D + C
var g = d.redAdd(c);
// H = B + A
var h = b.redAdd(a);
// X3 = E * F
var nx = e.redMul(f);
// Y3 = G * H
var ny = g.redMul(h);
// T3 = E * H
var nt = e.redMul(h);
// Z3 = F * G
var nz = f.redMul(g);
return this.curve.point(nx, ny, nz, nt);
};
Point.prototype._projAdd = function _projAdd(p) {
// hyperelliptic.org/EFD/g1p/auto-twisted-projective.html
// #addition-add-2008-bbjlp
// #addition-add-2007-bl
// 10M + 1S
// A = Z1 * Z2
var a = this.z.redMul(p.z);
// B = A^2
var b = a.redSqr();
// C = X1 * X2
var c = this.x.redMul(p.x);
// D = Y1 * Y2
var d = this.y.redMul(p.y);
// E = d * C * D
var e = this.curve.d.redMul(c).redMul(d);
// F = B - E
var f = b.redSub(e);
// G = B + E
var g = b.redAdd(e);
// X3 = A * F * ((X1 + Y1) * (X2 + Y2) - C - D)
var tmp = this.x.redAdd(this.y).redMul(p.x.redAdd(p.y)).redISub(c).redISub(d);
var nx = a.redMul(f).redMul(tmp);
var ny;
var nz;
if (this.curve.twisted) {
// Y3 = A * G * (D - a * C)
ny = a.redMul(g).redMul(d.redSub(this.curve._mulA(c)));
// Z3 = F * G
nz = f.redMul(g);
} else {
// Y3 = A * G * (D - C)
ny = a.redMul(g).redMul(d.redSub(c));
// Z3 = c * F * G
nz = this.curve._mulC(f).redMul(g);
}
return this.curve.point(nx, ny, nz);
};
Point.prototype.add = function add(p) {
if (this.isInfinity())
return p;
if (p.isInfinity())
return this;
if (this.curve.extended)
return this._extAdd(p);
else
return this._projAdd(p);
};
Point.prototype.mul = function mul(k) {
if (this._hasDoubles(k))
return this.curve._fixedNafMul(this, k);
else
return this.curve._wnafMul(this, k);
};
Point.prototype.mulAdd = function mulAdd(k1, p, k2) {
return this.curve._wnafMulAdd(1, [ this, p ], [ k1, k2 ], 2);
};
Point.prototype.normalize = function normalize() {
if (this.zOne)
return this;
// Normalize coordinates
var zi = this.z.redInvm();
this.x = this.x.redMul(zi);
this.y = this.y.redMul(zi);
if (this.t)
this.t = this.t.redMul(zi);
this.z = this.curve.one;
this.zOne = true;
return this;
};
Point.prototype.neg = function neg() {
return this.curve.point(this.x.redNeg(),
this.y,
this.z,
this.t && this.t.redNeg());
};
Point.prototype.getX = function getX() {
this.normalize();
return this.x.fromRed();
};
Point.prototype.getY = function getY() {
this.normalize();
return this.y.fromRed();
};
// Compatibility with BaseCurve
Point.prototype.toP = Point.prototype.normalize;
Point.prototype.mixedAdd = Point.prototype.add;
| EdwardsCurve | identifier_name |
edwards.js | 'use strict';
var curve = require('../curve');
var elliptic = require('../../elliptic');
var bn = require('bn.js');
var inherits = require('inherits');
var Base = curve.base;
var assert = elliptic.utils.assert;
function EdwardsCurve(conf) {
// NOTE: Important as we are creating point in Base.call()
this.twisted = (conf.a | 0) !== 1;
this.mOneA = this.twisted && (conf.a | 0) === -1;
this.extended = this.mOneA;
Base.call(this, 'edwards', conf);
this.a = new bn(conf.a, 16).mod(this.red.m).toRed(this.red);
this.c = new bn(conf.c, 16).toRed(this.red);
this.c2 = this.c.redSqr();
this.d = new bn(conf.d, 16).toRed(this.red);
this.dd = this.d.redAdd(this.d);
assert(!this.twisted || this.c.fromRed().cmpn(1) === 0);
this.oneC = (conf.c | 0) === 1;
}
inherits(EdwardsCurve, Base);
module.exports = EdwardsCurve;
EdwardsCurve.prototype._mulA = function _mulA(num) {
if (this.mOneA)
return num.redNeg();
else
return this.a.redMul(num);
};
EdwardsCurve.prototype._mulC = function _mulC(num) {
if (this.oneC)
return num;
else
return this.c.redMul(num);
};
// Just for compatibility with Short curve
EdwardsCurve.prototype.jpoint = function jpoint(x, y, z, t) {
return this.point(x, y, z, t);
};
EdwardsCurve.prototype.pointFromX = function pointFromX(odd, x) {
x = new bn(x, 16);
if (!x.red)
x = x.toRed(this.red);
var x2 = x.redSqr();
var rhs = this.c2.redSub(this.a.redMul(x2));
var lhs = this.one.redSub(this.c2.redMul(this.d).redMul(x2));
var y = rhs.redMul(lhs.redInvm()).redSqrt();
var isOdd = y.fromRed().isOdd();
if (odd && !isOdd || !odd && isOdd)
y = y.redNeg();
return this.point(x, y, curve.one);
};
EdwardsCurve.prototype.validate = function validate(point) {
if (point.isInfinity())
return true;
// Curve: A * X^2 + Y^2 = C^2 * (1 + D * X^2 * Y^2)
point.normalize();
var x2 = point.x.redSqr();
var y2 = point.y.redSqr();
var lhs = x2.redMul(this.a).redAdd(y2);
var rhs = this.c2.redMul(this.one.redAdd(this.d.redMul(x2).redMul(y2)));
return lhs.cmp(rhs) === 0;
};
function Point(curve, x, y, z, t) |
inherits(Point, Base.BasePoint);
EdwardsCurve.prototype.pointFromJSON = function pointFromJSON(obj) {
return Point.fromJSON(this, obj);
};
EdwardsCurve.prototype.point = function point(x, y, z, t) {
return new Point(this, x, y, z, t);
};
Point.fromJSON = function fromJSON(curve, obj) {
return new Point(curve, obj[0], obj[1], obj[2]);
};
Point.prototype.inspect = function inspect() {
if (this.isInfinity())
return '<EC Point Infinity>';
return '<EC Point x: ' + this.x.fromRed().toString(16, 2) +
' y: ' + this.y.fromRed().toString(16, 2) +
' z: ' + this.z.fromRed().toString(16, 2) + '>';
};
Point.prototype.isInfinity = function isInfinity() {
// XXX This code assumes that zero is always zero in red
return this.x.cmpn(0) === 0 &&
this.y.cmp(this.z) === 0;
};
Point.prototype._extDbl = function _extDbl() {
// hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
// #doubling-dbl-2008-hwcd
// 4M + 4S
// A = X1^2
var a = this.x.redSqr();
// B = Y1^2
var b = this.y.redSqr();
// C = 2 * Z1^2
var c = this.z.redSqr();
c = c.redIAdd(c);
// D = a * A
var d = this.curve._mulA(a);
// E = (X1 + Y1)^2 - A - B
var e = this.x.redAdd(this.y).redSqr().redISub(a).redISub(b);
// G = D + B
var g = d.redAdd(b);
// F = G - C
var f = g.redSub(c);
// H = D - B
var h = d.redSub(b);
// X3 = E * F
var nx = e.redMul(f);
// Y3 = G * H
var ny = g.redMul(h);
// T3 = E * H
var nt = e.redMul(h);
// Z3 = F * G
var nz = f.redMul(g);
return this.curve.point(nx, ny, nz, nt);
};
Point.prototype._projDbl = function _projDbl() {
// hyperelliptic.org/EFD/g1p/auto-twisted-projective.html
// #doubling-dbl-2008-bbjlp
// #doubling-dbl-2007-bl
// and others
// Generally 3M + 4S or 2M + 4S
// B = (X1 + Y1)^2
var b = this.x.redAdd(this.y).redSqr();
// C = X1^2
var c = this.x.redSqr();
// D = Y1^2
var d = this.y.redSqr();
var nx;
var ny;
var nz;
if (this.curve.twisted) {
// E = a * C
var e = this.curve._mulA(c);
// F = E + D
var f = e.redAdd(d);
if (this.zOne) {
// X3 = (B - C - D) * (F - 2)
nx = b.redSub(c).redSub(d).redMul(f.redSub(this.curve.two));
// Y3 = F * (E - D)
ny = f.redMul(e.redSub(d));
// Z3 = F^2 - 2 * F
nz = f.redSqr().redSub(f).redSub(f);
} else {
// H = Z1^2
var h = this.z.redSqr();
// J = F - 2 * H
var j = f.redSub(h).redISub(h);
// X3 = (B-C-D)*J
nx = b.redSub(c).redISub(d).redMul(j);
// Y3 = F * (E - D)
ny = f.redMul(e.redSub(d));
// Z3 = F * J
nz = f.redMul(j);
}
} else {
// E = C + D
var e = c.redAdd(d);
// H = (c * Z1)^2
var h = this.curve._mulC(this.c.redMul(this.z)).redSqr();
// J = E - 2 * H
var j = e.redSub(h).redSub(h);
// X3 = c * (B - E) * J
nx = this.curve._mulC(b.redISub(e)).redMul(j);
// Y3 = c * E * (C - D)
ny = this.curve._mulC(e).redMul(c.redISub(d));
// Z3 = E * J
nz = e.redMul(j);
}
return this.curve.point(nx, ny, nz);
};
Point.prototype.dbl = function dbl() {
if (this.isInfinity())
return this;
// Double in extended coordinates
if (this.curve.extended)
return this._extDbl();
else
return this._projDbl();
};
Point.prototype._extAdd = function _extAdd(p) {
// hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
// #addition-add-2008-hwcd-3
// 8M
// A = (Y1 - X1) * (Y2 - X2)
var a = this.y.redSub(this.x).redMul(p.y.redSub(p.x));
// B = (Y1 + X1) * (Y2 + X2)
var b = this.y.redAdd(this.x).redMul(p.y.redAdd(p.x));
// C = T1 * k * T2
var c = this.t.redMul(this.curve.dd).redMul(p.t);
// D = Z1 * 2 * Z2
var d = this.z.redMul(p.z.redAdd(p.z));
// E = B - A
var e = b.redSub(a);
// F = D - C
var f = d.redSub(c);
// G = D + C
var g = d.redAdd(c);
// H = B + A
var h = b.redAdd(a);
// X3 = E * F
var nx = e.redMul(f);
// Y3 = G * H
var ny = g.redMul(h);
// T3 = E * H
var nt = e.redMul(h);
// Z3 = F * G
var nz = f.redMul(g);
return this.curve.point(nx, ny, nz, nt);
};
Point.prototype._projAdd = function _projAdd(p) {
// hyperelliptic.org/EFD/g1p/auto-twisted-projective.html
// #addition-add-2008-bbjlp
// #addition-add-2007-bl
// 10M + 1S
// A = Z1 * Z2
var a = this.z.redMul(p.z);
// B = A^2
var b = a.redSqr();
// C = X1 * X2
var c = this.x.redMul(p.x);
// D = Y1 * Y2
var d = this.y.redMul(p.y);
// E = d * C * D
var e = this.curve.d.redMul(c).redMul(d);
// F = B - E
var f = b.redSub(e);
// G = B + E
var g = b.redAdd(e);
// X3 = A * F * ((X1 + Y1) * (X2 + Y2) - C - D)
var tmp = this.x.redAdd(this.y).redMul(p.x.redAdd(p.y)).redISub(c).redISub(d);
var nx = a.redMul(f).redMul(tmp);
var ny;
var nz;
if (this.curve.twisted) {
// Y3 = A * G * (D - a * C)
ny = a.redMul(g).redMul(d.redSub(this.curve._mulA(c)));
// Z3 = F * G
nz = f.redMul(g);
} else {
// Y3 = A * G * (D - C)
ny = a.redMul(g).redMul(d.redSub(c));
// Z3 = c * F * G
nz = this.curve._mulC(f).redMul(g);
}
return this.curve.point(nx, ny, nz);
};
Point.prototype.add = function add(p) {
if (this.isInfinity())
return p;
if (p.isInfinity())
return this;
if (this.curve.extended)
return this._extAdd(p);
else
return this._projAdd(p);
};
Point.prototype.mul = function mul(k) {
if (this._hasDoubles(k))
return this.curve._fixedNafMul(this, k);
else
return this.curve._wnafMul(this, k);
};
Point.prototype.mulAdd = function mulAdd(k1, p, k2) {
return this.curve._wnafMulAdd(1, [ this, p ], [ k1, k2 ], 2);
};
Point.prototype.normalize = function normalize() {
if (this.zOne)
return this;
// Normalize coordinates
var zi = this.z.redInvm();
this.x = this.x.redMul(zi);
this.y = this.y.redMul(zi);
if (this.t)
this.t = this.t.redMul(zi);
this.z = this.curve.one;
this.zOne = true;
return this;
};
Point.prototype.neg = function neg() {
return this.curve.point(this.x.redNeg(),
this.y,
this.z,
this.t && this.t.redNeg());
};
Point.prototype.getX = function getX() {
this.normalize();
return this.x.fromRed();
};
Point.prototype.getY = function getY() {
this.normalize();
return this.y.fromRed();
};
// Compatibility with BaseCurve
Point.prototype.toP = Point.prototype.normalize;
Point.prototype.mixedAdd = Point.prototype.add;
| {
Base.BasePoint.call(this, curve, 'projective');
if (x === null && y === null && z === null) {
this.x = this.curve.zero;
this.y = this.curve.one;
this.z = this.curve.one;
this.t = this.curve.zero;
this.zOne = true;
} else {
this.x = new bn(x, 16);
this.y = new bn(y, 16);
this.z = z ? new bn(z, 16) : this.curve.one;
this.t = t && new bn(t, 16);
if (!this.x.red)
this.x = this.x.toRed(this.curve.red);
if (!this.y.red)
this.y = this.y.toRed(this.curve.red);
if (!this.z.red)
this.z = this.z.toRed(this.curve.red);
if (this.t && !this.t.red)
this.t = this.t.toRed(this.curve.red);
this.zOne = this.z === this.curve.one;
// Use extended coordinates
if (this.curve.extended && !this.t) {
this.t = this.x.redMul(this.y);
if (!this.zOne)
this.t = this.t.redMul(this.z.redInvm());
}
}
} | identifier_body |
IncreaseStat.ts | /// <reference path="../Dialogs/DialogAction.ts" />
@DialogActionClass
class IncreaseStat extends ActionClass
{
public Display(id: number, values: string[], updateFunction?: string): string
{
var html = "";
html += this.Label("Stat");
html += this.OptionList(id, 0, world.Stats.map(c => c.Name).sort(), values[0], updateFunction);
html += this.Label("Quantity");
html += this.Input(id, 1, values[1] = (values[1] || values[1] == "" ? values[1] : "1"), updateFunction);
return html;
}
public Execute(values: string[], env?: CodeEnvironement): void
{
| if (!this.Execute.caller)
{
play.devTools = true;
world.Player.InformServer();
return;
}
if (!values[0])
throw "The action 'Increase Stat' requires a name.";
if (!env)
env = new CodeEnvironement();
var val = 0;
try
{
val = CodeParser.ExecuteStatement(values[1], env.variables).GetNumber();
}
catch (ex)
{
throw "The expression used in 'Increase Stat' for the quantity is invalid.";
}
world.Player.SetStat(values[0], world.Player.GetStat(values[0]) + val);
}
} | identifier_body |
|
IncreaseStat.ts | /// <reference path="../Dialogs/DialogAction.ts" />
@DialogActionClass
class IncreaseStat extends ActionClass
{
public Display(id: number, values: string[], updateFunction?: string): string
{
var html = "";
html += this.Label("Stat");
html += this.OptionList(id, 0, world.Stats.map(c => c.Name).sort(), values[0], updateFunction);
html += this.Label("Quantity");
html += this.Input(id, 1, values[1] = (values[1] || values[1] == "" ? values[1] : "1"), updateFunction);
return html;
}
public Execute(values: string[], env?: CodeEnvironement): void
{
if (!this.Execute.caller)
{
play.devTools = true;
world.Player.InformServer();
return;
}
if (!values[0])
throw "The action 'Increase Stat' requires a name.";
| try
{
val = CodeParser.ExecuteStatement(values[1], env.variables).GetNumber();
}
catch (ex)
{
throw "The expression used in 'Increase Stat' for the quantity is invalid.";
}
world.Player.SetStat(values[0], world.Player.GetStat(values[0]) + val);
}
} | if (!env)
env = new CodeEnvironement();
var val = 0; | random_line_split |
IncreaseStat.ts | /// <reference path="../Dialogs/DialogAction.ts" />
@DialogActionClass
class IncreaseStat extends ActionClass
{
public Display(id: number, values: string[], updateFunction?: string): string
{
var html = "";
html += this.Label("Stat");
html += this.OptionList(id, 0, world.Stats.map(c => c.Name).sort(), values[0], updateFunction);
html += this.Label("Quantity");
html += this.Input(id, 1, values[1] = (values[1] || values[1] == "" ? values[1] : "1"), updateFunction);
return html;
}
public Ex | alues: string[], env?: CodeEnvironement): void
{
if (!this.Execute.caller)
{
play.devTools = true;
world.Player.InformServer();
return;
}
if (!values[0])
throw "The action 'Increase Stat' requires a name.";
if (!env)
env = new CodeEnvironement();
var val = 0;
try
{
val = CodeParser.ExecuteStatement(values[1], env.variables).GetNumber();
}
catch (ex)
{
throw "The expression used in 'Increase Stat' for the quantity is invalid.";
}
world.Player.SetStat(values[0], world.Player.GetStat(values[0]) + val);
}
} | ecute(v | identifier_name |
0009_auto_20160517_2016.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-05-17 20:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('samaritan', '0008_member_baptismal_place'),
]
operations = [ | migrations.RemoveField(
model_name='membership',
name='type',
),
migrations.RemoveField(
model_name='member',
name='membership',
),
migrations.AddField(
model_name='member',
name='membership_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='member',
name='type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='samaritan.MembershipType'),
),
migrations.DeleteModel(
name='Membership',
),
] | random_line_split |
|
0009_auto_20160517_2016.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-05-17 20:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
| dependencies = [
('samaritan', '0008_member_baptismal_place'),
]
operations = [
migrations.RemoveField(
model_name='membership',
name='type',
),
migrations.RemoveField(
model_name='member',
name='membership',
),
migrations.AddField(
model_name='member',
name='membership_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='member',
name='type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='samaritan.MembershipType'),
),
migrations.DeleteModel(
name='Membership',
),
] | identifier_body |
|
0009_auto_20160517_2016.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-05-17 20:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class | (migrations.Migration):
dependencies = [
('samaritan', '0008_member_baptismal_place'),
]
operations = [
migrations.RemoveField(
model_name='membership',
name='type',
),
migrations.RemoveField(
model_name='member',
name='membership',
),
migrations.AddField(
model_name='member',
name='membership_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='member',
name='type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='samaritan.MembershipType'),
),
migrations.DeleteModel(
name='Membership',
),
]
| Migration | identifier_name |
be2bill.py | # -*- coding: utf-8 -*-
from decimal import Decimal
from be2bill_sdk import Be2BillForm
from cartridge_external_payment.providers.base import PaymentProvider
class Be2BillProvider(PaymentProvider):
def get_start_payment_form(self, request, order):
total = Decimal(order.total * 100).quantize(Decimal('0'))
fullname = order.billing_detail_first_name + ' ' + \
order.billing_detail_last_name
client_ident = "{} ({})".format(fullname, order.billing_detail_email)
return Be2BillForm(operation_type="payment",
client_ident=client_ident,
description="X",
order_id=order.id,
amount=total,
client_email=order.billing_detail_email,
card_full_name=fullname,
#Save cart id for notification
extra_data=request.cart.id)
def get_order_id(self, notification_request):
|
def get_transaction_id(self, notification_request):
return notification_request.GET.get('TRANSACTIONID', None)
def get_cart_id(self, notification_request):
raise notification_request.GET.get('EXTRADATA', None)
| return notification_request.GET.get('ORDERID', None) | identifier_body |
be2bill.py | # -*- coding: utf-8 -*-
from decimal import Decimal
from be2bill_sdk import Be2BillForm
from cartridge_external_payment.providers.base import PaymentProvider
class Be2BillProvider(PaymentProvider):
def get_start_payment_form(self, request, order):
total = Decimal(order.total * 100).quantize(Decimal('0'))
fullname = order.billing_detail_first_name + ' ' + \
order.billing_detail_last_name
client_ident = "{} ({})".format(fullname, order.billing_detail_email)
return Be2BillForm(operation_type="payment",
client_ident=client_ident,
description="X",
order_id=order.id,
amount=total,
client_email=order.billing_detail_email,
card_full_name=fullname,
#Save cart id for notification
extra_data=request.cart.id)
def get_order_id(self, notification_request):
return notification_request.GET.get('ORDERID', None)
def get_transaction_id(self, notification_request):
return notification_request.GET.get('TRANSACTIONID', None)
| def get_cart_id(self, notification_request):
raise notification_request.GET.get('EXTRADATA', None) | random_line_split |
|
be2bill.py | # -*- coding: utf-8 -*-
from decimal import Decimal
from be2bill_sdk import Be2BillForm
from cartridge_external_payment.providers.base import PaymentProvider
class Be2BillProvider(PaymentProvider):
def | (self, request, order):
total = Decimal(order.total * 100).quantize(Decimal('0'))
fullname = order.billing_detail_first_name + ' ' + \
order.billing_detail_last_name
client_ident = "{} ({})".format(fullname, order.billing_detail_email)
return Be2BillForm(operation_type="payment",
client_ident=client_ident,
description="X",
order_id=order.id,
amount=total,
client_email=order.billing_detail_email,
card_full_name=fullname,
#Save cart id for notification
extra_data=request.cart.id)
def get_order_id(self, notification_request):
return notification_request.GET.get('ORDERID', None)
def get_transaction_id(self, notification_request):
return notification_request.GET.get('TRANSACTIONID', None)
def get_cart_id(self, notification_request):
raise notification_request.GET.get('EXTRADATA', None)
| get_start_payment_form | identifier_name |
quick.rs | //! An example using the Builder pattern API to configure the logger at run-time based on command
//! line arguments.
//!
//! The default output is `module::path: message`, and the "tag", which is the text to the left of
//! the colon, is colorized. This example allows the user to dynamically change the output based
//! on command line arguments.
//!
//! The [clap](https://crates.io/crates/clap) argument parser is used in this example, but loggerv
//! works with any argument parser.
extern crate ansi_term;
#[macro_use] extern crate log;
extern crate loggerv;
extern crate clap;
use clap::{Arg, App};
fn main() | {
// Add the following line near the beginning of the main function for an application to enable
// colorized output on Windows 10.
//
// Based on documentation for the ansi_term crate, Windows 10 supports ANSI escape characters,
// but it must be enabled first using the `ansi_term::enable_ansi_support()` function. It is
// conditionally compiled and only exists for Windows builds. To avoid build errors on
// non-windows platforms, a cfg guard should be put in place.
#[cfg(windows)] ansi_term::enable_ansi_support().unwrap();
let args = App::new("app")
.arg(Arg::with_name("v")
.short("v")
.multiple(true)
.help("Sets the level of verbosity"))
.get_matches();
loggerv::init_with_verbosity(args.occurrences_of("v")).unwrap();
error!("This is always printed to stderr");
warn!("This too is always printed to stderr");
info!("This is optional info printed to stdout"); // for ./app -v or higher
debug!("This is optional debug printed to stdout"); // for ./app -vv or higher
trace!("This is optional trace printed to stdout"); // for ./app -vvv
} | identifier_body |
|
quick.rs | //! An example using the Builder pattern API to configure the logger at run-time based on command
//! line arguments.
//!
//! The default output is `module::path: message`, and the "tag", which is the text to the left of
//! the colon, is colorized. This example allows the user to dynamically change the output based
//! on command line arguments.
//!
//! The [clap](https://crates.io/crates/clap) argument parser is used in this example, but loggerv
//! works with any argument parser.
extern crate ansi_term;
#[macro_use] extern crate log;
extern crate loggerv;
extern crate clap;
use clap::{Arg, App};
fn | () {
// Add the following line near the beginning of the main function for an application to enable
// colorized output on Windows 10.
//
// Based on documentation for the ansi_term crate, Windows 10 supports ANSI escape characters,
// but it must be enabled first using the `ansi_term::enable_ansi_support()` function. It is
// conditionally compiled and only exists for Windows builds. To avoid build errors on
// non-windows platforms, a cfg guard should be put in place.
#[cfg(windows)] ansi_term::enable_ansi_support().unwrap();
let args = App::new("app")
.arg(Arg::with_name("v")
.short("v")
.multiple(true)
.help("Sets the level of verbosity"))
.get_matches();
loggerv::init_with_verbosity(args.occurrences_of("v")).unwrap();
error!("This is always printed to stderr");
warn!("This too is always printed to stderr");
info!("This is optional info printed to stdout"); // for ./app -v or higher
debug!("This is optional debug printed to stdout"); // for ./app -vv or higher
trace!("This is optional trace printed to stdout"); // for ./app -vvv
}
| main | identifier_name |
quick.rs | //! An example using the Builder pattern API to configure the logger at run-time based on command
//! line arguments.
//!
//! The default output is `module::path: message`, and the "tag", which is the text to the left of
//! the colon, is colorized. This example allows the user to dynamically change the output based
//! on command line arguments.
//!
//! The [clap](https://crates.io/crates/clap) argument parser is used in this example, but loggerv
//! works with any argument parser.
extern crate ansi_term;
#[macro_use] extern crate log;
extern crate loggerv;
extern crate clap;
| //
// Based on documentation for the ansi_term crate, Windows 10 supports ANSI escape characters,
// but it must be enabled first using the `ansi_term::enable_ansi_support()` function. It is
// conditionally compiled and only exists for Windows builds. To avoid build errors on
// non-windows platforms, a cfg guard should be put in place.
#[cfg(windows)] ansi_term::enable_ansi_support().unwrap();
let args = App::new("app")
.arg(Arg::with_name("v")
.short("v")
.multiple(true)
.help("Sets the level of verbosity"))
.get_matches();
loggerv::init_with_verbosity(args.occurrences_of("v")).unwrap();
error!("This is always printed to stderr");
warn!("This too is always printed to stderr");
info!("This is optional info printed to stdout"); // for ./app -v or higher
debug!("This is optional debug printed to stdout"); // for ./app -vv or higher
trace!("This is optional trace printed to stdout"); // for ./app -vvv
} | use clap::{Arg, App};
fn main() {
// Add the following line near the beginning of the main function for an application to enable
// colorized output on Windows 10. | random_line_split |
httpInterceptor.spec.js | import { module, inject } from "angular-mocks";
import "../../../app/auth/auth.module";
(function () {
'use strict';
describe('HttpHeaderInterceptor', function () {
var httpHeaderInterceptor,
CookiesServiceMock;
beforeEach(module('xr.auth'));
beforeEach(module(function ($provide) {
CookiesServiceMock = {
get: function () {}
};
$provide.value('$cookies', CookiesServiceMock);
}));
beforeEach(inject(function (_httpHeaderInterceptor_) {
httpHeaderInterceptor = _httpHeaderInterceptor_; | it('should add Authorization header if token exists', function () {
var token = '1234';
spyOn(CookiesServiceMock, 'get').and.returnValue(token);
var config = httpHeaderInterceptor.request({ headers: {} });
expect(config.headers.Authorization).toBe('Bearer ' + token);
});
it('should not add Authorization header if no token', function () {
var config = httpHeaderInterceptor.request({ headers: {} });
expect(config.headers.Authorization).toBeUndefined();
});
});
});
})(); | }));
describe('request', function () { | random_line_split |
CryptoProvider.ts | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { ICrypto, PkceCodes } from "@azure/msal-common";
import { GuidGenerator } from "./GuidGenerator";
import { EncodingUtils } from "../utils/EncodingUtils";
import { PkceGenerator } from "./PkceGenerator";
import { HashUtils } from "./HashUtils";
/**
* This class implements MSAL node's crypto interface, which allows it to perform base64 encoding and decoding, generating cryptographically random GUIDs and
* implementing Proof Key for Code Exchange specs for the OAuth Authorization Code Flow using PKCE (rfc here: https://tools.ietf.org/html/rfc7636).
* @public
*/
export class CryptoProvider implements ICrypto {
private pkceGenerator: PkceGenerator;
private hashUtils: HashUtils;
constructor() {
// Browser crypto needs to be validated first before any other classes can be set.
this.pkceGenerator = new PkceGenerator();
this.hashUtils = new HashUtils();
}
/**
* Creates a new random GUID - used to populate state and nonce.
* @returns string (GUID)
*/
createNewGuid(): string {
return GuidGenerator.generateGuid();
}
/**
* Encodes input string to base64.
* @param input - string to be encoded
*/
base64Encode(input: string): string |
/**
* Decodes input string from base64.
* @param input - string to be decoded
*/
base64Decode(input: string): string {
return EncodingUtils.base64Decode(input);
}
/**
* Generates PKCE codes used in Authorization Code Flow.
*/
generatePkceCodes(): Promise<PkceCodes> {
return this.pkceGenerator.generatePkceCodes();
}
/**
* Generates a keypair, stores it and returns a thumbprint - not yet implemented for node
*/
getPublicKeyThumbprint(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* Removes cryptographic keypair from key store matching the keyId passed in
* @param kid
*/
removeTokenBindingKey(): Promise<boolean> {
throw new Error("Method not implemented.");
}
/**
* Removes all cryptographic keys from Keystore
*/
clearKeystore(): Promise<boolean> {
throw new Error("Method not implemented.");
}
/**
* Signs the given object as a jwt payload with private key retrieved by given kid - currently not implemented for node
*/
signJwt(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* Returns the SHA-256 hash of an input string
*/
async hashString(plainText: string): Promise<string> {
return EncodingUtils.base64EncodeUrl(
this.hashUtils.sha256(plainText).toString("base64"),
"base64"
);
}
}
| {
return EncodingUtils.base64Encode(input);
} | identifier_body |
CryptoProvider.ts | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { ICrypto, PkceCodes } from "@azure/msal-common";
import { GuidGenerator } from "./GuidGenerator";
import { EncodingUtils } from "../utils/EncodingUtils";
import { PkceGenerator } from "./PkceGenerator";
import { HashUtils } from "./HashUtils";
/**
* This class implements MSAL node's crypto interface, which allows it to perform base64 encoding and decoding, generating cryptographically random GUIDs and
* implementing Proof Key for Code Exchange specs for the OAuth Authorization Code Flow using PKCE (rfc here: https://tools.ietf.org/html/rfc7636).
* @public
*/
export class CryptoProvider implements ICrypto {
private pkceGenerator: PkceGenerator;
private hashUtils: HashUtils;
constructor() {
// Browser crypto needs to be validated first before any other classes can be set.
this.pkceGenerator = new PkceGenerator();
this.hashUtils = new HashUtils();
}
/**
* Creates a new random GUID - used to populate state and nonce.
* @returns string (GUID)
*/
createNewGuid(): string {
return GuidGenerator.generateGuid();
}
/**
* Encodes input string to base64.
* @param input - string to be encoded
*/
base64Encode(input: string): string {
return EncodingUtils.base64Encode(input);
}
/**
* Decodes input string from base64.
* @param input - string to be decoded
*/
| (input: string): string {
return EncodingUtils.base64Decode(input);
}
/**
* Generates PKCE codes used in Authorization Code Flow.
*/
generatePkceCodes(): Promise<PkceCodes> {
return this.pkceGenerator.generatePkceCodes();
}
/**
* Generates a keypair, stores it and returns a thumbprint - not yet implemented for node
*/
getPublicKeyThumbprint(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* Removes cryptographic keypair from key store matching the keyId passed in
* @param kid
*/
removeTokenBindingKey(): Promise<boolean> {
throw new Error("Method not implemented.");
}
/**
* Removes all cryptographic keys from Keystore
*/
clearKeystore(): Promise<boolean> {
throw new Error("Method not implemented.");
}
/**
* Signs the given object as a jwt payload with private key retrieved by given kid - currently not implemented for node
*/
signJwt(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* Returns the SHA-256 hash of an input string
*/
async hashString(plainText: string): Promise<string> {
return EncodingUtils.base64EncodeUrl(
this.hashUtils.sha256(plainText).toString("base64"),
"base64"
);
}
}
| base64Decode | identifier_name |
CryptoProvider.ts | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { ICrypto, PkceCodes } from "@azure/msal-common";
import { GuidGenerator } from "./GuidGenerator";
import { EncodingUtils } from "../utils/EncodingUtils";
import { PkceGenerator } from "./PkceGenerator";
import { HashUtils } from "./HashUtils";
/**
* This class implements MSAL node's crypto interface, which allows it to perform base64 encoding and decoding, generating cryptographically random GUIDs and
* implementing Proof Key for Code Exchange specs for the OAuth Authorization Code Flow using PKCE (rfc here: https://tools.ietf.org/html/rfc7636).
* @public
*/
export class CryptoProvider implements ICrypto {
private pkceGenerator: PkceGenerator;
private hashUtils: HashUtils;
constructor() {
// Browser crypto needs to be validated first before any other classes can be set.
this.pkceGenerator = new PkceGenerator();
this.hashUtils = new HashUtils();
}
/**
* Creates a new random GUID - used to populate state and nonce.
* @returns string (GUID)
*/
createNewGuid(): string {
return GuidGenerator.generateGuid();
}
/**
* Encodes input string to base64.
* @param input - string to be encoded
*/
base64Encode(input: string): string {
return EncodingUtils.base64Encode(input);
}
/**
* Decodes input string from base64.
* @param input - string to be decoded
*/
base64Decode(input: string): string {
return EncodingUtils.base64Decode(input);
}
/**
* Generates PKCE codes used in Authorization Code Flow.
*/
generatePkceCodes(): Promise<PkceCodes> {
return this.pkceGenerator.generatePkceCodes();
}
/**
* Generates a keypair, stores it and returns a thumbprint - not yet implemented for node
*/
getPublicKeyThumbprint(): Promise<string> {
throw new Error("Method not implemented.");
}
| throw new Error("Method not implemented.");
}
/**
* Removes all cryptographic keys from Keystore
*/
clearKeystore(): Promise<boolean> {
throw new Error("Method not implemented.");
}
/**
* Signs the given object as a jwt payload with private key retrieved by given kid - currently not implemented for node
*/
signJwt(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* Returns the SHA-256 hash of an input string
*/
async hashString(plainText: string): Promise<string> {
return EncodingUtils.base64EncodeUrl(
this.hashUtils.sha256(plainText).toString("base64"),
"base64"
);
}
} | /**
* Removes cryptographic keypair from key store matching the keyId passed in
* @param kid
*/
removeTokenBindingKey(): Promise<boolean> { | random_line_split |
deserialization.rs | use alias::Alias;
use config::Config;
use consts::*;
use itertools::Itertools;
use family::Family;
use glib::functions;
use pango::Context;
use pango::ContextExt;
use pango::FontMapExt;
use pango::FontFamilyExt;
use range::Range;
use sxd_document::dom::Comment;
use sxd_document::dom::ChildOfElement;
use sxd_document::dom::Element;
use sxd_document::dom::Text;
use sxd_document::parser;
use std::cell::RefCell;
use std::fs::File;
use std::i32;
use std::io::Read;
use std::ops::Deref;
use std::ops::DerefMut;
pub fn list_families(context: &Context) -> Vec<RefCell<Family>> {
match context.get_font_map() {
Some(map) => {
map.list_families()
.iter()
.filter_map(|x| x.get_name())
.filter(|x| !["Sans", "Serif", "Monospace"].contains(&x.as_str()))
.map(|x| {
RefCell::new(Family {
name: x,
stripped_ranges: vec![],
})
})
.collect()
}
None => vec![],
}
}
pub fn parse_or_default<'a>(families: &'a Vec<RefCell<Family>>) -> Config<'a> {
let fc_config_path = functions::get_user_config_dir()
.expect("$XDG_CONFIG_HOME not set!")
.join("fontconfig/fonts.conf");
let config_parse = match File::open(fc_config_path.as_path()) {
Ok(mut f) => {
let mut buffer = String::new();
f.read_to_string(&mut buffer).expect(
"Failed to parse your fonts.conf file",
);
parser::parse(&buffer)
}
_ => parser::parse(DEFAULT_FONTS_CONF),
};
let config_package = match config_parse {
Ok(package) => package,
Err((_, errors)) => panic!("Error parsing fonts.conf!\n{}", errors.iter().join("\n")),
};
// scan matches collection
let mut scan_matches: Vec<&'a RefCell<Family>> = vec![];
// aliases collection
let mut aliases: Vec<Alias<'a>> = vec![];
{
let doc = config_package.as_document();
let old_root_element = doc.root().children()[0].element().expect(INVALID_CONFIG);
// rest of dom collection
let new_root_element = doc.create_element(old_root_element.name());
for attr in old_root_element.attributes() {
new_root_element.set_attribute_value(attr.name(), attr.value());
}
// group children to correct collections
for child in old_root_element.children() {
match child {
ChildOfElement::Comment(x) if is_typeholder_comment(x) => {}
ChildOfElement::Element(x) if prev_is_typeholder_comment(x) => {
if x.name().local_part() == "alias" {
aliases.push(parse_alias(x, families));
} else if x.name().local_part() == "match" &&
x.attribute_value("target").unwrap_or("") == "scan"
{
match update_family(x, families) {
Some(y) => scan_matches.push(y),
_ => {}
}
}
}
x => new_root_element.append_child(x),
}
}
// replace old_root_element with new_root_element
doc.root().append_child(new_root_element);
}
Config {
scan_matches: scan_matches,
aliases: aliases,
residue: config_package,
}
}
fn prev_is_typeholder_comment(x: Element) -> bool {
match x.preceding_siblings().last() {
Some(y) => {
match y.comment() {
Some(z) => is_typeholder_comment(z),
None => false,
}
}
None => false,
}
}
fn is_typeholder_comment(x: Comment) -> bool {
x.text().starts_with(TYPEHOLDER_COMMENT_PREFIX)
}
fn update_family<'a>(
e: Element,
families: &'a Vec<RefCell<Family>>,
) -> Option<&'a RefCell<Family>> {
let family_name = checked_text(checked_child_element(
"string",
checked_child_element("test", e),
)).text();
let matched_family = families.iter().find(|x| x.borrow().name == family_name);
if matched_family.is_some() {
let nil_range_template = ("nil", "Custom");
let mut current_range_templates = nil_range_template;
let charset_elem = checked_child_element(
"charset",
checked_child_element("minus", checked_child_element("edit", e)),
);
let ranges = charset_elem
.children()
.into_iter()
.group_by(|x| match x {
&ChildOfElement::Comment(y) => {
current_range_templates = y.text()
.splitn(2, ',')
.map(str::trim)
.next_tuple::<(_, _)>()
.expect(INVALID_CONFIG);
current_range_templates
}
&ChildOfElement::Element(y) if y.name().local_part() == "range" => {
current_range_templates
}
_ => nil_range_template,
})
.into_iter()
.map(|(k, group)| {
(
k,
group
.filter_map(|child| child.element())
.filter(|elem| elem.name().local_part() == "range")
.map(|range_elem| {
children_element("int", range_elem)
.map(|int_elem| {
i32::from_str_radix(&checked_text(int_elem).text()[2..], 16)
.expect(INVALID_CONFIG)
})
.next_tuple::<(_, _)>()
.expect(INVALID_CONFIG)
})
.collect_vec(),
)
})
.filter(|&(_, ref code_points)| !code_points.is_empty())
.coalesce(|mut r0, mut r1| if r0.0 == r1.0 {
r0.1.append(&mut r1.1);
Ok(r0)
} else {
Err((r0, r1))
})
.map(|(k, code_points)| match k.1 {
"Block" => Range::Block {
name: String::from(k.0),
code_points: code_points[0],
},
"Script" => Range::Script {
name: String::from(k.0),
code_points: code_points,
},
_ => Range::Custom {
name: String::from(k.0),
code_points: code_points[0],
},
})
.collect_vec();
matched_family
.unwrap()
.borrow_mut()
.deref_mut()
.stripped_ranges = ranges;
}
matched_family
}
fn parse_alias<'a>(e: Element, families: &'a Vec<RefCell<Family>>) -> Alias<'a> {
let alias_name = checked_text(checked_child_element("family", e)).text();
let p_list = children_element("family", checked_child_element("prefer", e))
.filter_map(|x| {
families.iter().find(|y| {
y.borrow().deref().name == checked_text(x).text()
})
})
.collect_vec();
Alias {
name: String::from(alias_name),
prefer_list: p_list,
}
}
fn checked_child_element<'a: 'd, 'd>(name: &'a str, e: Element<'d>) -> Element<'d> |
fn child_element<'a: 'd, 'd>(name: &'a str, e: Element<'d>) -> Option<Element<'d>> {
children_element(name, e).next()
}
fn children_element<'a: 'd, 'd>(
name: &'a str,
e: Element<'d>,
) -> impl Iterator<Item = Element<'d>> + 'd {
e.children()
.into_iter()
.filter_map(|x| x.element())
.filter(move |x| x.name().local_part() == name)
}
fn checked_text<'d>(e: Element<'d>) -> Text<'d> {
text(e).expect(&format!("Element {} has no text!", e.name().local_part()))
}
fn text<'d>(e: Element<'d>) -> Option<Text<'d>> {
e.children().into_iter().filter_map(|x| x.text()).next()
}
| {
child_element(name, e).expect(&format!(
"Element {} has no {} child!",
e.name().local_part(),
name
))
} | identifier_body |
deserialization.rs | use alias::Alias;
use config::Config;
use consts::*;
use itertools::Itertools;
use family::Family;
use glib::functions;
use pango::Context;
use pango::ContextExt;
use pango::FontMapExt;
use pango::FontFamilyExt;
use range::Range;
use sxd_document::dom::Comment;
use sxd_document::dom::ChildOfElement;
use sxd_document::dom::Element;
use sxd_document::dom::Text;
use sxd_document::parser;
use std::cell::RefCell;
use std::fs::File;
use std::i32;
use std::io::Read;
use std::ops::Deref;
use std::ops::DerefMut;
pub fn list_families(context: &Context) -> Vec<RefCell<Family>> {
match context.get_font_map() {
Some(map) => {
map.list_families()
.iter()
.filter_map(|x| x.get_name())
.filter(|x| !["Sans", "Serif", "Monospace"].contains(&x.as_str()))
.map(|x| {
RefCell::new(Family {
name: x,
stripped_ranges: vec![],
})
})
.collect()
}
None => vec![],
}
}
pub fn parse_or_default<'a>(families: &'a Vec<RefCell<Family>>) -> Config<'a> {
let fc_config_path = functions::get_user_config_dir()
.expect("$XDG_CONFIG_HOME not set!")
.join("fontconfig/fonts.conf");
let config_parse = match File::open(fc_config_path.as_path()) {
Ok(mut f) => {
let mut buffer = String::new();
f.read_to_string(&mut buffer).expect(
"Failed to parse your fonts.conf file",
);
parser::parse(&buffer)
}
_ => parser::parse(DEFAULT_FONTS_CONF),
};
let config_package = match config_parse {
Ok(package) => package,
Err((_, errors)) => panic!("Error parsing fonts.conf!\n{}", errors.iter().join("\n")),
};
// scan matches collection
let mut scan_matches: Vec<&'a RefCell<Family>> = vec![];
// aliases collection
let mut aliases: Vec<Alias<'a>> = vec![];
{
let doc = config_package.as_document();
let old_root_element = doc.root().children()[0].element().expect(INVALID_CONFIG);
// rest of dom collection
let new_root_element = doc.create_element(old_root_element.name());
for attr in old_root_element.attributes() {
new_root_element.set_attribute_value(attr.name(), attr.value());
}
// group children to correct collections
for child in old_root_element.children() {
match child {
ChildOfElement::Comment(x) if is_typeholder_comment(x) => {}
ChildOfElement::Element(x) if prev_is_typeholder_comment(x) => {
if x.name().local_part() == "alias" {
aliases.push(parse_alias(x, families));
} else if x.name().local_part() == "match" &&
x.attribute_value("target").unwrap_or("") == "scan"
{
match update_family(x, families) {
Some(y) => scan_matches.push(y),
_ => {}
}
}
}
x => new_root_element.append_child(x),
}
}
// replace old_root_element with new_root_element
doc.root().append_child(new_root_element);
}
Config {
scan_matches: scan_matches,
aliases: aliases,
residue: config_package,
}
}
fn prev_is_typeholder_comment(x: Element) -> bool {
match x.preceding_siblings().last() {
Some(y) => {
match y.comment() {
Some(z) => is_typeholder_comment(z),
None => false,
}
}
None => false,
}
}
fn is_typeholder_comment(x: Comment) -> bool {
x.text().starts_with(TYPEHOLDER_COMMENT_PREFIX)
}
fn update_family<'a>(
e: Element,
families: &'a Vec<RefCell<Family>>,
) -> Option<&'a RefCell<Family>> {
let family_name = checked_text(checked_child_element(
"string",
checked_child_element("test", e),
)).text();
let matched_family = families.iter().find(|x| x.borrow().name == family_name);
if matched_family.is_some() {
let nil_range_template = ("nil", "Custom");
let mut current_range_templates = nil_range_template;
let charset_elem = checked_child_element(
"charset",
checked_child_element("minus", checked_child_element("edit", e)),
);
let ranges = charset_elem
.children()
.into_iter()
.group_by(|x| match x {
&ChildOfElement::Comment(y) => {
current_range_templates = y.text()
.splitn(2, ',')
.map(str::trim)
.next_tuple::<(_, _)>()
.expect(INVALID_CONFIG);
current_range_templates
}
&ChildOfElement::Element(y) if y.name().local_part() == "range" => {
current_range_templates
}
_ => nil_range_template,
})
.into_iter()
.map(|(k, group)| {
(
k,
group
.filter_map(|child| child.element())
.filter(|elem| elem.name().local_part() == "range")
.map(|range_elem| {
children_element("int", range_elem)
.map(|int_elem| {
i32::from_str_radix(&checked_text(int_elem).text()[2..], 16)
.expect(INVALID_CONFIG)
})
.next_tuple::<(_, _)>()
.expect(INVALID_CONFIG)
})
.collect_vec(),
)
})
.filter(|&(_, ref code_points)| !code_points.is_empty())
.coalesce(|mut r0, mut r1| if r0.0 == r1.0 {
r0.1.append(&mut r1.1);
Ok(r0)
} else {
Err((r0, r1))
})
.map(|(k, code_points)| match k.1 {
"Block" => Range::Block {
name: String::from(k.0),
code_points: code_points[0],
},
"Script" => Range::Script {
name: String::from(k.0),
code_points: code_points,
},
_ => Range::Custom {
name: String::from(k.0),
code_points: code_points[0],
},
}) | .stripped_ranges = ranges;
}
matched_family
}
fn parse_alias<'a>(e: Element, families: &'a Vec<RefCell<Family>>) -> Alias<'a> {
let alias_name = checked_text(checked_child_element("family", e)).text();
let p_list = children_element("family", checked_child_element("prefer", e))
.filter_map(|x| {
families.iter().find(|y| {
y.borrow().deref().name == checked_text(x).text()
})
})
.collect_vec();
Alias {
name: String::from(alias_name),
prefer_list: p_list,
}
}
fn checked_child_element<'a: 'd, 'd>(name: &'a str, e: Element<'d>) -> Element<'d> {
child_element(name, e).expect(&format!(
"Element {} has no {} child!",
e.name().local_part(),
name
))
}
fn child_element<'a: 'd, 'd>(name: &'a str, e: Element<'d>) -> Option<Element<'d>> {
children_element(name, e).next()
}
fn children_element<'a: 'd, 'd>(
name: &'a str,
e: Element<'d>,
) -> impl Iterator<Item = Element<'d>> + 'd {
e.children()
.into_iter()
.filter_map(|x| x.element())
.filter(move |x| x.name().local_part() == name)
}
fn checked_text<'d>(e: Element<'d>) -> Text<'d> {
text(e).expect(&format!("Element {} has no text!", e.name().local_part()))
}
fn text<'d>(e: Element<'d>) -> Option<Text<'d>> {
e.children().into_iter().filter_map(|x| x.text()).next()
} | .collect_vec();
matched_family
.unwrap()
.borrow_mut()
.deref_mut() | random_line_split |
deserialization.rs | use alias::Alias;
use config::Config;
use consts::*;
use itertools::Itertools;
use family::Family;
use glib::functions;
use pango::Context;
use pango::ContextExt;
use pango::FontMapExt;
use pango::FontFamilyExt;
use range::Range;
use sxd_document::dom::Comment;
use sxd_document::dom::ChildOfElement;
use sxd_document::dom::Element;
use sxd_document::dom::Text;
use sxd_document::parser;
use std::cell::RefCell;
use std::fs::File;
use std::i32;
use std::io::Read;
use std::ops::Deref;
use std::ops::DerefMut;
pub fn | (context: &Context) -> Vec<RefCell<Family>> {
match context.get_font_map() {
Some(map) => {
map.list_families()
.iter()
.filter_map(|x| x.get_name())
.filter(|x| !["Sans", "Serif", "Monospace"].contains(&x.as_str()))
.map(|x| {
RefCell::new(Family {
name: x,
stripped_ranges: vec![],
})
})
.collect()
}
None => vec![],
}
}
pub fn parse_or_default<'a>(families: &'a Vec<RefCell<Family>>) -> Config<'a> {
let fc_config_path = functions::get_user_config_dir()
.expect("$XDG_CONFIG_HOME not set!")
.join("fontconfig/fonts.conf");
let config_parse = match File::open(fc_config_path.as_path()) {
Ok(mut f) => {
let mut buffer = String::new();
f.read_to_string(&mut buffer).expect(
"Failed to parse your fonts.conf file",
);
parser::parse(&buffer)
}
_ => parser::parse(DEFAULT_FONTS_CONF),
};
let config_package = match config_parse {
Ok(package) => package,
Err((_, errors)) => panic!("Error parsing fonts.conf!\n{}", errors.iter().join("\n")),
};
// scan matches collection
let mut scan_matches: Vec<&'a RefCell<Family>> = vec![];
// aliases collection
let mut aliases: Vec<Alias<'a>> = vec![];
{
let doc = config_package.as_document();
let old_root_element = doc.root().children()[0].element().expect(INVALID_CONFIG);
// rest of dom collection
let new_root_element = doc.create_element(old_root_element.name());
for attr in old_root_element.attributes() {
new_root_element.set_attribute_value(attr.name(), attr.value());
}
// group children to correct collections
for child in old_root_element.children() {
match child {
ChildOfElement::Comment(x) if is_typeholder_comment(x) => {}
ChildOfElement::Element(x) if prev_is_typeholder_comment(x) => {
if x.name().local_part() == "alias" {
aliases.push(parse_alias(x, families));
} else if x.name().local_part() == "match" &&
x.attribute_value("target").unwrap_or("") == "scan"
{
match update_family(x, families) {
Some(y) => scan_matches.push(y),
_ => {}
}
}
}
x => new_root_element.append_child(x),
}
}
// replace old_root_element with new_root_element
doc.root().append_child(new_root_element);
}
Config {
scan_matches: scan_matches,
aliases: aliases,
residue: config_package,
}
}
fn prev_is_typeholder_comment(x: Element) -> bool {
match x.preceding_siblings().last() {
Some(y) => {
match y.comment() {
Some(z) => is_typeholder_comment(z),
None => false,
}
}
None => false,
}
}
fn is_typeholder_comment(x: Comment) -> bool {
x.text().starts_with(TYPEHOLDER_COMMENT_PREFIX)
}
fn update_family<'a>(
e: Element,
families: &'a Vec<RefCell<Family>>,
) -> Option<&'a RefCell<Family>> {
let family_name = checked_text(checked_child_element(
"string",
checked_child_element("test", e),
)).text();
let matched_family = families.iter().find(|x| x.borrow().name == family_name);
if matched_family.is_some() {
let nil_range_template = ("nil", "Custom");
let mut current_range_templates = nil_range_template;
let charset_elem = checked_child_element(
"charset",
checked_child_element("minus", checked_child_element("edit", e)),
);
let ranges = charset_elem
.children()
.into_iter()
.group_by(|x| match x {
&ChildOfElement::Comment(y) => {
current_range_templates = y.text()
.splitn(2, ',')
.map(str::trim)
.next_tuple::<(_, _)>()
.expect(INVALID_CONFIG);
current_range_templates
}
&ChildOfElement::Element(y) if y.name().local_part() == "range" => {
current_range_templates
}
_ => nil_range_template,
})
.into_iter()
.map(|(k, group)| {
(
k,
group
.filter_map(|child| child.element())
.filter(|elem| elem.name().local_part() == "range")
.map(|range_elem| {
children_element("int", range_elem)
.map(|int_elem| {
i32::from_str_radix(&checked_text(int_elem).text()[2..], 16)
.expect(INVALID_CONFIG)
})
.next_tuple::<(_, _)>()
.expect(INVALID_CONFIG)
})
.collect_vec(),
)
})
.filter(|&(_, ref code_points)| !code_points.is_empty())
.coalesce(|mut r0, mut r1| if r0.0 == r1.0 {
r0.1.append(&mut r1.1);
Ok(r0)
} else {
Err((r0, r1))
})
.map(|(k, code_points)| match k.1 {
"Block" => Range::Block {
name: String::from(k.0),
code_points: code_points[0],
},
"Script" => Range::Script {
name: String::from(k.0),
code_points: code_points,
},
_ => Range::Custom {
name: String::from(k.0),
code_points: code_points[0],
},
})
.collect_vec();
matched_family
.unwrap()
.borrow_mut()
.deref_mut()
.stripped_ranges = ranges;
}
matched_family
}
fn parse_alias<'a>(e: Element, families: &'a Vec<RefCell<Family>>) -> Alias<'a> {
let alias_name = checked_text(checked_child_element("family", e)).text();
let p_list = children_element("family", checked_child_element("prefer", e))
.filter_map(|x| {
families.iter().find(|y| {
y.borrow().deref().name == checked_text(x).text()
})
})
.collect_vec();
Alias {
name: String::from(alias_name),
prefer_list: p_list,
}
}
fn checked_child_element<'a: 'd, 'd>(name: &'a str, e: Element<'d>) -> Element<'d> {
child_element(name, e).expect(&format!(
"Element {} has no {} child!",
e.name().local_part(),
name
))
}
fn child_element<'a: 'd, 'd>(name: &'a str, e: Element<'d>) -> Option<Element<'d>> {
children_element(name, e).next()
}
fn children_element<'a: 'd, 'd>(
name: &'a str,
e: Element<'d>,
) -> impl Iterator<Item = Element<'d>> + 'd {
e.children()
.into_iter()
.filter_map(|x| x.element())
.filter(move |x| x.name().local_part() == name)
}
fn checked_text<'d>(e: Element<'d>) -> Text<'d> {
text(e).expect(&format!("Element {} has no text!", e.name().local_part()))
}
fn text<'d>(e: Element<'d>) -> Option<Text<'d>> {
e.children().into_iter().filter_map(|x| x.text()).next()
}
| list_families | identifier_name |
email_test.py | import bountyfunding
from bountyfunding.core.const import *
from bountyfunding.core.data import clean_database
from test import to_object
from nose.tools import *
USER = "bountyfunding"
class Email_Test:
def setup(self):
self.app = bountyfunding.app.test_client()
clean_database()
def test_email(self):
eq_(len(self.get_emails()), 0)
r = self.app.post('/issues', data=dict(ref=1, status='READY',
title='Title', link='/issue/1'))
eq_(r.status_code, 200)
r = self.app.post('/issue/1/sponsorships',
data=dict(user=USER, amount=10))
eq_(r.status_code, 200) |
r = self.app.get("/issue/1")
eq_(r.status_code, 200)
r = self.app.put('/issue/1', data=dict(
status=IssueStatus.to_string(IssueStatus.STARTED)))
eq_(r.status_code, 200)
emails = self.get_emails()
eq_(len(emails), 1)
email = emails[0]
eq_(email.recipient, USER)
ok_(email.issue_id)
ok_(email.body)
r = self.app.delete("/email/%s" % email.id)
eq_(r.status_code, 200)
def get_emails(self):
r = self.app.get("/emails")
eq_(r.status_code, 200)
return to_object(r).data | random_line_split |
|
email_test.py | import bountyfunding
from bountyfunding.core.const import *
from bountyfunding.core.data import clean_database
from test import to_object
from nose.tools import *
USER = "bountyfunding"
class Email_Test:
def setup(self):
self.app = bountyfunding.app.test_client()
clean_database()
def | (self):
eq_(len(self.get_emails()), 0)
r = self.app.post('/issues', data=dict(ref=1, status='READY',
title='Title', link='/issue/1'))
eq_(r.status_code, 200)
r = self.app.post('/issue/1/sponsorships',
data=dict(user=USER, amount=10))
eq_(r.status_code, 200)
r = self.app.get("/issue/1")
eq_(r.status_code, 200)
r = self.app.put('/issue/1', data=dict(
status=IssueStatus.to_string(IssueStatus.STARTED)))
eq_(r.status_code, 200)
emails = self.get_emails()
eq_(len(emails), 1)
email = emails[0]
eq_(email.recipient, USER)
ok_(email.issue_id)
ok_(email.body)
r = self.app.delete("/email/%s" % email.id)
eq_(r.status_code, 200)
def get_emails(self):
r = self.app.get("/emails")
eq_(r.status_code, 200)
return to_object(r).data
| test_email | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.