prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>6aab1ac34c3_.py<|end_file_name|><|fim▁begin|>"""Initial Structure (Creating test and page tables)
Revision ID: 6aab1ac34c3
Revises: None
Create Date: 2015-07-19 12:58:04.968533
"""
# revision identifiers, used by Alembic.
revision = '6aab1ac34c3'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():<|fim▁hole|> sa.Column('id', sa.Integer(), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.Column('run_date', sa.DateTime(), nullable=True),
sa.Column('browser_name', sa.String(length=20), nullable=True),
sa.Column('browser_version', sa.String(length=10), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('page',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('test_id', sa.Integer(), nullable=False),
sa.Column('page_id', sa.String(length=64), nullable=False),
sa.Column('fqdn', sa.String(length=256), nullable=True),
sa.Column('time_to_first_byte', sa.Float(), nullable=True),
sa.Column('html_load_time', sa.Float(), nullable=True),
sa.Column('video_load_time', sa.Float(), nullable=True),
sa.Column('audio_load_time', sa.Float(), nullable=True),
sa.Column('js_load_time', sa.Float(), nullable=True),
sa.Column('css_load_time', sa.Float(), nullable=True),
sa.Column('image_load_time', sa.Float(), nullable=True),
sa.Column('page_load_time', sa.Float(), nullable=True),
sa.Column('page_size', sa.Float(), nullable=True),
sa.Column('image_size', sa.Float(), nullable=True),
sa.Column('css_size', sa.Float(), nullable=True),
sa.Column('text_size', sa.Float(), nullable=True),
sa.Column('js_size', sa.Float(), nullable=True),
sa.Column('audio_size', sa.Float(), nullable=True),
sa.Column('video_size', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['test_id'], ['test.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('page')
op.drop_table('test')
### end Alembic commands ###<|fim▁end|> | ### commands auto generated by Alembic - please adjust! ###
op.create_table('test', |
<|file_name|>traceback-example-1.py<|end_file_name|><|fim▁begin|>'''
ÏÂÀý չʾÁË traceback Ä£¿éÔÊÐíÄãÔÚ³ÌÐòÀï´òÓ¡Òì³£µÄ¸ú×Ù·µ»Ø(Traceback)ÐÅÏ¢, ÀàËÆÎ´²¶»ñÒ쳣ʱ½âÊÍÆ÷Ëù×öµÄ.<|fim▁hole|>
# ×¢Òâ! µ¼Èë traceback »áÇåÀíµôÒ쳣״̬, ËùÒÔ
# ×îºÃ±ðÔÚÒì³£´¦Àí´úÂëÖе¼Èë¸ÃÄ£¿é
import traceback
try:
raise SyntaxError, "example"
except:
traceback.print_exc()<|fim▁end|> | ''' |
<|file_name|>simplified_data_tasks.py<|end_file_name|><|fim▁begin|>## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Basic simplified data test functions - NOT FOR XML """
from invenio.bibworkflow_config import CFG_OBJECT_STATUS<|fim▁hole|>
def task_a(a):
def _task_a(obj, eng):
"""Function task_a docstring"""
eng.log.info("executing task a " + str(a))
obj.data += a
return _task_a
def task_b(obj, eng):
"""Function task_b docstring"""
eng.log.info("executing task b")
if obj.data < 20:
obj.change_status(CFG_OBJECT_STATUS.ERROR)
eng.log.info("Object status %s" % (obj.db_obj.status,))
eng.log.info("data < 20")
obj.add_task_result("task_b", {'a': 12, 'b': 13, 'c': 14})
eng.halt("Value of filed: data in object is too small.")<|fim▁end|> | |
<|file_name|>test_v3_protection.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at<|fim▁hole|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import uuid
from keystone import config
from keystone import exception
from keystone.openstack.common import jsonutils
from keystone.policy.backends import rules
from keystone import tests
from keystone.tests import test_v3
CONF = config.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class IdentityTestProtectedCase(test_v3.RestfulTestCase):
"""Test policy enforcement on the v3 Identity API."""
def setUp(self):
"""Setup for Identity Protection Test Cases.
As well as the usual housekeeping, create a set of domains,
users, roles and projects for the subsequent tests:
- Three domains: A,B & C. C is disabled.
- DomainA has user1, DomainB has user2 and user3
- DomainA has group1 and group2, DomainB has group3
- User1 has two roles on DomainA
- User2 has one role on DomainA
Remember that there will also be a fourth domain in existence,
the default domain.
"""
# Ensure that test_v3.RestfulTestCase doesn't load its own
# sample data, which would make checking the results of our
# tests harder
super(IdentityTestProtectedCase, self).setUp()
# Initialize the policy engine and allow us to write to a temp
# file in each test to create the policies
self.addCleanup(rules.reset)
rules.reset()
_unused, self.tmpfilename = tempfile.mkstemp()
self.config_fixture.config(policy_file=self.tmpfilename)
# A default auth request we can use - un-scoped user token
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'])
def load_sample_data(self):
# Start by creating a couple of domains
self.domainA = self.new_domain_ref()
self.assignment_api.create_domain(self.domainA['id'], self.domainA)
self.domainB = self.new_domain_ref()
self.assignment_api.create_domain(self.domainB['id'], self.domainB)
self.domainC = self.new_domain_ref()
self.domainC['enabled'] = False
self.assignment_api.create_domain(self.domainC['id'], self.domainC)
# Now create some users, one in domainA and two of them in domainB
self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
self.user1['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.user1['id'], self.user1)
self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
self.user2['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.user2['id'], self.user2)
self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
self.user3['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.user3['id'], self.user3)
self.group1 = self.new_group_ref(domain_id=self.domainA['id'])
self.identity_api.create_group(self.group1['id'], self.group1)
self.group2 = self.new_group_ref(domain_id=self.domainA['id'])
self.identity_api.create_group(self.group2['id'], self.group2)
self.group3 = self.new_group_ref(domain_id=self.domainB['id'])
self.identity_api.create_group(self.group3['id'], self.group3)
self.role = self.new_role_ref()
self.assignment_api.create_role(self.role['id'], self.role)
self.role1 = self.new_role_ref()
self.assignment_api.create_role(self.role1['id'], self.role1)
self.assignment_api.create_grant(self.role['id'],
user_id=self.user1['id'],
domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.user2['id'],
domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role1['id'],
user_id=self.user1['id'],
domain_id=self.domainA['id'])
def _get_id_list_from_ref_list(self, ref_list):
result_list = []
for x in ref_list:
result_list.append(x['id'])
return result_list
def _set_policy(self, new_policy):
with open(self.tmpfilename, "w") as policyfile:
policyfile.write(jsonutils.dumps(new_policy))
def test_list_users_unprotected(self):
"""GET /users (unprotected)
Test Plan:
- Update policy so api is unprotected
- Use an un-scoped token to make sure we can get back all
the users independent of domain
"""
self._set_policy({"identity:list_users": []})
r = self.get('/users', auth=self.auth)
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertIn(self.user1['id'], id_list)
self.assertIn(self.user2['id'], id_list)
self.assertIn(self.user3['id'], id_list)
def test_list_users_filtered_by_domain(self):
"""GET /users?domain_id=mydomain (filtered)
Test Plan:
- Update policy so api is unprotected
- Use an un-scoped token to make sure we can filter the
users by domainB, getting back the 2 users in that domain
"""
self._set_policy({"identity:list_users": []})
url_by_name = '/users?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth)
# We should get back two users, those in DomainB
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertIn(self.user2['id'], id_list)
self.assertIn(self.user3['id'], id_list)
def test_get_user_protected_match_id(self):
"""GET /users/{id} (match payload)
Test Plan:
- Update policy to protect api by user_id
- List users with user_id of user1 as filter, to check that
this will correctly match user_id in the flattened
payload
"""
# TODO(henry-nash, ayoung): It would be good to expand this
# test for further test flattening, e.g. protect on, say, an
# attribute of an object being created
new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]}
self._set_policy(new_policy)
url_by_name = '/users/%s' % self.user1['id']
r = self.get(url_by_name, auth=self.auth)
self.assertEqual(self.user1['id'], r.result['user']['id'])
def test_get_user_protected_match_target(self):
"""GET /users/{id} (match target)
Test Plan:
- Update policy to protect api by domain_id
- Try and read a user who is in DomainB with a token scoped
to Domain A - this should fail
- Retry this for a user who is in Domain A, which should succeed.
- Finally, try getting a user that does not exist, which should
still return UserNotFound
"""
new_policy = {'identity:get_user':
[["domain_id:%(target.user.domain_id)s"]]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/users/%s' % self.user2['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
url_by_name = '/users/%s' % self.user1['id']
r = self.get(url_by_name, auth=self.auth)
self.assertEqual(self.user1['id'], r.result['user']['id'])
url_by_name = '/users/%s' % uuid.uuid4().hex
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.UserNotFound.code)
def test_revoke_grant_protected_match_target(self):
"""DELETE /domains/{id}/users/{id}/roles/{id} (match target)
Test Plan:
- Update policy to protect api by domain_id of entities in
the grant
- Try and delete the existing grant that has a user who is
from a different domain - this should fail.
- Retry this for a user who is in Domain A, which should succeed.
"""
new_policy = {'identity:revoke_grant':
[["domain_id:%(target.user.domain_id)s"]]}
self._set_policy(new_policy)
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domainA['id'],
'user_id': self.user2['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role['id']}
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
self.delete(member_url, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
collection_url = (
'/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': self.domainA['id'],
'user_id': self.user1['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role1['id']}
self.delete(member_url, auth=self.auth)
def test_list_users_protected_by_domain(self):
"""GET /users?domain_id=mydomain (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA with a filter
specifying domainA - we should only get back the one user
that is in domainA.
- Try and read the users from domainB - this should fail since
we don't have a token scoped for domainB
"""
new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/users?domain_id=%s' % self.domainA['id']
r = self.get(url_by_name, auth=self.auth)
# We should only get back one user, the one in DomainA
id_list = self._get_id_list_from_ref_list(r.result.get('users'))
self.assertEqual(1, len(id_list))
self.assertIn(self.user1['id'], id_list)
# Now try for domainB, which should fail
url_by_name = '/users?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_list_groups_protected_by_domain(self):
"""GET /groups?domain_id=mydomain (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA and make sure
we only get back the two groups that are in domainA
- Try and read the groups from domainB - this should fail since
we don't have a token scoped for domainB
"""
new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/groups?domain_id=%s' % self.domainA['id']
r = self.get(url_by_name, auth=self.auth)
# We should only get back two groups, the ones in DomainA
id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
self.assertEqual(2, len(id_list))
self.assertIn(self.group1['id'], id_list)
self.assertIn(self.group2['id'], id_list)
# Now try for domainB, which should fail
url_by_name = '/groups?domain_id=%s' % self.domainB['id']
r = self.get(url_by_name, auth=self.auth,
expected_status=exception.ForbiddenAction.code)
def test_list_groups_protected_by_domain_and_filtered(self):
"""GET /groups?domain_id=mydomain&name=myname (protected)
Test Plan:
- Update policy to protect api by domain_id
- List groups using a token scoped to domainA with a filter
specifying both domainA and the name of group.
- We should only get back the group in domainA that matches
the name
"""
new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
self._set_policy(new_policy)
self.auth = self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
domain_id=self.domainA['id'])
url_by_name = '/groups?domain_id=%s&name=%s' % (
self.domainA['id'], self.group2['name'])
r = self.get(url_by_name, auth=self.auth)
# We should only get back one user, the one in DomainA that matches
# the name supplied
id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
self.assertEqual(1, len(id_list))
self.assertIn(self.group2['id'], id_list)
class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase):
"""Test policy enforcement of the sample v3 cloud policy file."""
def setUp(self):
"""Setup for v3 Cloud Policy Sample Test Cases.
The following data is created:
- Three domains: domainA, domainB and admin_domain
- One project, which name is 'project'
- domainA has three users: domain_admin_user, project_admin_user and
just_a_user:
- domain_admin_user has role 'admin' on domainA,
- project_admin_user has role 'admin' on the project,
- just_a_user has a non-admin role on both domainA and the project.
- admin_domain has user cloud_admin_user, with an 'admin' role
on admin_domain.
We test various api protection rules from the cloud sample policy
file to make sure the sample is valid and that we correctly enforce it.
"""
# Ensure that test_v3.RestfulTestCase doesn't load its own
# sample data, which would make checking the results of our
# tests harder
super(IdentityTestv3CloudPolicySample, self).setUp()
# Finally, switch to the v3 sample policy file
self.addCleanup(rules.reset)
rules.reset()
self.config_fixture.config(
policy_file=tests.dirs.etc('policy.v3cloudsample.json'))
def load_sample_data(self):
# Start by creating a couple of domains
self.domainA = self.new_domain_ref()
self.assignment_api.create_domain(self.domainA['id'], self.domainA)
self.domainB = self.new_domain_ref()
self.assignment_api.create_domain(self.domainB['id'], self.domainB)
self.admin_domain = {'id': 'admin_domain_id', 'name': 'Admin_domain'}
self.assignment_api.create_domain(self.admin_domain['id'],
self.admin_domain)
# And our users
self.cloud_admin_user = self.new_user_ref(
domain_id=self.admin_domain['id'])
self.cloud_admin_user['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.cloud_admin_user['id'],
self.cloud_admin_user)
self.just_a_user = self.new_user_ref(domain_id=self.domainA['id'])
self.just_a_user['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.just_a_user['id'], self.just_a_user)
self.domain_admin_user = self.new_user_ref(
domain_id=self.domainA['id'])
self.domain_admin_user['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.domain_admin_user['id'],
self.domain_admin_user)
self.project_admin_user = self.new_user_ref(
domain_id=self.domainA['id'])
self.project_admin_user['password'] = uuid.uuid4().hex
self.identity_api.create_user(self.project_admin_user['id'],
self.project_admin_user)
# The admin role and another plain role
self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
self.assignment_api.create_role(self.admin_role['id'], self.admin_role)
self.role = self.new_role_ref()
self.assignment_api.create_role(self.role['id'], self.role)
# The cloud admin just gets the admin role
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.cloud_admin_user['id'],
domain_id=self.admin_domain['id'])
# Assign roles to the domain
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.domain_admin_user['id'],
domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
domain_id=self.domainA['id'])
# Create a assign roles to the project
self.project = self.new_project_ref(domain_id=self.domainA['id'])
self.assignment_api.create_project(self.project['id'], self.project)
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.project_admin_user['id'],
project_id=self.project['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
project_id=self.project['id'])
def _stati(self, expected_status):
# Return the expected return codes for APIs with and without data
# with any specified status overriding the normal values
if expected_status is None:
return (200, 201, 204)
else:
return (expected_status, expected_status, expected_status)
def _test_user_management(self, domain_id, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
entity_url = '/users/%s' % self.just_a_user['id']
list_url = '/users?domain_id=%s' % domain_id
self.get(entity_url, auth=self.auth,
expected_status=status_OK)
self.get(list_url, auth=self.auth,
expected_status=status_OK)
user = {'description': 'Updated'}
self.patch(entity_url, auth=self.auth, body={'user': user},
expected_status=status_OK)
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
user_ref = self.new_user_ref(domain_id=domain_id)
self.post('/users', auth=self.auth, body={'user': user_ref},
expected_status=status_created)
def _test_project_management(self, domain_id, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
entity_url = '/projects/%s' % self.project['id']
list_url = '/projects?domain_id=%s' % domain_id
self.get(entity_url, auth=self.auth,
expected_status=status_OK)
self.get(list_url, auth=self.auth,
expected_status=status_OK)
project = {'description': 'Updated'}
self.patch(entity_url, auth=self.auth, body={'project': project},
expected_status=status_OK)
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
proj_ref = self.new_project_ref(domain_id=domain_id)
self.post('/projects', auth=self.auth, body={'project': proj_ref},
expected_status=status_created)
def _test_domain_management(self, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
entity_url = '/domains/%s' % self.domainB['id']
list_url = '/domains'
self.get(entity_url, auth=self.auth,
expected_status=status_OK)
self.get(list_url, auth=self.auth,
expected_status=status_OK)
domain = {'description': 'Updated', 'enabled': False}
self.patch(entity_url, auth=self.auth, body={'domain': domain},
expected_status=status_OK)
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
domain_ref = self.new_domain_ref()
self.post('/domains', auth=self.auth, body={'domain': domain_ref},
expected_status=status_created)
def _test_grants(self, target, entity_id, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
a_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assignment_api.create_role(a_role['id'], a_role)
collection_url = (
'/%(target)s/%(target_id)s/users/%(user_id)s/roles' % {
'target': target,
'target_id': entity_id,
'user_id': self.just_a_user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': a_role['id']}
self.put(member_url, auth=self.auth,
expected_status=status_no_data)
self.head(member_url, auth=self.auth,
expected_status=status_no_data)
self.get(collection_url, auth=self.auth,
expected_status=status_OK)
self.delete(member_url, auth=self.auth,
expected_status=status_no_data)
def test_user_management(self):
# First, authenticate with a user that does not have the domain
# admin role - shouldn't be able to do much.
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
self._test_user_management(
self.domainA['id'], expected=exception.ForbiddenAction.code)
# Now, authenticate with a user that does have the domain admin role
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_user_management(self.domainA['id'])
def test_user_management_by_cloud_admin(self):
# Test users management with a cloud admin. This user should
# be able to manage users in any domain.
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
self._test_user_management(self.domainA['id'])
def test_project_management(self):
# First, authenticate with a user that does not have the project
# admin role - shouldn't be able to do much.
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
self._test_project_management(
self.domainA['id'], expected=exception.ForbiddenAction.code)
# ...but should still be able to list projects of which they are
# a member
url = '/users/%s/projects' % self.just_a_user['id']
self.get(url, auth=self.auth)
# Now, authenticate with a user that does have the domain admin role
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_project_management(self.domainA['id'])
def test_domain_grants(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
domain_id=self.domainA['id'])
self._test_grants('domains', self.domainA['id'],
expected=exception.ForbiddenAction.code)
# Now, authenticate with a user that does have the domain admin role
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_grants('domains', self.domainA['id'])
# Check that with such a token we cannot modify grants on a
# different domain
self._test_grants('domains', self.domainB['id'],
expected=exception.ForbiddenAction.code)
def test_domain_grants_by_cloud_admin(self):
# Test domain grants with a cloud admin. This user should be
# able to manage roles on any domain.
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
self._test_grants('domains', self.domainA['id'])
def test_project_grants(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
password=self.just_a_user['password'],
project_id=self.project['id'])
self._test_grants('projects', self.project['id'],
expected=exception.ForbiddenAction.code)
# Now, authenticate with a user that does have the project
# admin role
self.auth = self.build_authentication_request(
user_id=self.project_admin_user['id'],
password=self.project_admin_user['password'],
project_id=self.project['id'])
self._test_grants('projects', self.project['id'])
def test_project_grants_by_domain_admin(self):
# Test project grants with a domain admin. This user should be
# able to manage roles on any project in its own domain.
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_grants('projects', self.project['id'])
def test_cloud_admin(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
self._test_domain_management(
expected=exception.ForbiddenAction.code)
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
domain_id=self.admin_domain['id'])
self._test_domain_management()<|fim▁end|> | #
# http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>intoto_provenance.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import any_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="grafeas.v1",
manifest={
"Recipe",
"Completeness",
"Metadata",
"BuilderConfig",
"InTotoProvenance",
},
)
class Recipe(proto.Message):
r"""Steps taken to build the artifact.
For a TaskRun, typically each container corresponds to one step
in the recipe.
Attributes:
type_ (str):
URI indicating what type of recipe was
performed. It determines the meaning of
recipe.entryPoint, recipe.arguments,
recipe.environment, and materials.
defined_in_material (int):
Index in materials containing the recipe
steps that are not implied by recipe.type. For
example, if the recipe type were "make", then
this would point to the source containing the
Makefile, not the make program itself. Set to -1<|fim▁hole|> entry_point (str):
String identifying the entry point into the
build. This is often a path to a configuration
file and/or a target label within that file. The
syntax and meaning are defined by recipe.type.
For example, if the recipe type were "make",
then this would reference the directory in which
to run make as well as which target to use.
arguments (Sequence[google.protobuf.any_pb2.Any]):
Collection of all external inputs that
influenced the build on top of
recipe.definedInMaterial and recipe.entryPoint.
For example, if the recipe type were "make",
then this might be the flags passed to make
aside from the target, which is captured in
recipe.entryPoint. Since the arguments field can
greatly vary in structure, depending on the
builder and recipe type, this is of form "Any".
environment (Sequence[google.protobuf.any_pb2.Any]):
Any other builder-controlled inputs necessary
for correctly evaluating the recipe. Usually
only needed for reproducing the build but not
evaluated as part of policy. Since the
environment field can greatly vary in structure,
depending on the builder and recipe type, this
is of form "Any".
"""
type_ = proto.Field(proto.STRING, number=1,)
defined_in_material = proto.Field(proto.INT64, number=2,)
entry_point = proto.Field(proto.STRING, number=3,)
arguments = proto.RepeatedField(proto.MESSAGE, number=4, message=any_pb2.Any,)
environment = proto.RepeatedField(proto.MESSAGE, number=5, message=any_pb2.Any,)
class Completeness(proto.Message):
r"""Indicates that the builder claims certain fields in this
message to be complete.
Attributes:
arguments (bool):
If true, the builder claims that
recipe.arguments is complete, meaning that all
external inputs are properly captured in the
recipe.
environment (bool):
If true, the builder claims that
recipe.environment is claimed to be complete.
materials (bool):
If true, the builder claims that materials
are complete, usually through some controls to
prevent network access. Sometimes called
"hermetic".
"""
arguments = proto.Field(proto.BOOL, number=1,)
environment = proto.Field(proto.BOOL, number=2,)
materials = proto.Field(proto.BOOL, number=3,)
class Metadata(proto.Message):
r"""Other properties of the build.
Attributes:
build_invocation_id (str):
Identifies the particular build invocation,
which can be useful for finding associated logs
or other ad-hoc analysis. The value SHOULD be
globally unique, per in-toto Provenance spec.
build_started_on (google.protobuf.timestamp_pb2.Timestamp):
The timestamp of when the build started.
build_finished_on (google.protobuf.timestamp_pb2.Timestamp):
The timestamp of when the build completed.
completeness (grafeas.grafeas_v1.types.Completeness):
Indicates that the builder claims certain
fields in this message to be complete.
reproducible (bool):
If true, the builder claims that running the
recipe on materials will produce bit-for-bit
identical output.
"""
build_invocation_id = proto.Field(proto.STRING, number=1,)
build_started_on = proto.Field(
proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,
)
build_finished_on = proto.Field(
proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,
)
completeness = proto.Field(proto.MESSAGE, number=4, message="Completeness",)
reproducible = proto.Field(proto.BOOL, number=5,)
class BuilderConfig(proto.Message):
r"""
Attributes:
id (str):
"""
id = proto.Field(proto.STRING, number=1,)
class InTotoProvenance(proto.Message):
r"""
Attributes:
builder_config (grafeas.grafeas_v1.types.BuilderConfig):
required
recipe (grafeas.grafeas_v1.types.Recipe):
Identifies the configuration used for the
build. When combined with materials, this SHOULD
fully describe the build, such that re-running
this recipe results in bit-for-bit identical
output (if the build is reproducible).
metadata (grafeas.grafeas_v1.types.Metadata):
materials (Sequence[str]):
The collection of artifacts that influenced
the build including sources, dependencies, build
tools, base images, and so on. This is
considered to be incomplete unless
metadata.completeness.materials is true. Unset
or null is equivalent to empty.
"""
builder_config = proto.Field(proto.MESSAGE, number=1, message="BuilderConfig",)
recipe = proto.Field(proto.MESSAGE, number=2, message="Recipe",)
metadata = proto.Field(proto.MESSAGE, number=3, message="Metadata",)
materials = proto.RepeatedField(proto.STRING, number=4,)
__all__ = tuple(sorted(__protobuf__.manifest))<|fim▁end|> | if the recipe doesn't come from a material, as
zero is default unset value for int64. |
<|file_name|>advent1.rs<|end_file_name|><|fim▁begin|>// advent1.rs
// Manhattan lengths and segment intersection
use std::io;
use std::collections::HashSet;
type Vec2 = [i32; 2];
// unit vectors for the 4 cardinal directions
const NORTH: Vec2 = [0, 1];
#[allow(dead_code)]
const SOUTH: Vec2 = [0, -1];
#[allow(dead_code)]
const EAST: Vec2 = [1, 0];
#[allow(dead_code)]
const WEST: Vec2 = [-1, 0];
fn main() {
let mut input = String::new();
io::stdin().read_line(&mut input).expect("Failed to read line");
println!("part 1 distance: {}", calc_manhattan_length(&input));
println!("part 2 distance: {}", find_first_revisited_distance(&input));
}
<|fim▁hole|>
for turn in turns.trim().split(", ") {
let dist = parse_turn(&mut dir, turn);
position[0] += dir[0] * dist;
position[1] += dir[1] * dist;
}
position[0].abs() + position[1].abs()
}
// returns new distance, updates direction
// We're making the assumption that turn starts with L or R followed by a number.
// This function probably will panic if given garbage
fn parse_turn(dir: &mut Vec2, turn: &str) -> i32 {
let (dir_str, dist_str) = turn.split_at(1); // could panic
*dir = match dir_str.chars().nth(0) {
Some('R') => turn_right(*dir),
Some('L') => turn_left(*dir),
_ => panic!("unexpected character in input"), // could panic
};
dist_str.parse::<i32>().unwrap() // could panic
}
// rotate clockwise 90 degrees
fn turn_right(dir: Vec2) -> Vec2 {
[dir[1], -dir[0]]
}
// rotate counter-clockwise 90 degrees
fn turn_left(dir: Vec2) -> Vec2 {
[-dir[1], dir[0]]
}
// ////////
// Part 2
fn find_first_revisited_distance(turns: &str) -> i32 {
let mut dir = NORTH;
let mut position = [0, 0];
let mut visited = HashSet::new();
visited.insert(position);
'outer: for turn in turns.trim().split(", ") {
let distance = parse_turn(&mut dir, turn);
for _ in 0..distance {
position[0] += dir[0];
position[1] += dir[1];
if !visited.insert(position) {
// already in set, we've been here before
break 'outer;
}
}
}
position[0].abs() + position[1].abs()
}
// //////
// Tests
#[test]
fn test_turn_right() {
assert_eq!(EAST, turn_right(NORTH));
assert_eq!(SOUTH, turn_right(EAST));
assert_eq!(WEST, turn_right(SOUTH));
assert_eq!(NORTH, turn_right(WEST));
}
#[test]
fn test_turn_left() {
assert_eq!(WEST, turn_left(NORTH));
assert_eq!(NORTH, turn_left(EAST));
assert_eq!(EAST, turn_left(SOUTH));
assert_eq!(SOUTH, turn_left(WEST));
}
#[test]
fn test_parse_turn() {
let mut dir = NORTH;
assert_eq!(3, parse_turn(&mut dir, "R3"));
assert_eq!(EAST, dir);
assert_eq!(23, parse_turn(&mut dir, "L23"));
assert_eq!(NORTH, dir);
}
#[test]
fn test_calc_manhattan_length() {
assert_eq!(5, calc_manhattan_length("R2, L3"));
assert_eq!(2, calc_manhattan_length("R2, R2, R2"));
assert_eq!(12, calc_manhattan_length("R5, L5, R5, R3"));
}
// part 2
#[test]
fn test_find_first_revisited_distance() {
assert_eq!(4, find_first_revisited_distance("R8, R4, R4, R8"));
}<|fim▁end|> | // split the input into individual turns, add up all the distance vectors, and return the distance
fn calc_manhattan_length(turns: &str) -> i32 {
let mut dir = NORTH;
let mut position = [0, 0]; |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate coreaudio;
extern crate coremidi;
extern crate time;
extern crate synthesizer_io_core;
use coreaudio::audio_unit::{AudioUnit, IOType, SampleFormat, Scope};
use coreaudio::audio_unit::render_callback::{self, data};
use synthesizer_io_core::modules;
use synthesizer_io_core::worker::Worker;
use synthesizer_io_core::queue::Sender;
use synthesizer_io_core::graph::{Node, Message, SetParam, Note};
use synthesizer_io_core::module::N_SAMPLES_PER_CHUNK;
struct Midi {
tx: Sender<Message>,
cur_note: Option<u8>,
}
impl Midi {
fn new(tx: Sender<Message>) -> Midi {
Midi {
tx: tx,
cur_note: None,
}
}
fn send(&self, msg: Message) {
self.tx.send(msg);
}
fn set_ctrl_const(&mut self, value: u8, lo: f32, hi: f32, ix: usize, ts: u64) {
let value = lo + value as f32 * (1.0/127.0) * (hi - lo);
let param = SetParam {
ix: ix,
param_ix: 0,
val: value,
timestamp: ts,
};
self.send(Message::SetParam(param));
}
fn send_note(&mut self, ixs: Vec<usize>, midi_num: f32, velocity: f32, on: bool,
ts: u64)
{
let note = Note {
ixs: ixs.into_boxed_slice(),
midi_num: midi_num,
velocity: velocity,
on: on,
timestamp: ts,
};
self.send(Message::Note(note));
}
fn dispatch_midi(&mut self, data: &[u8], ts: u64) {
let mut i = 0;
while i < data.len() {
if data[i] == 0xb0 {
let controller = data[i + 1];
let value = data[i + 2];
match controller {
1 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 3, ts),
2 => self.set_ctrl_const(value, 0.0, 0.995, 4, ts),
3 => self.set_ctrl_const(value, 0.0, 22_000f32.log2(), 5, ts),
5 => self.set_ctrl_const(value, 0.0, 10.0, 11, ts),
6 => self.set_ctrl_const(value, 0.0, 10.0, 12, ts),
7 => self.set_ctrl_const(value, 0.0, 6.0, 13, ts),
8 => self.set_ctrl_const(value, 0.0, 10.0, 14, ts),
_ => println!("don't have handler for controller {}", controller),
}
i += 3;
} else if data[i] == 0x90 || data[i] == 0x80 {
let midi_num = data[i + 1];
let velocity = data[i + 2];
let on = data[i] == 0x90 && velocity > 0;
if on || self.cur_note == Some(midi_num) {
self.send_note(vec![5, 7], midi_num as f32, velocity as f32, on, ts);
self.cur_note = if on { Some(midi_num) } else { None }
}
i += 3;
} else {
break;
}
}
}
}
fn main() {
let (mut worker, tx, rx) = Worker::create(1024);
/*
let module = Box::new(modules::ConstCtrl::new(440.0f32.log2()));
worker.handle_node(Node::create(module, 1, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 2, [], [(1, 0)]));
let module = Box::new(modules::ConstCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::Sin::new(44_100.0));
worker.handle_node(Node::create(module, 4, [], [(3, 0)]));
let module = Box::new(modules::Sum);
worker.handle_node(Node::create(module, 0, [(2, 0), (4, 0)], []));
*/
let module = Box::new(modules::Saw::new(44_100.0));
worker.handle_node(Node::create(module, 1, [], [(5, 0)]));
let module = Box::new(modules::SmoothCtrl::new(880.0f32.log2()));
worker.handle_node(Node::create(module, 3, [], []));
let module = Box::new(modules::SmoothCtrl::new(0.5));
worker.handle_node(Node::create(module, 4, [], []));
let module = Box::new(modules::NotePitch::new());
worker.handle_node(Node::create(module, 5, [], []));
let module = Box::new(modules::Biquad::new(44_100.0));
worker.handle_node(Node::create(module, 6, [(1,0)], [(3, 0), (4, 0)]));
let module = Box::new(modules::Adsr::new());
worker.handle_node(Node::create(module, 7, [], vec![(11, 0), (12, 0), (13, 0), (14, 0)]));
let module = Box::new(modules::Gain::new());
worker.handle_node(Node::create(module, 0, [(6, 0)], [(7, 0)]));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 11, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 12, [], []));
let module = Box::new(modules::SmoothCtrl::new(4.0));
worker.handle_node(Node::create(module, 13, [], []));
let module = Box::new(modules::SmoothCtrl::new(5.0));
worker.handle_node(Node::create(module, 14, [], []));
let _audio_unit = run(worker).unwrap();
let source_index = 0;
if let Some(source) = coremidi::Source::from_index(source_index) {
println!("Listening for midi from {}", source.display_name().unwrap());
let client = coremidi::Client::new("synthesizer-client").unwrap();
let mut last_ts = 0;
let mut last_val = 0;
let mut midi = Midi::new(tx);
let callback = move |packet_list: &coremidi::PacketList| {
for packet in packet_list.iter() {
let data = packet.data();
let delta_t = packet.timestamp() - last_ts;
let speed = 1e9 * (data[2] as f64 - last_val as f64) / delta_t as f64;
println!("{} {:3.3} {} {}", speed, delta_t as f64 * 1e-6, data[2],
time::precise_time_ns() - packet.timestamp());
last_val = data[2];
last_ts = packet.timestamp();
midi.dispatch_midi(&data, last_ts);
}
};
let input_port = client.input_port("synthesizer-port", callback).unwrap();
input_port.connect_source(&source).unwrap();
println!("Press Enter to exit.");
let mut line = String::new();
::std::io::stdin().read_line(&mut line).unwrap();
input_port.disconnect_source(&source).unwrap();
} else {<|fim▁hole|> println!("No midi available");
}
}
fn run(mut worker: Worker) -> Result<AudioUnit, coreaudio::Error> {
// Construct an Output audio unit that delivers audio to the default output device.
let mut audio_unit = AudioUnit::new(IOType::DefaultOutput)?;
let stream_format = audio_unit.stream_format(Scope::Output)?;
//println!("{:#?}", &stream_format);
// We expect `f32` data.
assert!(SampleFormat::F32 == stream_format.sample_format);
type Args = render_callback::Args<data::NonInterleaved<f32>>;
audio_unit.set_render_callback(move |args| {
let Args { num_frames, mut data, .. }: Args = args;
assert!(num_frames % N_SAMPLES_PER_CHUNK == 0);
let mut i = 0;
let mut timestamp = time::precise_time_ns();
while i < num_frames {
// should let the graph generate stereo
let buf = worker.work(timestamp)[0].get();
for j in 0..N_SAMPLES_PER_CHUNK {
for channel in data.channels_mut() {
channel[i + j] = buf[j];
}
}
timestamp += 1451247; // 64 * 1e9 / 44_100
i += N_SAMPLES_PER_CHUNK;
}
Ok(())
})?;
audio_unit.start()?;
Ok(audio_unit)
}<|fim▁end|> | |
<|file_name|>IConcurrentSet.java<|end_file_name|><|fim▁begin|>package ninja.egg82.concurrent;<|fim▁hole|> //functions
int getRemainingCapacity();
int getCapacity();
}<|fim▁end|> |
import java.util.Set;
public interface IConcurrentSet<T> extends Set<T> { |
<|file_name|>rc4random.js<|end_file_name|><|fim▁begin|>Rc4Random = function(seed)
{
var keySchedule = [];
var keySchedule_i = 0;
var keySchedule_j = 0;
function init(seed) {
for (var i = 0; i < 256; i++) {
keySchedule[i] = i;
}
var j = 0;
for (var i = 0; i < 256; i++) {
j = (j + keySchedule[i] + seed.charCodeAt(i % seed.length)) % 256;
var t = keySchedule[i];
keySchedule[i] = keySchedule[j];
keySchedule[j] = t;
}
}
init(seed);
function getRandomByte() {
keySchedule_i = (keySchedule_i + 1) % 256;
keySchedule_j = (keySchedule_j + keySchedule[keySchedule_i]) % 256;
var t = keySchedule[keySchedule_i];
keySchedule[keySchedule_i] = keySchedule[keySchedule_j];
keySchedule[keySchedule_j] = t;
return keySchedule[(keySchedule[keySchedule_i] + keySchedule[keySchedule_j]) % 256];
}<|fim▁hole|> this.getRandomNumber = function() {
var number = 0;
var multiplier = 1;
for (var i = 0; i < 8; i++) {
number += getRandomByte() * multiplier;
multiplier *= 256;
}
return number / 18446744073709551616;
}
}<|fim▁end|> | |
<|file_name|>test_settings.py<|end_file_name|><|fim▁begin|># flake8: noqa
"""Settings to be used for running tests."""
from settings import *
INSTALLED_APPS.append('integration_tests')
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
EMAIL_SUBJECT_PREFIX = '[test] '
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'<|fim▁hole|><|fim▁end|> | SOUTH_TESTS_MIGRATE = False |
<|file_name|>_management_locks_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._management_locks_operations import build_create_or_update_at_resource_group_level_request, build_create_or_update_at_resource_level_request, build_create_or_update_at_subscription_level_request, build_create_or_update_by_scope_request, build_delete_at_resource_group_level_request, build_delete_at_resource_level_request, build_delete_at_subscription_level_request, build_delete_by_scope_request, build_get_at_resource_group_level_request, build_get_at_resource_level_request, build_get_at_subscription_level_request, build_get_by_scope_request, build_list_at_resource_group_level_request, build_list_at_resource_level_request, build_list_at_subscription_level_request, build_list_by_scope_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ManagementLocksOperations:
"""ManagementLocksOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.locks.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_or_update_at_resource_group_level(
self,
resource_group_name: str,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Creates or updates a management lock at the resource group level.
When you apply a lock at a parent scope, all child resources inherit the same lock. To create
management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group to lock.
:type resource_group_name: str
:param lock_name: The lock name. The lock name can be a maximum of 260 characters. It cannot
contain <, > %, &, :, \, ?, /, or any control characters.
:type lock_name: str
:param parameters: The management lock parameters.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_at_resource_group_level_request(
resource_group_name=resource_group_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_at_resource_group_level(
self,
resource_group_name: str,
lock_name: str,
**kwargs: Any
) -> None:
"""Deletes a management lock at the resource group level.
To delete management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group containing the lock.
:type resource_group_name: str
:param lock_name: The name of lock to delete.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_at_resource_group_level_request(
resource_group_name=resource_group_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_at_resource_group_level(
self,
resource_group_name: str,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Gets a management lock at the resource group level.
:param resource_group_name: The name of the locked resource group.
:type resource_group_name: str
:param lock_name: The name of the lock to get.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_resource_group_level_request(
resource_group_name=resource_group_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.get_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def create_or_update_by_scope(
self,
scope: str,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Create or update a management lock by scope.
:param scope: The scope for the lock. When providing a scope for the assignment, use
'/subscriptions/{subscriptionId}' for subscriptions,
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}' for resource groups, and
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
for resources.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:param parameters: Create or update management lock parameters.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_by_scope_request(
scope=scope,
lock_name=lock_name,
content_type=content_type,
json=_json,
template_url=self.create_or_update_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_by_scope(
self,
scope: str,
lock_name: str,
**kwargs: Any
) -> None:
"""Delete a management lock by scope.
:param scope: The scope for the lock.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_by_scope_request(
scope=scope,
lock_name=lock_name,
template_url=self.delete_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_by_scope(
self,
scope: str,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Get a management lock by scope.
:param scope: The scope for the lock.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_by_scope_request(
scope=scope,
lock_name=lock_name,
template_url=self.get_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def create_or_update_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Creates or updates a management lock at the resource level or any level below the resource.
When you apply a lock at a parent scope, all child resources inherit the same lock. To create
management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group containing the resource to lock.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider namespace of the resource to lock.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to lock.
:type resource_type: str
:param resource_name: The name of the resource to lock.
:type resource_name: str
:param lock_name: The name of lock. The lock name can be a maximum of 260 characters. It cannot
contain <, > %, &, :, \, ?, /, or any control characters.
:type lock_name: str
:param parameters: Parameters for creating or updating a management lock.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
<|fim▁hole|> map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
lock_name: str,
**kwargs: Any
) -> None:
"""Deletes the management lock of a resource or any level below the resource.
To delete management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group containing the resource with the
lock to delete.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider namespace of the resource with the
lock to delete.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource with the lock to delete.
:type resource_type: str
:param resource_name: The name of the resource with the lock to delete.
:type resource_name: str
:param lock_name: The name of the lock to delete.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Get the management lock of a resource or any level below resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: An extra path parameter needed in some services, like SQL
Databases.
:type parent_resource_path: str
:param resource_type: The type of the resource.
:type resource_type: str
:param resource_name: The name of the resource.
:type resource_name: str
:param lock_name: The name of lock.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.get_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def create_or_update_at_subscription_level(
self,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Creates or updates a management lock at the subscription level.
When you apply a lock at a parent scope, all child resources inherit the same lock. To create
management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param lock_name: The name of lock. The lock name can be a maximum of 260 characters. It cannot
contain <, > %, &, :, \, ?, /, or any control characters.
:type lock_name: str
:param parameters: The management lock parameters.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_at_subscription_level_request(
lock_name=lock_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_at_subscription_level(
self,
lock_name: str,
**kwargs: Any
) -> None:
"""Deletes the management lock at the subscription level.
To delete management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param lock_name: The name of lock to delete.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_at_subscription_level_request(
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_at_subscription_level(
self,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Gets a management lock at the subscription level.
:param lock_name: The name of the lock to get.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_subscription_level_request(
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.get_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace
def list_at_resource_group_level(
self,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a resource group.
:param resource_group_name: The name of the resource group containing the locks to get.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_resource_group_level_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_resource_group_level_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks'} # type: ignore
@distributed_trace
def list_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a resource or any level below resource.
:param resource_group_name: The name of the resource group containing the locked resource. The
name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the locked resource.
:type resource_type: str
:param resource_name: The name of the locked resource.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks'} # type: ignore
@distributed_trace
def list_at_subscription_level(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_subscription_level_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_subscription_level_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks'} # type: ignore
@distributed_trace
def list_by_scope(
self,
scope: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a scope.
:param scope: The scope for the lock. When providing a scope for the assignment, use
'/subscriptions/{subscriptionId}' for subscriptions,
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}' for resource groups, and
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
for resources.
:type scope: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_scope_request(
scope=scope,
filter=filter,
template_url=self.list_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_scope_request(
scope=scope,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks'} # type: ignore<|fim▁end|> | if response.status_code not in [200, 201]: |
<|file_name|>aggregates_client.py<|end_file_name|><|fim▁begin|># Copyright 2013 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from tempest.lib.api_schema.response.compute.v2_1 import aggregates as schema
from tempest.lib.common import rest_client
from tempest.lib import exceptions as lib_exc
class AggregatesClient(rest_client.RestClient):
def list_aggregates(self):
"""Get aggregate list."""
resp, body = self.get("os-aggregates")
body = json.loads(body)
self.validate_response(schema.list_aggregates, resp, body)
return rest_client.ResponseBody(resp, body)
def show_aggregate(self, aggregate_id):
"""Get details of the given aggregate."""
resp, body = self.get("os-aggregates/%s" % aggregate_id)
body = json.loads(body)
self.validate_response(schema.get_aggregate, resp, body)
return rest_client.ResponseBody(resp, body)
def create_aggregate(self, **kwargs):
"""Create a new aggregate.
Available params: see http://developer.openstack.org/
api-ref-compute-v2.1.html#createaggregate
"""
post_body = json.dumps({'aggregate': kwargs})
resp, body = self.post('os-aggregates', post_body)
body = json.loads(body)
self.validate_response(schema.create_aggregate, resp, body)
return rest_client.ResponseBody(resp, body)
def update_aggregate(self, aggregate_id, **kwargs):
"""Update an aggregate.
Available params: see http://developer.openstack.org/
api-ref-compute-v2.1.html#updateaggregate
"""
put_body = json.dumps({'aggregate': kwargs})
resp, body = self.put('os-aggregates/%s' % aggregate_id, put_body)
body = json.loads(body)
self.validate_response(schema.update_aggregate, resp, body)
return rest_client.ResponseBody(resp, body)
def delete_aggregate(self, aggregate_id):
"""Delete the given aggregate."""
resp, body = self.delete("os-aggregates/%s" % aggregate_id)
self.validate_response(schema.delete_aggregate, resp, body)
return rest_client.ResponseBody(resp, body)
def is_resource_deleted(self, id):
try:
self.show_aggregate(id)
except lib_exc.NotFound:
return True
return False
@property
def resource_type(self):
"""Return the primary type of resource this client works with."""
return 'aggregate'
<|fim▁hole|> api-ref-compute-v2.1.html#addhost
"""
post_body = json.dumps({'add_host': kwargs})
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
post_body)
body = json.loads(body)
self.validate_response(schema.aggregate_add_remove_host, resp, body)
return rest_client.ResponseBody(resp, body)
def remove_host(self, aggregate_id, **kwargs):
"""Remove a host from the given aggregate.
Available params: see http://developer.openstack.org/
api-ref-compute-v2.1.html#removehost
"""
post_body = json.dumps({'remove_host': kwargs})
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
post_body)
body = json.loads(body)
self.validate_response(schema.aggregate_add_remove_host, resp, body)
return rest_client.ResponseBody(resp, body)
def set_metadata(self, aggregate_id, **kwargs):
"""Replace the aggregate's existing metadata with new metadata."""
post_body = json.dumps({'set_metadata': kwargs})
resp, body = self.post('os-aggregates/%s/action' % aggregate_id,
post_body)
body = json.loads(body)
self.validate_response(schema.aggregate_set_metadata, resp, body)
return rest_client.ResponseBody(resp, body)<|fim▁end|> | def add_host(self, aggregate_id, **kwargs):
"""Add a host to the given aggregate.
Available params: see http://developer.openstack.org/ |
<|file_name|>BtnUpLevel.js<|end_file_name|><|fim▁begin|>import React from 'react';
import Tap from '../hahoo/Tap';
class BtnUpLevel extends React.Component {
static propTypes = {
onItemClick: React.PropTypes.func
}
<|fim▁hole|> const { onItemClick, ...rest } = this.props;
return (<Tap
onTap={onItemClick}
className="btn btn-default"
{...rest}
><i className="fa fa-arrow-circle-up fa-fw" /> 上级</Tap>);
}
}
export default BtnUpLevel;<|fim▁end|> | state = {}
render() { |
<|file_name|>strain.go<|end_file_name|><|fim▁begin|>package strain
type Ints []int
type Lists [][]int
type Strings []string
func (i Ints) Keep(filter func(int) bool) Ints {
if i == nil {
return nil
}
filtered := []int{}
for _, v := range i {
if filter(v) {
filtered = append(filtered, v)
}
}
i = filtered
return i
}
func (i Ints) Discard(filter func(int) bool) Ints {
if i == nil {
return nil
}<|fim▁hole|> })
}
func (l Lists) Keep(filter func([]int) bool) Lists {
if l == nil {
return nil
}
filtered := [][]int{}
for _, v := range l {
if filter(v) {
filtered = append(filtered, v)
}
}
l = filtered
return l
}
func (s Strings) Keep(filter func(string) bool) Strings {
if s == nil {
return nil
}
filtered := []string{}
for _, v := range s {
if filter(v) {
filtered = append(filtered, v)
}
}
s = filtered
return s
}<|fim▁end|> | return i.Keep(func(i int) bool {
return !filter(i) |
<|file_name|>create_table_metaDatasetsUsed.py<|end_file_name|><|fim▁begin|>import ReviewHelper
import pandas as pd
df = ReviewHelper.get_pandas_data_frame_created_from_bibtex_file()
# find problematic ones
df[df.metaDatasetsUsed.isnull()]
list1 = df.metaDatasetsUsed.str.split(",").tolist()
df1 = pd.DataFrame(list1)
for i in range(df1.columns.size):
df1[i] = df1[i].str.strip()
stacked = df1.stack()
stacked_value_counts = stacked.value_counts()
greater_than = stacked_value_counts[stacked_value_counts > 3]
table_content_inside=""
list_ids_dataset_names = ["KDD99","NSL-KDD","DARPA","Kyoto","ISCX"]
table_content_inside=""
for dataset_name in greater_than.index:
dataset_count = greater_than[dataset_name]
dataset_name_in_table = dataset_name
dataset_name_in_table = dataset_name
if(dataset_name in list_ids_dataset_names):
dataset_name_in_table = "\\rowcolor{Gray}\n" + dataset_name + "* "
line = "{dataset_name} & {dataset_count} \\\\ \n".format(
dataset_name = dataset_name_in_table
,dataset_count = dataset_count
)
table_content_inside = table_content_inside + line
table_content_start = """
\\begin{table}[!ht]
\\centering
\\caption{ \\textbf{Most used Datasets}. * denotes IDS datasets. Datasets that are used less than three is not included.}
\\label{table-metaDatasetsUsed}
\\begin{tabular}{ll}
\\toprule
\\textbf{Dataset Name } & \\textbf{Article Count} \\\\
\\midrule
"""
table_content_end = """
\\bottomrule
\\end{tabular}
\\end{table}
"""<|fim▁hole|>
#print table_content_full
filename = "../latex/table-metaDatasetsUsed.tex"
target = open(filename, 'w')
target.write(table_content_full)
target.close()<|fim▁end|> |
table_content_full = table_content_start + table_content_inside + table_content_end |
<|file_name|>en_mod_rw.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os,sys,re
#Check the OS Version
RELEASE_FILE = "/etc/redhat-release"
RWM_FILE = "/etc/httpd/conf.modules.d/00-base.conf"
if os.path.isfile(RELEASE_FILE):
f=open(RELEASE_FILE,"r")
rel_list = f.read().split()
if rel_list[2] == "release" and tuple(rel_list[3].split(".")) < ('8','5'):<|fim▁hole|> raise("Unable to find the OS version")
#Check Apache installed
#TODO
#
#Test if the rewrite module file present
if os.path.isfile(RWM_FILE):
print("re write")
##print sys.version_info
##if sys.version_info < (2,7):
## print "This programm works only with the Python 2.7"###<|fim▁end|> | print("so far good")
else: |
<|file_name|>timer.py<|end_file_name|><|fim▁begin|>import contextlib
from time import time
from .meter import Meter
from .stats import Stat
from .histogram import Histogram
class Timer(Stat):
def __init__(self):
self.count = 0
self.meter = Meter()
self.histogram = Histogram()
super(Timer, self).__init__()
@contextlib.contextmanager
def time(self):
start_time = time()
try:
yield
finally:
self.update(time() - start_time)
<|fim▁hole|> def update(self, value):
self.meter.mark()
self.histogram.update(value)
def get_values(self):
values = self.meter.get_values()
values.update(self.histogram.get_values())
return values<|fim▁end|> | |
<|file_name|>RegisterableBinder.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2011 Christopher Pheby
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<|fim▁hole|> */
package org.jadira.bindings.core.binder;
import java.lang.annotation.Annotation;
import java.net.URL;
import org.jadira.bindings.core.api.Binding;
import org.jadira.bindings.core.api.Converter;
import org.jadira.bindings.core.api.FromUnmarshaller;
import org.jadira.bindings.core.api.ToMarshaller;
public interface RegisterableBinder {
/**
* Register the configuration file (bindings.xml) at the given URL
* @param nextLocation The URL to register
*/
void registerConfiguration(URL nextLocation);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param key The converter key
* @param converter The binding to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(ConverterKey<S,T> key, Binding<S, T> converter);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param key The converter key
* @param converter The FromUnmarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(ConverterKey<S,T> key, FromUnmarshaller<S, T> converter);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param key The converter key
* @param converter The ToMarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(ConverterKey<S,T> key, ToMarshaller<S, T> converter);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param key The converter key
* @param converter The Converter to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(ConverterKey<S,T> key, Converter<S, T> converter);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param sourceClass The source (owning) class
* @param targetClass The target (foreign) class
* @param converter The binding to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(final Class<S> sourceClass, Class<T> targetClass, Binding<S, T> converter);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The FromUnmarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(Class<S> sourceClass, Class<T> targetClass, FromUnmarshaller<S, T> converter);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The ToMarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(Class<S> sourceClass, Class<T> targetClass, ToMarshaller<S, T> converter);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The Converter to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(final Class<S> sourceClass, Class<T> targetClass, Converter<S, T> converter);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param sourceClass The source (owning) class
* @param targetClass The target (foreign) class
* @param converter The binding to be registered
* @param qualifier The qualifier for which the binding must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(final Class<S> sourceClass, Class<T> targetClass, Binding<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The FromUnmarshaller to be registered
* @param qualifier The qualifier for which the unmarshaller must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(Class<S> sourceClass, Class<T> targetClass, FromUnmarshaller<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The ToMarshaller to be registered
* @param qualifier The qualifier for which the marshaller must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(Class<S> sourceClass, Class<T> targetClass, ToMarshaller<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The Converter to be registered
* @param qualifier The qualifier for which the converter must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(final Class<S> sourceClass, Class<T> targetClass, Converter<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Inspect each of the supplied classes, processing any of the annotated methods found
* @param classesToInspect
*/
void registerAnnotatedClasses(Class<?>... classesToInspect);
/**
* Return an iterable collection of ConverterKeys, one for each currently registered conversion
*/
Iterable<ConverterKey<?, ?>> getConverterEntries();
}<|fim▁end|> | * See the License for the specific language governing permissions and
* limitations under the License.
|
<|file_name|>hostkeys.py<|end_file_name|><|fim▁begin|># Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
L{HostKeys}
"""
import base64
import binascii
from Crypto.Hash import SHA, HMAC
try:
import UserDict
import DictMixin
except:
from collections import UserDict
#from collections import MutableMapping as DictMixin
from collections import Mapping as DictMixin
from paramiko.common import *
from paramiko.dsskey import DSSKey
from paramiko.rsakey import RSAKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.util import get_logger
class InvalidHostKey(Exception):
def __init__(self, line, exc):
self.line = line
self.exc = exc
self.args = (line, exc)
class HostKeyEntry:
"""
Representation of a line in an OpenSSH-style "known hosts" file.
"""
def __init__(self, hostnames=None, key=None):
self.valid = (hostnames is not None) and (key is not None)
self.hostnames = hostnames
self.key = key
def from_line(cls, line, lineno=None):
"""
Parses the given line of text to find the names for the host,
the type of key, and the key data. The line is expected to be in the
format used by the openssh known_hosts file.
Lines are expected to not have leading or trailing whitespace.
We don't bother to check for comments or empty lines. All of
that should be taken care of before sending the line to us.
@param line: a line from an OpenSSH known_hosts file
@type line: str
"""
log = get_logger('paramiko.hostkeys')
fields = line.split(' ')
if len(fields) < 3:
# Bad number of fields
log.info("Not enough fields found in known_hosts in line %s (%r)" %
(lineno, line))
return None
fields = fields[:3]
names, keytype, key = fields
names = names.split(',')
# Decide what kind of key we're looking at and create an object
# to hold it accordingly.
try:
if keytype == 'ssh-rsa':
key = RSAKey(data=base64.decodebytes(key.encode()))
elif keytype == 'ssh-dss':
key = DSSKey(data=base64.decodebytes(key.encode()))
elif keytype == 'ecdsa-sha2-nistp256':
key = ECDSAKey(data=base64.decodebytes(key.encode()))
else:
log.info("Unable to handle key of type %s" % (keytype,))
return None
except binascii.Error as e:
raise InvalidHostKey(line, e)
return cls(names, key)
from_line = classmethod(from_line)
def to_line(self):
"""
Returns a string in OpenSSH known_hosts file format, or None if
the object is not in a valid state. A trailing newline is
included.
"""
if self.valid:
return '%s %s %s\n' % (','.join(self.hostnames), self.key.get_name(),
self.key.get_base64())
return None
def __repr__(self):
return '<HostKeyEntry %r: %r>' % (self.hostnames, self.key)
class HostKeys (DictMixin):
"""
Representation of an openssh-style "known hosts" file. Host keys can be
read from one or more files, and then individual hosts can be looked up to
verify server keys during SSH negotiation.
A HostKeys object can be treated like a dict; any dict lookup is equivalent
to calling L{lookup}.
@since: 1.5.3
"""
def __init__(self, filename=None):
"""
Create a new HostKeys object, optionally loading keys from an openssh
style host-key file.
@param filename: filename to load host keys from, or C{None}
@type filename: str
"""
# emulate a dict of { hostname: { keytype: PKey } }
self._entries = []
if filename is not None:
self.load(filename)
def add(self, hostname, keytype, key):
"""
Add a host key entry to the table. Any existing entry for a
C{(hostname, keytype)} pair will be replaced.
@param hostname: the hostname (or IP) to add
@type hostname: str
@param keytype: key type (C{"ssh-rsa"} or C{"ssh-dss"})
@type keytype: str
@param key: the key to add
@type key: L{PKey}
"""
for e in self._entries:
if (hostname in e.hostnames) and (e.key.get_name() == keytype):
e.key = key
return
self._entries.append(HostKeyEntry([hostname], key))
def load(self, filename):
"""
Read a file of known SSH host keys, in the format used by openssh.
This type of file unfortunately doesn't exist on Windows, but on
posix, it will usually be stored in
C{os.path.expanduser("~/.ssh/known_hosts")}.
If this method is called multiple times, the host keys are merged,
not cleared. So multiple calls to C{load} will just call L{add},
replacing any existing entries and adding new ones.
@param filename: name of the file to read host keys from
@type filename: str
@raise IOError: if there was an error reading the file
"""
f = open(filename, 'r')
for lineno, line in enumerate(f):
line = line.strip()
if (len(line) == 0) or (line[0] == '#'):
continue
e = HostKeyEntry.from_line(line, lineno)
if e is not None:
_hostnames = e.hostnames
for h in _hostnames:
if self.check(h, e.key):
e.hostnames.remove(h)
if len(e.hostnames):
self._entries.append(e)
f.close()
def save(self, filename):
"""
Save host keys into a file, in the format used by openssh. The order of
keys in the file will be preserved when possible (if these keys were
loaded from a file originally). The single exception is that combined
lines will be split into individual key lines, which is arguably a bug.
@param filename: name of the file to write
@type filename: str
@raise IOError: if there was an error writing the file
@since: 1.6.1
"""
f = open(filename, 'w')
for e in self._entries:
line = e.to_line()
if line:
f.write(line)
f.close()
def lookup(self, hostname):
"""
Find a hostkey entry for a given hostname or IP. If no entry is found,
C{None} is returned. Otherwise a dictionary of keytype to key is
returned. The keytype will be either C{"ssh-rsa"} or C{"ssh-dss"}.
@param hostname: the hostname (or IP) to lookup
@type hostname: str
@return: keys associated with this host (or C{None})
@rtype: dict(str, L{PKey})<|fim▁hole|> self._entries = entries
self._hostkeys = hostkeys
def __len__(self):
return len(self.keys())
def __iter__(self):
return self.keys().__iter__()
def __getitem__(self, key):
for e in self._entries:
if e.key.get_name() == key:
return e.key
raise KeyError(key)
def __setitem__(self, key, val):
for e in self._entries:
if e.key is None:
continue
if e.key.get_name() == key:
# replace
e.key = val
break
else:
# add a new one
e = HostKeyEntry([hostname], val)
self._entries.append(e)
self._hostkeys._entries.append(e)
def keys(self):
return [e.key.get_name() for e in self._entries if e.key is not None]
entries = []
for e in self._entries:
for h in e.hostnames:
if (h.startswith('|1|') and (self.hash_host(hostname, h) == h)) or (h == hostname):
entries.append(e)
if len(entries) == 0:
return None
return SubDict(hostname, entries, self)
def check(self, hostname, key):
"""
Return True if the given key is associated with the given hostname
in this dictionary.
@param hostname: hostname (or IP) of the SSH server
@type hostname: str
@param key: the key to check
@type key: L{PKey}
@return: C{True} if the key is associated with the hostname; C{False}
if not
@rtype: bool
"""
k = self.lookup(hostname)
if k is None:
return False
host_key = k.get(key.get_name(), None)
if host_key is None:
return False
return str(host_key) == str(key)
def clear(self):
"""
Remove all host keys from the dictionary.
"""
self._entries = []
def __len__(self):
return len(self.keys())
def __iter__(self):
return self.keys().__iter__()
def __getitem__(self, key):
ret = self.lookup(key)
if ret is None:
raise KeyError(key)
return ret
def __setitem__(self, hostname, entry):
# don't use this please.
if len(entry) == 0:
self._entries.append(HostKeyEntry([hostname], None))
return
for key_type in entry.keys():
found = False
for e in self._entries:
if (hostname in e.hostnames) and (e.key.get_name() == key_type):
# replace
e.key = entry[key_type]
found = True
if not found:
self._entries.append(HostKeyEntry([hostname], entry[key_type]))
def keys(self):
# python 2.4 sets would be nice here.
ret = []
for e in self._entries:
for h in e.hostnames:
if h not in ret:
ret.append(h)
return ret
def values(self):
ret = []
for k in self.keys():
ret.append(self.lookup(k))
return ret
def hash_host(hostname, salt=None):
"""
Return a "hashed" form of the hostname, as used by openssh when storing
hashed hostnames in the known_hosts file.
@param hostname: the hostname to hash
@type hostname: str
@param salt: optional salt to use when hashing (must be 20 bytes long)
@type salt: str
@return: the hashed hostname
@rtype: str
"""
if salt is None:
salt = rng.read(SHA.digest_size)
else:
if salt.startswith('|1|'):
salt = salt.split('|')[2]
salt = base64.decodebytes(salt.encode())
assert len(salt) == SHA.digest_size
hmac = HMAC.HMAC(salt, hostname.encode(), SHA).digest()
hostkey = '|1|%s|%s' % (base64.encodestring(salt).decode(), base64.encodestring(hmac).decode())
return hostkey.replace('\n', '')
hash_host = staticmethod(hash_host)<|fim▁end|> | """
class SubDict (DictMixin):
def __init__(self, hostname, entries, hostkeys):
self._hostname = hostname |
<|file_name|>Captcha.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import time
from module.plugins.internal.Plugin import Plugin
from module.plugins.internal.utils import encode
class Captcha(Plugin):
__name__ = "Captcha"
__type__ = "captcha"
__version__ = "0.47"
__status__ = "stable"
__description__ = """Base anti-captcha plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
def __init__(self, plugin): #@TODO: Pass pyfile instead plugin, so store plugin's html in its associated pyfile as data
self._init(plugin.pyload)
self.plugin = plugin
self.task = None #: captchaManager task
self.init()
def _log(self, level, plugintype, pluginname, messages):
messages = (self.__name__,) + messages
return self.plugin._log(level, plugintype, self.plugin.__name__, messages)
def recognize(self, image):
"""
Extend to build your custom anti-captcha ocr
"""
pass
def decrypt(self, url, get={}, post={}, ref=False, cookies=True, decode=False, req=None,
input_type='jpg', output_type='textual', ocr=True, timeout=120):
img = self.load(url, get=get, post=post, ref=ref, cookies=cookies, decode=decode, req=req or self.plugin.req)
return self.decrypt_image(img, input_type, output_type, ocr, timeout)
def decrypt_image(self, data, input_type='jpg', output_type='textual', ocr=False, timeout=120):
"""
Loads a captcha and decrypts it with ocr, plugin, user input
:param data: image raw data
:param get: get part for request
:param post: post part for request
:param cookies: True if cookies should be enabled
:param input_type: Type of the Image
:param output_type: 'textual' if text is written on the captcha\
or 'positional' for captcha where the user have to click\
on a specific region on the captcha
:param ocr: if True, ocr is not used
:return: result of decrypting
"""
result = ""
time_ref = ("%.2f" % time.time())[-6:].replace(".", "")
with open(os.path.join("tmp", "captcha_image_%s_%s.%s" % (self.plugin.__name__, time_ref, input_type)), "wb") as tmp_img:
tmp_img.write(encode(data))
if ocr:
if isinstance(ocr, basestring):
OCR = self.pyload.pluginManager.loadClass("captcha", ocr) #: Rename `captcha` to `ocr` in 0.4.10
result = OCR(self.plugin).recognize(tmp_img.name)
else:
result = self.recognize(tmp_img.name)
if not result:
captchaManager = self.pyload.captchaManager
try:
self.task = captchaManager.newTask(data, input_type, tmp_img.name, output_type)
captchaManager.handleCaptcha(self.task)
self.task.setWaiting(max(timeout, 50)) #@TODO: Move to `CaptchaManager` in 0.4.10
while self.task.isWaiting():
self.plugin.check_status()
time.sleep(1)
finally:
captchaManager.removeTask(self.task)
if self.task.error:
self.fail(self.task.error)
elif not self.task.result:
self.plugin.retry_captcha(msg=_("No captcha result obtained in appropriate time"))
result = self.task.result
if not self.pyload.debug:
try:
os.remove(tmp_img.name)
except OSError, e:
self.log_warning(_("Error removing `%s`") % tmp_img.name, e)
# self.log_info(_("Captcha result: ") + result) #@TODO: Remove from here?
return result
def invalid(self):
if not self.task:
return
self.log_warning(_("Invalid captcha"))<|fim▁hole|> self.task.invalid()
def correct(self):
if not self.task:
return
self.log_info(_("Correct captcha"))
self.task.correct()<|fim▁end|> | |
<|file_name|>test_wiringfastapi_py36.py<|end_file_name|><|fim▁begin|>from httpx import AsyncClient
# Runtime import to avoid syntax errors in samples on Python < 3.5 and reach top-dir
import os
_TOP_DIR = os.path.abspath(
os.path.sep.join((
os.path.dirname(__file__),
'../',
)),
)
_SAMPLES_DIR = os.path.abspath(
os.path.sep.join((
os.path.dirname(__file__),
'../samples/',
)),
)
import sys
sys.path.append(_TOP_DIR)
sys.path.append(_SAMPLES_DIR)
from asyncutils import AsyncTestCase
from wiringfastapi import web
class WiringFastAPITest(AsyncTestCase):
client: AsyncClient
def setUp(self) -> None:
super().setUp()
self.client = AsyncClient(app=web.app, base_url='http://test')
def tearDown(self) -> None:
self._run(self.client.aclose())
super().tearDown()
def test_depends_marker_injection(self):
class ServiceMock:
async def process(self):
return 'Foo'
with web.container.service.override(ServiceMock()):
response = self._run(self.client.get('/'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), {'result': 'Foo'})
def test_depends_injection(self):<|fim▁hole|><|fim▁end|> | response = self._run(self.client.get('/auth', auth=('john_smith', 'secret')))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), {'username': 'john_smith', 'password': 'secret'}) |
<|file_name|>MLAlertController.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xiaomi.smarthome.common.ui.dialog;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import java.lang.ref.WeakReference;
import android.content.Context;
import android.content.DialogInterface;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckedTextView;
import android.widget.CursorAdapter;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.ScrollView;
import android.widget.SimpleCursorAdapter;
import android.widget.TextView;
import com.xiaomi.common.R;
public class MLAlertController {
private static final int BIT_BUTTON_POSITIVE = 1;
private static final int BIT_BUTTON_NEGATIVE = 2;
private static final int BIT_BUTTON_NEUTRAL = 4;
private final Context mContext;
private final DialogInterface mDialogInterface;
private final Window mWindow;
private CharSequence mTitle;
private CharSequence mMessage;
private ListView mListView;
private View mView;
private int mViewSpacingLeft;
private int mViewSpacingTop;
private int mViewSpacingRight;
private int mViewSpacingBottom;
private boolean mViewSpacingSpecified = false;
private Button mButtonPositive;
private CharSequence mButtonPositiveText;
private Message mButtonPositiveMessage;
private Button mButtonNegative;
private CharSequence mButtonNegativeText;
private Message mButtonNegativeMessage;
private Button mButtonNeutral;
private CharSequence mButtonNeutralText;
private Message mButtonNeutralMessage;
private ScrollView mScrollView;
private int mIconId = -1;
private Drawable mIcon;
private ImageView mIconView;
private TextView mTitleView;
private TextView mMessageView;
private View mCustomTitleView;
private boolean mForceInverseBackground;
private ListAdapter mAdapter;
private int mCheckedItem = -1;
private int mAlertDialogLayout;
private int mListLayout;
private int mListLayoutWithTitle;
private int mMultiChoiceItemLayout;
private int mSingleChoiceItemLayout;
private int mListItemLayout;
// add by afei for progressDialog Top and normal is Bottom
private int mGravity;
private Handler mHandler;
private boolean mTransplantBg = false;
private boolean mAutoDismiss = true; // 对话框在点击按钮之后是否自动消失
private boolean mCustomBgTransplant = false;
View.OnClickListener mButtonHandler = new View.OnClickListener() {
public void onClick(View v) {
Message m = null;
if (v == mButtonPositive && mButtonPositiveMessage != null) {
m = Message.obtain(mButtonPositiveMessage);
} else if (v == mButtonNegative && mButtonNegativeMessage != null) {
m = Message.obtain(mButtonNegativeMessage);
} else if (v == mButtonNeutral && mButtonNeutralMessage != null) {
m = Message.obtain(mButtonNeutralMessage);
}
if (m != null) {
m.sendToTarget();
}
if (mAutoDismiss) {
// Post a message so we dismiss after the above handlers are
// executed
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface)
.sendToTarget();
}
}
};
private static final class ButtonHandler extends Handler {
// Button clicks have Message.what as the BUTTON{1,2,3} constant
private static final int MSG_DISMISS_DIALOG = 1;
private WeakReference<DialogInterface> mDialog;
public ButtonHandler(DialogInterface dialog) {
mDialog = new WeakReference<DialogInterface>(dialog);
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case DialogInterface.BUTTON_POSITIVE:
case DialogInterface.BUTTON_NEGATIVE:
case DialogInterface.BUTTON_NEUTRAL:
((DialogInterface.OnClickListener) msg.obj).onClick(mDialog.get(), msg.what);
break;
case MSG_DISMISS_DIALOG:
((DialogInterface) msg.obj).dismiss();
}
}
}
public void sendDismissMessage() {
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface).sendToTarget();
}
public MLAlertController(Context context, DialogInterface di, Window window) {
this(context, di, window, Gravity.BOTTOM);
}
public MLAlertController(Context context, DialogInterface di, Window window, int gravity) {
mContext = context;
mDialogInterface = di;
mWindow = window;
mHandler = new ButtonHandler(di);
mAlertDialogLayout = R.layout.ml_alert_dialog;
mListLayout = R.layout.ml_select_dialog;
mListLayoutWithTitle = R.layout.ml_select_dialog_center;
mMultiChoiceItemLayout = R.layout.ml_select_dialog_multichoice;
mSingleChoiceItemLayout = R.layout.ml_select_dialog_singlechoice;
mListItemLayout = R.layout.ml_select_dialog_item;
mGravity = gravity;
}
static boolean canTextInput(View v) {
if (v.onCheckIsTextEditor()) {
return true;
}
if (!(v instanceof ViewGroup)) {
return false;
}
ViewGroup vg = (ViewGroup) v;<|fim▁hole|> v = vg.getChildAt(i);
if (canTextInput(v)) {
return true;
}
}
return false;
}
public void installContent() {
/* We use a custom title so never request a window title */
mWindow.requestFeature(Window.FEATURE_NO_TITLE);
mWindow.setGravity(mGravity);
if (mView == null || !canTextInput(mView)) {
mWindow.setFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM,
WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
}
mWindow.setContentView(mAlertDialogLayout);
setupView();
}
public void setTitle(CharSequence title) {
mTitle = title;
if (mTitleView != null) {
mTitleView.setText(title);
}
}
/**
* @see android.app.AlertDialog.Builder#setCustomTitle(View)
*/
public void setCustomTitle(View customTitleView) {
mCustomTitleView = customTitleView;
}
public void setAudoDismiss(boolean autoDismiss) {
mAutoDismiss = autoDismiss;
}
public void setMessage(CharSequence message) {
mMessage = message;
if (mMessageView != null) {
mMessageView.setText(message);
}
}
/**
* Set the view to display in the dialog.
*/
public void setView(View view) {
mView = view;
mViewSpacingSpecified = false;
}
public void setCustomTransplant(boolean b) {
mCustomBgTransplant = b;
}
/**
* Set the view to display in the dialog along with the spacing around that
* view
*/
public void setView(View view, int viewSpacingLeft, int viewSpacingTop, int viewSpacingRight,
int viewSpacingBottom) {
mView = view;
mViewSpacingSpecified = true;
mViewSpacingLeft = viewSpacingLeft;
mViewSpacingTop = viewSpacingTop;
mViewSpacingRight = viewSpacingRight;
mViewSpacingBottom = viewSpacingBottom;
}
/**
* Sets a click listener or a message to be sent when the button is clicked.
* You only need to pass one of {@code listener} or {@code msg}.
*
* @param whichButton Which button, can be one of
* {@link DialogInterface#BUTTON_POSITIVE},
* {@link DialogInterface#BUTTON_NEGATIVE}, or
* {@link DialogInterface#BUTTON_NEUTRAL}
* @param text The text to display in positive button.
* @param listener The
* {@link DialogInterface.OnClickListener} to
* use.
* @param msg The {@link Message} to be sent when clicked.
*/
public void setButton(int whichButton, CharSequence text,
DialogInterface.OnClickListener listener, Message msg) {
if (msg == null && listener != null) {
msg = mHandler.obtainMessage(whichButton, listener);
}
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
mButtonPositiveText = text;
mButtonPositiveMessage = msg;
break;
case DialogInterface.BUTTON_NEGATIVE:
mButtonNegativeText = text;
mButtonNegativeMessage = msg;
break;
case DialogInterface.BUTTON_NEUTRAL:
mButtonNeutralText = text;
mButtonNeutralMessage = msg;
break;
default:
throw new IllegalArgumentException("Button does not exist");
}
}
/**
* Set resId to 0 if you don't want an icon.
*
* @param resId the resourceId of the drawable to use as the icon or 0 if
* you don't want an icon.
*/
public void setIcon(int resId) {
mIconId = resId;
if (mIconView != null) {
if (resId > 0) {
mIconView.setImageResource(mIconId);
} else if (resId == 0) {
mIconView.setVisibility(View.GONE);
}
}
}
public void setIcon(Drawable icon) {
mIcon = icon;
if ((mIconView != null) && (mIcon != null)) {
mIconView.setImageDrawable(icon);
}
}
public void setInverseBackgroundForced(boolean forceInverseBackground) {
mForceInverseBackground = forceInverseBackground;
}
public ListView getListView() {
return mListView;
}
public View getView() {
return mView;
}
public Button getButton(int whichButton) {
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
return mButtonPositive;
case DialogInterface.BUTTON_NEGATIVE:
return mButtonNegative;
case DialogInterface.BUTTON_NEUTRAL:
return mButtonNeutral;
default:
return null;
}
}
@SuppressWarnings({
"UnusedDeclaration"
})
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU && mListView != null
&& mListView.getVisibility() == View.VISIBLE) {
this.mDialogInterface.dismiss();
}
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
@SuppressWarnings({
"UnusedDeclaration"
})
public boolean onKeyUp(int keyCode, KeyEvent event) {
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
private void setupView() {
LinearLayout contentPanel = (LinearLayout) mWindow.findViewById(R.id.contentPanel);
setupContent(contentPanel);
boolean hasButtons = setupButtons();
LinearLayout topPanel = (LinearLayout) mWindow.findViewById(R.id.topPanel);
boolean hasTitle = setupTitle(topPanel);
View buttonPanel = mWindow.findViewById(R.id.buttonPanel);
if (!hasButtons) {
buttonPanel.setVisibility(View.GONE);
}
FrameLayout customPanel = (FrameLayout) mWindow.findViewById(R.id.customPanel);
if (mView != null) {
// 自定义dialog透明背景
// mWindow.findViewById(R.id.parentPanel).setBackgroundColor(mContext.getResources().getColor(android.R.color.transparent));
FrameLayout custom = (FrameLayout) mWindow.findViewById(R.id.custom);
custom.addView(mView);
if (mViewSpacingSpecified) {
custom.setPadding(mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
if (mCustomBgTransplant)
mTransplantBg = true;
}
if (mListView != null) {
((LinearLayout.LayoutParams) customPanel.getLayoutParams()).weight = 0;
}
} else {
customPanel.setVisibility(View.GONE);
}
if (mTransplantBg) {
mWindow.findViewById(R.id.parentPanel).setBackgroundColor(
mContext.getResources().getColor(android.R.color.transparent));
} else {
// mWindow.findViewById(R.id.parentPanel).setBackgroundColor(0xffffffff);
}
if (mListView != null) {
// Listview有分割线divider,因此header和listview需要显示分割线
mWindow.findViewById(R.id.title_divider_line).setVisibility(View.VISIBLE);
mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.VISIBLE);
} else {
mWindow.findViewById(R.id.title_divider_line).setVisibility(View.GONE);
mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.GONE);
}
/**
* Add margin top for the button panel if we have not any panel
*/
if (topPanel.getVisibility() == View.GONE && contentPanel.getVisibility() == View.GONE
&& customPanel.getVisibility() == View.GONE && hasButtons) {
buttonPanel.setPadding(buttonPanel.getPaddingLeft(), buttonPanel.getPaddingBottom(),
buttonPanel.getPaddingRight(), buttonPanel.getPaddingBottom());
}
/*
* Only display the divider if we have a title and a custom view or a
* message.
*/
if (hasTitle) {
// View divider = null;
// if (mMessage != null || mView != null || mListView != null) {
// divider = mWindow.findViewById(R.id.titleDivider);
// } else {
// divider = mWindow.findViewById(R.id.titleDividerTop);
// }
//
// if (divider != null) {
// divider.setVisibility(View.VISIBLE);
// }
}
setBackground(topPanel, contentPanel, customPanel, hasButtons, hasTitle, buttonPanel);
if (TextUtils.isEmpty(mTitle) && TextUtils.isEmpty(mMessage)) {
mWindow.findViewById(R.id.empty_view).setVisibility(View.GONE);
}
}
private boolean setupTitle(LinearLayout topPanel) {
boolean hasTitle = true;
if (mCustomTitleView != null) {
// Add the custom title view directly to the topPanel layout
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
topPanel.addView(mCustomTitleView, 0, lp);
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
} else {
final boolean hasTextTitle = !TextUtils.isEmpty(mTitle);
mIconView = (ImageView) mWindow.findViewById(R.id.icon);
if (hasTextTitle) {
/* Display the title if a title is supplied, else hide it */
mTitleView = (TextView) mWindow.findViewById(R.id.alertTitle);
mTitleView.setText(mTitle);
/*
* Do this last so that if the user has supplied any icons we
* use them instead of the default ones. If the user has
* specified 0 then make it disappear.
*/
if (mIconId > 0) {
mIconView.setImageResource(mIconId);
} else if (mIcon != null) {
mIconView.setImageDrawable(mIcon);
} else if (mIconId == 0) {
/*
* Apply the padding from the icon to ensure the title is
* aligned correctly.
*/
mTitleView.setPadding(mIconView.getPaddingLeft(),
mIconView.getPaddingTop(),
mIconView.getPaddingRight(),
mIconView.getPaddingBottom());
mIconView.setVisibility(View.GONE);
}
} else {
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
mIconView.setVisibility(View.GONE);
topPanel.setVisibility(View.GONE);
hasTitle = false;
}
}
return hasTitle;
}
private void setupContent(LinearLayout contentPanel) {
mScrollView = (ScrollView) mWindow.findViewById(R.id.scrollView);
mScrollView.setFocusable(false);
// Special case for users that only want to display a String
mMessageView = (TextView) mWindow.findViewById(R.id.message);
if (mMessageView == null) {
return;
}
if (mMessage != null) {
mMessageView.setText(mMessage);
} else {
mMessageView.setVisibility(View.GONE);
mScrollView.removeView(mMessageView);
if (mListView != null) {
contentPanel.removeView(mWindow.findViewById(R.id.scrollView));
contentPanel.addView(mListView,
new LinearLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT));
contentPanel.setLayoutParams(new LinearLayout.LayoutParams(MATCH_PARENT, 0, 1.0f));
} else {
contentPanel.setVisibility(View.GONE);
}
}
}
private boolean setupButtons() {
int whichButtons = 0;
mButtonPositive = (Button) mWindow.findViewById(R.id.button1);
mButtonPositive.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonPositiveText)) {
mButtonPositive.setVisibility(View.GONE);
} else {
mButtonPositive.setText(mButtonPositiveText);
mButtonPositive.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_POSITIVE;
}
mButtonNegative = (Button) mWindow.findViewById(R.id.button2);
mButtonNegative.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNegativeText)) {
mButtonNegative.setVisibility(View.GONE);
} else {
mButtonNegative.setText(mButtonNegativeText);
mButtonNegative.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEGATIVE;
}
mButtonNeutral = (Button) mWindow.findViewById(R.id.button3);
mButtonNeutral.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNeutralText)) {
mButtonNeutral.setVisibility(View.GONE);
} else {
mButtonNeutral.setText(mButtonNeutralText);
mButtonNeutral.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEUTRAL;
}
if (shouldCenterSingleButton(whichButtons)) {
if (whichButtons == BIT_BUTTON_POSITIVE) {
centerButton(mButtonPositive);
} else if (whichButtons == BIT_BUTTON_NEGATIVE) {
centerButton(mButtonNegative);
} else if (whichButtons == BIT_BUTTON_NEUTRAL) {
centerButton(mButtonNeutral);
}
}
return whichButtons != 0;
}
private static boolean shouldCenterSingleButton(int whichButton) {
return whichButton == BIT_BUTTON_POSITIVE
|| whichButton == BIT_BUTTON_NEGATIVE
|| whichButton == BIT_BUTTON_NEUTRAL;
}
private void centerButton(TextView button) {
LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) button.getLayoutParams();
params.gravity = Gravity.CENTER_HORIZONTAL;
params.weight = 0.5f;
button.setLayoutParams(params);
button.setBackgroundResource(R.drawable.common_button);
}
private void setBackground(LinearLayout topPanel, LinearLayout contentPanel,
View customPanel, boolean hasButtons, boolean hasTitle,
View buttonPanel) {
if (mTransplantBg) {
/* Get all the different background required */
int fullDark = mContext.getResources().getColor(android.R.color.transparent);
int topDark = mContext.getResources().getColor(android.R.color.transparent);
int centerDark = mContext.getResources().getColor(android.R.color.transparent);
int bottomDark = mContext.getResources().getColor(android.R.color.transparent);
int fullBright = mContext.getResources().getColor(android.R.color.transparent);
int topBright = mContext.getResources().getColor(android.R.color.transparent);
int centerBright = mContext.getResources().getColor(android.R.color.transparent);
int bottomBright = mContext.getResources().getColor(android.R.color.transparent);
int bottomMedium = mContext.getResources().getColor(android.R.color.transparent);
/*
* We now set the background of all of the sections of the alert.
* First collect together each section that is being displayed along
* with whether it is on a light or dark background, then run
* through them setting their backgrounds. This is complicated
* because we need to correctly use the full, top, middle, and
* bottom graphics depending on how many views they are and where
* they appear.
*/
View[] views = new View[4];
boolean[] light = new boolean[4];
View lastView = null;
boolean lastLight = false;
int pos = 0;
if (hasTitle) {
views[pos] = topPanel;
light[pos] = false;
pos++;
}
/*
* The contentPanel displays either a custom text message or a
* ListView. If it's text we should use the dark background for
* ListView we should use the light background. If neither are there
* the contentPanel will be hidden so set it as null.
*/
views[pos] = (contentPanel.getVisibility() == View.GONE)
? null : contentPanel;
light[pos] = mListView != null;
pos++;
if (customPanel != null) {
views[pos] = customPanel;
light[pos] = mForceInverseBackground;
pos++;
}
if (hasButtons) {
views[pos] = buttonPanel;
light[pos] = true;
}
boolean setView = false;
for (pos = 0; pos < views.length; pos++) {
View v = views[pos];
if (v == null) {
continue;
}
if (lastView != null) {
if (!setView) {
lastView.setBackgroundResource(lastLight ? topBright : topDark);
} else {
lastView.setBackgroundResource(lastLight ? centerBright : centerDark);
}
setView = true;
}
lastView = v;
lastLight = light[pos];
}
if (lastView != null) {
if (setView) {
/*
* ListViews will use the Bright background but buttons use
* the Medium background.
*/
lastView.setBackgroundResource(
lastLight ? (hasButtons ? bottomMedium : bottomBright) : bottomDark);
} else {
lastView.setBackgroundResource(lastLight ? fullBright : fullDark);
}
}
}
if ((mListView != null) && (mAdapter != null)) {
mListView.setAdapter(mAdapter);
if (mCheckedItem > -1) {
mListView.setItemChecked(mCheckedItem, true);
mListView.setSelection(mCheckedItem);
}
}
}
public static class RecycleListView extends ListView {
boolean mRecycleOnMeasure = true;
public RecycleListView(Context context) {
super(context);
}
public RecycleListView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RecycleListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
protected boolean recycleOnMeasure() {
return mRecycleOnMeasure;
}
}
public static class AlertParams {
public final Context mContext;
public final LayoutInflater mInflater;
public int mIconId = 0;
public Drawable mIcon;
public CharSequence mTitle;
public View mCustomTitleView;
public CharSequence mMessage;
public CharSequence mPositiveButtonText;
public DialogInterface.OnClickListener mPositiveButtonListener;
public CharSequence mNegativeButtonText;
public DialogInterface.OnClickListener mNegativeButtonListener;
public CharSequence mNeutralButtonText;
public DialogInterface.OnClickListener mNeutralButtonListener;
public boolean mCancelable;
public DialogInterface.OnCancelListener mOnCancelListener;
public DialogInterface.OnKeyListener mOnKeyListener;
public CharSequence[] mItems;
public ListAdapter mAdapter;
public DialogInterface.OnClickListener mOnClickListener;
public View mView;
public int mViewSpacingLeft;
public int mViewSpacingTop;
public int mViewSpacingRight;
public int mViewSpacingBottom;
public boolean mViewSpacingSpecified = false;
public boolean[] mCheckedItems;
public boolean mIsMultiChoice;
public boolean mIsSingleChoice;
public int mCheckedItem = -1;
public DialogInterface.OnMultiChoiceClickListener mOnCheckboxClickListener;
public Cursor mCursor;
public String mLabelColumn;
public String mIsCheckedColumn;
public boolean mForceInverseBackground;
public AdapterView.OnItemSelectedListener mOnItemSelectedListener;
public OnPrepareListViewListener mOnPrepareListViewListener;
public boolean mRecycleOnMeasure = true;
public boolean mAutoDismiss = true;
public MLAlertDialog.DismissCallBack mDismissCallBack;
public CharSequence mCustomTitle;
public boolean mCustomBgTransplant = false;
/**
* Interface definition for a callback to be invoked before the ListView
* will be bound to an adapter.
*/
public interface OnPrepareListViewListener {
/**
* Called before the ListView is bound to an adapter.
*
* @param listView The ListView that will be shown in the dialog.
*/
void onPrepareListView(ListView listView);
}
public AlertParams(Context context) {
mContext = context;
mCancelable = true;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
public void apply(MLAlertController dialog) {
if (mCustomTitleView != null) {
dialog.setCustomTitle(mCustomTitleView);
} else {
if (mTitle != null) {
dialog.setTitle(mTitle);
}
if (mIcon != null) {
dialog.setIcon(mIcon);
}
if (mIconId >= 0) {
dialog.setIcon(mIconId);
}
}
if (mMessage != null) {
dialog.setMessage(mMessage);
}
if (mPositiveButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_POSITIVE, mPositiveButtonText,
mPositiveButtonListener, null);
}
if (mNegativeButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEGATIVE, mNegativeButtonText,
mNegativeButtonListener, null);
}
if (mNeutralButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEUTRAL, mNeutralButtonText,
mNeutralButtonListener, null);
}
if (mForceInverseBackground) {
dialog.setInverseBackgroundForced(true);
}
// For a list, the client can either supply an array of items or an
// adapter or a cursor
dialog.mTransplantBg = false;
if ((mItems != null) || (mCursor != null) || (mAdapter != null)) {
if (dialog.mGravity == Gravity.CENTER) {
createCenterListView(dialog);
} else {
createListView(dialog);
}
}
if (mView != null) {
if (mViewSpacingSpecified) {
dialog.setView(mView, mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
} else {
dialog.setView(mView);
}
}
dialog.setAudoDismiss(mAutoDismiss);
dialog.setCustomTransplant(mCustomBgTransplant);
}
private void createCenterListView(final MLAlertController dialog) {
final LinearLayout customView = (LinearLayout)
mInflater.inflate(dialog.mListLayoutWithTitle, null);
final RecycleListView listView = (RecycleListView) customView
.findViewById(R.id.select_dialog_listview);
ListAdapter adapter;
int layout = R.layout.ml_center_item;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[] {
mLabelColumn
}, new int[] {
R.id.text1
});
}
if (mCustomTitle != null) {
((TextView) (customView.findViewById(R.id.title))).setText(mCustomTitle);
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/*
* Don't directly set the adapter on the ListView as we might want
* to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
listView.setAdapter(adapter);
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mView = customView;
dialog.mTransplantBg = true;
dialog.setCustomTransplant(mCustomBgTransplant);
}
private void createListView(final MLAlertController dialog) {
final RecycleListView listView = (RecycleListView)
mInflater.inflate(dialog.mListLayout, null);
ListAdapter adapter;
if (mIsMultiChoice) {
if (mCursor == null) {
adapter = new ArrayAdapter<CharSequence>(
mContext, dialog.mMultiChoiceItemLayout, R.id.text1, mItems) {
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
if (mCheckedItems != null) {
boolean isItemChecked = mCheckedItems[position];
if (isItemChecked) {
listView.setItemChecked(position, true);
}
}
return view;
}
};
} else {
adapter = new CursorAdapter(mContext, mCursor, false) {
private final int mLabelIndex;
private final int mIsCheckedIndex;
{
final Cursor cursor = getCursor();
mLabelIndex = cursor.getColumnIndexOrThrow(mLabelColumn);
mIsCheckedIndex = cursor.getColumnIndexOrThrow(mIsCheckedColumn);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
CheckedTextView text = (CheckedTextView) view.findViewById(R.id.text1);
text.setText(cursor.getString(mLabelIndex));
listView.setItemChecked(cursor.getPosition(),
cursor.getInt(mIsCheckedIndex) == 1);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(dialog.mMultiChoiceItemLayout,
parent, false);
}
};
}
} else {
int layout = mIsSingleChoice
? dialog.mSingleChoiceItemLayout : dialog.mListItemLayout;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[] {
mLabelColumn
}, new int[] {
R.id.text1
});
}
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/*
* Don't directly set the adapter on the ListView as we might want
* to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mListView = listView;
dialog.setCustomTransplant(mCustomBgTransplant);
}
}
}<|fim▁end|> | int i = vg.getChildCount();
while (i > 0) {
i--; |
<|file_name|>URLInputType.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT<|fim▁hole|> * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "core/html/forms/URLInputType.h"
#include "core/InputTypeNames.h"
#include "core/html/HTMLInputElement.h"
#include "core/html/parser/HTMLParserIdioms.h"
#include "platform/text/PlatformLocale.h"
namespace blink {
InputType* URLInputType::create(HTMLInputElement& element) {
return new URLInputType(element);
}
void URLInputType::countUsage() {
countUsageIfVisible(UseCounter::InputTypeURL);
}
const AtomicString& URLInputType::formControlType() const {
return InputTypeNames::url;
}
bool URLInputType::typeMismatchFor(const String& value) const {
return !value.isEmpty() && !KURL(KURL(), value).isValid();
}
bool URLInputType::typeMismatch() const {
return typeMismatchFor(element().value());
}
String URLInputType::typeMismatchText() const {
return locale().queryString(WebLocalizedString::ValidationTypeMismatchForURL);
}
String URLInputType::sanitizeValue(const String& proposedValue) const {
return BaseTextInputType::sanitizeValue(
stripLeadingAndTrailingHTMLSpaces(proposedValue));
}
String URLInputType::sanitizeUserInputValue(const String& proposedValue) const {
// Do not call URLInputType::sanitizeValue.
return BaseTextInputType::sanitizeValue(proposedValue);
}
} // namespace blink<|fim▁end|> | * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
<|file_name|>instr_vmovdqa64.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vmovdqa64_1() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 253, 139, 111, 251], OperandSize::Dword)
}
#[test]
fn vmovdqa64_2() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(XMM2)), operand2: Some(IndirectScaledIndexed(EDI, EBX, Eight, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 253, 142, 111, 20, 223], OperandSize::Dword)
}
#[test]
fn vmovdqa64_3() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM23)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 177, 253, 142, 111, 215], OperandSize::Qword)
}
#[test]
fn vmovdqa64_4() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(XMM21)), operand2: Some(IndirectDisplaced(RBX, 122436272, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 225, 253, 143, 111, 171, 176, 58, 76, 7], OperandSize::Qword)
}
#[test]<|fim▁hole|> run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 253, 174, 111, 231], OperandSize::Dword)
}
#[test]
fn vmovdqa64_6() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(YMM5)), operand2: Some(Indirect(EAX, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 253, 171, 111, 40], OperandSize::Dword)
}
#[test]
fn vmovdqa64_7() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(YMM28)), operand2: Some(Direct(YMM11)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 65, 253, 175, 111, 227], OperandSize::Qword)
}
#[test]
fn vmovdqa64_8() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(YMM12)), operand2: Some(IndirectScaledIndexedDisplaced(RDI, RBX, Four, 1809334581, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 113, 253, 169, 111, 164, 159, 53, 65, 216, 107], OperandSize::Qword)
}
#[test]
fn vmovdqa64_9() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(ZMM4)), operand2: Some(Direct(ZMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 253, 204, 111, 224], OperandSize::Dword)
}
#[test]
fn vmovdqa64_10() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(ZMM4)), operand2: Some(IndirectDisplaced(EDI, 761628869, Some(OperandSize::Zmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 253, 204, 111, 167, 197, 136, 101, 45], OperandSize::Dword)
}
#[test]
fn vmovdqa64_11() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(ZMM29)), operand2: Some(Direct(ZMM12)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 65, 253, 207, 111, 236], OperandSize::Qword)
}
#[test]
fn vmovdqa64_12() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(ZMM6)), operand2: Some(IndirectScaledIndexedDisplaced(RSI, RSI, Four, 360649149, Some(OperandSize::Zmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 241, 253, 201, 111, 180, 182, 189, 17, 127, 21], OperandSize::Qword)
}
#[test]
fn vmovdqa64_13() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 241, 253, 137, 111, 243], OperandSize::Dword)
}
#[test]
fn vmovdqa64_14() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(IndirectDisplaced(ESI, 1415020857, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 253, 8, 127, 174, 57, 129, 87, 84], OperandSize::Dword)
}
#[test]
fn vmovdqa64_15() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(XMM31)), operand2: Some(Direct(XMM28)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 1, 253, 140, 111, 252], OperandSize::Qword)
}
#[test]
fn vmovdqa64_16() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(IndirectScaledIndexed(RDI, RDX, Two, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM23)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 225, 253, 8, 127, 60, 87], OperandSize::Qword)
}
#[test]
fn vmovdqa64_17() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 253, 174, 111, 226], OperandSize::Dword)
}
#[test]
fn vmovdqa64_18() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(IndirectDisplaced(EDI, 537258592, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 253, 40, 127, 151, 96, 234, 5, 32], OperandSize::Dword)
}
#[test]
fn vmovdqa64_19() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM22)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 177, 253, 175, 111, 246], OperandSize::Qword)
}
#[test]
fn vmovdqa64_20() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(IndirectScaledIndexedDisplaced(RBX, RBX, Eight, 773870247, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM21)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 225, 253, 40, 127, 172, 219, 167, 82, 32, 46], OperandSize::Qword)
}
#[test]
fn vmovdqa64_21() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(ZMM2)), operand2: Some(Direct(ZMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 253, 206, 111, 208], OperandSize::Dword)
}
#[test]
fn vmovdqa64_22() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(IndirectDisplaced(ECX, 1377197729, Some(OperandSize::Zmmword), None)), operand2: Some(Direct(ZMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 253, 72, 127, 129, 161, 94, 22, 82], OperandSize::Dword)
}
#[test]
fn vmovdqa64_23() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(Direct(ZMM15)), operand2: Some(Direct(ZMM30)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 17, 253, 207, 111, 254], OperandSize::Qword)
}
#[test]
fn vmovdqa64_24() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQA64, operand1: Some(IndirectScaledIndexed(RCX, RDX, Eight, Some(OperandSize::Zmmword), None)), operand2: Some(Direct(ZMM24)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 97, 253, 72, 127, 4, 209], OperandSize::Qword)
}<|fim▁end|> | fn vmovdqa64_5() { |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>import jps
import json
import time
class MessageHolder(object):
def __init__(self):
self._saved_msg = []
def __call__(self, msg):
self._saved_msg.append(msg)
def get_msg(self):
return self._saved_msg
def test_multi_pubsub_once():
holder1 = MessageHolder()
holder2 = MessageHolder()
holder3 = MessageHolder()<|fim▁hole|> sub1 = jps.Subscriber('test_utils1', holder1)
sub2 = jps.Subscriber('test_utils2', holder2)
sub3 = jps.Subscriber('test_utils3', holder3)
pub = jps.utils.JsonMultiplePublisher()
time.sleep(0.1)
pub.publish(
'{"test_utils1": "hoge", "test_utils2": {"x": 3}, "test_utils3": 5}')
time.sleep(0.1)
sub1.spin_once()
sub2.spin_once()
sub3.spin_once()
assert len(holder1.get_msg()) == 1
assert json.loads(holder1.get_msg()[0]) == 'hoge'
assert len(holder2.get_msg()) == 1
obj = json.loads(holder2.get_msg()[0])
assert obj['x'] == 3
assert len(holder3.get_msg()) == 1
assert json.loads(holder3.get_msg()[0]) == 5
def test_to_obj():
msg = '{"aa": 1, "bb": ["hoge", "hogi"], "cc": {"cc1" : 50}}'
converted = jps.utils.to_obj(msg)
assert converted.aa == 1
assert converted.bb[0] == 'hoge'
assert converted.bb[1] == 'hogi'
assert len(converted.bb) == 2
assert converted.cc.cc1 == 50
# todo: do
# json = converted.to_json()
# assert json == msg
# todo
def test_to_obj_list():
msg = '["hoge", "hogi", {"atr1": "val2", "atr2": 1.0}]'
bb = jps.utils.to_obj(msg)
assert len(bb) == 2
assert bb[0] == 'hoge'
assert bb[1] == 'hogi'
assert bb[2].atr1 == 'val2'
assert bb[2].atr2 == 1.0
# json = bb.to_json()
# assert json == msg
def test_to_obj_list():
msg = '[{"hoge": 1}, {"hogi": 2}]'
bb = jps.utils.to_obj(msg)
assert len(bb) == 2
assert bb[0].hoge == 1
assert bb[1].hogi == 2
# todo: list support
# json = bb.to_json()
# assert json == msg
def test_to_obj_simple():
msg = '{"aa": 1, "cc": 3, "bb": 2}'
converted = jps.utils.to_obj(msg)
assert converted.aa == 1
assert converted.bb == 2
assert converted.cc == 3
# works only super simple case
json1 = converted.to_json()
assert json1 == msg<|fim▁end|> | |
<|file_name|>0029_auto_20151028_1609.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
<|fim▁hole|> dependencies = [
('core', '0028_auto_20151026_0926'),
]
operations = [
migrations.AlterField(
model_name='partner',
name='logo',
field=models.ImageField(default=None, upload_to='partners', verbose_name='Logo', help_text="Envoyez le logo du partenaire ici.<br />Il doit faire 150x150px. Si la largeur est différente de la hauteur, l'image apparaitra déformée."),
),
]<|fim▁end|> | class Migration(migrations.Migration):
|
<|file_name|>parser_config.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# flake8: noqa
import argparse
from aria_cli import commands as aria
from aria_cli.config import argument_utils
from argcomplete import completers
yaml_files_completer = completers.FilesCompleter(['*.yml', '*.yaml'])
archive_files_completer = completers.FilesCompleter(
['*.zip', '*.tar', '*.tar.gz', '*.tar.bz2'])
FORMAT_INPUT_AS_YAML_OR_DICT = 'formatted as YAML or as "key1=value1;key2=value2"'
def workflow_id_argument(hlp):
return {
'metavar': 'WORKFLOW',
'dest': 'workflow_id',
'type': str,
'required': True,
'help': hlp,
}
def parser_config():
return {
'description': 'Manages ARIA in different Cloud Environments',
'arguments': {
'--version': {
'help': 'show version information and exit',
'action': aria.version
}
},
'commands': {
'validate': {
'arguments': {
'-p,--blueprint-path': {
'metavar': 'BLUEPRINT_FILE',
'type': argparse.FileType(),
'dest': 'blueprint_path',
'required': True,
'help': "Path to the application's blueprint file",
'completer': yaml_files_completer
}
},
'help': 'command for validating a blueprint',
'handler': aria.local.validate
},
'init': {
'help': 'Init a local workflow execution environment in '
'in the current working directory',
'arguments': {
'-p,--blueprint-path': {
'dest': 'blueprint_path',
'metavar': 'BLUEPRINT_PATH',
'type': str,
'required': True,
'help': 'Path to a blueprint'
},
'-i,--inputs': {
'metavar': 'INPUTS',
'dest': 'inputs',
'required': False,
'help': 'Inputs file/string for the local workflow creation ({0})'
.format(FORMAT_INPUT_AS_YAML_OR_DICT)
},
'--install-plugins': {
'dest': 'install_plugins_',
'action': 'store_true',
'default': False,
'help': 'Install necessary plugins of the given blueprint.'
}
},
'handler': aria.local.init
},
'install-plugins': {
'help': 'Installs the necessary plugins for a given blueprint',
'arguments': {
'-p,--blueprint-path': {
'dest': 'blueprint_path',
'metavar': 'BLUEPRINT_PATH',
'type': str,
'required': True,
'help': 'Path to a blueprint'
}
},
'handler': aria.local.install_plugins
},
'create-requirements': {
'help': 'Creates a PIP compliant requirements file for the given blueprint',
'arguments': {
'-p,--blueprint-path': {
'dest': 'blueprint_path',
'metavar': 'BLUEPRINT_PATH',
'type': str,
'required': True,
'help': 'Path to a blueprint'
},
'-o,--output': {
'metavar': 'REQUIREMENTS_OUTPUT',
'dest': 'output',
'required': False,
'help': 'Path to a file that will hold the '
'requirements of the blueprint'
}
},
'handler': aria.local.create_requirements
},
'execute': {
'help': 'Execute a workflow locally',
'arguments': {
'-w,--workflow':
argument_utils.remove_completer(
workflow_id_argument(
hlp='The workflow to execute locally')),
'-p,--parameters': {
'metavar': 'PARAMETERS',
'dest': 'parameters',
'default': {},
'type': str,
'required': False,
'help': 'Parameters for the workflow execution ({0})'
.format(FORMAT_INPUT_AS_YAML_OR_DICT)
},
'--allow-custom-parameters': {
'dest': 'allow_custom_parameters',
'action': 'store_true',
'default': False,
'help': 'A flag for allowing the passing of custom parameters ('
"parameters which were not defined in the workflow's schema in "
'the blueprint) to the execution'
},
'--task-retries': {
'metavar': 'TASK_RETRIES',
'dest': 'task_retries',
'default': 0,
'type': int,
'help': 'How many times should a task be retried in case '
'it fails'
},
'--task-retry-interval': {
'metavar': 'TASK_RETRY_INTERVAL',
'dest': 'task_retry_interval',
'default': 1,
'type': int,
'help': 'How many seconds to wait before each task is retried'
},
'--task-thread-pool-size': {
'metavar': 'TASK_THREAD_POOL_SIZE',
'dest': 'task_thread_pool_size',
'default': 1,
'type': int,
'help': 'The size of the thread pool size to execute tasks in'
}
},
'handler': aria.local.execute
},
'outputs': {
'help': 'Display outputs',
'arguments': {},
'handler': aria.local.outputs
},
'instances': {
'help': 'Display node instances',
'arguments': {
'--node-id': {
'metavar': 'NODE_ID',
'dest': 'node_id',
'default': None,<|fim▁hole|> 'required': False,
'help': 'Only display node instances of this node id'
}
},
'handler': aria.local.instances
}
}
}<|fim▁end|> | 'type': str, |
<|file_name|>delayed_commits.js<|end_file_name|><|fim▁begin|>// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
couchTests.delayed_commits = function(debug) {
var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
db.deleteDb();
db.createDb();
if (debug) debugger;
run_on_modified_server(
[{section: "couchdb",
key: "delayed_commits",
value: "true"}],
function () {
// By default, couchdb doesn't fully commit documents to disk right away,
// it waits about a second to batch the full commit flush along with any
// other updates. If it crashes or is restarted you may lose the most
// recent commits.
T(db.save({_id:"1",a:2,b:4}).ok);
T(db.open("1") != null);
restartServer();
T(db.open("1") == null); // lost the update.
// note if we waited > 1 sec before the restart, the doc would likely
// commit.
// Retry the same thing but with full commits on.
var db2 = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"true"});
T(db2.save({_id:"1",a:2,b:4}).ok);
T(db2.open("1") != null);
restartServer();
T(db2.open("1") != null);<|fim▁hole|>
T(db.save({_id:"2",a:2,b:4}).ok);
T(db.open("2") != null);
T(db.ensureFullCommit().ok);
restartServer();
T(db.open("2") != null);
// However, it's possible even when flushed, that the server crashed between
// the update and the commit, and you don't want to check to make sure
// every doc you updated actually made it to disk. So record the instance
// start time of the database before the updates and then check it again
// after the flush (the instance start time is returned by the flush
// operation). if they are the same, we know everything was updated
// safely.
// First try it with a crash.
var instanceStartTime = db.info().instance_start_time;
T(db.save({_id:"3",a:2,b:4}).ok);
T(db.open("3") != null);
restartServer();
var commitResult = db.ensureFullCommit();
T(commitResult.ok && commitResult.instance_start_time != instanceStartTime);
// start times don't match, meaning the server lost our change
T(db.open("3") == null); // yup lost it
// retry with no server restart
var instanceStartTime = db.info().instance_start_time;
T(db.save({_id:"4",a:2,b:4}).ok);
T(db.open("4") != null);
var commitResult = db.ensureFullCommit();
T(commitResult.ok && commitResult.instance_start_time == instanceStartTime);
// Successful commit, start times match!
restartServer();
T(db.open("4") != null);
});
// Now test that when we exceed the max_dbs_open, pending commits are safely
// written.
T(db.save({_id:"5",foo:"bar"}).ok);
var max = 2;
run_on_modified_server(
[{section: "couchdb",
key: "delayed_commits",
value: "true"},
{section: "couchdb",
key: "max_dbs_open",
value: max.toString()}],
function () {
for(var i=0; i<max; i++) {
var dbi = new CouchDB("test_suite_db" + i);
dbi.deleteDb();
dbi.createDb();
}
T(db.open("5").foo=="bar");
for(var i=0; i<max+1; i++) {
var dbi = new CouchDB("test_suite_db" + i);
dbi.deleteDb();
}
});
};<|fim▁end|> |
// You can update but without committing immediately, and then ensure
// everything is commited in the last step. |
<|file_name|>VMMRC.cpp<|end_file_name|><|fim▁begin|>/* $Id$ */
/** @file
* VMM - Raw-mode Context.
*/
/*
* Copyright (C) 2006-2012 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*/
/*******************************************************************************
* Header Files *
*******************************************************************************/
#define LOG_GROUP LOG_GROUP_VMM
#include <iprt/asm-amd64-x86.h> /* for SUPGetCpuHzFromGIP */
#include <VBox/vmm/vmm.h>
#include <VBox/vmm/trpm.h>
#include <VBox/vmm/pgm.h>
#include "VMMInternal.h"
#include <VBox/vmm/vm.h>
#include <VBox/sup.h>
#include <VBox/err.h>
#include <VBox/log.h>
#include <iprt/assert.h>
#include <iprt/initterm.h>
/*******************************************************************************
* Global Variables *
*******************************************************************************/
/** Default logger instance. */
extern "C" DECLIMPORT(RTLOGGERRC) g_Logger;
extern "C" DECLIMPORT(RTLOGGERRC) g_RelLogger;
/*******************************************************************************
* Internal Functions *
*******************************************************************************/
static int vmmGCTest(PVM pVM, unsigned uOperation, unsigned uArg);
static DECLCALLBACK(int) vmmGCTestTmpPFHandler(PVM pVM, PCPUMCTXCORE pRegFrame);
static DECLCALLBACK(int) vmmGCTestTmpPFHandlerCorruptFS(PVM pVM, PCPUMCTXCORE pRegFrame);
/**
* The GC entry point.
*
* @returns VBox status code.
* @param pVM Pointer to the VM.
* @param uOperation Which operation to execute (VMMGCOPERATION).
* @param uArg Argument to that operation.
*/
VMMRCDECL(int) VMMGCEntry(PVM pVM, unsigned uOperation, unsigned uArg, ...)
{<|fim▁hole|> {
/*
* Init RC modules.
*/
case VMMGC_DO_VMMGC_INIT:
{
/*
* Validate the svn revision (uArg) and build type (ellipsis).
*/
if (uArg != VMMGetSvnRev())
return VERR_VMM_RC_VERSION_MISMATCH;
va_list va;
va_start(va, uArg);
uint32_t uBuildType = va_arg(va, uint32_t);
if (uBuildType != vmmGetBuildType())
return VERR_VMM_RC_VERSION_MISMATCH;
/*
* Initialize the runtime.
*/
uint64_t u64TS = va_arg(va, uint64_t);
va_end(va);
int rc = RTRCInit(u64TS);
Log(("VMMGCEntry: VMMGC_DO_VMMGC_INIT - uArg=%u (svn revision) u64TS=%RX64; rc=%Rrc\n", uArg, u64TS, rc));
AssertRCReturn(rc, rc);
rc = PGMRegisterStringFormatTypes();
AssertRCReturn(rc, rc);
rc = PGMRCDynMapInit(pVM);
AssertRCReturn(rc, rc);
return VINF_SUCCESS;
}
/*
* Testcase which is used to test interrupt forwarding.
* It spins for a while with interrupts enabled.
*/
case VMMGC_DO_TESTCASE_HYPER_INTERRUPT:
{
uint32_t volatile i = 0;
ASMIntEnable();
while (i < _2G32)
i++;
ASMIntDisable();
return 0;
}
/*
* Testcase which simply returns, this is used for
* profiling of the switcher.
*/
case VMMGC_DO_TESTCASE_NOP:
return 0;
/*
* Testcase executes a privileged instruction to force a world switch. (in both SVM & VMX)
*/
case VMMGC_DO_TESTCASE_HM_NOP:
ASMRdMsr_Low(MSR_IA32_SYSENTER_CS);
return 0;
/*
* Delay for ~100us.
*/
case VMMGC_DO_TESTCASE_INTERRUPT_MASKING:
{
uint64_t u64MaxTicks = (SUPGetCpuHzFromGIP(g_pSUPGlobalInfoPage) != ~(uint64_t)0
? SUPGetCpuHzFromGIP(g_pSUPGlobalInfoPage)
: _2G)
/ 10000;
uint64_t u64StartTSC = ASMReadTSC();
uint64_t u64TicksNow;
uint32_t volatile i = 0;
do
{
/* waste some time and protect against getting stuck. */
for (uint32_t volatile j = 0; j < 1000; j++, i++)
if (i > _2G32)
return VERR_GENERAL_FAILURE;
/* check if we're done.*/
u64TicksNow = ASMReadTSC() - u64StartTSC;
} while (u64TicksNow < u64MaxTicks);
return VINF_SUCCESS;
}
/*
* Trap testcases and unknown operations.
*/
default:
if ( uOperation >= VMMGC_DO_TESTCASE_TRAP_FIRST
&& uOperation < VMMGC_DO_TESTCASE_TRAP_LAST)
return vmmGCTest(pVM, uOperation, uArg);
return VERR_INVALID_PARAMETER;
}
}
/**
* Internal RC logger worker: Flush logger.
*
* @returns VINF_SUCCESS.
* @param pLogger The logger instance to flush.
* @remark This function must be exported!
*/
VMMRCDECL(int) vmmGCLoggerFlush(PRTLOGGERRC pLogger)
{
PVM pVM = &g_VM;
NOREF(pLogger);
if (pVM->vmm.s.fRCLoggerFlushingDisabled)
return VINF_SUCCESS; /* fail quietly. */
return VMMRZCallRing3NoCpu(pVM, VMMCALLRING3_VMM_LOGGER_FLUSH, 0);
}
/**
* Flush logger if almost full.
*
* @param pVM Pointer to the VM.
*/
VMMRCDECL(void) VMMGCLogFlushIfFull(PVM pVM)
{
if ( pVM->vmm.s.pRCLoggerRC
&& pVM->vmm.s.pRCLoggerRC->offScratch >= (sizeof(pVM->vmm.s.pRCLoggerRC->achScratch)*3/4))
{
if (pVM->vmm.s.fRCLoggerFlushingDisabled)
return; /* fail quietly. */
VMMRZCallRing3NoCpu(pVM, VMMCALLRING3_VMM_LOGGER_FLUSH, 0);
}
}
/**
* Switches from guest context to host context.
*
* @param pVM Pointer to the VM.
* @param rc The status code.
*/
VMMRCDECL(void) VMMGCGuestToHost(PVM pVM, int rc)
{
pVM->vmm.s.pfnRCToHost(rc);
}
/**
* Calls the ring-0 host code.
*
* @param pVM Pointer to the VM.
*/
DECLASM(void) vmmRCProbeFireHelper(PVM pVM)
{
pVM->vmm.s.pfnRCToHost(VINF_VMM_CALL_TRACER);
}
/**
* Execute the trap testcase.
*
* There is some common code here, that's why we're collecting them
* like this. Odd numbered variation (uArg) are executed with write
* protection (WP) enabled.
*
* @returns VINF_SUCCESS if it was a testcase setup up to continue and did so successfully.
* @returns VERR_NOT_IMPLEMENTED if the testcase wasn't implemented.
* @returns VERR_GENERAL_FAILURE if the testcase continued when it shouldn't.
*
* @param pVM Pointer to the VM.
* @param uOperation The testcase.
* @param uArg The variation. See function description for odd / even details.
*
* @remark Careful with the trap 08 testcase and WP, it will triple
* fault the box if the TSS, the Trap8 TSS and the fault TSS
* GDTE are in pages which are read-only.
* See bottom of SELMR3Init().
*/
static int vmmGCTest(PVM pVM, unsigned uOperation, unsigned uArg)
{
/*
* Set up the testcase.
*/
#if 0
switch (uOperation)
{
default:
break;
}
#endif
/*
* Enable WP if odd variation.
*/
if (uArg & 1)
vmmGCEnableWP();
/*
* Execute the testcase.
*/
int rc = VERR_NOT_IMPLEMENTED;
switch (uOperation)
{
//case VMMGC_DO_TESTCASE_TRAP_0:
//case VMMGC_DO_TESTCASE_TRAP_1:
//case VMMGC_DO_TESTCASE_TRAP_2:
case VMMGC_DO_TESTCASE_TRAP_3:
{
if (uArg <= 1)
rc = vmmGCTestTrap3();
break;
}
//case VMMGC_DO_TESTCASE_TRAP_4:
//case VMMGC_DO_TESTCASE_TRAP_5:
//case VMMGC_DO_TESTCASE_TRAP_6:
//case VMMGC_DO_TESTCASE_TRAP_7:
case VMMGC_DO_TESTCASE_TRAP_8:
{
#ifndef DEBUG_bird /** @todo dynamic check that this won't triple fault... */
if (uArg & 1)
break;
#endif
if (uArg <= 1)
rc = vmmGCTestTrap8();
break;
}
//VMMGC_DO_TESTCASE_TRAP_9,
//VMMGC_DO_TESTCASE_TRAP_0A,
//VMMGC_DO_TESTCASE_TRAP_0B,
//VMMGC_DO_TESTCASE_TRAP_0C,
case VMMGC_DO_TESTCASE_TRAP_0D:
{
if (uArg <= 1)
rc = vmmGCTestTrap0d();
break;
}
case VMMGC_DO_TESTCASE_TRAP_0E:
{
if (uArg <= 1)
rc = vmmGCTestTrap0e();
else if (uArg == 2 || uArg == 4)
{
/*
* Test the use of a temporary #PF handler.
*/
rc = TRPMGCSetTempHandler(pVM, X86_XCPT_PF, uArg != 4 ? vmmGCTestTmpPFHandler : vmmGCTestTmpPFHandlerCorruptFS);
if (RT_SUCCESS(rc))
{
rc = vmmGCTestTrap0e();
/* in case it didn't fire. */
int rc2 = TRPMGCSetTempHandler(pVM, X86_XCPT_PF, NULL);
if (RT_FAILURE(rc2) && RT_SUCCESS(rc))
rc = rc2;
}
}
break;
}
}
/*
* Re-enable WP.
*/
if (uArg & 1)
vmmGCDisableWP();
return rc;
}
/**
* Temporary \#PF trap handler for the \#PF test case.
*
* @returns VBox status code (appropriate for GC return).
* In this context RT_SUCCESS means to restart the instruction.
* @param pVM Pointer to the VM.
* @param pRegFrame Trap register frame.
*/
static DECLCALLBACK(int) vmmGCTestTmpPFHandler(PVM pVM, PCPUMCTXCORE pRegFrame)
{
if (pRegFrame->eip == (uintptr_t)vmmGCTestTrap0e_FaultEIP)
{
pRegFrame->eip = (uintptr_t)vmmGCTestTrap0e_ResumeEIP;
return VINF_SUCCESS;
}
NOREF(pVM);
return VERR_INTERNAL_ERROR;
}
/**
* Temporary \#PF trap handler for the \#PF test case, this one messes up the fs
* selector.
*
* @returns VBox status code (appropriate for GC return).
* In this context RT_SUCCESS means to restart the instruction.
* @param pVM Pointer to the VM.
* @param pRegFrame Trap register frame.
*/
static DECLCALLBACK(int) vmmGCTestTmpPFHandlerCorruptFS(PVM pVM, PCPUMCTXCORE pRegFrame)
{
int rc = vmmGCTestTmpPFHandler(pVM, pRegFrame);
pRegFrame->fs.Sel = 0x30;
return rc;
}<|fim▁end|> | /* todo */
switch (uOperation) |
<|file_name|>performancetiming.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use dom::bindings::codegen::Bindings::PerformanceTimingBinding;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
#[deriving(Encodable)]
pub struct PerformanceTiming {
reflector_: Reflector,
navigationStart: u64,
navigationStartPrecise: f64,
}
impl PerformanceTiming {
pub fn new_inherited(navStart: u64, navStartPrecise: f64)
-> PerformanceTiming {
PerformanceTiming {
reflector_: Reflector::new(),
navigationStart: navStart,
navigationStartPrecise: navStartPrecise,
}
}
pub fn new(window: &JSRef<Window>) -> Temporary<PerformanceTiming> {
let timing = PerformanceTiming::new_inherited(window.navigationStart,
window.navigationStartPrecise);
reflect_dom_object(box timing, window, PerformanceTimingBinding::Wrap)
}
}
pub trait PerformanceTimingMethods {
fn NavigationStart(&self) -> u64;
fn NavigationStartPrecise(&self) -> f64;
}
impl<'a> PerformanceTimingMethods for JSRef<'a, PerformanceTiming> {
fn NavigationStart(&self) -> u64 {
self.navigationStart
}
fn NavigationStartPrecise(&self) -> f64 {
self.navigationStartPrecise
}
}
impl Reflectable for PerformanceTiming {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}<|fim▁end|> | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
<|file_name|>sccache_cargo.rs<|end_file_name|><|fim▁begin|>//! System tests for compiling Rust code with cargo.
//!
//! Any copyright is dedicated to the Public Domain.
//! http://creativecommons.org/publicdomain/zero/1.0/
#![deny(rust_2018_idioms)]
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
#[macro_use]
extern crate log;
/// Test that building a simple Rust crate with cargo using sccache results in a cache hit
/// when built a second time.
#[test]
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
fn test_rust_cargo() {
test_rust_cargo_cmd("check");
test_rust_cargo_cmd("build");
}
#[cfg(all(not(target_os = "windows"), not(target_os = "macos")))]
fn test_rust_cargo_cmd(cmd: &str) {
use assert_cmd::prelude::*;
use chrono::Local;
use predicates::prelude::*;
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::io::Write;
use std::path::Path;
use std::process::{Command, Stdio};
fn sccache_command() -> Command {
Command::new(assert_cmd::cargo::cargo_bin("sccache"))
}
fn stop() {
trace!("sccache --stop-server");
drop(
sccache_command()
.arg("--stop-server")
.stdout(Stdio::null())
.stderr(Stdio::null())
.status(),
);
}
drop(
env_logger::Builder::new()
.format(|f, record| {
write!(
f,
"{} [{}] - {}",
Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"),
record.level(),
record.args()
)
})
.parse_env("RUST_LOG")
.try_init(),
);
let cargo = env!("CARGO");
debug!("cargo: {}", cargo);
let sccache = assert_cmd::cargo::cargo_bin("sccache");
debug!("sccache: {:?}", sccache);
let crate_dir = Path::new(file!()).parent().unwrap().join("test-crate");
// Ensure there's no existing sccache server running.
stop();
// Create a temp directory to use for the disk cache.
let tempdir = tempfile::Builder::new()
.prefix("sccache_test_rust_cargo")
.tempdir()
.unwrap();
let cache_dir = tempdir.path().join("cache");
fs::create_dir(&cache_dir).unwrap();
let cargo_dir = tempdir.path().join("cargo");
fs::create_dir(&cargo_dir).unwrap();
// Start a new sccache server.
trace!("sccache --start-server");
sccache_command()
.arg("--start-server")
.env("SCCACHE_DIR", &cache_dir)
.assert()
.success();
// `cargo clean` first, just to be sure there's no leftover build objects.<|fim▁hole|> ("RUSTC_WRAPPER", sccache.as_ref()),
("CARGO_TARGET_DIR", cargo_dir.as_ref()),
// Explicitly disable incremental compilation because sccache is unable
// to cache it at the time of writing.
("CARGO_INCREMENTAL", OsStr::new("0")),
];
Command::new(&cargo)
.args(&["clean"])
.envs(envs.iter().copied())
.current_dir(&crate_dir)
.assert()
.success();
// Now build the crate with cargo.
Command::new(&cargo)
.args(&[cmd, "--color=never"])
.envs(envs.iter().copied())
.current_dir(&crate_dir)
.assert()
.stderr(predicates::str::contains("\x1b[").from_utf8().not())
.success();
// Clean it so we can build it again.
Command::new(&cargo)
.args(&["clean"])
.envs(envs.iter().copied())
.current_dir(&crate_dir)
.assert()
.success();
Command::new(&cargo)
.args(&[cmd, "--color=always"])
.envs(envs.iter().copied())
.current_dir(&crate_dir)
.assert()
.stderr(predicates::str::contains("\x1b[").from_utf8())
.success();
// Now get the stats and ensure that we had a cache hit for the second build.
// The test crate has one dependency (itoa) so there are two separate
// compilations.
trace!("sccache --show-stats");
sccache_command()
.args(&["--show-stats", "--stats-format=json"])
.assert()
.stdout(predicates::str::contains(r#""cache_hits":{"counts":{"Rust":2}}"#).from_utf8())
.success();
stop();
}<|fim▁end|> | let envs = vec![ |
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Various utilities to glue JavaScript and the DOM implementation together.
use dom::bindings::codegen::PrototypeList;
use dom::bindings::codegen::PrototypeList::MAX_PROTO_CHAIN_LENGTH;
use dom::bindings::conversions::{native_from_reflector_jsmanaged, is_dom_class};
use dom::bindings::error::throw_type_error;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{Temporary, Root};
use dom::browsercontext;
use dom::window;
use libc;
use libc::c_uint;
use std::boxed;
use std::cell::Cell;
use std::ffi::CString;
use std::ptr;
use js::glue::UnwrapObject;
use js::glue::{IsWrapper, RUST_JSID_IS_INT, RUST_JSID_TO_INT};
use js::jsapi::{JS_AlreadyHasOwnProperty, JS_NewFunction};
use js::jsapi::{JS_DefineProperties, JS_ForwardGetPropertyTo};
use js::jsapi::{JS_GetClass, JS_LinkConstructorAndPrototype, JS_GetStringCharsAndLength};
use js::jsapi::JSHandleObject;
use js::jsapi::JS_GetFunctionObject;
use js::jsapi::{JS_HasPropertyById, JS_GetPrototype};
use js::jsapi::{JS_GetProperty, JS_HasProperty};
use js::jsapi::{JS_DefineFunctions, JS_DefineProperty};
use js::jsapi::{JS_ValueToString, JS_GetReservedSlot, JS_SetReservedSlot};
use js::jsapi::{JSContext, JSObject, JSBool, jsid, JSClass};
use js::jsapi::{JSFunctionSpec, JSPropertySpec};
use js::jsapi::{JS_NewGlobalObject, JS_InitStandardClasses};
use js::jsapi::JS_DeletePropertyById2;
use js::jsfriendapi::JS_ObjectToOuterObject;
use js::jsfriendapi::bindgen::JS_NewObjectWithUniqueType;
use js::jsval::JSVal;
use js::jsval::{PrivateValue, ObjectValue, NullValue};
use js::jsval::{Int32Value, UInt32Value, DoubleValue, BooleanValue, UndefinedValue};
use js::rust::with_compartment;
use js::{JSPROP_ENUMERATE, JSPROP_READONLY, JSPROP_PERMANENT};
use js::JSFUN_CONSTRUCTOR;
use js;
/// Proxy handler for a WindowProxy.
pub struct WindowProxyHandler(pub *const libc::c_void);
#[allow(raw_pointer_derive)]
#[jstraceable]
/// Static data associated with a global object.
pub struct GlobalStaticData {
/// The WindowProxy proxy handler for this global.
pub windowproxy_handler: WindowProxyHandler,
}
impl GlobalStaticData {
/// Creates a new GlobalStaticData.
pub fn new() -> GlobalStaticData {
GlobalStaticData {
windowproxy_handler: browsercontext::new_window_proxy_handler(),
}
}
}
// NOTE: This is baked into the Ion JIT as 0 in codegen for LGetDOMProperty and
// LSetDOMProperty. Those constants need to be changed accordingly if this value
// changes.
const DOM_PROTO_INSTANCE_CLASS_SLOT: u32 = 0;
/// The index of the slot that contains a reference to the ProtoOrIfaceArray.
// All DOM globals must have a slot at DOM_PROTOTYPE_SLOT.
pub const DOM_PROTOTYPE_SLOT: u32 = js::JSCLASS_GLOBAL_SLOT_COUNT;
/// The flag set on the `JSClass`es for DOM global objects.
// NOTE: This is baked into the Ion JIT as 0 in codegen for LGetDOMProperty and
// LSetDOMProperty. Those constants need to be changed accordingly if this value
// changes.
pub const JSCLASS_DOM_GLOBAL: u32 = js::JSCLASS_USERBIT1;
/// Representation of an IDL constant value.
#[derive(Clone)]
pub enum ConstantVal {
/// `long` constant.
IntVal(i32),
/// `unsigned long` constant.
UintVal(u32),
/// `double` constant.
DoubleVal(f64),
/// `boolean` constant.
BoolVal(bool),
/// `null` constant.
NullVal,
}
/// Representation of an IDL constant.
#[derive(Clone)]
pub struct ConstantSpec {
/// name of the constant.
pub name: &'static [u8],
/// value of the constant.
pub value: ConstantVal
}
impl ConstantSpec {
/// Returns a `JSVal` that represents the value of this `ConstantSpec`.
pub fn get_value(&self) -> JSVal {
match self.value {
ConstantVal::NullVal => NullValue(),
ConstantVal::IntVal(i) => Int32Value(i),
ConstantVal::UintVal(u) => UInt32Value(u),
ConstantVal::DoubleVal(d) => DoubleValue(d),
ConstantVal::BoolVal(b) => BooleanValue(b),
}
}
}
/// Helper structure for cross-origin wrappers for DOM binding objects.
pub struct NativePropertyHooks {
/// The property arrays for this interface.
pub native_properties: &'static NativeProperties,
/// The NativePropertyHooks instance for the parent interface, if any.
pub proto_hooks: Option<&'static NativePropertyHooks>,
}
/// The struct that holds inheritance information for DOM object reflectors.
#[derive(Copy)]
pub struct DOMClass {
/// A list of interfaces that this object implements, in order of decreasing
/// derivedness.
pub interface_chain: [PrototypeList::ID; MAX_PROTO_CHAIN_LENGTH],
/// The NativePropertyHooks for the interface associated with this class.
pub native_hooks: &'static NativePropertyHooks,
}
unsafe impl Sync for DOMClass {}
/// The JSClass used for DOM object reflectors.
#[derive(Copy)]
pub struct DOMJSClass {
/// The actual JSClass.
pub base: js::Class,
/// Associated data for DOM object reflectors.
pub dom_class: DOMClass
}
unsafe impl Sync for DOMJSClass {}
/// Returns the ProtoOrIfaceArray for the given global object.
/// Fails if `global` is not a DOM global object.
pub fn get_proto_or_iface_array(global: *mut JSObject) -> *mut *mut JSObject {
unsafe {
assert!(((*JS_GetClass(global)).flags & JSCLASS_DOM_GLOBAL) != 0);
JS_GetReservedSlot(global, DOM_PROTOTYPE_SLOT).to_private() as *mut *mut JSObject
}
}
/// Contains references to lists of methods, attributes, and constants for a
/// given interface.
pub struct NativeProperties {
/// Instance methods for the interface.
pub methods: Option<&'static [JSFunctionSpec]>,
/// Instance attributes for the interface.
pub attrs: Option<&'static [JSPropertySpec]>,
/// Constants for the interface.
pub consts: Option<&'static [ConstantSpec]>,
/// Static methods for the interface.
pub static_methods: Option<&'static [JSFunctionSpec]>,
/// Static attributes for the interface.
pub static_attrs: Option<&'static [JSPropertySpec]>,
}
unsafe impl Sync for NativeProperties {}
/// A JSNative that cannot be null.
pub type NonNullJSNative =
unsafe extern "C" fn (arg1: *mut JSContext, arg2: c_uint, arg3: *mut JSVal) -> JSBool;
/// Creates the *interface prototype object* and the *interface object* (if
/// needed).
/// Fails on JSAPI failure.
pub fn do_create_interface_objects(cx: *mut JSContext, global: *mut JSObject,
receiver: *mut JSObject,
proto_proto: *mut JSObject,
proto_class: &'static JSClass,
constructor: Option<(NonNullJSNative, &'static str, u32)>,
dom_class: *const DOMClass,
members: &'static NativeProperties)
-> *mut JSObject {
let proto = create_interface_prototype_object(cx, global, proto_proto,
proto_class, members);
unsafe {
JS_SetReservedSlot(proto, DOM_PROTO_INSTANCE_CLASS_SLOT,
PrivateValue(dom_class as *const libc::c_void));
}
match constructor {
Some((native, name, nargs)) => {
let s = CString::new(name).unwrap();
create_interface_object(cx, global, receiver,
native, nargs, proto,
members, s.as_ptr())
},
None => (),
}
proto
}
/// Creates the *interface object*.
/// Fails on JSAPI failure.
fn create_interface_object(cx: *mut JSContext, global: *mut JSObject,
receiver: *mut JSObject,
constructor_native: NonNullJSNative,
ctor_nargs: u32, proto: *mut JSObject,
members: &'static NativeProperties,
name: *const libc::c_char) {
unsafe {
let fun = JS_NewFunction(cx, Some(constructor_native), ctor_nargs,
JSFUN_CONSTRUCTOR, global, name);
assert!(!fun.is_null());
let constructor = JS_GetFunctionObject(fun);
assert!(!constructor.is_null());
if let Some(static_methods) = members.static_methods {
define_methods(cx, constructor, static_methods);
}
if let Some(static_properties) = members.static_attrs {
define_properties(cx, constructor, static_properties);
}
if let Some(constants) = members.consts {
define_constants(cx, constructor, constants);
}
if !proto.is_null() {
assert!(JS_LinkConstructorAndPrototype(cx, constructor, proto) != 0);
}
let mut already_defined = 0;
assert!(JS_AlreadyHasOwnProperty(cx, receiver, name, &mut already_defined) != 0);
if already_defined == 0 {
assert!(JS_DefineProperty(cx, receiver, name,
ObjectValue(&*constructor),
None, None, 0) != 0);
}
}
}
/// Defines constants on `obj`.
/// Fails on JSAPI failure.
fn define_constants(cx: *mut JSContext, obj: *mut JSObject,
constants: &'static [ConstantSpec]) {
for spec in constants.iter() {
unsafe {
assert!(JS_DefineProperty(cx, obj, spec.name.as_ptr() as *const libc::c_char,
spec.get_value(), None, None,
JSPROP_ENUMERATE | JSPROP_READONLY |
JSPROP_PERMANENT) != 0);
}
}
}
/// Defines methods on `obj`. The last entry of `methods` must contain zeroed
/// memory.
/// Fails on JSAPI failure.
fn define_methods(cx: *mut JSContext, obj: *mut JSObject,
methods: &'static [JSFunctionSpec]) {
unsafe {
assert!(JS_DefineFunctions(cx, obj, methods.as_ptr()) != 0);
}
}
/// Defines attributes on `obj`. The last entry of `properties` must contain
/// zeroed memory.
/// Fails on JSAPI failure.
fn define_properties(cx: *mut JSContext, obj: *mut JSObject,
properties: &'static [JSPropertySpec]) {
unsafe {
assert!(JS_DefineProperties(cx, obj, properties.as_ptr()) != 0);
}
}
/// Creates the *interface prototype object*.
/// Fails on JSAPI failure.
fn create_interface_prototype_object(cx: *mut JSContext, global: *mut JSObject,
parent_proto: *mut JSObject,
proto_class: &'static JSClass,
members: &'static NativeProperties)
-> *mut JSObject {
unsafe {
let our_proto = JS_NewObjectWithUniqueType(cx, proto_class,
&*parent_proto, &*global);
assert!(!our_proto.is_null());
if let Some(methods) = members.methods {
define_methods(cx, our_proto, methods);
}
if let Some(properties) = members.attrs {
define_properties(cx, our_proto, properties);
}
if let Some(constants) = members.consts {
define_constants(cx, our_proto, constants);
}
return our_proto;
}
}
/// A throwing constructor, for those interfaces that have neither
/// `NoInterfaceObject` nor `Constructor`.
pub unsafe extern fn throwing_constructor(cx: *mut JSContext, _argc: c_uint,
_vp: *mut JSVal) -> JSBool {
throw_type_error(cx, "Illegal constructor.");
return 0;
}
type ProtoOrIfaceArray = [*mut JSObject; PrototypeList::ID::Count as usize];
/// Construct and cache the ProtoOrIfaceArray for the given global.
/// Fails if the argument is not a DOM global.
pub fn initialize_global(global: *mut JSObject) {
let proto_array: Box<ProtoOrIfaceArray> = box ()
([0 as *mut JSObject; PrototypeList::ID::Count as usize]);
unsafe {
assert!(((*JS_GetClass(global)).flags & JSCLASS_DOM_GLOBAL) != 0);
let box_ = boxed::into_raw(proto_array);
JS_SetReservedSlot(global,
DOM_PROTOTYPE_SLOT,
PrivateValue(box_ as *const libc::c_void));
}
}
/// A trait to provide access to the `Reflector` for a DOM object.
pub trait Reflectable {
/// Returns the receiver's reflector.
fn reflector<'a>(&'a self) -> &'a Reflector;
}
/// Create the reflector for a new DOM object and yield ownership to the
/// reflector.
pub fn reflect_dom_object<T: Reflectable>
(obj: Box<T>,
global: GlobalRef,
wrap_fn: extern "Rust" fn(*mut JSContext, GlobalRef, Box<T>) -> Temporary<T>)
-> Temporary<T> {
wrap_fn(global.get_cx(), global, obj)
}
/// A struct to store a reference to the reflector of a DOM object.
// Allowing unused_attribute because the lint sometimes doesn't run in order
#[allow(raw_pointer_derive, unrooted_must_root, unused_attributes)]
#[derive(PartialEq)]
#[must_root]
#[servo_lang = "reflector"]
// If you're renaming or moving this field, update the path in plugins::reflector as well
pub struct Reflector {
object: Cell<*mut JSObject>,
}
impl Reflector {
/// Get the reflector.
#[inline]
pub fn get_jsobject(&self) -> *mut JSObject {
self.object.get()
}
/// Initialize the reflector. (May be called only once.)
pub fn set_jsobject(&self, object: *mut JSObject) {
assert!(self.object.get().is_null());
assert!(!object.is_null());
self.object.set(object);
}
/// Return a pointer to the memory location at which the JS reflector
/// object is stored. Used by Temporary values to root the reflector, as
/// required by the JSAPI rooting APIs.
pub unsafe fn rootable(&self) -> *mut *mut JSObject {
self.object.as_unsafe_cell().get()
}
/// Create an uninitialized `Reflector`.
pub fn new() -> Reflector {
Reflector {
object: Cell::new(ptr::null_mut()),
}
}
}
/// Gets the property `id` on `proxy`'s prototype. If it exists, `*found` is
/// set to true and `*vp` to the value, otherwise `*found` is set to false.
///
/// Returns false on JSAPI failure.
pub fn get_property_on_prototype(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid, found: *mut bool, vp: *mut JSVal)
-> bool {
unsafe {
//let proto = GetObjectProto(proxy);
let proto = JS_GetPrototype(proxy);
if proto.is_null() {
*found = false;
return true;
}
let mut has_property = 0;<|fim▁hole|> }
*found = has_property != 0;
let no_output = vp.is_null();
if has_property == 0 || no_output {
return true;
}
JS_ForwardGetPropertyTo(cx, proto, id, proxy, vp) != 0
}
}
/// Get an array index from the given `jsid`. Returns `None` if the given
/// `jsid` is not an integer.
pub fn get_array_index_from_id(_cx: *mut JSContext, id: jsid) -> Option<u32> {
unsafe {
if RUST_JSID_IS_INT(id) != 0 {
return Some(RUST_JSID_TO_INT(id) as u32);
}
return None;
}
// if id is length atom, -1, otherwise
/*return if JSID_IS_ATOM(id) {
let atom = JSID_TO_ATOM(id);
//let s = *GetAtomChars(id);
if s > 'a' && s < 'z' {
return -1;
}
let i = 0;
let str = AtomToLinearString(JSID_TO_ATOM(id));
return if StringIsArray(str, &mut i) != 0 { i } else { -1 }
} else {
IdToInt32(cx, id);
}*/
}
/// Find the index of a string given by `v` in `values`.
/// Returns `Err(())` on JSAPI failure (there is a pending exception), and
/// `Ok(None)` if there was no matching string.
pub fn find_enum_string_index(cx: *mut JSContext,
v: JSVal,
values: &[&'static str])
-> Result<Option<usize>, ()> {
unsafe {
let jsstr = JS_ValueToString(cx, v);
if jsstr.is_null() {
return Err(());
}
let mut length = 0;
let chars = JS_GetStringCharsAndLength(cx, jsstr, &mut length);
if chars.is_null() {
return Err(());
}
Ok(values.iter().position(|value| {
value.len() == length as usize &&
range(0, length as usize).all(|j| {
value.as_bytes()[j] as u16 == *chars.offset(j as isize)
})
}))
}
}
/// Returns wether `obj` is a platform object
/// http://heycam.github.io/webidl/#dfn-platform-object
pub fn is_platform_object(obj: *mut JSObject) -> bool {
unsafe {
// Fast-path the common case
let mut clasp = JS_GetClass(obj);
if is_dom_class(&*clasp) {
return true;
}
// Now for simplicity check for security wrappers before anything else
if IsWrapper(obj) == 1 {
let unwrapped_obj = UnwrapObject(obj, /* stopAtOuter = */ 0, ptr::null_mut());
if unwrapped_obj.is_null() {
return false;
}
clasp = js::jsapi::JS_GetClass(obj);
}
// TODO also check if JS_IsArrayBufferObject
return is_dom_class(&*clasp);
}
}
/// Get the property with name `property` from `object`.
/// Returns `Err(())` on JSAPI failure (there is a pending exception), and
/// `Ok(None)` if there was no property with the given name.
pub fn get_dictionary_property(cx: *mut JSContext,
object: *mut JSObject,
property: &str) -> Result<Option<JSVal>, ()> {
use std::ffi::CString;
fn has_property(cx: *mut JSContext, object: *mut JSObject, property: &CString,
found: &mut JSBool) -> bool {
unsafe {
JS_HasProperty(cx, object, property.as_ptr(), found) != 0
}
}
fn get_property(cx: *mut JSContext, object: *mut JSObject, property: &CString,
value: &mut JSVal) -> bool {
unsafe {
JS_GetProperty(cx, object, property.as_ptr(), value) != 0
}
}
let property = CString::new(property).unwrap();
if object.is_null() {
return Ok(None);
}
let mut found: JSBool = 0;
if !has_property(cx, object, &property, &mut found) {
return Err(());
}
if found == 0 {
return Ok(None);
}
let mut value = NullValue();
if !get_property(cx, object, &property, &mut value) {
return Err(());
}
Ok(Some(value))
}
/// Returns whether `proxy` has a property `id` on its prototype.
pub fn has_property_on_prototype(cx: *mut JSContext, proxy: *mut JSObject,
id: jsid) -> bool {
// MOZ_ASSERT(js::IsProxy(proxy) && js::GetProxyHandler(proxy) == handler);
let mut found = false;
return !get_property_on_prototype(cx, proxy, id, &mut found, ptr::null_mut()) || found;
}
/// Create a DOM global object with the given class.
pub fn create_dom_global(cx: *mut JSContext, class: *const JSClass)
-> *mut JSObject {
unsafe {
let obj = JS_NewGlobalObject(cx, class, ptr::null_mut());
if obj.is_null() {
return ptr::null_mut();
}
with_compartment(cx, obj, || {
JS_InitStandardClasses(cx, obj);
});
initialize_global(obj);
obj
}
}
/// Drop the resources held by reserved slots of a global object
pub unsafe fn finalize_global(obj: *mut JSObject) {
let _: Box<ProtoOrIfaceArray> =
Box::from_raw(get_proto_or_iface_array(obj) as *mut ProtoOrIfaceArray);
}
/// Callback to outerize windows when wrapping.
pub unsafe extern fn wrap_for_same_compartment(cx: *mut JSContext, obj: *mut JSObject) -> *mut JSObject {
JS_ObjectToOuterObject(cx, obj)
}
/// Callback to outerize windows before wrapping.
pub unsafe extern fn pre_wrap(cx: *mut JSContext, _scope: *mut JSObject,
obj: *mut JSObject, _flags: c_uint) -> *mut JSObject {
JS_ObjectToOuterObject(cx, obj)
}
/// Callback to outerize windows.
pub extern fn outerize_global(_cx: *mut JSContext, obj: JSHandleObject) -> *mut JSObject {
unsafe {
debug!("outerizing");
let obj = *obj.unnamed_field1;
let win: Root<window::Window> = native_from_reflector_jsmanaged(obj).unwrap().root();
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let win = win.r();
let context = win.browser_context();
context.as_ref().unwrap().window_proxy()
}
}
/// Deletes the property `id` from `object`.
pub unsafe fn delete_property_by_id(cx: *mut JSContext, object: *mut JSObject,
id: jsid, bp: &mut bool) -> bool {
let mut value = UndefinedValue();
if JS_DeletePropertyById2(cx, object, id, &mut value) == 0 {
return false;
}
*bp = value.to_boolean();
return true;
}
/// Results of `xml_name_type`.
#[derive(PartialEq)]
#[allow(missing_docs)]
pub enum XMLName {
QName,
Name,
InvalidXMLName
}
/// Check if an element name is valid. See http://www.w3.org/TR/xml/#NT-Name
/// for details.
pub fn xml_name_type(name: &str) -> XMLName {
fn is_valid_start(c: char) -> bool {
match c {
':' |
'A' ... 'Z' |
'_' |
'a' ... 'z' |
'\u{C0}' ... '\u{D6}' |
'\u{D8}' ... '\u{F6}' |
'\u{F8}' ... '\u{2FF}' |
'\u{370}' ... '\u{37D}' |
'\u{37F}' ... '\u{1FFF}' |
'\u{200C}' ... '\u{200D}' |
'\u{2070}' ... '\u{218F}' |
'\u{2C00}' ... '\u{2FEF}' |
'\u{3001}' ... '\u{D7FF}' |
'\u{F900}' ... '\u{FDCF}' |
'\u{FDF0}' ... '\u{FFFD}' |
'\u{10000}' ... '\u{EFFFF}' => true,
_ => false,
}
}
fn is_valid_continuation(c: char) -> bool {
is_valid_start(c) || match c {
'-' |
'.' |
'0' ... '9' |
'\u{B7}' |
'\u{300}' ... '\u{36F}' |
'\u{203F}' ... '\u{2040}' => true,
_ => false,
}
}
let mut iter = name.chars();
let mut non_qname_colons = false;
let mut seen_colon = false;
match iter.next() {
None => return XMLName::InvalidXMLName,
Some(c) => {
if !is_valid_start(c) {
return XMLName::InvalidXMLName;
}
if c == ':' {
non_qname_colons = true;
}
}
}
for c in name.chars() {
if !is_valid_continuation(c) {
return XMLName::InvalidXMLName;
}
if c == ':' {
match seen_colon {
true => non_qname_colons = true,
false => seen_colon = true
}
}
}
match non_qname_colons {
false => XMLName::QName,
true => XMLName::Name
}
}<|fim▁end|> | if JS_HasPropertyById(cx, proto, id, &mut has_property) == 0 {
return false; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![crate_name = "skia"]
#![crate_type = "rlib"]
#![feature(libc)]
extern crate libc;<|fim▁hole|> SkiaGrGLSharedSurfaceRef,
SkiaGrGLNativeContextRef,
SkiaSkNativeSharedGLContextCreate,
SkiaSkNativeSharedGLContextRetain,
SkiaSkNativeSharedGLContextRelease,
SkiaSkNativeSharedGLContextGetFBOID,
SkiaSkNativeSharedGLContextStealSurface,
SkiaSkNativeSharedGLContextGetGrContext,
SkiaSkNativeSharedGLContextMakeCurrent,
SkiaSkNativeSharedGLContextFlush,
};
pub mod skia;<|fim▁end|> |
pub use skia::{
SkiaSkNativeSharedGLContextRef,
SkiaGrContextRef, |
<|file_name|>validators.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import re
from rest_framework.serializers import ValidationError
def name(value):
'''Matches names of people, countries and and other things.'''
if re.match(r'^[A-Za-z\s\.\-\'àèéìòóôù]+$', value) is None:
raise ValidationError('This field contains invalid characters.')
def address(value):
'''Matches street addresses.'''
if re.match(r'^[\w\s\.\-\'àèéìòóôù]+$', value) is None:
raise ValidationError('This field contains invalid characters.')
def numeric(value):
'''Matches numbers and spaces.'''
if re.match(r'^[\d\s]+$', value) is None:
raise ValidationError('This field can only contain numbers and spaces.')
def email(value):
'''Loosely matches email addresses.'''
if re.match(r'^[\w_.+-]+@[\w-]+\.[\w\-.]+$', value) is None:
raise ValidationError('This is an invalid email address.')
def phone_international(value):
'''Loosely matches phone numbers.'''
if re.match(r'^[\d\-x\s\+\(\)]+$', value) is None:
raise ValidationError('This is an invalid phone number.')
def phone_domestic(value):
'''Matches domestic phone numbers.'''
if re.match(r'^\(?(\d{3})\)?\s(\d{3})-(\d{4})(\sx\d{1,5})?$', value) is None:
raise ValidationError('This is an invalid phone number.')
def nonempty(value):
'''Requires that a field be non-empty.'''
if not value:
raise ValidationError('This field is required.')<|fim▁end|> | # -*- coding: utf-8 -*- |
<|file_name|>route_lifecycle_reflector.js<|end_file_name|><|fim▁begin|>import { Type } from 'angular2/src/core/facade/lang';
import { CanActivate } from './lifecycle_annotations_impl';
import { reflector } from 'angular2/src/core/reflection/reflection';
export function hasLifecycleHook(e, type) {
if (!(type instanceof Type))
return false;
return e.name in type.prototype;
}
export function getCanActivateHook(type) {
var annotations = reflector.annotations(type);
for (let i = 0; i < annotations.length; i += 1) {
let annotation = annotations[i];<|fim▁hole|> }
}
return null;
}
//# sourceMappingURL=route_lifecycle_reflector.js.map<|fim▁end|> | if (annotation instanceof CanActivate) {
return annotation.fn; |
<|file_name|>resource.js<|end_file_name|><|fim▁begin|>'use strict';
define([
'jquery', 'underscore', 'angular',
'angular-resource'
],
function ($, _, angular) {
var resourceApp = angular.module('resourceApp', [ 'ngResource' ]);
resourceApp.factory('NodeFactory', function ($resource) {
// $resource(url[, paramDefaults][, actions]);
return $resource('/document/node/:nid', {}, {
// save: { method: 'PUT', params: { nid: '@nid'}, transformResponse: function () {
// console.log('transformResponse', arguments);
// }, transformRequest: function () {
// console.log('transformRequest', arguments);
// } },
// get: { ... }
create: { method: 'POST' },
read: { method: 'GET', isArray: true },
update: { method: 'PUT', params: { nid: '@nid'} },
delete: { method: 'DELETE', params: { nid: '@nid'}, query: {} }
});
});
resourceApp.controller('ResourceDemoCtrl', function ($scope, NodeFactory) {
// We can retrieve a collection from the server
$scope.nodelist = NodeFactory.read(function readSuccess (value, responseHeaders) {
// console.log(nodelist);
// GET: /document/node
// server returns: [ {...}, {...}, ... ];<|fim▁hole|> var node = value[0];
node.ipAddr = '211.114.0.250';
node.alive = false;
delete node.os;
node.$update();
// PUT: /document/node/160
// server returns: { ... };
});
$scope.setCurr = function (node) {
// clone
$scope.nodeData = {
nid: node.nid,
ipAddr: node.ipAddr,
port: node.port,
alive: node.alive
};
// ref
// $scope.nodeData = node;
};
$scope.requestByFactory = function (method, node) {
switch (method) {
case 'create':
var newNode = NodeFactory.create(node, function (value, responseHeaders) {
$scope.nodelist.push(newNode);
}, function (httpResponse) {
alert(httpResponse.data);
});
// POST: /document/node
// server returns: { ... };
break;
case 'update':
var updated = NodeFactory.update(node, function (value, responseHeaders) {
var selected = _.findWhere($scope.nodelist, { nid: node.nid });
if (selected) {
updated = _.extend(selected, updated);
}
}, function (httpResponse) {
alert(httpResponse.data);
});
// PUT: /document/node/:nid
// server returns: { ... };
break;
case 'getOne':
$scope.nodeData = NodeFactory.get(node);
break;
case 'delete':
var nid = NodeFactory.delete(node, function (value, responseHeaders) {
var selected = _.findWhere($scope.nodelist, { nid: node.nid });
if (selected) {
$scope.nodelist.splice(_.indexOf($scope.nodelist, selected), 1);
}
}, function (httpResponse) {
alert(httpResponse.data);
});
// DELETE: /document/node/:nid
// server returns: nid
break;
}
};
$scope.requestByInstence = function (method, node) {
switch (method) {
case 'update':
node.$update(function (value, responseHeaders) {
node = value;
}, function (httpResponse) {
alert(httpResponse.data);
});
// PUT: /document/node/:nid
// server returns: { ... };
break;
// case 'getOne':
// node = node.$get(node);
// break;
case 'delete':
node.$delete(function (value, responseHeaders) {
$scope.nodelist.splice(_.indexOf($scope.nodelist, node), 1);
}, function (httpResponse) {
alert(httpResponse.data);
});
// DELETE: /document/node/:nid
// server returns: nid
break;
}
};
});
});<|fim▁end|> | |
<|file_name|>sequence_fr.ts<|end_file_name|><|fim▁begin|><TS language="fr" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Cliquer à droite pour modifier l'adresse ou l'étiquette</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Créer une nouvelle adresse</translation>
</message>
<message>
<source>&New</source>
<translation>&Nouveau</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copier l'adresse courante sélectionnée dans le presse-papiers</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Copier</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Fermer</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Copier l'adresse</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Supprimer l'adresse actuellement sélectionnée de la liste</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporter les données de l'onglet courant vers un fichier</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exporter</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Supprimer</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Choisir l'adresse à laquelle envoyer des pièces</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Choisir l'adresse avec laquelle recevoir des pièces</translation>
</message>
<message>
<source>C&hoose</source>
<translation>C&hoisir</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Adresses d'envoi</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Adresses de réception</translation>
</message>
<message>
<source>These are your Sequence addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Voici vos adresses Sequence pour envoyer des paiements. Vérifiez toujours le montant et l'adresse du destinataire avant d'envoyer des pièces.</translation>
</message>
<message>
<source>These are your Sequence addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Voici vos adresses Sequence pour recevoir des paiements. Il est recommandé d'utiliser une nouvelle adresse de réception pour chaque transaction.</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Copier l'é&tiquette</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Modifier</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Exporter la liste d'adresses</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Valeurs séparées par des virgules (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>L'exportation a échoué</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Une erreur est survenue lors de l'enregistrement de la liste d'adresses vers %1. Veuillez ressayer plus tard.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Étiquette</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(aucune étiquette)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Fenêtre de dialogue de la phrase de passe</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Saisir la phrase de passe</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nouvelle phrase de passe</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Répéter la phrase de passe</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Chiffrer le portefeuille</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Cette opération nécessite votre phrase de passe pour déverrouiller le portefeuille.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Déverrouiller le portefeuille</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Cette opération nécessite votre phrase de passe pour déchiffrer le portefeuille.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Déchiffrer le portefeuille</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Changer la phrase de passe</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Saisir l’ancienne phrase de passe pour le portefeuille ainsi que la nouvelle.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Confirmer le chiffrement du portefeuille</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR SEQUENCES</b>!</source>
<translation>Avertissement : si vous chiffrez votre portefeuille et perdez votre phrase de passe, vous <b>PERDREZ TOUS VOS SEQUENCES</b> !</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Êtes-vous sûr de vouloir chiffrer votre portefeuille ?</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANT : Toute sauvegarde précédente de votre fichier de portefeuille devrait être remplacée par le nouveau fichier de portefeuille chiffré. Pour des raisons de sécurité, les sauvegardes précédentes de votre fichier de portefeuille non chiffré deviendront inutilisables dès que vous commencerez à utiliser le nouveau portefeuille chiffré.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Avertissement : la touche Verr. Maj. est activée !</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Portefeuille chiffré</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Saisissez une nouvelle phrase de passe pour le portefeuille.<br/>Veuillez utiliser une phrase composée de <b>dix caractères aléatoires ou plus</b>, ou bien de <b>huit mots ou plus</b>.</translation>
</message>
<message>
<source>Sequence will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your sequences from being stolen by malware infecting your computer.</source>
<translation>Sequence va à présent se fermer pour terminer le chiffrement. N'oubliez pas que le chiffrement de votre portefeuille n'est pas une protection totale contre le vol par des logiciels malveillants qui infecteraient votre ordinateur.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Le chiffrement du portefeuille a échoué</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Le chiffrement du portefeuille a échoué en raison d'une erreur interne. Votre portefeuille n'a pas été chiffré.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Les phrases de passe saisies ne correspondent pas.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Le déverrouillage du portefeuille a échoué</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La phrase de passe saisie pour déchiffrer le portefeuille était incorrecte.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Le déchiffrage du portefeuille a échoué</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>La phrase de passe du portefeuille a été modifiée avec succès.</translation>
</message>
</context>
<context>
<name>SequenceGUI</name>
<message>
<source>Sign &message...</source>
<translation>&Signer le message...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synchronisation avec le réseau en cours…</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Vue d'ensemble</translation>
</message>
<message>
<source>Node</source>
<translation>Nœud</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Afficher une vue d’ensemble du portefeuille</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transactions</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Parcourir l'historique des transactions</translation>
</message>
<message>
<source>E&xit</source>
<translation>Q&uitter</translation>
</message>
<message>
<source>Quit application</source>
<translation>Quitter l’application</translation>
</message>
<message>
<source>About &Qt</source>
<translation>À propos de &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Afficher des informations sur Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Options…</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Chiffrer le portefeuille...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>Sauvegarder le &portefeuille...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Changer la phrase de passe...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>Adresses d'&envoi...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>Adresses de &réception...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Ouvrir un &URI...</translation>
</message>
<message>
<source>Sequence client</source>
<translation>Client Sequence</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Importation des blocs à partir du disque...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Réindexation des blocs sur le disque...</translation>
</message>
<message>
<source>Send coins to a Sequence address</source>
<translation>Envoyer des pièces à une adresse Sequence</translation>
</message>
<message>
<source>Modify configuration options for Sequence</source>
<translation>Modifier les options de configuration de Sequence</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Sauvegarder le portefeuille vers un autre emplacement</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Modifier la phrase de passe utilisée pour le chiffrement du portefeuille</translation>
</message>
<message>
<source>&Debug window</source>
<translation>Fenêtre de &débogage</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Ouvrir une console de débogage et de diagnostic</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Vérifier un message...</translation>
</message>
<message>
<source>Sequence</source>
<translation>Sequence</translation>
</message>
<message>
<source>Wallet</source>
<translation>Portefeuille</translation>
</message>
<message>
<source>&Send</source>
<translation>&Envoyer</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Recevoir</translation>
</message>
<message>
<source>Show information about Sequence</source>
<translation>Montrer des informations à propos de Sequence</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Afficher / Cacher</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Afficher ou masquer la fenêtre principale</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Chiffrer les clefs privées de votre portefeuille</translation>
</message>
<message>
<source>Sign messages with your Sequence addresses to prove you own them</source>
<translation>Signer les messages avec vos adresses Sequence pour prouver que vous les détenez</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Sequence addresses</source>
<translation>Vérifier les messages pour vous assurer qu'ils ont été signés avec les adresses Sequence spécifiées</translation>
</message>
<message>
<source>&File</source>
<translation>&Fichier</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Réglages</translation>
</message>
<message>
<source>&Help</source>
<translation>&Aide</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Barre d'outils des onglets</translation>
</message>
<message>
<source>Sequence</source>
<translation>Sequence</translation>
</message>
<message>
<source>Request payments (generates QR codes and Sequence: URIs)</source>
<translation>Demander des paiements (génère des codes QR et des URIs Sequence:)</translation>
</message>
<message>
<source>&About Sequence</source>
<translation>À &propos de Sequence</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Afficher la liste d'adresses d'envoi et d'étiquettes utilisées</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Afficher la liste d'adresses de réception et d'étiquettes utilisées</translation>
</message>
<message>
<source>Open a Sequence: URI or payment request</source>
<translation>Ouvrir un URI Sequence: ou une demande de paiement</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>Options de ligne de &commande</translation>
</message>
<message>
<source>Show the Sequence help message to get a list with possible Sequence command-line options</source>
<translation>Afficher le message d'aide de Sequence pour obtenir une liste des options de ligne de commande Sequence possibles.</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to Sequence network</source>
<translation><numerusform>%n connexion active avec le réseau Sequence</numerusform><numerusform>%n connexions actives avec le réseau Sequence</numerusform></translation>
</message>
<message>
<source>No block source available...</source>
<translation>Aucune source de blocs disponible...</translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n heure</numerusform><numerusform>%n heures</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n jour</numerusform><numerusform>%n jours</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n semaine</numerusform><numerusform>%n semaines</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 et %2</translation>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation><numerusform>%n an</numerusform><numerusform>%n ans</numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 en retard</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Le dernier bloc reçu avait été généré il y a %1.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Les transactions après ceci ne sont pas encore visibles.</translation>
</message>
<message>
<source>Error</source>
<translation>Erreur</translation>
</message>
<message>
<source>Warning</source>
<translation>Avertissement</translation>
</message>
<message>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<source>Up to date</source>
<translation>À jour</translation>
</message>
<message numerus="yes">
<source>Processed %n blocks of transaction history.</source>
<translation><numerusform>%n bloc de l'historique transactionnel a été traité</numerusform><numerusform>%n blocs de l'historique transactionnel ont été traités</numerusform></translation>
</message>
<message>
<source>Catching up...</source>
<translation>Rattrapage en cours…</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Transaction envoyée</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Transaction entrante</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Date : %1
Montant : %2
Type : %3
Adresse : %4
</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Le portefeuille est <b>chiffré</b> et est actuellement <b>déverrouillé</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Le portefeuille est <b>chiffré</b> et actuellement <b>verrouillé</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Alerte réseau</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Sélection des pièces</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Quantité :</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Octets :</translation>
</message>
<message>
<source>Amount:</source>
<translation>Montant :</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorité :</translation>
</message>
<message>
<source>Fee:</source>
<translation>Frais :</translation>
</message>
<message>
<source>Dust:</source>
<translation>Poussière :</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Après les frais :</translation>
</message>
<message>
<source>Change:</source>
<translation>Monnaie :</translation>
</message>
<message>
<source>(un)select all</source>
<translation>Tout (dé)sélectionner</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Mode arborescence</translation>
</message>
<message>
<source>List mode</source>
<translation>Mode liste</translation>
</message>
<message>
<source>Amount</source>
<translation>Montant</translation>
</message>
<message>
<source>Received with label</source>
<translation>Reçu avec une étiquette</translation>
</message>
<message>
<source>Received with address</source>
<translation>Reçu avec une adresse</translation>
</message>
<message>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Confirmations</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmée</translation>
</message>
<message>
<source>Priority</source>
<translation>Priorité</translation>
</message>
<message>
<source>Copy address</source>
<translation>Copier l’adresse</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copier l’étiquette</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copier le montant</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Copier l'ID de la transaction</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Verrouiller ce qui n'est pas dépensé</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Déverrouiller ce qui n'est pas dépensé</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Copier la quantité</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Copier les frais</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Copier le montant après les frais</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Copier les octets</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Copier la priorité</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Copier la poussière</translation>
</message>
<message>
<source>Copy change</source>
<translation>Copier la monnaie</translation>
</message>
<message>
<source>highest</source>
<translation>la plus élevée</translation>
</message>
<message>
<source>higher</source>
<translation>plus élevée</translation>
</message>
<message>
<source>high</source>
<translation>élevée</translation>
</message>
<message>
<source>medium-high</source>
<translation>moyennement-élevée</translation>
</message>
<message>
<source>medium</source>
<translation>moyenne</translation>
</message>
<message>
<source>low-medium</source>
<translation>moyennement-basse</translation>
</message>
<message>
<source>low</source>
<translation>basse</translation>
</message>
<message>
<source>lower</source>
<translation>plus basse</translation>
</message>
<message>
<source>lowest</source>
<translation>la plus basse</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 verrouillé)</translation>
</message>
<message>
<source>none</source>
<translation>aucun</translation>
</message>
<message>
<source>Can vary +/- %1 satoshi(s) per input.</source>
<translation>Peut varier +/- %1 satoshi(s) par entrée.</translation>
</message>
<message>
<source>yes</source>
<translation>oui</translation>
</message>
<message>
<source>no</source>
<translation>non</translation>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Cette étiquette devient rouge si la taille de la transaction est plus grande que 1 000 octets.</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Ceci signifie que des frais d'au moins %1 par ko sont exigés.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Peut varier +/- 1 octet par entrée.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Les transactions à priorité plus haute sont plus à même d'être incluses dans un bloc.</translation>
</message>
<message>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>Cette étiquette devient rouge si la priorité est plus basse que « moyenne »</translation>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Cette étiquette devient rouge si un destinataire reçoit un montant inférieur à %1.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(aucune étiquette)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>monnaie de %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(monnaie)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Modifier l'adresse</translation>
</message>
<message>
<source>&Label</source>
<translation>&Étiquette</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>L'étiquette associée à cette entrée de la liste d'adresses</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>L'adresse associée à cette entrée de la liste d'adresses. Ceci ne peut être modifié que pour les adresses d'envoi.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adresse</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Nouvelle adresse de réception</translation>
</message>
<message>
<source>New sending address</source>
<translation>Nouvelle adresse d’envoi</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Modifier l’adresse de réception</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Modifier l’adresse d'envoi</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>L’adresse fournie « %1 » est déjà présente dans le carnet d'adresses.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Sequence address.</source>
<translation>L'adresse fournie « %1 » n'est pas une adresse Sequence valide.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Impossible de déverrouiller le portefeuille.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Échec de génération de la nouvelle clef.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Un nouveau répertoire de données sera créé.</translation>
</message>
<message>
<source>name</source>
<translation>nom</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Le répertoire existe déjà. Ajoutez %1 si vous voulez créer un nouveau répertoire ici.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Le chemin existe déjà et n'est pas un répertoire.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Impossible de créer un répertoire de données ici.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Sequence</source>
<translation>Sequence</translation>
</message>
<message>
<source>version</source>
<translation>version</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
<message>
<source>About Sequence</source>
<translation>À propos de Sequence</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Options de ligne de commande</translation>
</message>
<message>
<source>Usage:</source>
<translation>Utilisation :</translation>
</message>
<message>
<source>command-line options</source>
<translation>options de ligne de commande</translation>
</message>
<message>
<source>UI options</source>
<translation>Options de l'interface utilisateur</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Définir la langue, par exemple « fr_CA » (par défaut : la langue du système)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Démarrer minimisé</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Définir les certificats SSL racine pour les requêtes de paiement (par défaut : -système-)</translation>
</message>
<message>
<source>Show splash screen on startup (default: 1)</source>
<translation>Afficher l'écran d'accueil au démarrage (par défaut : 1)</translation>
</message>
<message>
<source>Choose data directory on startup (default: 0)</source>
<translation>Choisir un répertoire de données au démarrage (par défaut : 0)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Bienvenue</translation>
</message>
<message>
<source>Welcome to Sequence.</source>
<translation>Bienvenue à Sequence.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Sequence will store its data.</source>
<translation>Comme c'est la première fois que le logiciel est lancé, vous pouvez choisir où Sequence stockera ses données.</translation>
</message>
<message>
<source>Sequence will download and store a copy of the Sequence block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>Sequence va télécharger et stocker une copie de la chaîne de blocs Sequence. Au moins %1Go de données seront stockées dans ce répertoire et cela augmentera avec le temps. Le portefeuille sera également stocké dans ce répertoire.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Utiliser le répertoire de données par défaut</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Utiliser un répertoire de données personnalisé :</translation>
</message>
<message>
<source>Sequence</source>
<translation>Sequence</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Erreur : le répertoire de données spécifié « %1 » ne peut pas être créé.</translation>
</message>
<message>
<source>Error</source>
<translation>Erreur</translation>
</message>
<message numerus="yes">
<source>%n GB of free space available</source>
<translation><numerusform>%n Go d'espace libre disponible</numerusform><numerusform>%n Go d'espace libre disponibles</numerusform></translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Ouvrir un URI</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Ouvrir une demande de paiement à partir d'un URI ou d'un fichier</translation>
</message>
<message>
<source>URI:</source>
<translation>URI :</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Choisir le fichier de demande de paiement</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Choisir le fichier de demande de paiement à ouvrir</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Options</translation>
</message>
<message>
<source>&Main</source>
<translation>Réglages &principaux</translation>
</message>
<message>
<source>Automatically start Sequence after logging in to the system.</source>
<translation>Démarrer Sequence automatiquement après avoir ouvert une session sur le système.</translation>
</message>
<message>
<source>&Start Sequence on system login</source>
<translation>&Démarrer Sequence lors de l'ouverture d'une session</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Taille du cache de la base de &données</translation>
</message>
<message>
<source>MB</source>
<translation>Mo</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Nombre d'exétrons de &vérification de script</translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Accepter les connexions provenant de l'extérieur</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Permettre les transactions entrantes</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>Adresse IP du mandataire (par ex. IPv4 : 127.0.0.1 / IPv6 : ::1)</translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>URL de tiers (par ex. un explorateur de blocs) apparaissant dans l'onglet des transactions comme éléments du menu contextuel. %s dans l'URL est remplacé par le hachage de la transaction. Les URL multiples sont séparées par une barre verticale |.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>URL de transaction d'un tiers</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Options actives de ligne de commande qui annulent les options ci-dessus :</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Réinitialiser toutes les options du client aux valeurs par défaut.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Réinitialisation des options</translation>
</message>
<message>
<source>&Network</source>
<translation>&Réseau</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = auto, < 0 = laisser ce nombre de cœurs inutilisés)</translation>
</message>
<message>
<source>W&allet</source>
<translation>&Portefeuille</translation>
</message>
<message>
<source>Expert</source>
<translation>Expert</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>Activer les fonctions de &contrôle des pièces </translation>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation>Si vous désactivé la dépense de la monnaie non confirmée, la monnaie d'une transaction ne peut pas être utilisée tant que cette transaction n'a pas reçu au moins une confirmation. Ceci affecte aussi comment votre solde est calculé.</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>&Dépenser la monnaie non confirmée</translation>
</message>
<message>
<source>Automatically open the Sequence client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Ouvrir le port du client Sequence automatiquement sur le routeur. Ceci ne fonctionne que si votre routeur supporte l'UPnP et si la fonctionnalité est activée.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Mapper le port avec l'&UPnP</translation>
</message>
<message>
<source>Connect to the Sequence network through a SOCKS5 proxy.</source>
<translation>Se connecter au réseau Sequence par un mandataire SOCKS5.</translation>
</message>
<message>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>Se &connecter par un mandataire SOCKS5 (mandataire par défaut) :</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>&IP du serveur mandataire :</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Port :</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port du serveur mandataire (par ex. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>&Fenêtre</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Afficher uniquement une icône système après minimisation.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimiser dans la barre système au lieu de la barre des tâches</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimiser au lieu de quitter l'application lorsque la fenêtre est fermée. Si cette option est activée, l'application ne pourra être fermée qu'en sélectionnant Quitter dans le menu.</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimiser lors de la fermeture</translation>
</message>
<message>
<source>&Display</source>
<translation>&Affichage</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>&Langue de l'interface utilisateur :</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting Sequence.</source>
<translation>La langue de l'interface utilisateur peut être définie ici. Ce réglage sera pris en compte après redémarrage de Sequence.</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Unité d'affichage des montants :</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Choisissez la sous-unité par défaut pour l'affichage dans l'interface et lors de l'envoi de pièces.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Afficher ou non les fonctions de contrôle des pièces.</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>A&nnuler</translation>
</message>
<message>
<source>default</source>
<translation>par défaut</translation>
</message>
<message>
<source>none</source>
<translation>aucune</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Confirmer la réinitialisation des options</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Le redémarrage du client est nécessaire pour activer les changements.</translation>
</message>
<message>
<source>Client will be shutdown, do you want to proceed?</source>
<translation>Le client sera arrêté, voulez-vous continuer ?</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Ce changement demanderait un redémarrage du client.</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>L'adresse de serveur mandataire fournie est invalide.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formulaire</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Sequence network after a connection is established, but this process has not completed yet.</source>
<translation>Les informations affichées peuvent être obsolètes. Votre portefeuille est automatiquement synchronisé avec le réseau Sequence lorsque la connexion s'établit, or ce processus n'est pas encore terminé.</translation>
</message>
<message>
<source>Watch-only:</source>
<translation>Juste-regarder :</translation>
</message>
<message>
<source>Available:</source>
<translation>Disponible :</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Votre solde actuel pouvant être dépensé</translation>
</message>
<message>
<source>Pending:</source>
<translation>En attente :</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Total des transactions qui doivent encore être confirmées et qu'il n'est pas encore possible de dépenser</translation>
</message>
<message>
<source>Immature:</source>
<translation>Immature :</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Le solde généré n'est pas encore mûr</translation>
</message>
<message>
<source>Balances</source>
<translation>Soldes</translation>
</message>
<message>
<source>Total:</source>
<translation>Total :</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>Votre solde total actuel</translation>
</message>
<message>
<source>Your current balance in watch-only addresses</source>
<translation>Votre balance actuelle en adresses juste-regarder</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Disponible :</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Transactions récentes</translation>
</message>
<message>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Transactions non confirmées vers des adresses juste-regarder</translation>
</message>
<message>
<source>Mined balance in watch-only addresses that has not yet matured</source>
<translation>Le solde miné dans des adresses juste-regarder, qui n'est pas encore mûr</translation>
</message>
<message>
<source>Current total balance in watch-only addresses</source>
<translation>Solde total actuel dans des adresses juste-regarder</translation>
</message>
<message>
<source>out of sync</source>
<translation>désynchronisé</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>URI handling</source>
<translation>Gestion des URIs</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Adresse de paiement invalide %1</translation>
</message>
<message>
<source>Payment request rejected</source>
<translation>La demande de paiement est rejetée</translation>
</message>
<message>
<source>Payment request network doesn't match client network.</source>
<translation>Le réseau de la demande de paiement ne correspond pas au réseau du client.</translation>
</message>
<message>
<source>Payment request has expired.</source>
<translation>La demande de paiement est expirée.</translation>
</message>
<message>
<source>Payment request is not initialized.</source>
<translation>La demande de paiement n'est pas initialisée.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Le paiement demandé d'un montant de %1 est trop faible (considéré comme de la poussière).</translation>
</message>
<message>
<source>Payment request error</source>
<translation>Erreur de demande de paiement</translation>
</message>
<message>
<source>Cannot start Sequence: click-to-pay handler</source>
<translation>Impossible de démarrer le gestionnaire de cliquer-pour-payer sequence :</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>L'URL de récupération de la demande de paiement est invalide : %1</translation>
</message>
<message>
<source>URI cannot be parsed! This can be caused by an invalid Sequence address or malformed URI parameters.</source>
<translation>L'URI ne peut pas être analysé ! Ceci peut être causé par une adresse Sequence invalide ou par des paramètres d'URI mal formés.</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Gestion des fichiers de demande de paiement</translation>
</message>
<message>
<source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source>
<translation>Le fichier de demande de paiement ne peut pas être lu ! Ceci peut être causé par un fichier de demande de paiement invalide.</translation>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Les demandes de paiements non vérifiées à des scripts de paiement personnalisés ne sont pas prises en charge.</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Remboursement de %1</translation>
</message>
<message>
<source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source>
<translation>La demande de paiement %1 est trop grande (%2 octets, %3 octets permis).</translation>
</message>
<message>
<source>Payment request DoS protection</source>
<translation>Protection DdS des demandes de paiement</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Erreur de communication avec %1 : %2</translation>
</message>
<message>
<source>Payment request cannot be parsed!</source>
<translation>La demande de paiement ne peut pas être analysée !</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Mauvaise réponse du serveur %1</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Le paiement a été confirmé</translation>
</message>
<message>
<source>Network request error</source>
<translation>Erreur de demande réseau</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>User Agent</source>
<translation>Agent utilisateur</translation>
</message>
<message>
<source>Address/Hostname</source>
<translation>Adresse/nom d'hôte</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Temps de ping</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Montant</translation>
</message>
<message>
<source>Enter a Sequence address (e.g. %1)</source>
<translation>Saisir une adresse Sequence (p. ex. %1)</translation>
</message>
<message>
<source>Enter a Sequence address or name registered on the dDNS.</source>
<translation>Entrez une adresse Sequence ou le nom inscrit sur le dDNS.</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 min</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>NETWORK</source>
<translation>RÉSEAU</translation>
</message>
<message>
<source>UNKNOWN</source>
<translation>INCONNU</translation>
</message>
<message>
<source>None</source>
<translation>Aucun</translation>
</message>
<message>
<source>N/A</source>
<translation>N.D.</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>&Sauvegarder l'image...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>&Copier l'image</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>Sauvegarder le code QR</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>Image PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>Nom du client</translation>
</message>
<message>
<source>N/A</source>
<translation>N.D.</translation>
</message>
<message>
<source>Client version</source>
<translation>Version du client</translation>
</message>
<message>
<source>&Information</source>
<translation>&Informations</translation>
</message>
<message>
<source>Debug window</source>
<translation>Fenêtre de débogage</translation>
</message>
<message>
<source>General</source>
<translation>Général</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Version d'OpenSSL utilisée</translation>
</message>
<message>
<source>Using BerkeleyDB version</source>
<translation>Version BerkeleyDB utilisée</translation>
</message>
<message>
<source>Startup time</source>
<translation>Heure de démarrage</translation>
</message>
<message>
<source>Network</source>
<translation>Réseau</translation>
</message>
<message>
<source>Name</source>
<translation>Nom</translation><|fim▁hole|> <message>
<source>Number of connections</source>
<translation>Nombre de connexions</translation>
</message>
<message>
<source>Block chain</source>
<translation>Chaîne de blocs</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Nombre actuel de blocs</translation>
</message>
<message>
<source>Received</source>
<translation>Reçu</translation>
</message>
<message>
<source>Sent</source>
<translation>Envoyé</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Pairs</translation>
</message>
<message>
<source>Select a peer to view detailed information.</source>
<translation>Choisir un pair pour voir l'information détaillée.</translation>
</message>
<message>
<source>Direction</source>
<translation>Direction</translation>
</message>
<message>
<source>Version</source>
<translation>Version</translation>
</message>
<message>
<source>User Agent</source>
<translation>Agent utilisateur</translation>
</message>
<message>
<source>Services</source>
<translation>Services</translation>
</message>
<message>
<source>Starting Height</source>
<translation>Hauteur de démarrage</translation>
</message>
<message>
<source>Sync Height</source>
<translation>Hauteur de synchro</translation>
</message>
<message>
<source>Ban Score</source>
<translation>Pointage des bannissements</translation>
</message>
<message>
<source>Connection Time</source>
<translation>Temps de connexion</translation>
</message>
<message>
<source>Last Send</source>
<translation>Dernier envoi</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Dernière réception</translation>
</message>
<message>
<source>Bytes Sent</source>
<translation>Octets envoyés</translation>
</message>
<message>
<source>Bytes Received</source>
<translation>Octets reçus</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Temps de ping</translation>
</message>
<message>
<source>Last block time</source>
<translation>Horodatage du dernier bloc</translation>
</message>
<message>
<source>&Open</source>
<translation>&Ouvrir</translation>
</message>
<message>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>Trafic &réseau</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Nettoyer</translation>
</message>
<message>
<source>Totals</source>
<translation>Totaux</translation>
</message>
<message>
<source>In:</source>
<translation>Entrant :</translation>
</message>
<message>
<source>Out:</source>
<translation>Sortant :</translation>
</message>
<message>
<source>Build date</source>
<translation>Date de compilation</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Journal de débogage</translation>
</message>
<message>
<source>Open the Sequence debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Ouvrir le journal de débogage de Sequence depuis le répertoire de données actuel. Ceci peut prendre quelques secondes pour les journaux de grande taille.</translation>
</message>
<message>
<source>Clear console</source>
<translation>Nettoyer la console</translation>
</message>
<message>
<source>Welcome to the Sequence RPC console.</source>
<translation>Bienvenue sur la console RPC de Sequence.</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Utiliser les touches de curseur pour naviguer dans l'historique et <b>Ctrl-L</b> pour effacer l'écran.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Taper <b>help</b> pour afficher une vue générale des commandes proposées.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 o</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 Ko</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 Mo</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 Go</translation>
</message>
<message>
<source>via %1</source>
<translation>par %1</translation>
</message>
<message>
<source>never</source>
<translation>jamais</translation>
</message>
<message>
<source>Inbound</source>
<translation>Entrant</translation>
</message>
<message>
<source>Outbound</source>
<translation>Sortant</translation>
</message>
<message>
<source>Unknown</source>
<translation>Inconnu</translation>
</message>
<message>
<source>Fetching...</source>
<translation>Récupération...</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Montant :</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Étiquette :</translation>
</message>
<message>
<source>&Message:</source>
<translation>M&essage :</translation>
</message>
<message>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation>Réutilise une adresse de réception précédemment utilisée. Réutiliser une adresse pose des problèmes de sécurité et de vie privée. N'utilisez pas cette option sauf si vous générez à nouveau une demande de paiement déjà faite.</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>Ré&utiliser une adresse de réception existante (non recommandé)</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Sequence network.</source>
<translation>Un message optionnel à joindre à la demande de paiement qui sera affiché à l'ouverture de celle-ci. Note : le message ne sera pas envoyé avec le paiement par le réseau Sequence.</translation>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation>Un étiquette optionnelle à associer à la nouvelle adresse de réception</translation>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Utiliser ce formulaire pour demander des paiements. Tous les champs sont <b>optionnels</b>.</translation>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>Un montant optionnel à demander. Laisser ceci vide ou à zéro pour ne pas demander de montant spécifique.</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Effacer tous les champs du formulaire.</translation>
</message>
<message>
<source>Clear</source>
<translation>Effacer</translation>
</message>
<message>
<source>Requested payments history</source>
<translation>Historique des paiements demandés</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Demande de paiement</translation>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Afficher la demande choisie (identique à un double-clic sur une entrée)</translation>
</message>
<message>
<source>Show</source>
<translation>Afficher</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Enlever les entrées sélectionnées de la liste</translation>
</message>
<message>
<source>Remove</source>
<translation>Enlever</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copier l’étiquette</translation>
</message>
<message>
<source>Copy message</source>
<translation>Copier le message</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copier le montant</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>Code QR</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Copier l'&URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Copier l'&adresse</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>&Sauvegarder l'image...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Demande de paiement à %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Informations de paiement</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Amount</source>
<translation>Montant</translation>
</message>
<message>
<source>Label</source>
<translation>Étiquette</translation>
</message>
<message>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>L'URI résultant est trop long, essayez de réduire le texte d'étiquette / de message.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Erreur d'encodage de l'URI en code QR.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<source>Label</source>
<translation>Étiquette</translation>
</message>
<message>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<source>Amount</source>
<translation>Montant</translation>
</message>
<message>
<source>(no label)</source>
<translation>(pas d'étiquette)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(pas de message)</translation>
</message>
<message>
<source>(no amount)</source>
<translation>(aucun montant)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Envoyer des pièces</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Fonctions de contrôle des pièces</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Entrants...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>choisi automatiquement</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Fonds insuffisants !</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Quantité :</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Octets :</translation>
</message>
<message>
<source>Amount:</source>
<translation>Montant :</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorité :</translation>
</message>
<message>
<source>Fee:</source>
<translation>Frais :</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Après les frais :</translation>
</message>
<message>
<source>Change:</source>
<translation>Monnaie :</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Si ceci est actif mais l'adresse de monnaie rendue est vide ou invalide, la monnaie sera envoyée vers une adresse nouvellement générée.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Adresse personnalisée de monnaie rendue</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Frais de transaction :</translation>
</message>
<message>
<source>Choose...</source>
<translation>Choisir...</translation>
</message>
<message>
<source>collapse fee-settings</source>
<translation>réduire les paramètres des frais</translation>
</message>
<message>
<source>Minimize</source>
<translation>Minimiser</translation>
</message>
<message>
<source>If the custom fee is set to 1000 satoshis and the transaction is only 250 bytes, then "per kilobyte" only pays 250 satoshis in fee, while "at least" pays 1000 satoshis. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Si les frais personnalisés sont définis à 1 000 satoshis et que la transaction est seulement de 250 octets, donc le « par kilo-octet » ne paiera que 250 satoshis de frais, alors que le « au moins » paiera 1 000 satoshis. Pour des transactions supérieures à un kilo-octet, les deux paieront par kilo-octets.</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>par kilo-octet</translation>
</message>
<message>
<source>If the custom fee is set to 1000 satoshis and the transaction is only 250 bytes, then "per kilobyte" only pays 250 satoshis in fee, while "total at least" pays 1000 satoshis. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Si les frais personnalisés sont définis à 1 000 satoshis et que la transaction est seulement de 250 octets, donc le « par kilo-octet » ne paiera que 250 satoshis de frais, alors que le « total au moins » paiera 1 000 satoshis. Pour des transactions supérieures à un kilo-octet, les deux paieront par kilo-octets.</translation>
</message>
<message>
<source>total at least</source>
<translation>total au moins</translation>
</message>
<message>
<source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks. But be aware that this can end up in a never confirming transaction once there is more demand for sequence transactions than the network can process.</source>
<translation>Il est correct de payer les frais minimum tant que le volume transactionnel est inférieur à l'espace dans les blocs. Mais soyez conscient que ceci pourrait résulter en une transaction n'étant jamais confirmée une fois qu'il y aura plus de transactions que le réseau ne pourra en traiter.</translation>
</message>
<message>
<source>(read the tooltip)</source>
<translation>(lire l'infobulle)</translation>
</message>
<message>
<source>Recommended:</source>
<translation>Recommandés :</translation>
</message>
<message>
<source>Custom:</source>
<translation>Personnalisés : </translation>
</message>
<message>
<source>(Smart fee not initialized yet. This usually takes a few blocks...)</source>
<translation>(Les frais intelligents ne sont pas encore initialisés. Ceci prend habituellement quelques blocs...)</translation>
</message>
<message>
<source>Confirmation time:</source>
<translation>Temps de confirmation :</translation>
</message>
<message>
<source>normal</source>
<translation>normal</translation>
</message>
<message>
<source>fast</source>
<translation>rapide</translation>
</message>
<message>
<source>Send as zero-fee transaction if possible</source>
<translation>Envoyer si possible une transaction sans frais</translation>
</message>
<message>
<source>(confirmation may take longer)</source>
<translation>(la confirmation pourrait prendre plus longtemps)</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Envoyer à plusieurs destinataires à la fois</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Ajouter un &destinataire</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Effacer tous les champs du formulaire.</translation>
</message>
<message>
<source>Dust:</source>
<translation>Poussière :</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Tout nettoyer</translation>
</message>
<message>
<source>Balance:</source>
<translation>Solde :</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Confirmer l’action d'envoi</translation>
</message>
<message>
<source>S&end</source>
<translation>E&nvoyer</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Confirmer l’envoi des pièces</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 à %2</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Copier la quantité</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copier le montant</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Copier les frais</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Copier le montant après les frais</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Copier les octets</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Copier la priorité</translation>
</message>
<message>
<source>Copy change</source>
<translation>Copier la monnaie</translation>
</message>
<message>
<source>Total Amount %1 (= %2)</source>
<translation>Montant total %1 (= %2)</translation>
</message>
<message>
<source>or</source>
<translation>ou</translation>
</message>
<message>
<source>The recipient address is not valid, please recheck.</source>
<translation>L'adresse du destinataire n’est pas valide, veuillez la vérifier.</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>Le montant à payer doit être supérieur à 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>Le montant dépasse votre solde.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Le montant dépasse votre solde lorsque les frais de transaction de %1 sont inclus.</translation>
</message>
<message>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Adresse identique trouvée. Il n'est possible d'envoyer qu'une fois à chaque adresse, par opération d'envoi.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>La création de la transaction a échoué !</translation>
</message>
<message>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>La transaction a été rejetée ! Ceci peut arriver si certaines pièces de votre portefeuille étaient déjà dépensées, par exemple si vous avez utilisé une copie de wallet.dat et que des pièces ont été dépensées dans la copie sans être marquées comme telles ici.</translation>
</message>
<message>
<source>A fee higher than %1 is considered an insanely high fee.</source>
<translation>Des frais supérieurs à %1 sont considérés comme follement élevés.</translation>
</message>
<message>
<source>Pay only the minimum fee of %1</source>
<translation>Payer seulement les frais minimum de %1</translation>
</message>
<message>
<source>Estimated to begin confirmation within %1 block(s).</source>
<translation>Début de confirmation estimé à %1 bloc(s).</translation>
</message>
<message>
<source>Warning: Invalid Sequence address</source>
<translation>Avertissement : adresse Sequence invalide</translation>
</message>
<message>
<source>(no label)</source>
<translation>(pas d'étiquette)</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation>Avertissement : adresse de monnaie rendue inconnue</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Copier la poussière</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Êtes-vous sûr de vouloir envoyer ?</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>ajouté en tant que frais de transaction</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>&Montant :</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>&Payer à :</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Saisir une étiquette pour cette adresse afin de l’ajouter à votre carnet d’adresses</translation>
</message>
<message>
<source>&Label:</source>
<translation>É&tiquette :</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Choisir une adresse déjà utilisée</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Ceci est un paiement normal.</translation>
</message>
<message>
<source>The Sequence address to send the payment to</source>
<translation>L'adresse Sequence à laquelle envoyer le paiement</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Coller l'adresse depuis le presse-papiers</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Enlever cette entrée</translation>
</message>
<message>
<source>Message:</source>
<translation>Message :</translation>
</message>
<message>
<source>This is a verified payment request.</source>
<translation>Ceci est une demande de paiement vérifiée.</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Saisir une étiquette pour cette adresse afin de l'ajouter à la liste d'adresses utilisées</translation>
</message>
<message>
<source>A message that was attached to the Sequence: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Sequence network.</source>
<translation>Un message qui était joint à l'URI Sequence et qui sera stocké avec la transaction pour référence. Note : ce message ne sera pas envoyé par le réseau Sequence.</translation>
</message>
<message>
<source>This is an unverified payment request.</source>
<translation>Ceci est une demande de paiement non vérifiée.</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Payer à :</translation>
</message>
<message>
<source>Memo:</source>
<translation>Mémo :</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Sequence is shutting down...</source>
<translation>Arrêt de Sequence...</translation>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Ne pas fermer l'ordinateur jusqu'à la disparition de cette fenêtre.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signatures - Signer / Vérifier un message</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Signer un message</translation>
</message>
<message>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Vous pouvez signer des messages avec vos adresses pour prouver que vous les détenez. Faites attention de ne rien signer de vague, car des attaques d'hameçonnage pourraient essayer de vous faire signer avec votre identité afin de l'usurper. Ne signez que des déclarations entièrement détaillées et avec lesquelles vous êtes d'accord.</translation>
</message>
<message>
<source>The Sequence address to sign the message with</source>
<translation>L'adresse Sequence avec laquelle signer le message</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Choisir une adresse précédemment utilisée</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Coller une adresse depuis le presse-papiers</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Saisir ici le message que vous désirez signer</translation>
</message>
<message>
<source>Signature</source>
<translation>Signature</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Copier la signature actuelle dans le presse-papiers</translation>
</message>
<message>
<source>Sign the message to prove you own this Sequence address</source>
<translation>Signer le message pour prouver que vous détenez cette adresse Sequence</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Signer le &message</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Réinitialiser tous les champs de signature de message</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Tout nettoyer</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Vérifier un message</translation>
</message>
<message>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Saisir ci-dessous l'adresse de signature, le message (assurez-vous d'avoir copié exactement les retours à la ligne, les espaces, tabulations etc.) et la signature pour vérifier le message. Faire attention à ne pas déduire davantage de la signature que ce qui est contenu dans le message signé même, pour éviter d'être trompé par une attaque d'homme du milieu.</translation>
</message>
<message>
<source>The Sequence address the message was signed with</source>
<translation>L'adresse Sequence avec laquelle le message a été signé</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Sequence address</source>
<translation>Vérifier le message pour vous assurer qu'il a bien été signé par l'adresse Sequence spécifiée</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Vérifier le &message</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Réinitialiser tous les champs de vérification de message</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Cliquez sur « Signer le message » pour générer la signature</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>L'adresse saisie est invalide.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Veuillez vérifier l'adresse et réessayer.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>L'adresse saisie ne fait pas référence à une clef.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Le déverrouillage du portefeuille a été annulé.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>La clef privée n'est pas disponible pour l'adresse indiquée.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>La signature du message a échoué.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Le message a été signé.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>La signature n'a pu être décodée.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Veuillez vérifier la signature et réessayer.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>La signature ne correspond pas à l'empreinte du message.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Échec de la vérification du message.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Message vérifié.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Sequence</source>
<translation>Sequence</translation>
</message>
<message>
<source>The Emercoin Developers</source>
<translation>Les développeurs Sequence</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>Ko/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Ouvert jusqu'à %1</translation>
</message>
<message>
<source>conflicted</source>
<translation>en conflit</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/hors ligne</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/non confirmée</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 confirmations</translation>
</message>
<message>
<source>Status</source>
<translation>État</translation>
</message>
<message numerus="yes">
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, diffusée à travers %n nœud</numerusform><numerusform>, diffusée à travers %n nœuds</numerusform></translation>
</message>
<message>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<source>Source</source>
<translation>Source</translation>
</message>
<message>
<source>Generated</source>
<translation>Généré</translation>
</message>
<message>
<source>From</source>
<translation>De</translation>
</message>
<message>
<source>To</source>
<translation>À</translation>
</message>
<message>
<source>own address</source>
<translation>votre propre adresse</translation>
</message>
<message>
<source>watch-only</source>
<translation>juste-regarder</translation>
</message>
<message>
<source>label</source>
<translation>étiquette</translation>
</message>
<message>
<source>Credit</source>
<translation>Crédit</translation>
</message>
<message numerus="yes">
<source>matures in %n more block(s)</source>
<translation><numerusform>arrive à maturité dans %n bloc de plus</numerusform><numerusform>arrive à maturité dans %n blocs de plus</numerusform></translation>
</message>
<message>
<source>not accepted</source>
<translation>refusé</translation>
</message>
<message>
<source>Debit</source>
<translation>Débit</translation>
</message>
<message>
<source>Total debit</source>
<translation>Débit total</translation>
</message>
<message>
<source>Total credit</source>
<translation>Crédit total</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Frais de transaction</translation>
</message>
<message>
<source>Net amount</source>
<translation>Montant net</translation>
</message>
<message>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<source>Comment</source>
<translation>Commentaire</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>ID de la transaction</translation>
</message>
<message>
<source>Merchant</source>
<translation>Marchand</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Les pièces générées doivent mûrir pendant %1 blocs avant de pouvoir être dépensées. Lorsque vous avez généré ce bloc, il a été diffusé sur le réseau pour être ajouté à la chaîne de blocs. S’il échoue a intégrer la chaîne, son état sera modifié en « non accepté » et il ne sera pas possible de le dépenser. Ceci peut arriver occasionnellement si un autre nœud génère un bloc à quelques secondes du votre.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Informations de débogage</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transaction</translation>
</message>
<message>
<source>Inputs</source>
<translation>Entrants</translation>
</message>
<message>
<source>Amount</source>
<translation>Montant</translation>
</message>
<message>
<source>true</source>
<translation>vrai</translation>
</message>
<message>
<source>false</source>
<translation>faux</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, n’a pas encore été diffusée avec succès</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Ouvert pour %n bloc de plus</numerusform><numerusform>Ouvert pour %n blocs de plus</numerusform></translation>
</message>
<message>
<source>unknown</source>
<translation>inconnu</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Détails de la transaction</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ce panneau affiche une description détaillée de la transaction</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Immature (%1 confirmations, sera disponible après %2)</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Ouvert pour %n bloc de plus</numerusform><numerusform>Ouvert pour %n blocs de plus</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>Ouvert jusqu'à %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmée (%1 confirmations)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Ce bloc n’a été reçu par aucun autre nœud et ne sera probablement pas accepté !</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Généré mais pas accepté</translation>
</message>
<message>
<source>Offline</source>
<translation>Hors ligne</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Non confirmé</translation>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Confirmation (%1 sur %2 confirmations recommandées)</translation>
</message>
<message>
<source>Conflicted</source>
<translation>En conflit</translation>
</message>
<message>
<source>Received with</source>
<translation>Reçue avec</translation>
</message>
<message>
<source>Received from</source>
<translation>Reçue de</translation>
</message>
<message>
<source>Sent to</source>
<translation>Envoyée à</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Paiement à vous-même</translation>
</message>
<message>
<source>Mined</source>
<translation>Miné</translation>
</message>
<message>
<source>watch-only</source>
<translation>juste-regarder</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(n.d)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>État de la transaction. Laissez le pointeur de la souris sur ce champ pour voir le nombre de confirmations.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Date et heure de réception de la transaction.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Type de transaction.</translation>
</message>
<message>
<source>Whether or not a watch-only address is involved in this transaction.</source>
<translation>Une adresse juste-regarder est-elle impliquée dans cette transaction.</translation>
</message>
<message>
<source>Destination address of transaction.</source>
<translation>L’adresse de destination de la transaction.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Montant ajouté ou enlevé au solde.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Toutes</translation>
</message>
<message>
<source>Today</source>
<translation>Aujourd’hui</translation>
</message>
<message>
<source>This week</source>
<translation>Cette semaine</translation>
</message>
<message>
<source>This month</source>
<translation>Ce mois-ci</translation>
</message>
<message>
<source>Last month</source>
<translation>Le mois dernier</translation>
</message>
<message>
<source>This year</source>
<translation>Cette année</translation>
</message>
<message>
<source>Range...</source>
<translation>Intervalle…</translation>
</message>
<message>
<source>Received with</source>
<translation>Reçue avec</translation>
</message>
<message>
<source>Sent to</source>
<translation>Envoyée à</translation>
</message>
<message>
<source>To yourself</source>
<translation>À vous-même</translation>
</message>
<message>
<source>Mined</source>
<translation>Miné</translation>
</message>
<message>
<source>Other</source>
<translation>Autres</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Saisir une adresse ou une étiquette à rechercher</translation>
</message>
<message>
<source>Min amount</source>
<translation>Montant min.</translation>
</message>
<message>
<source>Copy address</source>
<translation>Copier l’adresse</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copier l’étiquette</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copier le montant</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Copier l'ID de la transaction</translation>
</message>
<message>
<source>Edit label</source>
<translation>Modifier l’étiquette</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Afficher les détails de la transaction</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Exporter l'historique des transactions</translation>
</message>
<message>
<source>Watch-only</source>
<translation>Juste-regarder :</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>L'exportation a échoué</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Une erreur est survenue lors de l'enregistrement de l'historique des transactions vers %1.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Exportation réussie</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>L'historique des transactions a été sauvegardée avec succès vers %1.</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Valeurs séparées par des virgules (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmée</translation>
</message>
<message>
<source>Date</source>
<translation>Date</translation>
</message>
<message>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<source>Label</source>
<translation>Étiquette</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Range:</source>
<translation>Intervalle :</translation>
</message>
<message>
<source>to</source>
<translation>à</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Unité d'affichage des montants. Cliquer pour choisir une autre unité.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>Aucun portefeuille de chargé.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Envoyer des pièces</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Exporter</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporter les données de l'onglet courant vers un fichier</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Sauvegarder le portefeuille</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Données de portefeuille (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Échec de la sauvegarde</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>Une erreur est survenue lors de l'enregistrement des données de portefeuille vers %1.</translation>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation>Les données de portefeuille ont été enregistrées avec succès vers %1</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Sauvegarde réussie</translation>
</message>
</context>
<context>
<name>sequence</name>
<message>
<source>Options:</source>
<translation>Options :</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Spécifier le répertoire de données</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Se connecter à un nœud pour obtenir des adresses de pairs puis se déconnecter</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Spécifier votre propre adresse publique</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Accepter les commandes de JSON-RPC et de la ligne de commande</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Fonctionner en arrière-plan en tant que démon et accepter les commandes</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Utiliser le réseau de test</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Accepter les connexions entrantes (par défaut : 1 si aucun -proxy ou -connect )</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Se lier à l'adresse donnée et toujours l'écouter. Utilisez la notation [host]:port pour l'IPv6</translation>
</message>
<message>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation>Supprimer toutes les transactions du portefeuille et ne récupérer que ces parties de la chaîne de bloc avec -rescan au démarrage</translation>
</message>
<message>
<source>Distributed under the MIT software license, see the accompanying file COPYING or <http://www.opensource.org/licenses/mit-license.php>.</source>
<translation>Distribué sous la licence MIT d'utilisation d'un logiciel. Consultez le fichier joint COPYING ou <http://www.opensource.org/licenses/mit-license.php>.</translation>
</message>
<message>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation>Passer en mode de test de régression qui utilise une chaîne spéciale dans laquelle les blocs sont résolus instantanément.</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Exécuter la commande lorsqu'une transaction de portefeuille change (%s dans la commande est remplacée par TxID)</translation>
</message>
<message>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation>Dans ce mode -genproclimit contrôle combien de blocs sont générés immédiatement.</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Définir le nombre d'exétrons de vérification des scripts (%u à %d, 0 = auto, < 0 = laisser ce nombre de cœurs inutilisés, par défaut : %d)</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Ceci est une pré-version de test - l'utiliser à vos risques et périls - ne pas l'utiliser pour miner ou pour des applications marchandes</translation>
</message>
<message>
<source>Unable to bind to %s on this computer. Sequence is probably already running.</source>
<translation>Impossible de se lier à %s sur cet ordinateur. Sequence fonctionne probablement déjà.</translation>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Avertissement : -paytxfee est réglé sur un montant très élevé ! Il s'agit des frais de transaction que vous payerez si vous envoyez une transaction.</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Avertissement : le réseau ne semble pas totalement d'accord ! Quelques mineurs semblent éprouver des difficultés.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Avertissement : nous ne semblons pas être en accord complet avec nos pairs ! Vous pourriez avoir besoin d'effectuer une mise à niveau, ou d'autres nœuds du réseau pourraient avoir besoin d'effectuer une mise à niveau.</translation>
</message>
<message>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Avertissement : une erreur est survenue lors de la lecture de wallet.dat ! Toutes les clefs ont été lues correctement mais les données de transaction ou les entrées du carnet d'adresses sont peut-être incorrectes ou manquantes.</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Avertissement : wallet.dat corrompu, données récupérées ! Le fichier wallet.dat original a été enregistré en tant que wallet.{timestamp}.bak dans %s ; si votre solde ou transactions sont incorrects vous devriez effectuer une restauration depuis une sauvegarde.</translation>
</message>
<message>
<source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source>
<translation>Pairs de la liste blanche se connectant à partir du masque réseau ou de l'IP donné. Peut être spécifié plusieurs fois.</translation>
</message>
<message>
<source>(default: 1)</source>
<translation>(par défaut : 1)</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category> peut être :</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Tenter de récupérer les clefs privées d'un wallet.dat corrompu</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Options de création de bloc :</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Ne se connecter qu'au(x) nœud(s) spécifié(s)</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Options de connexion :</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Base corrompue de données des blocs détectée</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation>Options de test/de débogage :</translation>
</message>
<message>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Découvrir sa propre adresse IP (par défaut : 1 lors de l'écoute et si aucun -externalip)</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>Ne pas charger le portefeuille et désactiver les appels RPC</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Voulez-vous reconstruire la base de données des blocs maintenant ?</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Erreur lors de l'initialisation de la base de données des blocs</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Erreur lors de l'initialisation de l'environnement de la base de données du portefeuille %s !</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Erreur du chargement de la base de données des blocs</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Erreur lors de l'ouverture de la base de données des blocs</translation>
</message>
<message>
<source>Error: A fatal internal error occured, see debug.log for details</source>
<translation>Erreur : une erreur interne fatale s'est produite. Voir debug.log pour des détails</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Erreur : l'espace disque est faible !</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Échec de l'écoute sur un port quelconque. Utilisez -listen=0 si vous voulez ceci.</translation>
</message>
<message>
<source>If <category> is not supplied, output all debugging information.</source>
<translation>Si <category> n'est pas indiqué, extraire toutes les données de débogage.</translation>
</message>
<message>
<source>Importing...</source>
<translation>Importation...</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Bloc de genèse incorrect ou introuvable. Mauvais répertoire de données pour le réseau ?</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Adresse -onion invalide : « %s »</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Pas assez de descripteurs de fichiers proposés.</translation>
</message>
<message>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation>Seulement se connecter aux nœuds du réseau <net> (IPv4, IPv6 ou oignon)</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Reconstruire l'index de la chaîne de blocs à partir des fichiers blk000??.dat courants</translation>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Définir la taille du cache de la base de données en mégaoctets (%d to %d, default: %d)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Définir la taille minimale de bloc en octets (par défaut : %d)</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Spécifiez le fichier de portefeuille (dans le répertoire de données)</translation>
</message>
<message>
<source>This is intended for regression testing tools and app development.</source>
<translation>Ceci est à l'intention des outils de test de régression et du développement applicatif.</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: %u)</source>
<translation>Utiliser l'UPnP pour mapper le port d'écoute (par défaut : %u)</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Vérification des blocs en cours...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Vérification du portefeuille en cours...</translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Le portefeuille %s réside en dehors du répertoire de données %s</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Options du portefeuille :</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Vous devez reconstruire la base de données en utilisant -reindex afin de modifier -txindex</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importe des blocs depuis un fichier blk000??.dat externe</translation>
</message>
<message>
<source>Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source>
<translation>Permettre les connexions JSON-RPC de sources spécifiques. Valide pour <ip> qui sont une IP simple (p. ex. 1.2.3.4), un réseau/masque réseau (p. ex. 1.2.3.4/255.255.255.0) ou un réseau/CIDR (p. ex. 1.2.3.4/24). Cette option peut être être spécifiée plusieurs fois</translation>
</message>
<message>
<source>An error occurred while setting up the RPC address %s port %u for listening: %s</source>
<translation>Une erreur est survenue lors de la mise en place de l'adresse %s port %u d'écoute RPC : %s</translation>
</message>
<message>
<source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source>
<translation>Se lier à l'adresse donnée et aux pairs s'y connectant. Utiliser la notation [host]:port pour l'IPv6</translation>
</message>
<message>
<source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source>
<translation>Se lier à l'adresse donnée pour écouter des connexions JSON-RPC. Utiliser la notation [host]:port pour l'IPv6. Cette option peut être spécifiée plusieurs fois (par défaut : se lier à toutes les interfaces)</translation>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. Sequence is probably already running.</source>
<translation>Impossible d’obtenir un verrou sur le répertoire de données %s. Sequence fonctionne probablement déjà.</translation>
</message>
<message>
<source>Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:%u)</source>
<translation>Limiter continuellement les transactions gratuites à <n>*1000 octets par minute (par défaut : %u)</translation>
</message>
<message>
<source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source>
<translation>Créer de nouveaux fichiers avec les permissions système par défaut, au lieu de umask 077 (effectif seulement avec la fonction du portefeuille désactivée)</translation>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation>Erreur : l'écoute des connexions entrantes a échoué (l'écoute a retourné l'erreur %s)</translation>
</message>
<message>
<source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source>
<translation>Erreur : l'argument non pris en charge -socks a été trouvé. Il n'est plus possible de définir la version de SOCKS, seuls les serveurs mandataires SOCKS5 sont pris en charge.</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Exécuter une commande lorsqu'une alerte pertinente est reçue ou si nous voyons une bifurcation vraiment étendue (%s dans la commande est remplacé par le message)</translation>
</message>
<message>
<source>Fees (in SEQ/Kb) smaller than this are considered zero fee for relaying (default: %s)</source>
<translation>Les frais (en SEQ/Ko) inférieurs à ce seuil sont considérés comme étant nuls pour le relayage (par défaut : %s)</translation>
</message>
<message>
<source>Fees (in SEQ/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source>
<translation>Les frais (en SEQ/Ko) inférieurs à ce seuil sont considérés comme étant nuls pour la création de transactions (par défaut : %s)</translation>
</message>
<message>
<source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source>
<translation>Si paytxfee n'est pas défini, inclure suffisamment de frais afin que les transactions commencent la confirmation en moyenne avant n blocs (par défaut : %u)</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source>
<translation>Montant invalide pour -maxtxfee=<amount> : « %s » (doit être au moins les frais minrelay de %s pour prévenir le blocage des transactions)</translation>
</message>
<message>
<source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source>
<translation>Quantité maximale de données dans les transactions du porteur de données que nous relayons et minons (par défaut : %u)</translation>
</message>
<message>
<source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source>
<translation>Total maximal des frais à utiliser en une seule transaction de portefeuille. Le définir trop bas pourrait interrompre les grosses transactions (par défaut : %s)</translation>
</message>
<message>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation>Demander les adresses des pairs par recherche DNS si l'on manque d'adresses (par défaut : 1 sauf si -connect)</translation>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Définir la taille maximale en octets des transactions prioritaires/à frais modiques (par défaut : %d)</translation>
</message>
<message>
<source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source>
<translation>Définir le nombre de fils de génération de pièces, si elle est activée (-1 = tous les cœurs, par défaut : %d)</translation>
</message>
<message>
<source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit <https://www.openssl.org/> and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source>
<translation>Ce produit comprend des logiciels développés par le projet OpenSSL pour être utilisés dans la boîte à outils OpenSSL <https://www.openssl.org/> et un logiciel cryptographique écrit par Eric Young, ainsi qu'un logiciel UPnP écrit par Thomas Bernard.</translation>
</message>
<message>
<source>To use sequenced, or the -server option to sequence-qt, you must set an rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=sequencerpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Sequence Alert" [email protected]
</source>
<translation>Pour utiliser sequenced, ou l'option -server de sequence-qt, vous devez définir un mot de passe rpc dans le fichier de configuration :
%s
Il est recommandé d'utiliser le mot de passe aléatoire suivant :
rpcuser=sequencerpc
rpcpassword=%s
(vous n'avez pas à mémoriser ce mot de passe)
Le nom d'utilisateur et le mot de passe NE DOIVENT PAS être identiques.
Si le fichier n'existe pas, créez-le avec la permission lecture-seule-par-le-propriétaire.
Il est aussi recommandé de définir alertnotify afin que les problèmes vous soient signalés ;
par exemple : alertnotify=echo %%s | mail -s "Alerte Sequence" [email protected]
</translation>
</message>
<message>
<source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source>
<translation>Avertissement :-maxtxfee est défini très haut ! Des frais aussi élevés pourraient être payés sur une seule transaction.</translation>
</message>
<message>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Sequence will not work properly.</source>
<translation>Avertissement : veuillez vérifier que l'heure et la date de votre ordinateur sont correctes ! Si votre horloge n'est pas à l'heure, Sequence ne fonctionnera pas correctement.</translation>
</message>
<message>
<source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source>
<translation>Les pairs de la liste blanche ne peuvent pas être bannis DoS et leurs transactions sont toujours relayées, même si elles sont déjà dans le mempool, utile p. ex. pour une passerelle</translation>
</message>
<message>
<source>Accept public REST requests (default: %u)</source>
<translation>Accepter les demandes REST publiques (par défaut : %u)</translation>
</message>
<message>
<source>Cannot resolve -whitebind address: '%s'</source>
<translation>Impossible de résoudre l'adresse -whitebind : « %s »</translation>
</message>
<message>
<source>Connect through SOCKS5 proxy</source>
<translation>Se connecter par un mandataire SOCKS5</translation>
</message>
<message>
<source>Copyright (c) 2016-%i Duality Blockchain Solutions Developers</source>
<translation>Copyright © 2013-%i Les développeurs de Sequence</translation>
</message>
<message>
<source>Could not parse -rpcbind value %s as network address</source>
<translation>Impossible d'analyser la valeur -rpcbind %s comme adresse réseau</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet requires newer version of Sequence</source>
<translation>Erreur lors du chargement de wallet.dat : le portefeuille exige une version plus récente de Sequence</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Erreur de lecture de la base de données, fermeture en cours.</translation>
</message>
<message>
<source>Error: Unsupported argument -tor found, use -onion.</source>
<translation>Erreur : argument non pris en charge -tor trouvé, utiliser -onion.</translation>
</message>
<message>
<source>Fee (in SEQ/Kb) to add to transactions you send (default: %s)</source>
<translation>Les frais (en SEQ/ko) à ajouter aux transactions que vous envoyez (par défaut : %s)</translation>
</message>
<message>
<source>Information</source>
<translation>Informations</translation>
</message>
<message>
<source>Initialization sanity check failed. Sequence is shutting down.</source>
<translation>L'initialisation du test de cohérence a échoué. Sequence est en cours de fermeture. </translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s'</source>
<translation>Montant invalide pour -maxtxfee=<amount> : « %s »</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Montant invalide pour -minrelayfee=<montant> : « %s »</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Montant invalide pour -mintxfee=<montant> : « %s »</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</source>
<translation>Montant invalide pour -paytxfee=<montant> : « %s » (doit être au moins %s)</translation>
</message>
<message>
<source>Invalid netmask specified in -whitelist: '%s'</source>
<translation>Masque réseau invalide spécifié dans -whitelist : « %s »</translation>
</message>
<message>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation>Garder au plus <n> transactions non connectables en mémoire (par défaut : %u)</translation>
</message>
<message>
<source>Need to specify a port with -whitebind: '%s'</source>
<translation>Un port doit être spécifié avec -whitebind : « %s »</translation>
</message>
<message>
<source>Node relay options:</source>
<translation>Options de relais du nœud :</translation>
</message>
<message>
<source>RPC SSL options: (see the sequence Wiki for SSL setup instructions)</source>
<translation>Options RPC SSL : (voir le wiki Sequence pour les instructions de configuration de SSL)</translation>
</message>
<message>
<source>RPC server options:</source>
<translation>Options du serveur RPC :</translation>
</message>
<message>
<source>RPC support for HTTP persistent connections (default: %d)</source>
<translation>Prise en charge de RPC pour les connexions persistantes HTTP (par défaut : %d)</translation>
</message>
<message>
<source>Randomly drop 1 of every <n> network messages</source>
<translation>Abandonner aléatoirement 1 message du réseau sur <n></translation>
</message>
<message>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation>Tester aléatoirement 1 message du réseau sur <n></translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Envoyer les informations de débogage/trace à la console au lieu du fichier debug.log</translation>
</message>
<message>
<source>Send transactions as zero-fee transactions if possible (default: %u)</source>
<translation>Envoyer si possible les transactions comme étant sans frais (par défaut : %u)</translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Montrer toutes les options de débogage (utilisation : --help --help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Réduire le fichier debug.log lors du démarrage du client (par défaut : 1 lorsque -debug n'est pas présent)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>La signature de la transaction a échoué</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Ceci est un logiciel expérimental.</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Montant de la transaction trop bas</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Les montants de transaction doivent être positifs</translation>
</message>
<message>
<source>Transaction too large for fee policy</source>
<translation>La transaction est trop grosse pour la politique de frais</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transaction trop volumineuse</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation>Impossible de se lier à %s sur cet ordinateur (bind a retourné l'erreur %s)</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Utiliser l'UPnP pour mapper le port d'écoute (par défaut : 1 lors de l'écoute)</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Nom d'utilisateur pour les connexions JSON-RPC</translation>
</message>
<message>
<source>Wallet needed to be rewritten: restart Sequence to complete</source>
<translation>Le portefeuille avait besoin d'être réécrit : veuillez redémarrer Sequence pour terminer</translation>
</message>
<message>
<source>Warning</source>
<translation>Avertissement</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Avertissement : cette version est obsolète, une mise à niveau est nécessaire !</translation>
</message>
<message>
<source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source>
<translation>Avertissement : l'argument -benchmark non pris en charge a été ignoré, utiliser -debug=bench.</translation>
</message>
<message>
<source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source>
<translation>Avertissement : l'argument -debugnet non pris en charge a été ignoré, utiliser -debug=net.</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Supprimer toutes les transactions du portefeuille...</translation>
</message>
<message>
<source>on startup</source>
<translation>au démarrage</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompu, la récupération a échoué</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Mot de passe pour les connexions JSON-RPC</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Exécuter la commande lorsque le meilleur bloc change (%s dans cmd est remplacé par le hachage du bloc)</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Mettre à niveau le portefeuille vers le format le plus récent</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Réanalyser la chaîne de blocs pour les transactions de portefeuille manquantes</translation>
</message>
<message>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Utiliser OpenSSL (https) pour les connexions JSON-RPC</translation>
</message>
<message>
<source>This help message</source>
<translation>Ce message d'aide</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Autoriser les recherches DNS pour -addnode, -seednode et -connect</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Chargement des adresses…</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Erreur lors du chargement de wallet.dat : portefeuille corrompu</translation>
</message>
<message>
<source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source>
<translation>(1 = conserver les métadonnées de transmission, par ex. les informations du propriétaire du compte et de la demande de paiement, 2 = abandonner les métadonnées de transmission)</translation>
</message>
<message>
<source>Flush database activity from memory pool to disk log every <n> megabytes (default: %u)</source>
<translation>Purger l’activité de la base de données de la zone de mémoire vers le journal sur disque tous les <n> mégaoctets (par défaut : %u)</translation>
</message>
<message>
<source>How thorough the block verification of -checkblocks is (0-4, default: %u)</source>
<translation>Degré de profondeur de la vérification des blocs -checkblocks (0-4, par défaut : %u)</translation>
</message>
<message>
<source>Log transaction priority and fee per kB when mining blocks (default: %u)</source>
<translation>Lors du minage, journaliser la priorité des transactions et les frais par ko (par défaut : %u) </translation>
</message>
<message>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation>Maintenir un index complet des transactions, utilisé par l'appel RPC getrawtransaction (obtenir la transaction brute) (par défaut : %u)</translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation>Délai en secondes de refus de reconnexion pour les pairs présentant un mauvais comportement (par défaut : %u)</translation>
</message>
<message>
<source>Output debugging information (default: %u, supplying <category> is optional)</source>
<translation>Extraire les informations de débogage (par défaut : %u, fournir <category> est optionnel)</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source>
<translation>Utiliser un serveur mandataire SOCKS5 séparé pour atteindre les pairs par les services cachés de Tor (par défaut : %s)</translation>
</message>
<message>
<source>(default: %s)</source>
<translation>(par défaut : %s)</translation>
</message>
<message>
<source>Acceptable ciphers (default: %s)</source>
<translation>Chiffrements acceptables (par défaut : %s)</translation>
</message>
<message>
<source>Always query for peer addresses via DNS lookup (default: %u)</source>
<translation>Toujours demander les adresses des pairs par recherche DNS (par défaut : %u)</translation>
</message>
<message>
<source>Disable safemode, override a real safe mode event (default: %u)</source>
<translation>Désactiver le mode sans échec, passer outre un événement sans échec réel (par défaut : %u)</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Erreur lors du chargement de wallet.dat</translation>
</message>
<message>
<source>Force safe mode (default: %u)</source>
<translation>Forcer le mode sans échec (par défaut : %u)</translation>
</message>
<message>
<source>Generate coins (default: %u)</source>
<translation>Générer des pièces (défaut : %u)</translation>
</message>
<message>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation>Nombre de blocs à vérifier au démarrage (par défaut : %u, 0 = tous)</translation>
</message>
<message>
<source>Include IP addresses in debug output (default: %u)</source>
<translation>Inclure les adresses IP à la sortie de débogage (par défaut : %u)</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Adresse -proxy invalide : « %s »</translation>
</message>
<message>
<source>Limit size of signature cache to <n> entries (default: %u)</source>
<translation>Limiter la taille du cache des signatures à <n> entrées (par défaut : %u)</translation>
</message>
<message>
<source>Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</source>
<translation>Écouter les connexions JSON-RPC sur <port> (par défaut : %u ou tesnet : %u)</translation>
</message>
<message>
<source>Listen for connections on <port> (default: %u or testnet: %u)</source>
<translation>Écouter les connexions sur <port> (par défaut : %u ou tesnet : %u)</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation>Garder au plus <n> connexions avec les pairs (par défaut : %u)</translation>
</message>
<message>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</source>
<translation>Tampon maximal de réception par connexion, <n>*1000 octets (par défaut : %u)</translation>
</message>
<message>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</source>
<translation>Tampon maximal d'envoi par connexion », <n>*1000 octets (par défaut : %u)</translation>
</message>
<message>
<source>Only accept block chain matching built-in checkpoints (default: %u)</source>
<translation>N'accepter qu'une chaîne de blocs correspondant aux points de vérification intégrés (par défaut : %u)</translation>
</message>
<message>
<source>Prepend debug output with timestamp (default: %u)</source>
<translation>Ajouter l'horodatage au début de la sortie de débogage (par défaut : %u)</translation>
</message>
<message>
<source>Relay and mine data carrier transactions (default: %u)</source>
<translation>Relayer et miner les transactions du porteur de données (par défaut : %u)</translation>
</message>
<message>
<source>Relay non-P2SH multisig (default: %u)</source>
<translation>Relayer les multisignatures non-P2SH (par défaut : %u)</translation>
</message>
<message>
<source>Run a thread to flush wallet periodically (default: %u)</source>
<translation>Exécuter une tâche pour purger le portefeuille périodiquement (par défaut : %u) </translation>
</message>
<message>
<source>Set key pool size to <n> (default: %u)</source>
<translation>Définir la taille de la réserve de clefs à <n> (par défaut : %u)</translation>
</message>
<message>
<source>Set minimum block size in bytes (default: %u)</source>
<translation>Définir la taille de bloc minimale en octets (par défaut : %u)</translation>
</message>
<message>
<source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source>
<translation>Définit le drapeau DB_PRIVATE dans l'environnement de la BD du portefeuille (par défaut : %u)</translation>
</message>
<message>
<source>Specify configuration file (default: %s)</source>
<translation>Spécifier le fichier de configuration (par défaut : %s)</translation>
</message>
<message>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation>Spécifier le délai d'expiration de la connexion en millisecondes (minimum : 1, par défaut : %d)</translation>
</message>
<message>
<source>Specify pid file (default: %s)</source>
<translation>Spécifier le fichier pid (par défaut : %s)</translation>
</message>
<message>
<source>Spend unconfirmed change when sending transactions (default: %u)</source>
<translation>Dépenser la monnaie non confirmée lors de l'envoi de transactions (par défaut : %u)</translation>
</message>
<message>
<source>Stop running after importing blocks from disk (default: %u)</source>
<translation>Cesser l'exécution après l'importation des blocs du disque (par défaut : %u)</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation>Seuil de déconnexion des pairs présentant un mauvais comportement (par défaut : %u)</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Réseau inconnu spécifié sur -onlynet : « %s »</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Impossible de résoudre l'adresse -bind : « %s »</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Impossible de résoudre l'adresse -externalip : « %s »</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Montant invalide pour -paytxfee=<montant> : « %s »</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Fonds insuffisants</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Chargement de l’index des blocs…</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Ajouter un nœud auquel se connecter et tenter de garder la connexion ouverte</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Chargement du portefeuille…</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Impossible de revenir à une version inférieure du portefeuille</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Impossible d'écrire l'adresse par défaut</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Nouvelle analyse…</translation>
</message>
<message>
<source>Done loading</source>
<translation>Chargement terminé</translation>
</message>
<message>
<source>Error</source>
<translation>Erreur</translation>
</message>
</context>
</TS><|fim▁end|> | </message> |
<|file_name|>enums.ts<|end_file_name|><|fim▁begin|>export enum MessageType {
Command = 0x41,
CommandReply = 0x42,
Get = 0x43,
GetReply = 0x44,
Set = 0x45,<|fim▁hole|>export enum PowerModes {
On = 0x0001,
Standby = 0x0002,
Suspend = 0x0003,
Off = 0x0004,
}
export const MONITOR_ID_ALL = '*'
export type MonitorId = '*' | string<|fim▁end|> | SetReply = 0x46,
}
|
<|file_name|>ezmenu.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# I found this file inside Super Mario Bros python
# written by HJ https://sourceforge.net/projects/supermariobrosp/
# the complete work is licensed under GPL3 although I can not determine# license of this file
# maybe this is the original author, we can contact him/her http://www.pygame.org/project-EzMeNu-855-.html
import pygame
class EzMenu:
def __init__(self, *options):
self.options = options
self.x = 0
self.y = 0
self.font = pygame.font.Font(None, 32)
self.option = 0
self.width = 1
self.color = [0, 0, 0]
self.hcolor = [255, 0, 0]
self.height = len(self.options)*self.font.get_height()
for o in self.options:
text = o[0]
ren = self.font.render(text, 2, (0, 0, 0))
if ren.get_width() > self.width:
self.width = ren.get_width()
def draw(self, surface):
i=0
for o in self.options:
if i==self.option:<|fim▁hole|> else:
clr = self.color
text = o[0]
ren = self.font.render(text, 2, clr)
if ren.get_width() > self.width:
self.width = ren.get_width()
surface.blit(ren, ((self.x+self.width/2) - ren.get_width()/2, self.y + i*(self.font.get_height()+4)))
i+=1
def update(self, events):
for e in events:
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_DOWN:
self.option += 1
if e.key == pygame.K_UP:
self.option -= 1
if e.key == pygame.K_RETURN:
self.options[self.option][1]()
if self.option > len(self.options)-1:
self.option = 0
if self.option < 0:
self.option = len(self.options)-1
def set_pos(self, x, y):
self.x = x
self.y = y
def set_font(self, font):
self.font = font
def set_highlight_color(self, color):
self.hcolor = color
def set_normal_color(self, color):
self.color = color
def center_at(self, x, y):
self.x = x-(self.width/2)
self.y = y-(self.height/2)<|fim▁end|> | clr = self.hcolor |
<|file_name|>request.go<|end_file_name|><|fim▁begin|>package pusher
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"strconv"
)
const (
contentTypeHeaderKey = "Content-Type"
contentTypeHeaderValue = "application/json"
)
var headers = map[string]string{
"Content-Type": "application/json",
"X-Pusher-Library": fmt.Sprintf("%s %s", libraryName, libraryVersion),
}
// change timeout to time.Duration
func request(client *http.Client, method, url string, body []byte) ([]byte, error) {
req, err := http.NewRequest(method, url, bytes.NewBuffer(body))
for key, val := range headers {
req.Header.Set(http.CanonicalHeaderKey(key), val)
}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return processResponse(resp)
}
func processResponse(response *http.Response) ([]byte, error) {
responseBody, err := ioutil.ReadAll(response.Body)
if err != nil {<|fim▁hole|> }
message := fmt.Sprintf("Status Code: %s - %s", strconv.Itoa(response.StatusCode), string(responseBody))
err = errors.New(message)
return nil, err
}<|fim▁end|> | return nil, err
}
if response.StatusCode >= 200 && response.StatusCode < 300 {
return responseBody, nil |
<|file_name|>run-in-foreground.js<|end_file_name|><|fim▁begin|>WScript.Echo("** The end of copyright notices **");
function quote(arg) {
arg = arg.replace(/"/g, '""');
return '"' + arg + '"';
}
var app = WScript.Arguments(0);
var args = [];
for (var i = 1; i < WScript.Arguments.Count(); ++i) {
args.push(quote(WScript.Arguments(i)));
}
var shell = new ActiveXObject("WScript.Shell");
var cmd = quote(app) + " " + args.join(" ");
shell.run(cmd);
WScript.Sleep(200);
// attempt to activate the app.
// MSDN about Shell.AppActivate:
//
// "In determining which application to activate, the specified
// title is compared to the title string of each running
// application. If no exact match exists, any application whose
// title string begins with title is activated. If an application
// still cannot be found, any application whose title string ends
// with title is activated. If more than one instance of the
// application named by title exists, one instance is arbitrarily
// activated."
// taking a wild guess about the *title* of the window here:
var exename = app.replace(/^.*[\\\/]([^\\\/]+)$/, "$1").replace(/\.[^.]*$/, "");<|fim▁hole|>shell.AppActivate(exename);<|fim▁end|> | |
<|file_name|>SockshareCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from module.plugins.internal.DeadHoster import DeadHoster, create_getInfo
class SockshareCom(DeadHoster):<|fim▁hole|>
__pattern__ = r'http://(?:www\.)?sockshare\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
__description__ = """Sockshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("jeix", "[email protected]"),
("stickell", "[email protected]"),
("Walter Purcaro", "[email protected]")]
getInfo = create_getInfo(SockshareCom)<|fim▁end|> | __name__ = "SockshareCom"
__type__ = "hoster"
__version__ = "0.05" |
<|file_name|>npx.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from util import nodeenv_delegate
from setup import setup
if __name__ == "__main__":<|fim▁hole|> nodeenv_delegate("npx")<|fim▁end|> | setup(skip_dependencies=True) |
<|file_name|>Greeting.java<|end_file_name|><|fim▁begin|>package com.company;
import java.util.Scanner;
public class Greeting {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
<|fim▁hole|> int age = Integer.parseInt(scanner.nextLine());
System.out.printf("Hello, %s %s. You are %d years old.", firstName, lastName, age);
}
}<|fim▁end|> | String firstName = scanner.nextLine();
String lastName = scanner.nextLine(); |
<|file_name|>require.tsx<|end_file_name|><|fim▁begin|>import x from '@shortcut2/export';
<|fim▁hole|>}<|fim▁end|> | export default function(props) {
return <h1>{props.children}</h1> |
<|file_name|>csvReader.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
a csv trace reader
Author: Jason Yang <[email protected]> 2016/06
"""
import string
from PyMimircache.const import ALLOW_C_MIMIRCACHE, INSTALL_PHASE
from PyMimircache.utils.printing import *
if ALLOW_C_MIMIRCACHE and not INSTALL_PHASE:
import PyMimircache.CMimircache.CacheReader as c_cacheReader
from PyMimircache.cacheReader.abstractReader import AbstractReader
class CsvReader(AbstractReader):
"""
CsvReader class
"""
all = ["read_one_req", "read_complete_req", "lines_dict",
"lines", "read_time_req", "reset", "copy", "get_params"]
def __init__(self, file_loc,
data_type='c',
init_params=None,
block_unit_size=0,
disk_sector_size=0,
open_c_reader=True,
**kwargs):
"""
:param file_loc: location of the file
:param data_type: type of data, can be "l" for int/long, "c" for string
:param init_params: the init_params for opening csv
:param block_unit_size: block size for storage system, 0 when disabled
:param disk_sector_size: size of disk sector
:param open_c_reader: bool for whether open reader in C backend
:param kwargs: not used now
"""
<|fim▁hole|> assert "label" in init_params, "please provide label for csv reader"
self.trace_file = open(file_loc, 'rb')
# self.trace_file = open(file_loc, 'r', encoding='utf-8', errors='ignore')
self.init_params = init_params
self.label_column = init_params['label']
self.time_column = init_params.get("real_time", )
self.size_column = init_params.get("size", )
if self.time_column != -1:
self.support_real_time = True
if self.size_column != -1:
self.support_size = True
if block_unit_size != 0:
assert "size" in init_params, "please provide size_column option to consider request size"
self.header_bool = init_params.get('header', )
self.delimiter = init_params.get('delimiter', ",")
if "delimiter" not in init_params:
INFO("open {} using default delimiter \",\" for CsvReader".format(file_loc))
if self.header_bool:
self.headers = [i.strip(string.whitespace) for i in
self.trace_file.readline().decode().split(self.delimiter)]
# self.trace_file.readline()
if ALLOW_C_MIMIRCACHE and open_c_reader:
self.c_reader = c_cacheReader.setup_reader(file_loc, 'c', data_type=data_type,
block_unit_size=block_unit_size,
disk_sector_size=disk_sector_size,
init_params=init_params)
def read_one_req(self):
"""
read one request, return the lbn/objID
:return:
"""
super().read_one_req()
line = self.trace_file.readline().decode('utf-8', 'ignore')
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
if line:
ret = line.split(self.delimiter)[self.label_column - 1].strip()
if self.data_type == 'l':
ret = int(ret)
if self.block_unit_size != 0 and self.disk_sector_size != 0:
ret = ret * self.disk_sector_size // self.block_unit_size
return ret
else:
return None
def read_complete_req(self):
"""
read the complete line, including request and its all related info
:return: a list of all info of the request
"""
super().read_one_req()
line = self.trace_file.readline().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
if line:
line_split = line.strip().split(self.delimiter)
if self.block_unit_size != 0 and self.disk_sector_size != 0:
line_split[self.label_column - 1] = line_split[self.label_column - 1] * \
self.disk_sector_size // self.block_unit_size
return line_split
else:
return None
def lines_dict(self):
"""
return a dict with column header->data
note this function does not convert lbn even if disk_sector_size and block_unit_size are set
:return:
"""
line = self.trace_file.readline().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
while line:
line_split = line.split(self.delimiter)
d = {}
if self.header_bool:
for i in range(len(self.headers)):
d[self.headers[i]] = line_split[i].strip(string.whitespace)
else:
for key, value in enumerate(line_split):
d[key] = value
line = self.trace_file.readline()
yield d
# raise StopIteration
def lines(self):
"""
a generator for reading all the information of current request/line
:return: a tuple of current request
"""
line = self.trace_file.readline().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
while line:
line_split = tuple(line.split(self.delimiter))
line = self.trace_file.readline()
yield line_split
# raise StopIteration
def read_time_req(self):
"""
return real_time information for the request in the form of (time, request)
:return:
"""
super().read_one_req()
line = self.trace_file.readline().strip().decode()
while line and len(line.strip()) == 0:
line = self.trace_file.readline().decode()
if line:
line = line.split(self.delimiter)
try:
time = float(line[self.time_column - 1].strip())
lbn = line[self.label_column - 1].strip()
if self.data_type == 'l':
lbn = int(lbn)
if self.block_unit_size != 0 and self.disk_sector_size != 0:
lbn = lbn * self.disk_sector_size // self.block_unit_size
return time, lbn
except Exception as e:
print("ERROR csvReader reading data: {}, current line: {}".format(e, line))
else:
return None
def skip_n_req(self, n):
"""
skip N requests from current position
:param n: the number of requests to skip
"""
for i in range(n):
self.read_one_req()
def reset(self):
"""
reset reader to initial state
:return:
"""
super().reset()
if self.header_bool:
self.trace_file.readline()
def copy(self, open_c_reader=False):
"""
reader a deep copy of current reader with everything reset to initial state,
the returned reader should not interfere with current reader
:param open_c_reader: whether open_c_reader_or_not, default not open
:return: a copied reader
"""
return CsvReader(self.file_loc, self.data_type, self.init_params,
self.block_unit_size, self.disk_sector_size, open_c_reader, lock=self.lock)
def get_params(self):
"""
return all the parameters for this reader instance in a dictionary
:return: a dictionary containing all parameters
"""
return {
"file_loc": self.file_loc,
"init_params": self.init_params,
"data_type": self.data_type,
"block_unit_size": self.block_unit_size,
"disk_sector_size": self.disk_sector_size,
"open_c_reader": self.open_c_reader,
"lock": self.lock
}
def __next__(self): # Python 3
super().__next__()
element = self.read_one_req()
if element is not None:
return element
else:
raise StopIteration
def __repr__(self):
return "csvReader for trace {}".format(self.file_loc)<|fim▁end|> | super(CsvReader, self).__init__(file_loc, data_type, block_unit_size, disk_sector_size,
open_c_reader, kwargs.get("lock", None))
assert init_params is not None, "please provide init_param for csvReader" |
<|file_name|>sensor.py<|end_file_name|><|fim▁begin|>"""Support for Soma sensors."""
from datetime import timedelta
import logging
from requests import RequestException
from homeassistant.const import DEVICE_CLASS_BATTERY, PERCENTAGE
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from . import DEVICES, SomaEntity
from .const import API, DOMAIN
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Soma sensor platform."""
devices = hass.data[DOMAIN][DEVICES]
async_add_entities(
[SomaSensor(sensor, hass.data[DOMAIN][API]) for sensor in devices], True
)
class SomaSensor(SomaEntity, Entity):
"""Representation of a Soma cover device."""
<|fim▁hole|> return DEVICE_CLASS_BATTERY
@property
def name(self):
"""Return the name of the device."""
return self.device["name"] + " battery level"
@property
def state(self):
"""Return the state of the entity."""
return self.battery_state
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return PERCENTAGE
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Update the sensor with the latest data."""
try:
_LOGGER.debug("Soma Sensor Update")
response = await self.hass.async_add_executor_job(
self.api.get_battery_level, self.device["mac"]
)
except RequestException:
_LOGGER.error("Connection to SOMA Connect failed")
self.is_available = False
return
if response["result"] != "success":
_LOGGER.error(
"Unable to reach device %s (%s)", self.device["name"], response["msg"]
)
self.is_available = False
return
# https://support.somasmarthome.com/hc/en-us/articles/360026064234-HTTP-API
# battery_level response is expected to be min = 360, max 410 for
# 0-100% levels above 410 are consider 100% and below 360, 0% as the
# device considers 360 the minimum to move the motor.
_battery = round(2 * (response["battery_level"] - 360))
battery = max(min(100, _battery), 0)
self.battery_state = battery
self.is_available = True<|fim▁end|> | @property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES.""" |
<|file_name|>services.ts<|end_file_name|><|fim▁begin|>module app.teacherlists {
'use strict';
interface ITeacherListResourceClass<T> extends ng.resource.IResourceClass<ng.resource.IResource<T>> {
GetStudentList(data: T): T;
}
interface ITeacherRemoveStdResourceClass<T> extends ng.resource.IResourceClass<ng.resource.IResource<T>> {
RemoveStudent(data: T): T;
}
export class TeacherListService {
<|fim▁hole|> private RemoveStudentsvc: ITeacherRemoveStdResourceClass<any>;
static $inject = ['appConfig', '$resource', 'app.shared.ClientUserProfileService'];
constructor(appConfig: IAppConfig, private $resource: angular.resource.IResourceService, private userprofileSvc: app.shared.ClientUserProfileService) {
this.GetStudentListsvc = <ITeacherListResourceClass<any>>$resource(appConfig.TeacherListUrl, { 'userId': '@userId', 'classRoomId': '@classRoomId' });
this.RemoveStudentsvc = <ITeacherRemoveStdResourceClass<any>>$resource(appConfig.TeacherRemoveStdUrl,
{ 'classRoomId': '@classRoomId', 'UserProfileId': '@UserProfileId', 'RemoveUserProfileId': '@RemoveUserProfileId' });
}
public GetStudentList(classRoomId: string): ng.IPromise<any> {
var userId = this.userprofileSvc.GetClientUserProfile().UserProfileId;
return this.GetStudentListsvc.query(new GetStudentListRequest(userId, classRoomId)).$promise;
}
public RemoveStudent(classRoomId: string, removeId: string): ng.IPromise<any> {
var userId = this.userprofileSvc.GetClientUserProfile().UserProfileId;
return this.RemoveStudentsvc.save(new RemoveStudentRequest(classRoomId, userId, removeId)).$promise;
}
}
angular
.module('app.teacherlists')
.service('app.teacherlists.TeacherListService', TeacherListService);
}<|fim▁end|> | private GetStudentListsvc: ITeacherListResourceClass<any>; |
<|file_name|>arduino.rs<|end_file_name|><|fim▁begin|>extern crate serial;
extern crate byteorder;
use std::io::{Read, Write};
use std::thread;
use std::sync::mpsc::{Sender, SyncSender, Receiver};
use std::sync::mpsc;
use std::sync::{Arc, Mutex};
use self::byteorder::{WriteBytesExt, LittleEndian};
use event;
use jack_client;
const SETTINGS: serial::PortSettings = serial::PortSettings {
baud_rate: serial::Baud9600,
char_size: serial::Bits8,
parity: serial::ParityNone,
stop_bits: serial::Stop1,
flow_control: serial::FlowNone
};
struct Message {
head: char,
data: Vec<u8>,
}
pub struct Handler {
msg_tx: SyncSender<Message>,
level_mutex: Arc<Mutex<jack_client::Levels>>,
jack_sample_rate: usize,
record_enabled: Arc<Mutex<bool>>
}
impl Handler {
pub fn new(port_path: &str,
event_queue: event::Queue,
jack_sample_rate: usize) -> (Handler, thread::JoinHandle<()>) {
println!("HAndler");
let mut port = match serial::open(port_path) {
Err(e) => panic!("Could not open serial port: {}", e),
Ok(p) => p
};
let (msg_tx, msg_rx)= mpsc::sync_channel::<Message>(0);
let level_mutex = Arc::new(Mutex::new([0.0; 4]));
let mut conn = Connection::new(port, event_queue, msg_rx, level_mutex.clone());
let thrd = thread:: spawn( move || { conn.event_loop(); } );
(Handler { msg_tx: msg_tx,
level_mutex: level_mutex,
jack_sample_rate: jack_sample_rate,
record_enabled: Arc::new(Mutex::new(false))
}, thrd)
}
pub fn show_recenabled(&self, enabled: bool) {
let mut old = self.record_enabled.lock().expect("Could not get the record enabled lock");
if enabled == *old {
return;
}
let msg = Message { head: 'r', data: vec![enabled as u8] };
self.msg_tx.send(msg);
*old = enabled;
}
pub fn xrun(&self) {
let msg = Message { head: 'x', data: vec![] };
self.msg_tx.send(msg);
}
fn level(&self, sig: jack_client::Levels) {
let mut levels = self.level_mutex.lock().expect("Could not get access to level mutex.");
for (l, s) in levels.iter_mut().zip(&sig) {
*l = l.max(*s);
}
}
fn transport_time(&self, ttime: i64) {
let seconds = (ttime/(self.jack_sample_rate as i64)) as u16;
let b0 = (seconds & 0b11111111) as u8;
let b1 = ((seconds >> 8) & 0b11111111) as u8;
let msg = Message {
head: 't',
data: vec![b0, b1]
};
self.msg_tx.send(msg);
}
fn transport_speed(&self, tspeed: f32) {
let mut msg = Message {
head: 's',
data: vec![]
};
msg.data.write_f32::<LittleEndian>(tspeed).unwrap();
self.msg_tx.send(msg);
}
}
impl event::Handler for Handler {
fn event(&self, ev: &event::Event) {
match *ev {
event::Event::Level(l) => self.level(l),
event::Event::ArdourTime(t) => self.transport_time(t),
event::Event::ArdourSpeed(s) => self.transport_speed(s),
event::Event::RecordEnabled(re) => self.show_recenabled(re),
event::Event::XRun => self.xrun(),
_ => {}
};
}
}
struct Connection {
port: serial::posix::TTYPort,
event_queue: event::Queue,
msg_rx: Receiver<Message>,
level_mutex: Arc<Mutex<jack_client::Levels>>,
old_button_state: u16
}
impl Connection {
fn new(port: serial::posix::TTYPort,
evt_queue: event::Queue,
msg_rx: Receiver<Message>,
level_mutex: Arc<Mutex<jack_client::Levels>>) -> Connection {
Connection {
port: port,
event_queue: evt_queue,
msg_rx: msg_rx,
level_mutex: level_mutex,
old_button_state: 0
}
}
fn send_level_msg(&mut self) {
let mut msg = Message { head: 'l', data: vec![] };
{
let mut levels = self.level_mutex.lock().expect("Could not lock level mutex.");
for mut l in &*levels {
msg.data.push((l.min(1.0) * 255.0) as u8);
}
*levels = [0.0; 4];
}
self.send_arduino_msg(msg);
}
fn send_arduino_msg(&mut self, msg: Message) {
let mut d = vec![msg.head as u8];
for b in msg.data {
d.push(b);
}
self.port.write(&d);
}
fn event_loop(&mut self) {
println!("Connection event loop started");
let mut buf: [u8; 1] = [0];
loop {
match self.msg_rx.try_recv() {
Err(_) => {},
Ok(msg) => self.send_arduino_msg(msg)
};
self.send_level_msg();
match self.port.read_exact(&mut buf) {
Err(_) => continue,
Ok(_) => {}
};
match buf[0] {
b'?' => self.answer_probe(),
b'b' => self.button_event(),
b'l' => self.long_press_event(),
b => println!("Unknown signal identifier byte: {}", b as u8)
}
}
}
fn answer_probe(&mut self) {
println!("Answering probe");
self.port.write(b"!");
}
fn get_button_state(&mut self) -> u16 {
let mut buf: [u8; 2] = [0; 2];
self.port.read_exact(&mut buf).unwrap();
((buf[0] as u16)<< 8) + buf[1] as u16
}
fn button_event(&mut self) {
let button_state = self.get_button_state();
let changed_button: u16 = button_state ^ self.old_button_state;
self.old_button_state = button_state;
for bit in 0..16 {
let mask: u16 = (1 as u16) << bit;
if changed_button & mask != 0 {
let bs: event::ButtonState = match button_state & mask {
0 => event::ButtonState::Released,
_ => event::ButtonState::Pressed
};
self.event_queue.pass_event(event::button_event(bit, bs));
}
}
}
fn long_press_event(&mut self) {
let long_press_state = self.get_button_state();
for bit in 0..16 {
let mask: u16 = (1 as u16) << bit;
if long_press_state & mask != 0 {
let ev = event::button_event(bit, event::ButtonState::LongPressed);
self.event_queue.pass_event(ev);
}
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>guestby1currentsnapshot.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Karesansui.
#
# Copyright (C) 2009-2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
import web
import simplejson as json
import karesansui
from karesansui.lib.rest import Rest, auth
from karesansui.lib.const import VIRT_COMMAND_APPLY_SNAPSHOT
from karesansui.lib.utils import is_param, is_int
from karesansui.lib.virt.snapshot import KaresansuiVirtSnapshot
from karesansui.db.access.machine import findbyguest1
from karesansui.db.access.snapshot import findbyname_guestby1 as s_findbyname_guestby1
from karesansui.db.access._2pysilhouette import save_job_collaboration
from karesansui.db.access.machine2jobgroup import new as m2j_new
from karesansui.db.model._2pysilhouette import Job, JobGroup
from pysilhouette.command import dict2command
class GuestBy1CurrentSnapshot(Rest):
@auth
def _PUT(self, *param, **params):
(host_id, guest_id) = self.chk_guestby1(param)
if guest_id is None: return web.notfound()
if is_param(self.input, 'id') is False \
or is_int(self.input.id) is False:
return web.badrequest("Request data is invalid.")
snapshot_id = str(self.input.id)
snapshot = s_findbyname_guestby1(self.orm, snapshot_id, guest_id)
if snapshot is None:
pass
# ignore snapshots that is not in database.
#return web.badrequest("Request data is invalid.")
model = findbyguest1(self.orm, guest_id)
kvs = KaresansuiVirtSnapshot(readonly=False)
snapshot_list = []
try:
domname = kvs.kvc.uuid_to_domname(model.uniq_key)
if not domname: return web.notfound()
self.view.is_creatable = kvs.isSupportedDomain(domname)
try:
snapshot_list = kvs.listNames(domname)[domname]
except:
pass
finally:
kvs.finish()
if not snapshot_id in snapshot_list:
self.logger.debug(_("The specified snapshot does not exist in database. - %s") % snapshot_id)
# ignore snapshots that is not in database.
#return web.notfound()
action_cmd = dict2command(
"%s/%s" % (karesansui.config['application.bin.dir'],
VIRT_COMMAND_APPLY_SNAPSHOT),
{"name" : domname, "id" : snapshot_id})
cmdname = 'Apply Snapshot'
_jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey'])<|fim▁hole|> _machine2jobgroup = m2j_new(machine=model,
jobgroup_id=-1,
uniq_key=karesansui.sheconf['env.uniqkey'],
created_user=self.me,
modified_user=self.me,
)
save_job_collaboration(self.orm,
self.pysilhouette.orm,
_machine2jobgroup,
_jobgroup,
)
self.view.currentsnapshot = snapshot
return web.accepted(url=web.ctx.path)
urls = (
'/host/(\d+)/guest/(\d+)/currentsnapshot/?(\.part)?$', GuestBy1CurrentSnapshot,
)<|fim▁end|> | _job = Job('%s command' % cmdname, 0, action_cmd)
_jobgroup.jobs.append(_job)
|
<|file_name|>plot_tuto1DBedLoadTurb.py<|end_file_name|><|fim▁begin|>import subprocess
import sys
import numpy as np<|fim▁hole|>import matplotlib.pyplot as plt
plt.ion()
############### Plot properties #####################
import matplotlib.ticker as mticker
from matplotlib.ticker import StrMethodFormatter, NullFormatter
from matplotlib import rc
#rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
rc('text', usetex=True)
label_size = 20
legend_size = 12
fontsize=25
linewidth=2
plt.rcParams['xtick.labelsize'] = label_size
plt.rcParams['ytick.labelsize'] = label_size
plt.rcParams['legend.fontsize'] = legend_size
plt.rcParams['lines.linewidth'] = linewidth
plt.rcParams['axes.labelsize'] = fontsize
####################################################
######################
# Load DEM data
######################
zDEM, phiDEM, vxPDEM, vxFDEM, TDEM = np.loadtxt('DATA/BedloadTurbDEM.txt', unpack=True)
######################
#Read SedFoam results
######################
sol = '../1DBedLoadTurb/'
try:
proc = subprocess.Popen(
["foamListTimes", "-latestTime", "-case", sol],
stdout=subprocess.PIPE,)
except:
print("foamListTimes : command not found")
print("Do you have load OpenFoam environement?")
sys.exit(0)
output = proc.stdout.read() #to obtain the output of function foamListTimes from the subprocess
timeStep = output.decode().rstrip().split('\n')[0] #Some management on the output to obtain a number
#Read the data
X, Y, Z = fluidfoam.readmesh(sol)
z = Y
phi = fluidfoam.readscalar(sol, timeStep, 'alpha_a')
vxPart = fluidfoam.readvector(sol, timeStep, 'Ua')[0]
vxFluid = fluidfoam.readvector(sol, timeStep, 'Ub')[0]
T = fluidfoam.readscalar(sol, timeStep, 'Theta')
######################
#Plot results
######################
d = 0.006 #6mm diameter particles
plt.figure(figsize=[10,5])
plt.subplot(141)
plt.plot(phiDEM, zDEM/d, 'k--', label=r'DEM')
plt.plot(phi, z/d, label=r'SedFoam')
plt.xlabel(r'$\phi$', fontsize=25)
plt.ylabel(r'$\frac{z}{d}$', fontsize=30, rotation=True, horizontalalignment='right')
plt.grid()
plt.ylim([-1.525, 32.025])
plt.legend()
plt.subplot(142)
I = np.where(phiDEM>0.001)[0]
plt.plot(vxPDEM[I], zDEM[I]/d, 'r--')
I = np.where(phi>0.001)[0]
plt.plot(vxPart[I], z[I]/d, 'r', label=r'$v_x^p$')
plt.plot(vxFDEM, zDEM/d, 'b--')
plt.plot(vxFluid, z/d, 'b', label=r'$u_x^f$')
plt.xlabel(r'$v_x^p$, $u_x^f$', fontsize=25)
plt.ylim([-1.525, 32.025])
plt.grid()
plt.legend()
ax = plt.gca()
ax.set_yticklabels([])
plt.legend()
plt.subplot(143)
plt.plot(phiDEM*vxPDEM, zDEM/d, 'k--', label=r'DEM')
plt.plot(phi*vxPart, z/d, label=r'SedFoam')
plt.xlabel(r'$q = \phi v_x^p$', fontsize=25)
plt.grid()
plt.ylim([-1.525, 32.025])
ax = plt.gca()
ax.set_yticklabels([])
plt.subplot(144)
I = np.where(phiDEM>0.001)[0]
plt.plot(TDEM[I], zDEM[I]/d, 'k--', label=r'DEM')
I = np.where(phi>0.001)[0]
plt.plot(T[I], z[I]/d, label=r'SedFoam')
plt.xlabel(r'$T$', fontsize=25)
plt.grid()
plt.ylim([-1.525, 32.025])
ax = plt.gca()
ax.set_yticklabels([])
plt.savefig('Figures/res_TutoBedloadTurb.png', bbox_inches='tight')
plt.show(block=True)<|fim▁end|> | import fluidfoam |
<|file_name|>fonts.js<|end_file_name|><|fim▁begin|>'use strict';
var gulp = require( 'gulp' );
var fontmin = require( 'gulp-fontmin' );
var path = require( '../../paths.js' );<|fim▁hole|>
gulp.task( 'fonts', function( )
{
return gulp.src( path.to.fonts.source )
.pipe( fontmin( )
.pipe( gulp.dest( path.to.fonts.destination ) ) );
} );<|fim▁end|> | |
<|file_name|>elemwise.py<|end_file_name|><|fim▁begin|>"""Elementwise operators"""
from __future__ import absolute_import as _abs
import tvm
from .. import tag
from ..util import get_const_int
@tvm.tag_scope(tag=tag.ELEMWISE)
def relu(x):
"""Take relu of input x.
Parameters
----------
x : tvm.Tensor
Input argument.
Returns
-------
y : tvm.Tensor
The result.
"""
return tvm.compute(x.shape, lambda *i: tvm.max(x(*i), tvm.const(0, x.dtype)))
@tvm.tag_scope(tag=tag.ELEMWISE)
def leaky_relu(x, alpha):
"""Take leaky relu of input x.
Parameters
----------
x : tvm.Tensor
Input argument.
alpha : float
The slope for the small gradient when x < 0
Returns
-------
y : tvm.Tensor
The result.
"""
def _compute(*indices):
value = x(*indices)
calpha = tvm.const(alpha, value.dtype)
return tvm.select(value > 0, value, value * calpha)
return tvm.compute(x.shape, _compute)
@tvm.tag_scope(tag=tag.BROADCAST)
def prelu(x, slope, axis=1):
""" PReLU.
It accepts two arguments: an input ``x`` and a weight array ``W``
and computes the output as :math:`PReLU(x) y = x > 0 ? x : W * x`,
where :math:`*` is an elementwise multiplication for each sample in the
batch.
Arguments:
x : tvm.Tensor
Input argument.
slope : tvm.Tensor
Channelised slope tensor for prelu
axis : int
The axis where the channel data needs to be applied
Returns:
y : tvm.Tensor
The result.
Links:
[http://arxiv.org/pdf/1502.01852v1.pdf]
"""
assert len(x.shape) == 4 and len(slope.shape) == 1
assert axis < len(x.shape)
assert get_const_int(slope.shape[0]) == get_const_int(x.shape[axis])
def _compute_channelwise(*indices):<|fim▁hole|><|fim▁end|> | return tvm.select(x(*indices) > 0, x(*indices), x(*indices) * slope(indices[axis]))
return tvm.compute(x.shape, _compute_channelwise) |
<|file_name|>strategy.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package namespace
import (
"fmt"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/validation"
"k8s.io/kubernetes/pkg/fields"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/registry/generic"
"k8s.io/kubernetes/pkg/runtime"
"k8s.io/kubernetes/pkg/util/validation/field"
)
// namespaceStrategy implements behavior for Namespaces
type namespaceStrategy struct {
runtime.ObjectTyper<|fim▁hole|>// objects via the REST API.
var Strategy = namespaceStrategy{api.Scheme, api.SimpleNameGenerator}
// NamespaceScoped is false for namespaces.
func (namespaceStrategy) NamespaceScoped() bool {
return false
}
// PrepareForCreate clears fields that are not allowed to be set by end users on creation.
func (namespaceStrategy) PrepareForCreate(ctx api.Context, obj runtime.Object) {
// on create, status is active
namespace := obj.(*api.Namespace)
namespace.Status = api.NamespaceStatus{
Phase: api.NamespaceActive,
}
// on create, we require the kubernetes value
// we cannot use this in defaults conversion because we let it get removed over life of object
hasKubeFinalizer := false
for i := range namespace.Spec.Finalizers {
if namespace.Spec.Finalizers[i] == api.FinalizerKubernetes {
hasKubeFinalizer = true
break
}
}
if !hasKubeFinalizer {
if len(namespace.Spec.Finalizers) == 0 {
namespace.Spec.Finalizers = []api.FinalizerName{api.FinalizerKubernetes}
} else {
namespace.Spec.Finalizers = append(namespace.Spec.Finalizers, api.FinalizerKubernetes)
}
}
}
// PrepareForUpdate clears fields that are not allowed to be set by end users on update.
func (namespaceStrategy) PrepareForUpdate(ctx api.Context, obj, old runtime.Object) {
newNamespace := obj.(*api.Namespace)
oldNamespace := old.(*api.Namespace)
newNamespace.Spec.Finalizers = oldNamespace.Spec.Finalizers
newNamespace.Status = oldNamespace.Status
}
// Validate validates a new namespace.
func (namespaceStrategy) Validate(ctx api.Context, obj runtime.Object) field.ErrorList {
namespace := obj.(*api.Namespace)
return validation.ValidateNamespace(namespace)
}
// Canonicalize normalizes the object after validation.
func (namespaceStrategy) Canonicalize(obj runtime.Object) {
}
// AllowCreateOnUpdate is false for namespaces.
func (namespaceStrategy) AllowCreateOnUpdate() bool {
return false
}
// ValidateUpdate is the default update validation for an end user.
func (namespaceStrategy) ValidateUpdate(ctx api.Context, obj, old runtime.Object) field.ErrorList {
errorList := validation.ValidateNamespace(obj.(*api.Namespace))
return append(errorList, validation.ValidateNamespaceUpdate(obj.(*api.Namespace), old.(*api.Namespace))...)
}
func (namespaceStrategy) AllowUnconditionalUpdate() bool {
return true
}
type namespaceStatusStrategy struct {
namespaceStrategy
}
var StatusStrategy = namespaceStatusStrategy{Strategy}
func (namespaceStatusStrategy) PrepareForUpdate(ctx api.Context, obj, old runtime.Object) {
newNamespace := obj.(*api.Namespace)
oldNamespace := old.(*api.Namespace)
newNamespace.Spec = oldNamespace.Spec
}
func (namespaceStatusStrategy) ValidateUpdate(ctx api.Context, obj, old runtime.Object) field.ErrorList {
return validation.ValidateNamespaceStatusUpdate(obj.(*api.Namespace), old.(*api.Namespace))
}
type namespaceFinalizeStrategy struct {
namespaceStrategy
}
var FinalizeStrategy = namespaceFinalizeStrategy{Strategy}
func (namespaceFinalizeStrategy) ValidateUpdate(ctx api.Context, obj, old runtime.Object) field.ErrorList {
return validation.ValidateNamespaceFinalizeUpdate(obj.(*api.Namespace), old.(*api.Namespace))
}
// PrepareForUpdate clears fields that are not allowed to be set by end users on update.
func (namespaceFinalizeStrategy) PrepareForUpdate(ctx api.Context, obj, old runtime.Object) {
newNamespace := obj.(*api.Namespace)
oldNamespace := old.(*api.Namespace)
newNamespace.Status = oldNamespace.Status
}
// MatchNamespace returns a generic matcher for a given label and field selector.
func MatchNamespace(label labels.Selector, field fields.Selector) generic.Matcher {
return generic.MatcherFunc(func(obj runtime.Object) (bool, error) {
namespaceObj, ok := obj.(*api.Namespace)
if !ok {
return false, fmt.Errorf("not a namespace")
}
fields := NamespaceToSelectableFields(namespaceObj)
return label.Matches(labels.Set(namespaceObj.Labels)) && field.Matches(fields), nil
})
}
// NamespaceToSelectableFields returns a label set that represents the object
func NamespaceToSelectableFields(namespace *api.Namespace) labels.Set {
objectMetaFieldsSet := generic.ObjectMetaFieldsSet(namespace.ObjectMeta, false)
specificFieldsSet := fields.Set{
"status.phase": string(namespace.Status.Phase),
// This is a bug, but we need to support it for backward compatibility.
"name": namespace.Name,
}
return labels.Set(generic.MergeFieldsSets(objectMetaFieldsSet, specificFieldsSet))
}<|fim▁end|> | api.NameGenerator
}
// Strategy is the default logic that applies when creating and updating Namespace |
<|file_name|>score.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import division
from django.core.management.base import BaseCommand
from query.models import Video, Face, LabelSet, Frame
from scannerpy import ProtobufGenerator, Config
import os
import cv2
import math
import numpy as np
import tensorflow as tf
import align.detect_face
from collections import defaultdict
from array import *
from functools import wraps
import inspect
cfg = Config()
proto = ProtobufGenerator(cfg)
def initializer(func):
"""
Automatically assigns the parameters.
>>> class process:
... @initializer
... def __init__(self, cmd, reachable=False, user='root'):
... pass
>>> p = process('halt', True)
>>> p.cmd, p.reachable, p.user
('halt', True, 'root')
"""
names, varargs, keywords, defaults = inspect.getargspec(func)
@wraps(func)
def wrapper(self, *args, **kargs):
for name, arg in list(zip(names[1:], args)) + list(kargs.items()):
setattr(self, name, arg)
for name, default in zip(reversed(names), reversed(defaults)):
if not hasattr(self, name):
setattr(self, name, default)
func(self, *args, **kargs)
return wrapper
class VideoEvalStats(object):
@initializer
def __init__(self, video_id = 0, num_frames=0, tp_frames=0, fp_frames=0, fn_frames=0, mismatched_tp_frames=0, num_detections=0, tp_detections=0, fp_detections=0, fn_detections=0, num_males=0, num_females=0, gender_matches=0, male_mismatches=0, female_mismatches=0):
pass
def compute_precision_recall(self, tp, fp, fn):
if (tp + fp) != 0:
precision = tp / (tp + fp)
else:
precision = 0.0
if (tp + fn) != 0:
recall = tp / (tp + fn)
else:
recall = 0.0
return (precision, recall)
def compute_frame_acc_stats(self):
return self.compute_precision_recall(self.tp_frames, self.fp_frames, self.fn_frames)
def compute_det_acc_stats(self):
(det_precision, det_recall) = self.compute_precision_recall(self.tp_detections, self.fp_detections, self.fn_detections)
return (det_precision, det_recall)
def compute_gender_acc_stats(self):
if self.tp_detections != 0:
gender_precision = self.gender_matches / (self.num_males + self.num_females)
else:
gender_precision = 1.0
return gender_precision
def __str__(self):
frame_stats = "Video({})[FRAME SELECTION]: num_frames({}), tp({}), fp({}), fn({})".format(self.video_id, self.num_frames, self.tp_frames, self.fp_frames, self.fn_frames)
frame_acc_stats = "Video({})[FRAME SELECTION]: Frame selection precision({}), Frame selection recall({})".format(self.video_id, *self.compute_frame_acc_stats())
det_stats = "Video({})[DETECTION]: num_detections({}), tp({}), fp({}), fn({}), mismatched_frames({})".format(self.video_id, self.num_detections, self.tp_detections, self.fp_detections, self.fn_detections, self.mismatched_tp_frames)
det_acc_stats = "Video({})[DETECTION]: Detection precision({}), Detection recall({})".format(self.video_id, *self.compute_det_acc_stats())
gender_stats = "Video({})[GENDER]: males({}), females({}), gender_matches({}), male_mismatches({}), female_mismatches({})".format(self.video_id, self.num_males, self.num_females, self.gender_matches, self.male_mismatches, self.female_mismatches)
gender_acc_stats = "Video({})[GENDER]: Gender precision({})".format(self.video_id, self.compute_gender_acc_stats())
return frame_stats + "\n" + frame_acc_stats + "\n" + det_stats + "\n" + det_acc_stats + "\n" + gender_stats + "\n" + gender_acc_stats
def __add__(self, other):
num_frames = self.num_frames + other.num_frames<|fim▁hole|> # frame selection
tp_frames = self.tp_frames + other.tp_frames
fp_frames = self.fp_frames + other.fp_frames
fn_frames = self.fn_frames + other.fn_frames
# face detection
num_detections = self.num_detections + other.num_detections
mismatched_tp_frames = self.mismatched_tp_frames + other.mismatched_tp_frames
tp_detections = self.tp_detections + other.tp_detections
fp_detections = self.fp_detections + other.fp_detections
fn_detections = self.fn_detections + other.fn_detections
# gender detection
num_males = self.num_males + other.num_males
num_females = self.num_females + other.num_females
gender_matches = self.gender_matches + other.gender_matches
male_mismatches = self.male_mismatches + other.male_mismatches
female_mismatches = self.female_mismatches + other.female_mismatches
return VideoEvalStats(self.video_id, num_frames, tp_frames, fp_frames, fn_frames, mismatched_tp_frames, num_detections, tp_detections, fp_detections, fn_detections, num_males, num_females, gender_matches, male_mismatches, female_mismatches)
class VideoStats(object):
@initializer
def __init__(self, video_id = 0, num_frames=0, selected_frames=0, num_detections=0, num_males=0, num_females=0):
pass
def __str__(self):
stats = "Video({}): num_frames({}), selected_frames({}), num_detections({}), num_males({}), num_females({})".format(self.video_id, self.num_frames, self.selected_frames, self.num_detections, self.num_males, self.num_females)
return stats
def __add__(self, other):
num_frames = self.num_frames + other.num_frames
selected_frames = self.selected_frames + other.selected_frames
num_detections = self.num_detections + other.num_detections
num_males = self.num_males + other.num_males
num_females = self.num_females + other.num_females
return VideoStats(self.video_id, num_frames, selected_frames, num_detections, num_males, num_females)
class Command(BaseCommand):
help = 'Detect faces in videos'
def add_arguments(self, parser):
parser.add_argument('command')
def bbox_area(self, bbox, video):
return ((bbox.x2 - bbox.x1)*video.width) * \
((bbox.y2 - bbox.y1)*video.height)
def compute_iou(self, bbox1, bbox2, video):
int_x1=max(bbox1.x1, bbox2.x1)
int_y1=max(bbox1.y1, bbox2.y1)
int_x2=min(bbox1.x2, bbox2.x2)
int_y2=min(bbox1.y2, bbox2.y2)
int_area = 0.0
if(int_x2 > int_x1 and int_y2 > int_y1):
int_area = ((int_x2 - int_x1)*video.width) * \
((int_y2 - int_y1)*video.height)
iou = int_area/(self.bbox_area(bbox1, video)+self.bbox_area(bbox2, video)-int_area)
return iou
def remove_duplicates(self, l):
s = set()
return [x for x in l
if x not in s and not s.add(x)]
def fetch_ground_truth(self, video, label = "Talking Heads"):
g_labelset = video.handlabeled_labelset() # ground truth
#g_faces = Face.objects.filter(frame__labelset=g_labelset).prefetch_related('frame').all()
g_faces = Face.objects.filter(frame__labelset=g_labelset, frame__labels__name="Talking Heads").prefetch_related('frame').all()
ground_truth_frames = []
g_faces_dict = defaultdict(list)
for g_face in g_faces:
g_faces_dict[g_face.frame.number].append(g_face)
ground_truth_frames.append(g_face.frame.number)
ground_truth_frames = self.remove_duplicates(ground_truth_frames)
return (ground_truth_frames, g_faces_dict)
def fetch_automatic_detections(self, video, label = "Talking Heads"):
d_labelset = video.detected_labelset() # prediction
#d_faces = Face.objects.filter(frame__labelset=d_labelset).prefetch_related('frame').all()
#d_faces = Face.objects.filter(frame__labelset=d_labelset, frame__number__in=ground_truth_frames).prefetch_related('frame').all()
d_faces = Face.objects.filter(frame__labelset=d_labelset).prefetch_related('frame').all()
detected_frames = []
d_faces_dict = defaultdict(list)
# metrics for automatic detection of frames with "talking heads"
face_size_thres = 0.03
det_score_thres = 0.95
for d_face in d_faces:
if d_face.bbox.score > det_score_thres and self.bbox_area(d_face.bbox, video) > (face_size_thres * video.width * video.height):
d_faces_dict[d_face.frame.number].append(d_face)
detected_frames.append(d_face.frame.number)
detected_frames = self.remove_duplicates(detected_frames)
return (detected_frames, d_faces_dict)
def eval_detection(self, video, frame_number, d_faces, g_faces, vstats):
if len(d_faces) == 0 and len(g_faces) == 0:
return (0, 0, 0, 0, 0)
iou_threshold = 0.5
tp_detections = 0
fp_detections = 0
fn_detections = 0
gender_matches = 0
d_dict = defaultdict(int)
g_dict = defaultdict(int)
gender_eval_list = []
for d_face in d_faces:
for g_face in g_faces:
iou = self.compute_iou(d_face.bbox, g_face.bbox, video)
if iou > iou_threshold:
if g_dict[g_face] != 0:
fp_detections += 1
else:
tp_detections += 1
#if d_face.gender == g_face.gender:
# gender_matches += 1
gender_eval_list.append((d_face.gender, g_face.gender))
g_dict[g_face] += 1
d_dict[d_face] += 1
for d_face in d_faces:
if d_dict[d_face] == 0:
fp_detections += 1
for g_face in g_faces:
if g_dict[g_face] == 0:
fn_detections += 1
# update detection stats
vstats.num_detections += len(d_faces)
vstats.tp_detections += tp_detections
vstats.fp_detections += fp_detections
vstats.fn_detections += fn_detections
if fp_detections != 0 or fn_detections != 0:
vstats.mismatched_tp_frames += 1
return (vstats, gender_eval_list)
def eval_frame_selection(self, g_frame_list, d_frame_list):
tp_frames = [x for x in g_frame_list if x in d_frame_list]
fp_frames = [x for x in d_frame_list if x not in tp_frames]
fn_frames = [x for x in g_frame_list if x not in tp_frames]
return (tp_frames, fp_frames, fn_frames)
def eval_gender(self, gender_eval_list, vstats):
num_males = 0
num_females = 0
gender_matches = 0
male_mismatches = 0
female_mismatches = 0
for (d, g) in gender_eval_list:
if d == 'M':
num_males += 1
if g != d:
male_mismatches += 1
else:
gender_matches += 1
else:
num_females += 1
if g != d:
female_mismatches += 1
else:
gender_matches += 1
#update gender stats
vstats.num_males += num_males
vstats.num_females += num_females
vstats.gender_matches += gender_matches
vstats.male_mismatches += male_mismatches
vstats.female_mismatches += female_mismatches
return vstats
def eval_video(self, video):
(ground_truth_frames, g_faces_dict) = self.fetch_ground_truth(video)
(detected_frames, d_faces_dict) = self.fetch_automatic_detections(video)
(tp_frames, fp_frames, fn_frames) = self.eval_frame_selection(ground_truth_frames, detected_frames)
vstats = VideoEvalStats(video_id=video.id, num_frames=int(video.num_frames/video.get_stride()), tp_frames = len(tp_frames), fp_frames=len(fp_frames), fn_frames=len(fn_frames))
#for frame_number in range(0, 1000, video.get_stride()):
for frame_number in tp_frames:
# evaluate detection
d_faces = d_faces_dict[frame_number]
g_faces = g_faces_dict[frame_number]
(vstats, gender_eval_list) = self.eval_detection(video, frame_number, d_faces, g_faces, vstats)
# evaluate gender
vstats = self.eval_gender(gender_eval_list, vstats)
return vstats
def eval_videos(self, start_video_id, end_video_id):
vtotal_stats = VideoEvalStats(video_id=0)
for video_id in range(start_video_id, end_video_id):
video = Video.objects.filter(id=video_id).get()
vstats = self.eval_video(video)
print(vstats)
vtotal_stats = vtotal_stats + vstats
print(vtotal_stats)
def infer_videos(self, start_video_id, end_video_id):
vtotal_stats = VideoStats(video_id=0)
for video_id in range(start_video_id, end_video_id):
video = Video.objects.filter(id=video_id).get()
(detected_frames, d_faces_dict) = self.fetch_automatic_detections(video)
vstats = VideoStats(video_id=video.id, num_frames=int(video.num_frames/video.get_stride()), selected_frames=len(detected_frames))
#for frame_number in range(0, 1000, video.get_stride()):
for frame_number in detected_frames:
# evaluate detection
d_faces = d_faces_dict[frame_number]
for d_face in d_faces:
vstats.num_detections += 1
if d_face.gender == 'M':
vstats.num_males += 1
else:
vstats.num_females += 1
print(vstats)
vtotal_stats = vtotal_stats + vstats
print(vtotal_stats)
def handle(self, *args, **options):
start_video_id = 1
end_video_id = 61
#with open(options['path']) as f:
# paths = [s.strip() for s in f.readlines()]
command = options['command']
if command == "eval":
self.eval_videos(start_video_id, end_video_id) # compare with labeled data
elif command == "infer":
self.infer_videos(start_video_id, end_video_id) # no labeled data (just infer)
else:
print("Error: eval or run")<|fim▁end|> | |
<|file_name|>gpg_key.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> License as published by the Free Software Foundation; either version
2 of the License (GPLv2) or (at your option) any later version.
There is NO WARRANTY for this software, express or implied,
including the implied warranties of MERCHANTABILITY,
NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
have received a copy of GPLv2 along with this software; if not, see
http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
*/
KT.panel.list.registerPage('gpg_keys', {
create : 'new_gpg_key',
extra_create_data : function(){
return { 'gpg_key[name]' : $('#gpg_key_name').val() };
}
});
$(document).ready(function(){
$('#upload_gpg_key').live('click', function(event){
KT.gpg_key.upload();
});
$('#upload_new_gpg_key').live('submit', function(e){
e.preventDefault();
KT.gpg_key.upload();
});
$('#update_upload_gpg_key').live('click', function(event){
KT.gpg_key.upload_update();
});
$('#gpg_key_content').live('input keyup paste', function(){
if( $(this).val() !== '' ){
$('#gpg_key_content_upload').attr('disabled', 'disabled');
$('#upload_gpg_key').attr('disabled', 'disabled');
$('#clear_gpg_key').removeAttr('disabled');
} else {
$('#gpg_key_content_upload').removeAttr('disabled');
$('#upload_gpg_key').removeAttr('disabled');
$('#clear_gpg_key').attr('disabled', 'disabled');
}
});
$('#gpg_key_content_upload').live('change', function(){
if( $(this).val() !== '' ){
$('#gpg_key_content').attr('disabled', 'disabled');
$('#save_gpg_key').attr('disabled', 'disabled');
$('#clear_upload_gpg_key').removeAttr('disabled');
} else {
$('#gpg_key_content').removeAttr('disabled');
$('#save_gpg_key').removeAttr('disabled');
$('#clear_upload_gpg_key').attr('disabled', 'disabled');
}
});
$('#clear_upload_gpg_key').live('click', function(){
$('#gpg_key_content_upload').val('');
$('#gpg_key_content').removeAttr('disabled');
$('#save_gpg_key').removeAttr('disabled');
$('#clear_upload_gpg_key').attr('disabled', 'disabled');
$('#clear_gpg_key').attr('disabled', 'disabled');
});
$('#clear_gpg_key').live('click', function(){
$('#gpg_key_content').val('');
$('#gpg_key_content_upload').removeAttr('disabled');
$('#upload_gpg_key').removeAttr('disabled');
$('#clear_upload_gpg_key').attr('disabled', 'disabled');
$('#clear_gpg_key').attr('disabled', 'disabled');
});
$('#gpg_key_content_upload_update').live('change', function(){
if( $(this).val() !== '' ){
$('#update_upload_gpg_key').removeAttr('disabled');
$('#clear_upload_gpg_key').removeAttr('disabled');
} else {
$('#update_upload_gpg_key').attr('disabled', 'disabled');
$('#clear_upload_gpg_key').attr('disabled', 'disabled');
}
});
$('#clear_upload_gpg_key').live('click', function(){
$('#update_upload_gpg_key').attr('disabled', 'disabled');
$('#clear_upload_gpg_key').attr('disabled', 'disabled');
$('#gpg_key_content_upload_update').val('');
});
});
KT.gpg_key = (function($){
var self = this,
get_buttons = function(){
return {
'gpg_key_save' : $('#save_gpg_key'),
'gpg_key_upload': $('#upload_gpg_key')
}
},
enable_buttons = function(){
var buttons = get_buttons();
buttons.gpg_key_save.removeAttr('disabled');
buttons.gpg_key_upload.removeAttr('disabled');
},
disable_buttons = function(){
var buttons = get_buttons();
buttons.gpg_key_save.attr('disabled', 'disabled');
buttons.gpg_key_upload.attr('disabled', 'disabled');
};
self.upload = function(){
var submit_data = { 'gpg_key[name]' : $('#gpg_key_name').val() };
disable_buttons();
$('#upload_new_gpg_key').ajaxSubmit({
url : KT.routes['gpg_keys_path'](),
type : 'POST',
data : submit_data,
iframe : true,
success : function(data, status, xhr){
var parsed_data = $(data);
if( parsed_data.get(0).tagName === 'PRE' ){
notices.displayNotice('error', parsed_data.html());
} else {
KT.panel.list.createSuccess(data);
}
enable_buttons();
},
error : function(){
enable_buttons();
notices.checkNotices();
}
});
};
self.upload_update = function(){
$('#update_upload_gpg_key').attr('disabled', 'disabled');
$('#clear_upload_gpg_key').attr('disabled', 'disabled');
$('#upload_gpg_key').ajaxSubmit({
url : $(this).data('url'),
type : 'POST',
iframe : true,
success : function(data, status, xhr){
if( !data.match(/notices/) ){
$('#gpg_key_content').html(data);
$('#upload_gpg_key').val('');
}
notices.checkNotices();
$('#update_upload_gpg_key').removeAttr('disabled');
$('#clear_upload_gpg_key').removeAttr('disabled');
},
error : function(){
$('#update_upload_gpg_key').removeAttr('disabled');
$('#clear_upload_gpg_key').removeAttr('disabled');
notices.checkNotices();
}
});
};
return self;
})(jQuery);<|fim▁end|> | Copyright 2011 Red Hat, Inc.
This software is licensed to you under the GNU General Public |
<|file_name|>bibformat_regression_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat module regression tests."""
__revision__ = "$Id$"
import unittest
from invenio.config import CFG_SITE_URL, CFG_SITE_LANG
from invenio.testutils import make_test_suite, \
run_test_suite, \
test_web_page_content
from invenio.bibformat import format_record
class BibFormatAPITest(unittest.TestCase):
"""Check BibFormat API"""
def test_basic_formatting(self):
"""bibformat - Checking BibFormat API"""
result = format_record(recID=73,
of='hx',
ln=CFG_SITE_LANG,
verbose=0,
search_pattern=[],
xml_record=None,
user_info=None,
on_the_fly=True)
pageurl = CFG_SITE_URL + '/record/73?of=hx'
result = test_web_page_content(pageurl,
expected_text=result)
class BibFormatBibTeXTest(unittest.TestCase):
"""Check output produced by BibFormat for BibTeX output for
various records"""
def setUp(self):
"""Prepare some ideal outputs"""
self.record_74_hx = '''<pre>
@article{Wang:74,
author = "Wang, B and Lin, C Y and Abdalla, E",
title = "Quasinormal modes of Reissner-Nordstrom Anti-de Sitter
Black Holes",
journal = "Phys. Lett., B",
number = "hep-th/0003295",
volume = "481",
pages = "79-88",
year = "2000",
}
</pre>'''
def test_bibtex_output(self):
"""bibformat - BibTeX output"""
pageurl = CFG_SITE_URL + '/record/74?of=hx'
result = test_web_page_content(pageurl,
expected_text=self.record_74_hx)
self.assertEqual([], result)
class BibFormatDetailedHTMLTest(unittest.TestCase):
"""Check output produced by BibFormat for detailed HTML ouput for
various records"""
def setUp(self):
"""Prepare some ideal outputs"""
# Record 7 (Article)
self.record_74_hd_header = '''<table border="0" width="100%">
<tr>
<td>Published Article<small> / Particle Physics - Theory</small></td>
<td><small><strong></strong></small></td>
<td align="right"><strong>hep-th/0003295</strong></td>
</tr>
</table>'''
self.record_74_hd_title = '''<center><big><big><strong>Quasinormal modes of Reissner-Nordstrom Anti-de Sitter Black Holes</strong></big></big></center>'''
self.record_74_hd_authors = '''<a href="%(siteurl)s/search?f=author&p=Wang%%2C%%20B&ln=%(lang)s">Wang, B</a><small> (Fudan University)</small> ; <a href="%(siteurl)s/search?f=author&p=Lin%%2C%%20C%%20Y&ln=%(lang)s">Lin, C Y</a> ; <a href="%(siteurl)s/search?f=author&p=Abdalla%%2C%%20E&ln=%(lang)s">Abdalla, E</a><br />'''% \
{'siteurl' : CFG_SITE_URL,
'lang': CFG_SITE_LANG}
self.record_74_hd_abstract = '''<small><strong>Abstract: </strong>Complex frequencies associated with quasinormal modes for large Reissner-Nordstr$\ddot{o}$m Anti-de Sitter black holes have been computed. These frequencies have close relation to the black hole charge and do not linearly scale withthe black hole temperature as in Schwarzschild Anti-de Sitter case. In terms of AdS/CFT correspondence, we found that the bigger the black hole charge is, the quicker for the approach to thermal equilibrium in the CFT. The propertiesof quasinormal modes for $l>0$ have also been studied.</small><br />'''
self.record_74_hd_pubinfo = '''<strong>Published in: </strong><a href="http://weblib.cern.ch/cgi-bin/ejournals?publication=Phys.%20Lett.%2C%20B&volume=481&year=2000&page=79">Phys. Lett., B :481 2000 79-88</a>'''
self.record_74_hd_fulltext = '''0003295.pdf"><img style="border:none"'''
self.record_74_hd_citations = '''<strong>Cited by:</strong> try citation search for <a href="%(siteurl)s/search?f=reference&p=hep-th/0003295&ln=%(lang)s">hep-th/0003295</a>'''% \
{'siteurl' : CFG_SITE_URL,
'lang': CFG_SITE_LANG}
self.record_74_hd_references = '''<li><small>[17]</small> <small>A. Chamblin, R. Emparan, C. V. Johnson and R. C. Myers, Phys. Rev., D60: 104026 (1999) 5070 90 110 130 150 r+ 130 230 330 50 70 90 110 130 150 r+</small> </li>'''
# Record 7 (Picture)
self.record_7_hd_header = '''<table border="0" width="100%">
<tr>
<td>Pictures<small> / Life at CERN</small></td>
<td><small><strong></strong></small></td>
<td align="right"><strong>CERN-GE-9806033</strong></td>
</tr>
</table>'''
self.record_7_hd_title = '''<center><big><big><strong>Tim Berners-Lee</strong></big></big></center>'''
self.record_7_hd_date = '''<center>28 Jun 1998</center>'''
self.record_7_hd_abstract = '''<p><span class="blocknote">
Caption</span><br /> <small>Conference "Internet, Web, What's next?" on 26 June 1998 at CERN : Tim Berners-Lee, inventor of the World-Wide Web and Director of the W3C, explains how the Web came to be and give his views on the future.</small></p><p><span class="blocknote"><|fim▁hole|> self.record_7_hd_resource = '''<img src="%s/record/7/files/9806033.gif?subformat=icon" alt="9806033" style="max-width:250px;_width:250px;" />''' % CFG_SITE_URL
self.record_7_hd_resource_link = '%s/record/7/files/9806033.jpeg' % CFG_SITE_URL
def test_detailed_html_output(self):
"""bibformat - Detailed HTML output"""
# Test record 74 (Article)
pageurl = CFG_SITE_URL + '/record/74?of=hd'
result = test_web_page_content(pageurl,
expected_text=[self.record_74_hd_header,
self.record_74_hd_title,
self.record_74_hd_authors,
self.record_74_hd_abstract,
self.record_74_hd_pubinfo,
self.record_74_hd_fulltext,
#self.record_74_hd_citations,
#self.record_74_hd_references
])
self.assertEqual([], result)
# Test record 7 (Picture)
pageurl = CFG_SITE_URL + '/record/7?of=hd'
result = test_web_page_content(pageurl,
expected_text=[self.record_7_hd_header,
self.record_7_hd_title,
self.record_7_hd_date,
self.record_7_hd_abstract,
self.record_7_hd_resource,
self.record_7_hd_resource_link])
self.assertEqual([], result)
def test_detailed_html_edit_record(self):
"""bibformat - Detailed HTML output edit record link presence"""
pageurl = CFG_SITE_URL + '/record/74?of=hd'
result = test_web_page_content(pageurl, username='admin',
expected_text="Edit This Record")
self.assertEqual([], result)
def test_detailed_html_no_error_message(self):
"""bibformat - Detailed HTML output without error message"""
# No error message should be displayed in the web interface, whatever happens
pageurl = CFG_SITE_URL + '/record/74?of=hd'
result = test_web_page_content(pageurl, username='admin',
expected_text=["Exception",
"Could not"])
self.assertNotEqual([], result)
pageurl = CFG_SITE_URL + '/record/7?of=hd'
result = test_web_page_content(pageurl, username='admin',
expected_text=["Exception",
"Could not"])
self.assertNotEqual([], result)
class BibFormatNLMTest(unittest.TestCase):
"""Check output produced by BibFormat for NLM output for various
records"""
def setUp(self):
"""Prepare some ideal outputs"""
self.record_70_xn = '''<?xml version="1.0" encoding="UTF-8"?>
<articles>
<article xmlns:xlink="http://www.w3.org/1999/xlink/">
<front>
<journal-meta>
<journal-title>J. High Energy Phys.</journal-title>
<abbrev-journal-title>J. High Energy Phys.</abbrev-journal-title>
<issn>1126-6708</issn>
</journal-meta>
<article-meta>
<title-group>
<article-title>AdS/CFT For Non-Boundary Manifolds</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>McInnes</surname>
<given-names>B</given-names>
</name>
<aff>
<institution>National University of Singapore</institution>
</aff>
</contrib>
</contrib-group>
<pub-date pub-type="pub">
<year>2000</year>
</pub-date>
<volume>05</volume>
<fpage/>
<lpage/>
<self-uri xlink:href="%(siteurl)s/record/70"/>
<self-uri xlink:href="%(siteurl)s/record/70/files/0003291.pdf"/>
<self-uri xlink:href="%(siteurl)s/record/70/files/0003291.ps.gz"/>
</article-meta>
<abstract>In its Euclidean formulation, the AdS/CFT correspondence begins as a study of Yang-Mills conformal field theories on the sphere, S^4. It has been successfully extended, however, to S^1 X S^3 and to the torus T^4. It is natural tohope that it can be made to work for any manifold on which it is possible to define a stable Yang-Mills conformal field theory. We consider a possible classification of such manifolds, and show how to deal with the most obviousobjection : the existence of manifolds which cannot be represented as boundaries. We confirm Witten's suggestion that this can be done with the help of a brane in the bulk.</abstract>
</front>
<article-type>research-article</article-type>
<ref/>
</article>
</articles>''' % {'siteurl': CFG_SITE_URL}
def test_nlm_output(self):
"""bibformat - NLM output"""
pageurl = CFG_SITE_URL + '/record/70?of=xn'
result = test_web_page_content(pageurl,
expected_text=self.record_70_xn)
try:
self.assertEqual([], result)
except AssertionError:
result = test_web_page_content(pageurl,
expected_text=self.record_70_xn.replace('<fpage/>', '<fpage></fpage>').replace('<lpage/>', '<lpage></lpage>'))
self.assertEqual([], result)
class BibFormatBriefHTMLTest(unittest.TestCase):
"""Check output produced by BibFormat for brief HTML ouput for
various records"""
def setUp(self):
"""Prepare some ideal outputs"""
self.record_76_hb = '''<strong>Ιθάκη</strong>
/ <a href="%s/search?f=author&p=%%CE%%9A%%CE%%B1%%CE%%B2%%CE%%AC%%CF%%86%%CE%%B7%%CF%%82%%2C%%20%%CE%%9A%%20%%CE%%A0&ln=%s">Καβάφης, Κ Π</a>
<br /><small>
Σα βγεις στον πηγαιμό για την Ιθάκη, <br />
να εύχεσαι νάναι μακρύς ο δρόμος, <br />
γεμάτος περιπέτειες, γεμάτος γνώσεις [...] </small>''' % (CFG_SITE_URL, CFG_SITE_LANG)
def test_brief_html_output(self):
"""bibformat - Brief HTML output"""
pageurl = CFG_SITE_URL + '/record/76?of=HB'
result = test_web_page_content(pageurl,
expected_text=self.record_76_hb)
self.assertEqual([], result)
class BibFormatMARCXMLTest(unittest.TestCase):
"""Check output produced by BibFormat for MARCXML ouput for various records"""
def setUp(self):
"""Prepare some ideal outputs"""
self.record_9_xm = '''<?xml version="1.0" encoding="UTF-8"?>
<collection xmlns="http://www.loc.gov/MARC21/slim">
<record>
<controlfield tag="001">9</controlfield>
<datafield tag="041" ind1=" " ind2=" ">
<subfield code="a">eng</subfield>
</datafield>
<datafield tag="088" ind1=" " ind2=" ">
<subfield code="a">PRE-25553</subfield>
</datafield>
<datafield tag="088" ind1=" " ind2=" ">
<subfield code="a">RL-82-024</subfield>
</datafield>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Ellis, J</subfield>
<subfield code="u">University of Oxford</subfield>
</datafield>
<datafield tag="245" ind1=" " ind2=" ">
<subfield code="a">Grand unification with large supersymmetry breaking</subfield>
</datafield>
<datafield tag="260" ind1=" " ind2=" ">
<subfield code="c">Mar 1982</subfield>
</datafield>
<datafield tag="300" ind1=" " ind2=" ">
<subfield code="a">18 p</subfield>
</datafield>
<datafield tag="650" ind1="1" ind2="7">
<subfield code="2">SzGeCERN</subfield>
<subfield code="a">General Theoretical Physics</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Ibanez, L E</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Ross, G G</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="0">
<subfield code="y">1982</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="0">
<subfield code="b">11</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="1">
<subfield code="u">Oxford Univ.</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="1">
<subfield code="u">Univ. Auton. Madrid</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="1">
<subfield code="u">Rutherford Lab.</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="1">
<subfield code="c">1990-01-28</subfield>
<subfield code="l">50</subfield>
<subfield code="m">2002-01-04</subfield>
<subfield code="o">BATCH</subfield>
</datafield>
<datafield tag="909" ind1="C" ind2="S">
<subfield code="s">h</subfield>
<subfield code="w">1982n</subfield>
</datafield>
<datafield tag="980" ind1=" " ind2=" ">
<subfield code="a">PREPRINT</subfield>
</datafield>
</record>
</collection>'''
def test_marcxml_output(self):
"""bibformat - MARCXML output"""
pageurl = CFG_SITE_URL + '/record/9?of=xm'
result = test_web_page_content(pageurl,
expected_text=self.record_9_xm)
self.assertEqual([], result)
class BibFormatMARCTest(unittest.TestCase):
"""Check output produced by BibFormat for MARC ouput for various
records"""
def setUp(self):
"""Prepare some ideal outputs"""
self.record_29_hm = '''000000029 001__ 29
000000029 020__ $$a0720421039
000000029 041__ $$aeng
000000029 080__ $$a517.11
000000029 100__ $$aKleene, Stephen Cole$$uUniversity of Wisconsin
000000029 245__ $$aIntroduction to metamathematics
000000029 260__ $$aAmsterdam$$bNorth-Holland$$c1952 (repr.1964.)
000000029 300__ $$a560 p
000000029 490__ $$aBibl. Matematica$$v1
000000029 909C0 $$y1952
000000029 909C0 $$b21
000000029 909C1 $$c1990-01-27$$l00$$m2002-04-12$$oBATCH
000000029 909CS $$sm$$w198606
000000029 980__ $$aBOOK'''
def test_marc_output(self):
"""bibformat - MARC output"""
pageurl = CFG_SITE_URL + '/record/29?of=hm'
result = test_web_page_content(pageurl,
expected_text=self.record_29_hm)
self.assertEqual([], result)
class BibFormatTitleFormattingTest(unittest.TestCase):
"""Check title formatting produced by BibFormat."""
def test_subtitle_in_html_brief(self):
"""bibformat - title subtitle in HTML brief formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=statistics+computer',
expected_text="Statistics: a computer approach"))
def test_subtitle_in_html_detailed(self):
"""bibformat - title subtitle in HTML detailed formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=statistics+computer&of=HD',
expected_text="Statistics: a computer approach"))
def test_title_edition_in_html_brief(self):
"""bibformat - title edition in HTML brief formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=2nd',
expected_text="Introductory statistics: a decision map; 2nd ed"))
def test_title_edition_in_html_detailed(self):
"""bibformat - title edition in HTML detailed formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=2nd&of=HD',
expected_text="Introductory statistics: a decision map; 2nd ed"))
def test_title_part_in_html_brief(self):
"""bibformat - title part in HTML brief formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=analyse+informatique',
expected_text="Analyse informatique, t.2"))
def test_title_part_in_html_detailed(self):
"""bibformat - title part in HTML detailed formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=analyse+informatique&of=HD',
expected_text="Analyse informatique, t.2: L'accomplissement"))
class BibFormatISBNFormattingTest(unittest.TestCase):
"""Check ISBN formatting produced by BibFormat."""
def test_isbn_in_html_detailed(self):
"""bibformat - ISBN in HTML detailed formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=analyse+informatique&of=HD',
expected_text="ISBN: 2225350574"))
class BibFormatPublInfoFormattingTest(unittest.TestCase):
"""Check publication reference info formatting produced by BibFormat."""
def test_publinfo_in_html_brief(self):
"""bibformat - publication reference info in HTML brief formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/search?p=recid%3A84',
expected_text="Nucl. Phys. B: 656 (2003) pp. 23-36"))
def test_publinfo_in_html_detailed(self):
"""bibformat - publication reference info in HTML detailed formats"""
self.assertEqual([],
test_web_page_content(CFG_SITE_URL + '/record/84',
expected_text="Nucl. Phys. B: 656 (2003) pp. 23-36"))
TEST_SUITE = make_test_suite(BibFormatBibTeXTest,
BibFormatDetailedHTMLTest,
BibFormatBriefHTMLTest,
BibFormatNLMTest,
BibFormatMARCTest,
BibFormatMARCXMLTest,
BibFormatAPITest,
BibFormatTitleFormattingTest,
BibFormatISBNFormattingTest,
BibFormatPublInfoFormattingTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)<|fim▁end|> | Légende</span><br /><small>Conference "Internet, Web, What's next?" le 26 juin 1998 au CERN: Tim Berners-Lee, inventeur du World-Wide Web et directeur du W3C, explique comment le Web est ne, et donne ses opinions sur l'avenir.</small></p>''' |
<|file_name|>CCPrimitive.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
Copyright (c) 2013-2017 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "renderer/CCPrimitive.h"
#include "renderer/CCVertexIndexBuffer.h"
NS_CC_BEGIN
Primitive* Primitive::create(VertexData* verts, IndexBuffer* indices, int type)
{
auto result = new (std::nothrow) Primitive();
if( result && result->init(verts, indices, type))
{
result->autorelease();
return result;
}
CC_SAFE_DELETE(result);
return nullptr;
}
const VertexData* Primitive::getVertexData() const
{
return _verts;
}
const IndexBuffer* Primitive::getIndexData() const
{
return _indices;
}
Primitive::Primitive()
: _verts(nullptr)
, _indices(nullptr)
, _type(GL_POINTS)
, _start(0)
, _count(0)
{
}
<|fim▁hole|> CC_SAFE_RELEASE_NULL(_verts);
CC_SAFE_RELEASE_NULL(_indices);
}
bool Primitive::init(VertexData* verts, IndexBuffer* indices, int type)
{
if( nullptr == verts ) return false;
if(verts != _verts)
{
CC_SAFE_RELEASE(_verts);
CC_SAFE_RETAIN(verts);
_verts = verts;
}
if(indices != _indices)
{
CC_SAFE_RETAIN(indices);
CC_SAFE_RELEASE(_indices);
_indices = indices;
}
_type = type;
return true;
}
void Primitive::draw()
{
if(_verts)
{
_verts->use();
if(_indices!= nullptr)
{
GLenum type = (_indices->getType() == IndexBuffer::IndexType::INDEX_TYPE_SHORT_16) ? GL_UNSIGNED_SHORT : GL_UNSIGNED_INT;
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices->getVBO());
size_t offset = _start * _indices->getSizePerIndex();
glDrawElements((GLenum)_type, _count, type, (GLvoid*)offset);
}
else
{
glDrawArrays((GLenum)_type, _start, _count);
}
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
}
void Primitive::setStart(int start)
{
_start = start;
}
void Primitive::setCount(int count)
{
_count = count;
}
NS_CC_END<|fim▁end|> | Primitive::~Primitive()
{ |
<|file_name|>PathVariable.py<|end_file_name|><|fim▁begin|>"""SCons.Variables.PathVariable
This file defines an option type for SCons implementing path settings.
To be used whenever a a user-specified path override should be allowed.
Arguments to PathVariable are:
option-name = name of this option on the command line (e.g. "prefix")
option-help = help string for option
option-dflt = default value for this option
validator = [optional] validator for option value. Predefined
validators are:
PathAccept -- accepts any path setting; no validation
PathIsDir -- path must be an existing directory
PathIsDirCreate -- path must be a dir; will create
PathIsFile -- path must be a file
PathExists -- path must exist (any type) [default]
The validator is a function that is called and which
should return True or False to indicate if the path
is valid. The arguments to the validator function
are: (key, val, env). The key is the name of the
option, the val is the path specified for the option,
and the env is the env to which the Otions have been
added.
Usage example:
Examples:
prefix=/usr/local
opts = Variables()
opts = Variables()
opts.Add(PathVariable('qtdir',
'where the root of Qt is installed',
qtdir, PathIsDir))
opts.Add(PathVariable('qt_includes',
'where the Qt includes are installed',
'$qtdir/includes', PathIsDirCreate))
opts.Add(PathVariable('qt_libraries',
'where the Qt library is installed',
'$qtdir/lib'))
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included<|fim▁hole|># KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PathVariable.py rel_2.3.5:3347:d31d5a4e74b6 2015/07/31 14:36:10 bdbaddog"
__all__ = ['PathVariable',]
import os
import os.path
import SCons.Errors
class _PathVariableClass(object):
def PathAccept(self, key, val, env):
"""Accepts any path, no checking done."""
pass
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathIsDirCreate(self, key, val, env):
"""Validator to check if Path is a directory,
creating it if it does not exist."""
if os.path.isfile(val):
m = 'Path for option %s is a file, not a directory: %s'
raise SCons.Errors.UserError(m % (key, val))
if not os.path.isdir(val):
os.makedirs(val)
def PathIsFile(self, key, val, env):
"""validator to check if Path is a file"""
if not os.path.isfile(val):
if os.path.isdir(val):
m = 'File path for option %s is a directory: %s'
else:
m = 'File path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathExists(self, key, val, env):
"""validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def __call__(self, key, help, default, validator=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'path list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
The 'default' option specifies the default path to use if the
user does not specify an override with this option.
validator is a validator, see this file for examples
"""
if validator is None:
validator = self.PathExists
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
return (key, '%s ( /path/to/%s )' % (help, key[0]), default,
validator, None)
else:
return (key, '%s ( /path/to/%s )' % (help, key), default,
validator, None)
PathVariable = _PathVariableClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|> | # in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY |
<|file_name|>DomTextSearchTest.ts<|end_file_name|><|fim▁begin|>import { Assert, UnitTest } from '@ephox/bedrock-client';
import { Fun, Unicode } from '@ephox/katamari';
import { KAssert } from '@ephox/katamari-assertions';
import { Spot } from '@ephox/phoenix';
import { Pattern } from '@ephox/polaris';
import { Compare, Html, Insert, InsertAll, SugarElement } from '@ephox/sugar';
import * as DomTextSearch from 'ephox/robin/api/dom/DomTextSearch';
import { TextSeekerOutcome, TextSeekerPhaseConstructor } from 'ephox/robin/textdata/TextSeeker';
UnitTest.test('DomTextSearchTest', () => {
const wordbreaker = () => {
return new RegExp(Pattern.wordbreak(), 'i');
};
const wordfinder = () => {
return new RegExp(Pattern.wordchar(), 'i');
};
const stopAtGap = <E>(phase: TextSeekerPhaseConstructor, element: E, text: string, index: number) => {
return phase.finish(Spot.point(element, index));
};
const checkInfo = (result: TextSeekerOutcome<SugarElement>, expectedElement: SugarElement, expectedOffset: number) => {
result.fold(() => {
Assert.fail('Unexpected abort');
}, () => {
Assert.fail('Unexpected edge');
}, (info) => {
const isSame = Compare.eq(info.element, expectedElement);
Assert.eq('eq', true, isSame);
Assert.eq('eq', info.offset, expectedOffset);
});
};
const checkEdge = (result: TextSeekerOutcome<SugarElement>, expectedElement: SugarElement) => {
result.fold(() => {
Assert.fail('Unexpected abort');
}, (edge) => {
const isSame = Compare.eq(edge, expectedElement);
Assert.eq('eq', true, isSame);
}, () => {
Assert.fail('Unexpected info');
});
};
const checkAbort = (result: TextSeekerOutcome<SugarElement>) => {
result.fold(Fun.noop, () => {
Assert.fail('Unexpected edge');
}, () => {
Assert.fail('Unexpected info found');
});
};
// const outcome = Adt.generate([
// { aborted: [] },
// { edge: [ 'element' ] },
// { success: [ 'info' ] }
// ]);
let element = SugarElement.fromTag('div');
const text = SugarElement.fromText('@maurizio@ ');
Insert.append(element, text);
Assert.eq('eq', 1, element.dom.childNodes.length); // Range offsets [0, 1)
Assert.eq('eq', 11, text.dom.length); // Range offsets [0, 11)
const elemResult = DomTextSearch.expandRight(element, 0, { regex: wordbreaker, attempt: stopAtGap });
checkAbort(elemResult);
const elemResultB = DomTextSearch.expandRight(element, 8, { regex: wordbreaker, attempt: stopAtGap });
checkAbort(elemResultB);
const textResult1 = DomTextSearch.expandRight(text, 0, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(textResult1, text, 0);
const textResult2 = DomTextSearch.expandRight(text, 1, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(textResult2, text, 9);
const textResult = DomTextSearch.expandRight(text, 8, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(textResult, text, 9);
const textResult3 = DomTextSearch.expandRight(text, 9, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(textResult3, text, 9);
const textB = SugarElement.fromText('@one ');
const textBResult = DomTextSearch.expandRight(textB, 0, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(textBResult, textB, 0);
const textBResult1 = DomTextSearch.expandRight(textB, 1, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(textBResult1, textB, 4);
checkAbort(DomTextSearch.expandLeft(element, 0, { regex: wordbreaker, attempt: stopAtGap }));<|fim▁hole|> checkInfo(DomTextSearch.expandLeft(text, 1, { regex: wordbreaker, attempt: stopAtGap }), text, 0); // before the first '@'
checkInfo(DomTextSearch.expandLeft(text, 3, { regex: wordbreaker, attempt: stopAtGap }), text, 0); // before the first '@'
checkInfo(DomTextSearch.expandLeft(text, '@maurizio@'.length, { regex: wordbreaker, attempt: stopAtGap }), text,
'@maurizio@'.length - 1); // before the last '@'
checkInfo(DomTextSearch.expandLeft(text, '@maurizio@ '.length, { regex: wordbreaker, attempt: stopAtGap }), text,
'@maurizio@ '.length - 1); // before the ' '
//
// tests left and right looking for words or spaces
//
const textR = SugarElement.fromText(' words');
// Pos: 0 23 8
Assert.eq('eq', 8, textR.dom.length);
checkInfo(DomTextSearch.expandRight(textR, 0, { regex: wordfinder, attempt: stopAtGap }),
textR, 3); // 3 is the location after the last space, starting from the left
checkInfo(DomTextSearch.expandLeft(textR, 8, { regex: wordbreaker, attempt: stopAtGap }),
textR, 2); // 2 is the location after the last character, starting from the right
const textL = SugarElement.fromText('words ');
// Pos: 0 45 8
Assert.eq('eq', 8, textL.dom.length);
checkInfo(DomTextSearch.expandRight(textL, 0, { regex: wordbreaker, attempt: stopAtGap }),
textL, 5); // 5 is the location after the last character, starting from the left
checkInfo(DomTextSearch.expandLeft(textL, 8, { regex: wordfinder, attempt: stopAtGap }),
textL, 4); // 4 is the location after the last space, starting from the right
//
// tests moving right and left by words
//
element = SugarElement.fromTag('div');
const span2 = SugarElement.fromTag('span');
// Pos: 0123456789
const w1 = SugarElement.fromText(' wordy ');
const w2 = SugarElement.fromText(' words ');
const w3 = SugarElement.fromText(' wordd ');
Insert.append(span2, w2);
InsertAll.append(element, [ w1, span2, w3 ]);
Assert.eq('eq', 3, element.dom.childNodes.length); // Range offsets [0, 3)
Assert.eq('eq', 1, span2.dom.childNodes.length); // Range offsets [0, 1)
Assert.eq('eq', 9, w1.dom.length); // Range offsets [0, 7)
Assert.eq('eq', '<div> wordy <span> words </span> wordd </div>', Html.getOuter(element));
const r0 = DomTextSearch.expandRight(w1, 0, { regex: wordfinder, attempt: stopAtGap });
const r1 = DomTextSearch.expandRight(w1, 3, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(r0, w1, 2);
checkInfo(r1, w1, 7);
const r2 = DomTextSearch.expandRight(w1, 7, { regex: wordfinder, attempt: stopAtGap });
const r3 = DomTextSearch.expandRight(w2, 2, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(r2, w2, 2);
checkInfo(r3, w2, 7);
const r4 = DomTextSearch.expandRight(w2, 7, { regex: wordfinder, attempt: stopAtGap });
const r5 = DomTextSearch.expandRight(w3, 2, { regex: wordbreaker, attempt: stopAtGap });
checkInfo(r4, w3, 2);
checkInfo(r5, w3, 7);
const r6 = DomTextSearch.expandRight(w3, 7, { regex: wordfinder, attempt: stopAtGap });
const r7 = DomTextSearch.expandRight(w3, 9, { regex: wordbreaker, attempt: stopAtGap });
checkEdge(r6, w3); // hit the edge without finding a word so return 'edge' element not 'success' point
checkEdge(r7, w3);
// expandLeft, starting from the RHS:
// '<div> wordy <span> words </span> wordd </div>'
// ^
const l1 = DomTextSearch.expandLeft(w3, 9, { regex: wordfinder, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkInfo(l1, w3, 6);
const l2 = DomTextSearch.expandLeft(w3, 6, { regex: wordbreaker, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkInfo(l2, w3, 1);
const l3 = DomTextSearch.expandLeft(w3, 1, { regex: wordfinder, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkInfo(l3, w2, 6);
const l4 = DomTextSearch.expandLeft(w2, 6, { regex: wordbreaker, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkInfo(l4, w2, 1);
const l5 = DomTextSearch.expandLeft(w2, 1, { regex: wordfinder, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkInfo(l5, w1, 6);
const l6 = DomTextSearch.expandLeft(w1, 6, { regex: wordbreaker, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkInfo(l6, w1, 1);
const l7 = DomTextSearch.expandLeft(w1, 1, { regex: wordfinder, attempt: stopAtGap });
// '<div> wordy <span> words </span> wordd </div>'
// ^
checkEdge(l7, w1); // hit the edge looking for a word
//
// scanRight returns Optional({element, offset})
//
(() => {
const container = SugarElement.fromTag('div');
const alphaText = SugarElement.fromText('alpha');
const betaSpan = SugarElement.fromTag('span');
const betaText1 = SugarElement.fromText('be');
const betaText2 = SugarElement.fromText('ta');
const gammaText = SugarElement.fromText('');
const deltaText = SugarElement.fromText(Unicode.zeroWidth);
const epsilonText = SugarElement.fromText('epsilon');
InsertAll.append(container, [ alphaText, betaSpan, gammaText, deltaText, epsilonText ]);
InsertAll.append(betaSpan, [ betaText1, betaText2 ]);
const checkNoneScan = (label: string, start: SugarElement, offset: number) => {
KAssert.eqNone('There should be no scanning (' + label + ')', DomTextSearch.scanRight(start, offset));
};
const checkScan = (label: string, expected: { element: SugarElement; offset: number }, start: SugarElement, offset: number) => {
const actual = DomTextSearch.scanRight(start, offset).getOrDie('Could not find scan result for: ' + label);
Assert.eq('eq', expected.offset, actual.offset);
Assert.eq('Element did not match scan: (' + label + ')', true, Compare.eq(expected.element, actual.element));
};
checkNoneScan('Alpha:exceed', alphaText, 'alphabeta\uFEFFepisilon!'.length);
checkScan('Alpha:eof', { element: epsilonText, offset: 'epsilon'.length }, alphaText, 'alphabeta\uFEFFepsilon'.length);
checkScan('Alpha:2', { element: alphaText, offset: 2 }, alphaText, 2);
checkScan('Alpha:into beta:2', { element: betaText1, offset: 'be'.length }, alphaText, 'alphabe'.length);
checkScan('Alpha:into beta:3', { element: betaText2, offset: 't'.length }, alphaText, 'alphabet'.length);
checkScan('Beta:0', { element: betaText1, offset: 0 }, betaText1, ''.length);
})();
});<|fim▁end|> | checkAbort(DomTextSearch.expandLeft(element, 1, { regex: wordbreaker, attempt: stopAtGap }));
checkAbort(DomTextSearch.expandLeft(element, 2, { regex: wordbreaker, attempt: stopAtGap }));
checkEdge(DomTextSearch.expandLeft(text, 0, { regex: wordbreaker, attempt: stopAtGap }), text); |
<|file_name|>hook_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package hook
import (
"encoding/json"
"net/http/httptest"
"testing"
"time"
"k8s.io/test-infra/prow/config"
"k8s.io/test-infra/prow/github"
"k8s.io/test-infra/prow/phony"
"k8s.io/test-infra/prow/plugins"
"k8s.io/test-infra/prow/repoowners"
)
var ice = github.IssueCommentEvent{
Action: "reopened",
Repo: github.Repo{
Owner: github.User{
Login: "foo",
},
Name: "bar",
FullName: "foo/bar",
},<|fim▁hole|>
var repoLevelSecret = `
'*':
- value: key1
created_at: 2019-10-02T15:00:00Z
- value: key2
created_at: 2020-10-02T15:00:00Z
foo/bar:
- value: 123abc
created_at: 2019-10-02T15:00:00Z
- value: key6
created_at: 2020-10-02T15:00:00Z
`
var orgLevelSecret = `
'*':
- value: key1
created_at: 2019-10-02T15:00:00Z
- value: key2
created_at: 2020-10-02T15:00:00Z
foo:
- value: 123abc
created_at: 2019-10-02T15:00:00Z
- value: key4
created_at: 2020-10-02T15:00:00Z
`
var globalSecret = `
'*':
- value: 123abc
created_at: 2019-10-02T15:00:00Z
- value: key2
created_at: 2020-10-02T15:00:00Z
`
var missingMatchingSecret = `
somerandom:
- value: 123abc
created_at: 2019-10-02T15:00:00Z
- value: key2
created_at: 2020-10-02T15:00:00Z
`
var secretInOldFormat = `123abc`
// TestHook sets up a hook.Server and then sends a fake webhook at it. It then
// ensures that a fake plugin is called.
func TestHook(t *testing.T) {
called := make(chan bool, 1)
payload, err := json.Marshal(&ice)
if err != nil {
t.Fatalf("Marshalling ICE: %v", err)
}
plugins.RegisterIssueHandler(
"baz",
func(pc plugins.Agent, ie github.IssueEvent) error {
called <- true
return nil
},
nil,
)
pa := &plugins.ConfigAgent{}
pa.Set(&plugins.Configuration{Plugins: map[string][]string{"foo/bar": {"baz"}}})
ca := &config.Agent{}
clientAgent := &plugins.ClientAgent{
GitHubClient: github.NewFakeClient(),
OwnersClient: repoowners.NewClient(nil, nil, func(org, repo string) bool { return false }, func(org, repo string) bool { return false }, func() config.OwnersDirBlacklist { return config.OwnersDirBlacklist{} }),
}
metrics := NewMetrics()
var testcases = []struct {
name string
secret []byte
tokenGenerator func() []byte
shouldSucceed bool
}{
{
name: "Token present at repository level.",
secret: []byte("123abc"),
tokenGenerator: func() []byte {
return []byte(repoLevelSecret)
},
shouldSucceed: true,
},
{
name: "Token present at org level.",
secret: []byte("123abc"),
tokenGenerator: func() []byte {
return []byte(orgLevelSecret)
},
shouldSucceed: true,
},
{
name: "Token present at global level.",
secret: []byte("123abc"),
tokenGenerator: func() []byte {
return []byte(globalSecret)
},
shouldSucceed: true,
},
{
name: "Token not matching anywhere (wildcard token missing).",
secret: []byte("123abc"),
tokenGenerator: func() []byte {
return []byte(missingMatchingSecret)
},
shouldSucceed: false,
},
{
name: "Secret in old format.",
secret: []byte("123abc"),
tokenGenerator: func() []byte {
return []byte(secretInOldFormat)
},
shouldSucceed: true,
},
}
for _, tc := range testcases {
t.Logf("Running scenario %q", tc.name)
s := httptest.NewServer(&Server{
ClientAgent: clientAgent,
Plugins: pa,
ConfigAgent: ca,
Metrics: metrics,
TokenGenerator: tc.tokenGenerator,
})
defer s.Close()
if err := phony.SendHook(s.URL, "issues", payload, tc.secret); (err != nil) == tc.shouldSucceed {
t.Fatalf("Error sending hook: %v", err)
}
}
select {
case <-called: // All good.
case <-time.After(time.Second):
t.Error("Plugin not called after one second.")
}
}<|fim▁end|> | } |
<|file_name|>EmbedMultipleMetaTagsProblem.java<|end_file_name|><|fim▁begin|>/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.problems;
import org.apache.flex.compiler.common.ISourceLocation;
/**
* This problem gets created when a variable has two Embed meta data tags
* associated with it.
*/
public final class EmbedMultipleMetaTagsProblem extends CompilerProblem
{
public static final String DESCRIPTION =
"A variable can only only have one [${EMBED}] metadata tag";
public static final int errorCode = 1344;<|fim▁hole|>
public EmbedMultipleMetaTagsProblem(ISourceLocation site)
{
super(site);
}
// Prevent these from being localized.
public final String EMBED = "Embed";
}<|fim▁end|> | |
<|file_name|>hashset.rs<|end_file_name|><|fim▁begin|>use containers::array::Array;
use containers::list::List;
use containers::reference::Ref;
use containers::vector::Vector;
use memory::page::Page;
use memory::region::Region;
#[derive(Copy, Clone)]
pub struct HashSet<T: Hash<T> + Copy> {
slots: Vector<Ref<List<Slot<T>>>>,
}
impl<T: Hash<T> + Copy> HashSet<T> {
pub fn from_vector(_pr: &Region, _rp: *mut Page, vector: Ref<Vector<T>>) -> Ref<HashSet<T>> {
let _r = Region::create(_pr);
let hash_size = HashPrimeHelper::get_prime(vector.length);
let mut array: Ref<Array<Ref<List<Slot<T>>>>> = Ref::new(_r.page, Array::new());
for _ in 0..hash_size {
array.add(Ref::new(_r.page, List::new()));
}
let mut hash_set = Ref::new(_rp, HashSet { slots: Vector::from_array(_rp, array) });
hash_set.initialize_from_vector(vector);
hash_set
}
fn initialize_from_vector(&mut self, vector: Ref<Vector<T>>) {
for value in vector.iter() {
self.add(value);
}
}
fn add(&mut self, value: &T) {
let hash_code = value.hash();
let slot_number = hash_code % self.slots.length;
let mut slot_list = self.slots[slot_number];
for slot in slot_list.get_iterator() {
if value.equals(&slot.value) {
return;
}
}
slot_list.add(Slot {
hash_code: hash_code,
value: *value,
});
}
pub fn contains(&self, value: T) -> bool {
for slot in self.slots[value.hash() % self.slots.length].get_iterator() {
if value.equals(&slot.value) {
return true;
}
}
false
}
}
#[derive(Copy, Clone)]
struct Slot<T: Copy> {
value: T,
hash_code: usize,
}
pub struct HashPrimeHelper {}
// https://planetmath.org/goodhashtableprimes
static HASH_PRIMES: &'static [usize] = &[
3, 5, 11, 23, 53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593, 49157, 98317, 196613,
393241, 786433, 1572869, 3145739, 6291469, 12582917, 25165843, 50331653, 100663319, 201326611,
402653189, 805306457, 1610612741,
];
impl HashPrimeHelper {
pub fn get_prime(size: usize) -> usize {
for i in HASH_PRIMES {
if *i >= size {
return *i;
}
}
let mut i = size | 1;
while i < std::usize::MAX {
if HashPrimeHelper::is_prime(i) {
return i;
}
i += 2;
}
size
}
fn is_prime(candidate: usize) -> bool {
if (candidate & 1) != 0 {
let limit = (candidate as f64).sqrt() as usize;
let mut divisor: usize = 3;
while divisor <= limit {
divisor += 2;
if (candidate % divisor) == 0 {
return false;
}
}
return true;
}
candidate == 2
}
}
pub trait Equal<T: ?Sized = Self> {
fn equals(&self, other: &T) -> bool;
}
pub trait Hash<T: ?Sized = Self>: Equal<T> {
fn hash(&self) -> usize;
}
// FNV-1a hash
pub fn hash(data: *const u8, length: usize) -> usize {
let bytes = unsafe { std::slice::from_raw_parts(data, length) };
let mut hash: u64 = 0xcbf29ce484222325;
for byte in bytes.iter() {
hash = hash ^ (*byte as u64);
hash = hash.wrapping_mul(0x100000001b3);
}
hash as usize
}
#[test]<|fim▁hole|> use memory::Page;
use memory::Region;
use memory::StackBucket;
let mut heap = Heap::create();
let root_stack_bucket = StackBucket::create(&mut heap);
let root_page = Page::get(root_stack_bucket as usize);
let _r = Region::create_from_page(root_page);
let keywords = HashSet::from_vector(
&_r,
_r.page,
Ref::new(
_r.page,
Vector::from_raw_array(
_r.page,
&[
String::from_string_slice(_r.page, "using"),
String::from_string_slice(_r.page, "namespace"),
String::from_string_slice(_r.page, "typedef"),
],
),
),
);
assert_eq!(
(*keywords).contains(String::from_string_slice(_r.page, "using")),
true
);
assert_eq!(
(*keywords).contains(String::from_string_slice(_r.page, "namespace")),
true
);
assert_eq!(
(*keywords).contains(String::from_string_slice(_r.page, "typedef")),
true
);
assert_eq!(
(*keywords).contains(String::from_string_slice(_r.page, "nix")),
false
);
}<|fim▁end|> | fn test_hash_set() {
use containers::String;
use memory::Heap; |
<|file_name|>languages.js<|end_file_name|><|fim▁begin|>/* Global variables */
var _languagesHashOld = new Array();
var _languagesHash = new Array();
var _languagesRegexHash = new Array();
/* Initialize the languages arrays */
/* Order alphabetically by _languagesHash key */
/* Based in ISO 639-3 codes */
/* source: http://www-01.sil.org/iso639-3/iso-639-3.tab */
/* alt source: https://github.com/wikimedia/mediawiki/blob/7458dc32d99e6dd569b1629762443d074b6a3c52/languages/Names.php */
_languagesHash = {
'aar': 'Afaraf',
'arz': 'اللغة المصرية العامية',
'abk': 'аҧсуа бызшәа, аҧсшәа',
'ace': 'بهسا اچيه',
'aeb': 'زَوُن',
'afr': 'Afrikaans',
'aka': 'Akan',
'amh': 'አማርኛ',
'ang': 'Old English',
'ara': 'العربية',
'arg': 'Aragonés',
'asm': 'অসমীয়া',
'ava': 'авар мацӀ, магӀарул мацӀ',
'ave': 'avesta',
'aym': 'aymar aru',
'ary': 'الدارجة',
'arc': 'Imperial Aramaic',
'ast': 'Asturianu',
'aze': 'azərbaycanca',
'azb': 'South Azerbaijani',
'bak': 'Bašqort',
'bar': 'Boarisch',
'bam': 'Bamanankan',
'bpy': 'বিষ্ণুপ্রিয়া মণিপুরী',
'bcl': 'Bikol Sentral',
'bel': 'Беларуская',
'ben': 'বাংলা',
'bho': 'भोजपुरी',
'bis': 'Bislama',
'bod': 'བོད་ཡིག',
'bos': 'bosanski jezik',
'bre': 'Brezhoneg',
'bul': 'Български',
'bus': 'Bisã',
'bxr': 'буряад хэлэн',
'bcc': 'balojî Balójí',
'bel-tasrask': 'тарашкевіца, клясычны правапіс',
'bjn': 'Bahasa Banjar',
'cat': 'Català',
'cdo': '平話',
'ces': 'Česky',
'ceb': 'Sinugboanon',
'cha': 'Chamoru',
'che': 'нохчийн мотт',
'chy': 'Tsėhésenėstsestȯtse',
'chu': 'ѩзыкъ словѣньскъ',
'chv': 'Чӑвашла',
'cbk-zam': 'Chavacano',
'cor': 'Kernewek',
'cos': 'corsu, lingua corsa',
'cre': 'ᓀᐦᐃᔭᐍᐏᐣ',
'cym': 'Cymraeg',
'chr': 'ᏣᎳᎩ ᎦᏬᏂᎯᏍᏗ',
'crh': 'Къырымтатарджа',
'ckb': 'کوردیی ناوەندی',
'csb': 'Kaszëbsczi jãzëk',
'dan': 'Dansk',
'dsb': 'Dolnoserbski',
'deu': 'Deutsch',
'div': 'ދިވެހި',
'dzo': ' རྫོང་ཁ',
'ell': 'Ελληνικά',
'eng': 'English',
'epo': 'Esperanto',
'est': 'Eesti',
'eus': 'euskara',
'ewe': 'Eʋegbe',
'ext': 'estremeñu',
'eml': 'emiliân-rumagnōl',
'ebn': 'বাংলা',
'fao': 'Føroyskt',
'fas': 'فارسی',
'fij': 'vosa Vakaviti',
'fin': 'Suomi',
'fra': 'Français',
'fry': 'Frysk',
'ful': 'Fulfulde',
'fur': 'Furlan',
'frp': 'Provençau',
'frr': 'Nordfriisk',
'gag': 'Gagauz dili',
'gan': '贛語',
'gla': 'Gàidhlig',
'gle': 'Gaeilge',
'glg': 'Galego',
'glv': 'Gaelg',
'glk': 'گیلکی',
'got': 'Gothic',
'grn': 'Avañe\'ẽ',
'guj': 'ગુજરાતી',
'gsw': 'Schwyzerdütsch',
'hak': '客家語/Hak-kâ-ngî',
'haw': 'ʻŌlelo Hawaiʻi',
'hat': 'Kreyol ayisyen',
'hau': 'Hausa',
'heb': 'עברית',
'her': 'Otjiherero',
'hin': 'हिन्दी',
'hmo': 'Hiri Motu',
'hrv': 'Олык Марий',
'hun': 'Magyar',
'hye': 'Հայերեն',
'hif-latn': 'Fiji Baat',
'hif': 'फिजी बात',
'hrx': 'Riograndenser Hunsrückisch',
'hsb': 'Hornjoserbsce',
'ibo': 'Asụsụ Igbo',
'ido': 'Ido',
'iii': 'ꆈꌠ꒿ Nuosuhxop',
'iku': 'ᐃᓄᒃᑎᑐᑦ',
'ile': 'Interlingue',
'ina': 'Interlingua',
'ind': 'Bahasa Indonesia',
'ipk': 'Iñupiaq',
'isl': 'Íslenska',
'ita': 'Italiano',
'ilo': 'Ilokano',
'jav': 'Basa Jawa',
'jpn': '日本語',
'jbo': 'la .lojban.',
'kaa': 'Қарақалпақ тили',
'kal': 'Kalaallisut',
'kan': 'ಕನ್ನಡ',
'kas': 'कश्मीरी',
'kat': 'ქართული',
'kau': 'Kanuri',
'kaz': 'Қазақша',
'khm': 'ភាសាខ្មែរ',
'kik': 'Gĩkũyũ',
'kin': 'Ikinyarwanda',
'kir': 'قىرعىز تىلى',
'kom': 'коми кыв',
'kon': 'Kikongo',
'kor': '한국어',
'kor-kp': '조선어',
'kua': 'Kuanyama',
'kur': 'kurdî',
'ksh': 'Ripoarisch',
'kab': 'Taqbaylit',
'kbd': 'Адыгэбзэ',
'koi': 'Перем коми кыв',
'krc': 'Къарачай-Малкъар тил',
'lad': 'Judeo-Español',
'lbe': 'лакку маз',
'lez': 'Лезги чӏал Lezgi č’al',
'lij': 'Lìgure, Zenéize',
'lki': 'لوری',
'lmo': 'Lumbaart',
'ltg': 'latgalīšu volūda',
'lzh': '古文',
'mai': 'मैथिली, মৈথিলী',
'mdf': 'Мокшень кяль / mokšenj kälj',
'mhr': 'Meadow Mari',
'min': 'باسو مينڠكاباو',
'mrj': 'Мары йӹлмӹ',
'mwl': 'Mirandés',
'myv': 'Morafa',
'mzn': 'مازندرانی',
'nah': 'Asteca',
'nan': '閩南語 / 闽南语',
'nap': 'Napulitano',
'nds': 'Plattdüütsch',
'nds-nl': 'Nederlaands Leegsaksies',
'nov': 'Novial',
'nrm': 'Narom',
'nso': 'Pedi',
'pag': 'Pangasinense',
'pam': 'Amánung Sísuan',
'pap': 'Papiamentu',
'pfl': 'Pfälzisch',
'pih': 'Pitkern-Norfolk',
'lao': 'ພາສາລາວ',
'lat': 'Lingua Latīna',
'lav': 'Latviešu',
'lim': 'Limburgs',
'lin': 'Lingála',
'lit': 'Lietuvių',
'ltz': 'Lëtzebuergesch',
'lub': 'Tshiluba',
'lug': 'Luganda',
'mah': 'Kajin M̧ajeļ',
'mal': 'മലയാളം',
'mar': 'मराठी',
'map-bms': 'Basa Banyumasan',
'mkd': 'Македонски',
'mlg': 'Malagasy',
'mlt': 'Malti',
'mon': 'Монгол хэл',
'mri': 'Te reo Māori',
'msa': 'Bahasa Melayu',
'mul': 'multilingual',
'mya': 'မြန်မာဘာသာ',
'nau': 'Ekakairũ Naoero',
'nav': 'Diné bizaad',
'nbl': 'isiNdebele',
'nde': 'isiNdebele',
'ndo': 'Owambo',
'nep': 'नेपाली',
'nld': 'Nederlands',
'nno': 'Norsk (nynorsk)',
'nob': 'Norsk (bokmål)',
'nor': 'Norsk (bokmål)',
'nya': 'ChiCheŵa',
'new': 'नेपाल भाषा',
'oci': 'Occitan',
'oji': 'ᐊᓂᔑᓈᐯᒧᐎᓐ',
'ori': 'ଓଡ଼ିଆ',
'orm': 'Afaan Oromoo',
'oss': 'ирон æвзаг',
'pan': 'ਪੰਜਾਬੀ',
'pcd': 'Picard',
'pli': 'पाऴि',
'pol': 'Język polski',
'por': 'Português',
'por-pt': 'Português do Brasil',
'pus': 'پښتو',
'pdc': 'Pennsilfaanisch Deitsch',
'pms': 'Piemontèis',
'que': 'Runa Simi',
'roh': 'Rumantsch',
'ron': 'Română',
'run': 'Ikirundi',
'rus': 'Русский',
'rue': 'Русиньскый',
'pnb': 'شاہ مکھی پنجابی',
'pnt': 'ποντιακά',
'rmy': 'Vlax Romani',
'roa-tara': 'Tarandíne',
'rup': 'armãneashce, armãneashti, rrãmãneshti',
'sah': 'Саха тыла',
'scn': 'Sicilianu',
'sco': '(Braid) Scots, Lallans',
'sgs': 'Žemaičių tarmė',
'srn': 'Sranan Tongo',
'stq': 'Seeltersk',
'szl': 'ślōnskŏ gŏdka',
'tet': 'Lia-Tetun',
'tpi': 'Tok Pisin',
'tum': 'chiTumbuka',
'tyv': 'тыва дыл tyva dyl',
'udm': 'удмурт кыл udmurt kyl',
'vep': 'vepsän kel’',
'vls': 'West-Vlaams',
'vro': 'võro kiil',
'wuu': '吳語/吴语',
'xal': 'ᡆᡕᡅᠷᠠᡑ ᡘᡄᠯᡄᠨ',
'xmf': 'მარგალური ნინა',
'yue': '廣州話 / 广州话',
'zea': 'Zeêuws',
'zh-min-nan': '閩南語 / 闽南语',
'sag': 'yângâ tî sängö',
'san': 'संस्कृतम्',
'sin': 'සිංහල',
'skr': 'सराइकी',
'slk': 'Slovenčina',
'slv': 'Slovenščina',
'sme': 'Davvisámegiella',
'smo': 'gagana fa\'a Samoa',
'sna': 'chiShona',
'snd': 'सिन्धी',
'som': 'Soomaaliga',
'sot': 'Sesotho',
'spa': 'Español',
'sqi': 'Mirësevini',
'tha': 'ภาษาไทย',
'srd': 'sardu',
'srp': 'Српски',
'srp-ec': 'Српски (ћирилица)',
'srp-el': 'Srpski (latinica)',
'ssw': 'SiSwati',
'sun': 'Basa Sunda',
'swa': 'Kiswahili',
'swe': 'Svenska',
'ses': 'Koyraboro Senni',
'sh': 'Srpskohrvatski / Српскохрватски',
'tah': 'Reo Tahiti',
'tam': 'தமிழ்',
'tat': 'татар теле',
'tel': 'తెలుగు',
'tgk': 'тоҷикӣ',
'tgl': 'Wikang Tagalog',
'tir': 'ትግርኛ',
'ton': 'faka Tonga',
'tsn': 'Setswana',
'tso': 'Xitsonga',
'tuk': 'Türkmen dili',
'tur': 'Türkçe',
'twi': 'Twi',
'tcy': 'ತುಳು',
'tly': 'толышә зывон',
'tt-cyrl': 'Татарча',
'uig': 'ئۇيغۇرچە',
'ug-arab': 'ئۇيغۇرچە',
'ukr': 'Українська',
'urd': 'اردو',
'uzb': 'Oʻzbekcha',
'ven': 'Tshivenḓa',
'vec': 'Vèneto',
'vie': 'Tiếng Việt',
'vol': 'Volapük',
'wln': 'walon',
'wol': 'Wolof',
'xho': 'isiXhosa',
'yid': 'ייִדיש',
'yor': 'Yorùbá',
'zha': 'Saɯ cueŋƅ',
'zho': '中文',
'zho-hans': '中文(简体)',
'zho-hant': '中文(繁體)',
'zho-hk': '中文(香港)',
'zul': 'isiZulu',
'zza': 'Zāzākī'
};
/* Based in ISO 639-1 and 639-2 codes */
_languagesHashOld = {
'ay': _languagesHash.aym,
'aa': _languagesHash.aar,
'ab': _languagesHash.abk,
'af': _languagesHash.afr,
'ak': _languagesHash.aka,
'am': _languagesHash.amh,
'an': _languagesHash.arg,
'ar': _languagesHash.ara,
'as': _languagesHash.asm,
'az': _languagesHash.aze,
'ba': _languagesHash.bak,
'bh': _languagesHash.bho,
'bm': _languagesHash.bam,
'be': _languagesHash.bel,
'bi': _languagesHash.bis,
'bo': _languagesHash.bod,
'bn': _languagesHash.ben,
'br': _languagesHash.bre,
'bs': _languagesHash.bos,
'bg': _languagesHash.bul,
'be-tarask': _languagesHash['bel-tasrask'],
'ca': _languagesHash.cat,
'ce': _languagesHash.che,
'co': _languagesHash.cos,
'cs': _languagesHash.ces,
'cv': _languagesHash.chv,
'cy': _languagesHash.cym,
'cu': _languagesHash.chu,
'cr': _languagesHash.cre,
'crh-latn': _languagesHash.crh,
'ch': _languagesHash.cha,
'da': _languagesHash.dan,
'de': _languagesHash.deu,
'dv': _languagesHash.div,
'dz': _languagesHash.dzo,
'ee': _languagesHash.ewe,
'el': _languagesHash.ell,
'en': _languagesHash.eng,
'eo': _languagesHash.epo,
'eu': _languagesHash.eus,
'et': _languagesHash.est,
'fo': _languagesHash.fao,
'fa': _languagesHash.fas,
'fi': _languagesHash.fin,
'fy': _languagesHash.fry,
'fj': _languagesHash.fij,
'fr': _languagesHash.fra,
'ff': _languagesHash.ful,
'gl': _languagesHash.glg,
'gd': _languagesHash.gla,
'ga': _languagesHash.gle,
'gn': _languagesHash.grn,
'gu': _languagesHash.guj,
'gv': _languagesHash.glv,
'ht': _languagesHash.hat,
'ha': _languagesHash.hau,
'he': _languagesHash.heb,
'hi': _languagesHash.hin,<|fim▁hole|> 'hy': _languagesHash.hye,
'ie': _languagesHash.ile,
'ig': _languagesHash.ibo,
'ia': _languagesHash.ina,
'id': _languagesHash.ind,
'it': _languagesHash.ita,
'ik': _languagesHash.ipk,
'io': _languagesHash.ido,
'iu': _languagesHash.iku,
'is': _languagesHash.isl,
'jv': _languagesHash.jav,
'ja': _languagesHash.jpn,
'kn': _languagesHash.kan,
'ki': _languagesHash.kik,
'kv': _languagesHash.kom,
'kw': _languagesHash.cor,
'lg': _languagesHash.lug,
'ln': _languagesHash.lin,
'lo': _languagesHash.lao,
'mi': _languagesHash.mri,
'na': _languagesHash.nau,
'nv': _languagesHash.nav,
'ny': _languagesHash.nya,
'om': _languagesHash.orm,
'os': _languagesHash.oss,
'pa': _languagesHash.pan,
'pi': _languagesHash.pli,
'ka': _languagesHash.kat,
'kk': _languagesHash.kaz,
'km': _languagesHash.khm,
'ky': _languagesHash.kir,
'ko-kp': _languagesHash['kor-kp'],
'ku': _languagesHash.kur,
'kg': _languagesHash.kon,
'kl': _languagesHash.kal,
'ks': _languagesHash.kas,
'la': _languagesHash.lat,
'lv': _languagesHash.lav,
'li': _languagesHash.lim,
'lt': _languagesHash.lit,
'lb': _languagesHash.ltz,
'ml': _languagesHash.mal,
'mr': _languagesHash.mar,
'mk': _languagesHash.mkd,
'mg': _languagesHash.mlg,
'mt': _languagesHash.mlt,
'mn': _languagesHash.mon,
'ms': _languagesHash.msa,
'my': _languagesHash.mya,
'ne': _languagesHash.nep,
'nl': _languagesHash.nld,
'nn': _languagesHash.nno,
'nb': _languagesHash.nob,
'no': _languagesHash.nor,
'oc': _languagesHash.oci,
'or': _languagesHash.ori,
'pl': _languagesHash.pol,
'pt': _languagesHash.por,
'pt-br': _languagesHash['por-pt'],
'ps': _languagesHash.pus,
'qu': _languagesHash.que,
'rm': _languagesHash.roh,
'ro': _languagesHash.ron,
'ru': _languagesHash.rus,
'rn': _languagesHash.run,
'rw': _languagesHash.kin,
'sc': _languagesHash.srd,
'sd': _languagesHash.snd,
'se': _languagesHash.sme,
'sg': _languagesHash.sag,
'sm': _languagesHash.smo,
'sn': _languagesHash.sna,
'so': _languagesHash.som,
'ss': _languagesHash.ssw,
'st': _languagesHash.sot,
'tg': _languagesHash.tgk,
'ti': _languagesHash.tir,
'tn': _languagesHash.tsn,
'to': _languagesHash.ton,
'ts': _languagesHash.tso,
'tt': _languagesHash.tat,
'tw': _languagesHash.twi,
'ty': _languagesHash.tah,
'ug': _languagesHash.uig,
've': _languagesHash.ven,
'wa': _languagesHash.wln,
'wo': _languagesHash.wol,
'xh': _languagesHash.xho,
'za': _languagesHash.zha,
'zu': _languagesHash.zul,
'sa': _languagesHash.san,
'si': _languagesHash.sin,
'sk': _languagesHash.slk,
'sl': _languagesHash.slv,
'es': _languagesHash.spa,
'sq': _languagesHash.sqi,
'sr': _languagesHash.srp,
'sr-ec': _languagesHash['srp-ec'],
'sr-el': _languagesHash['srp-el'],
'su': _languagesHash.sun,
'sw': _languagesHash.swa,
'sv': _languagesHash.swe,
'ta': _languagesHash.tam,
'te': _languagesHash.tel,
'tl': _languagesHash.tgl,
'th': _languagesHash.tha,
'tk': _languagesHash.tuk,
'tr': _languagesHash.tur,
'uk': _languagesHash.ukr,
'ur': _languagesHash.urd,
'uz': _languagesHash.uzb,
'vi': _languagesHash.vie,
'yo': _languagesHash.wol,
'yi': _languagesHash.yid,
'zh': _languagesHash.zho,
'zh-hans': _languagesHash['zho-hans'],
'zh-hant': _languagesHash['zho-hant'],
'zh-hk': _languagesHash['zho-hk'],
'diq': _languagesHash.zza
};
function getLanguageNameFromISO(code) {
var language = _languagesHash[code] || _languagesHashOld[code] || '';
if (!language && code) {
/* This js code might be used js runtime engine where
* the dump() method is not available */
try {
dump('"' + code + '" is not available in languages.js.\n');
} catch (error) {}
}
return language;
}
function getLanguageNameFromISOCodes(codes) {
var result = "";
var codeArray = codes.split(',');
for (var i in codeArray) {
result += getLanguageNameFromISO(codeArray[i]);
if (i < codeArray.length - 1) {
result += ', ';
}
}
return result;
}
/* Be careful, this function returns false, also if undefined - that
* means nothing because the table _languagesHashOld is not complete */
function isOldLanguageCode(code) {
return _languagesHashOld[iso] ? true : false;
}
function buildLanguagesRegexHash() {
var code;
for (code in _languagesHash) {
_languagesRegexHash[_languagesHash[code]] = code;
}
for (code in _languagesHashOld) {
var regex = _languagesRegexHash[_languagesHashOld[code]];
_languagesRegexHash[_languagesHashOld[code]] = '^(' + (regex ? regex + '|' : '') + code + ')$';
}
}
function getLanguageRegex(language) {
return _languagesRegexHash[language] || '';
}<|fim▁end|> | 'hr': _languagesHash.hrv,
'hu': _languagesHash.hun, |
<|file_name|>sugar_integration_tests.cc<|end_file_name|><|fim▁begin|>#define BOOST_TEST_MODULE sugar_integration_tests<|fim▁hole|><|fim▁end|> | #include <boost/test/included/unit_test.hpp> |
<|file_name|>peturb.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
# returns a random d dimensional vector, a direction to peturb in
def direction(d,t):
# if type == uniform
if(t == 'u'):
return np.random.uniform(-1/np.sqrt(2), 1/np.sqrt(2), d)
elif(t == 'n'):
return np.random.normal(0, 1/np.sqrt(d), d)
elif(t == 's'):
# a point on the N-Sphere<|fim▁hole|> x = np.zeros(d)
x[0] = np.cos(angles[0])
for i in range(1,d-1):
temp = 1
for j in range(i):
temp = temp * np.sin(angles[j])
x[i] = temp*np.cos(angles[i])
x[d-1] = x[d-2]*np.tan(angles[d-2])
return x
fig = plt.figure()
ax = plt.axes(projection='3d')
for i in range(1000):
R = np.random.uniform(0,1,1)[0]
R2 = np.random.uniform(0,1,1)[0]
xs = np.sin(np.arccos(1-2*R))*np.cos(2*np.pi*R2)
ys = np.sin(np.arccos(1-2*R))*np.sin(2*np.pi*R2)
zs = 1- 2*R
ax.scatter3D(xs, ys, zs, cmap='Greens')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show()<|fim▁end|> | angles = np.random.uniform(0, np.pi, d-2) |
<|file_name|>Acao.js<|end_file_name|><|fim▁begin|>// Representa um ação que pode ser desfeita e refeita
// A ação é associada a uma aba aberta
// Nas funções redo e undo, this se refere à aba associada à ação<|fim▁hole|> var aba = Interface.abaFoco
this.id = String(Math.random())
this.nome = nome
this.redo = redo
this.undo = undo
if (aba.posHistorico < aba.historico.length)
aba.historico = aba.historico.slice(0, aba.posHistorico)
aba.historico.push(this)
aba.posHistorico = aba.historico.length
redo.call(aba)
aba.livro.modificado = true
InterfaceEdicao.atualizarDesfazer()
}
// Retorna o nome da ação a ser desfeita na aba ativa (null caso nada possa ser desfeito)
Acao.getDesfazer = function () {
var aba, acao
aba = Interface.abaFoco
acao = aba.historico[aba.posHistorico-1]
if (aba.posHistorico > 0 && acao.undo)
return acao.nome
return null
}
// Retorna o nome da ação a ser refeita na aba ativa (null caso nada possa ser refeito)
Acao.getRefazer = function () {
var aba, acao
aba = Interface.abaFoco
acao = aba.historico[aba.posHistorico]
if (aba.posHistorico < aba.historico.length && acao.redo)
return acao.nome
return null
}
// Desfaz a última ação na aba
Acao.desfazer = function (aba) {
var acao
if (aba.posHistorico > 0) {
acao = aba.historico[aba.posHistorico-1]
if (acao.undo) {
acao.undo.call(aba)
aba.posHistorico--
if ((aba.posHistorico && aba.historico[aba.posHistorico-1].id!=aba.idAcaoSalvo)
|| (!aba.posHistorico && aba.idAcaoSalvo!=""))
aba.livro.modificado = true
else
aba.livro.modificado = false
}
}
}
// Refaz a ação na aba
Acao.refazer = function (aba) {
var acao
if (aba.posHistorico < aba.historico.length) {
acao = aba.historico[aba.posHistorico]
if (acao.redo) {
acao.redo.call(aba)
aba.posHistorico++
if ((aba.posHistorico && aba.historico[aba.posHistorico-1].id!=aba.idAcaoSalvo)
|| (!aba.posHistorico && aba.idAcaoSalvo!=""))
aba.livro.modificado = true
else
aba.livro.modificado = false
}
}
}<|fim▁end|> | function Acao(nome, redo, undo) { |
<|file_name|>screensaver.cpp<|end_file_name|><|fim▁begin|>/* This file is part of Clementine.
Copyright 2010, David Sansome <[email protected]>
Clementine is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Clementine is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Clementine. If not, see <http://www.gnu.org/licenses/>.
*/
#include "screensaver.h"
#include <QtGlobal>
#include "config.h"
#ifdef HAVE_DBUS
#include <QDBusConnection>
#include <QDBusConnectionInterface>
#include "dbusscreensaver.h"
#endif
#ifdef Q_OS_DARWIN
#include "macscreensaver.h"
#endif
#ifdef Q_OS_WIN32
#include "windowsscreensaver.h"
#endif
#include <QtDebug>
const char* Screensaver::kGnomeService = "org.gnome.ScreenSaver";<|fim▁hole|>const char* Screensaver::kKdeInterface = "org.freedesktop.ScreenSaver";
Screensaver* Screensaver::screensaver_ = 0;
Screensaver* Screensaver::GetScreensaver() {
if (!screensaver_) {
#if defined(HAVE_DBUS)
if (QDBusConnection::sessionBus().interface()->isServiceRegistered(
kGnomeService)) {
screensaver_ =
new DBusScreensaver(kGnomeService, kGnomePath, kGnomeInterface);
} else if (QDBusConnection::sessionBus().interface()->isServiceRegistered(
kKdeService)) {
screensaver_ = new DBusScreensaver(kKdeService, kKdePath, kKdeInterface);
}
#elif defined(Q_OS_DARWIN)
screensaver_ = new MacScreensaver();
#elif defined(Q_OS_WIN32)
screensaver_ = new WindowsScreensaver();
#endif
}
return screensaver_;
}<|fim▁end|> | const char* Screensaver::kGnomePath = "/";
const char* Screensaver::kGnomeInterface = "org.gnome.ScreenSaver";
const char* Screensaver::kKdeService = "org.kde.ScreenSaver";
const char* Screensaver::kKdePath = "/ScreenSaver/"; |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from socketio import sdjango
from web.api import EventResource, SummaryFeedResource, SummaryFeedByCountryCodeResource
from tastypie.api import Api
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin<|fim▁hole|># admin.autodiscover()
sdjango.autodiscover()
v1_api = Api(api_name='v1')
#api_test = EventResource()
v1_api.register(SummaryFeedResource())
#v1_api.register(SummaryFeedByCountryCodeResource())
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'malwarez.views.home', name='home'),
# url(r'^malwarez/', include('malwarez.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'web.views.index'),
url(r'^test/$', 'web.views.test'),
url(r'^socket\.io', include(sdjango.urls)),
url(r'^api/', include(v1_api.urls)),
url(r'^summary-country/(?P<countryCode>\w+)/$', 'web.views.getSummaryByCountry'),
# TODO: be more specific country parameter should be only 3 char long
url(r'^top/(?P<type>\w+)/(?P<country>\w+)/$', 'web.views.getTopFive'),
url(r'^top/(?P<type>\w+)/$', 'web.views.getTopFive'),
#url(r'^detail/(?P<type>\w+)/(?P<country>\w+)/(?P<data>\w+)/$', 'web.views.getDetail'),
url(r'^detail/(?P<type>\w+)/(?P<data>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/$', 'web.views.getDetail'),
url(r'^detail/(?P<type>\w+)/(?P<data>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/(?P<country>\w+)/$', 'web.views.getDetail'),
url(r'filter/malware', 'web.views.getDiversityMalware'),
url(r'filter/ip', 'web.views.getDiversityIP')
)
urlpatterns += staticfiles_urlpatterns()<|fim▁end|> | |
<|file_name|>glyphMargin.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/css!./glyphMargin';
import { DynamicViewOverlay } from 'vs/editor/browser/view/dynamicViewOverlay';
import { ViewContext } from 'vs/editor/common/view/viewContext';
import { RenderingContext } from 'vs/editor/common/view/renderingContext';
import * as viewEvents from 'vs/editor/common/view/viewEvents';
import { editorBackground } from 'vs/platform/theme/common/colorRegistry';
import { registerThemingParticipant } from 'vs/platform/theme/common/themeService';
export class DecorationToRender {
_decorationToRenderBrand: void;
public startLineNumber: number;
public endLineNumber: number;
public className: string;
constructor(startLineNumber: number, endLineNumber: number, className: string) {
this.startLineNumber = +startLineNumber;
this.endLineNumber = +endLineNumber;
this.className = String(className);
}
}
export abstract class DedupOverlay extends DynamicViewOverlay {
protected _render(visibleStartLineNumber: number, visibleEndLineNumber: number, decorations: DecorationToRender[]): string[][] {
let output: string[][] = [];
for (let lineNumber = visibleStartLineNumber; lineNumber <= visibleEndLineNumber; lineNumber++) {
let lineIndex = lineNumber - visibleStartLineNumber;
output[lineIndex] = [];
}
if (decorations.length === 0) {
return output;
}
decorations.sort((a, b) => {
if (a.className === b.className) {
if (a.startLineNumber === b.startLineNumber) {
return a.endLineNumber - b.endLineNumber;
}
return a.startLineNumber - b.startLineNumber;
}
return (a.className < b.className ? -1 : 1);
});
let prevClassName: string = null;
let prevEndLineIndex = 0;
for (let i = 0, len = decorations.length; i < len; i++) {
let d = decorations[i];
let className = d.className;
let startLineIndex = Math.max(d.startLineNumber, visibleStartLineNumber) - visibleStartLineNumber;
let endLineIndex = Math.min(d.endLineNumber, visibleEndLineNumber) - visibleStartLineNumber;
if (prevClassName === className) {
startLineIndex = Math.max(prevEndLineIndex + 1, startLineIndex);
prevEndLineIndex = Math.max(prevEndLineIndex, endLineIndex);
} else {
prevClassName = className;
prevEndLineIndex = endLineIndex;
}
for (let i = startLineIndex; i <= prevEndLineIndex; i++) {
output[i].push(prevClassName);
}
}
return output;
}
}
export class GlyphMarginOverlay extends DedupOverlay {
private _context: ViewContext;
private _lineHeight: number;
private _glyphMargin: boolean;
private _glyphMarginLeft: number;
private _glyphMarginWidth: number;
private _renderResult: string[];
constructor(context: ViewContext) {
super();
this._context = context;
this._lineHeight = this._context.configuration.editor.lineHeight;
this._glyphMargin = this._context.configuration.editor.viewInfo.glyphMargin;
this._glyphMarginLeft = this._context.configuration.editor.layoutInfo.glyphMarginLeft;
this._glyphMarginWidth = this._context.configuration.editor.layoutInfo.glyphMarginWidth;
this._renderResult = null;
this._context.addEventHandler(this);
}
public dispose(): void {
this._context.removeEventHandler(this);
this._context = null;
this._renderResult = null;
}
// --- begin event handlers
public onConfigurationChanged(e: viewEvents.ViewConfigurationChangedEvent): boolean {
if (e.lineHeight) {
this._lineHeight = this._context.configuration.editor.lineHeight;
}
if (e.viewInfo.glyphMargin) {
this._glyphMargin = this._context.configuration.editor.viewInfo.glyphMargin;
}
if (e.layoutInfo) {
this._glyphMarginLeft = this._context.configuration.editor.layoutInfo.glyphMarginLeft;
this._glyphMarginWidth = this._context.configuration.editor.layoutInfo.glyphMarginWidth;
}
return true;
}
public onCursorPositionChanged(e: viewEvents.ViewCursorPositionChangedEvent): boolean {
return false;
}
public onCursorSelectionChanged(e: viewEvents.ViewCursorSelectionChangedEvent): boolean {
return false;
}
public onDecorationsChanged(e: viewEvents.ViewDecorationsChangedEvent): boolean {
return true;
}
public onFlushed(e: viewEvents.ViewFlushedEvent): boolean {
return true;
}
public onLinesChanged(e: viewEvents.ViewLinesChangedEvent): boolean {
return true;
}
public onLinesDeleted(e: viewEvents.ViewLinesDeletedEvent): boolean {
return true;
}
public onLinesInserted(e: viewEvents.ViewLinesInsertedEvent): boolean {
return true;
}
public onRevealRangeRequest(e: viewEvents.ViewRevealRangeRequestEvent): boolean {
return false;
}
public onScrollChanged(e: viewEvents.ViewScrollChangedEvent): boolean {
return e.scrollTopChanged;
}
public onZonesChanged(e: viewEvents.ViewZonesChangedEvent): boolean {
return true;
}
// --- end event handlers
protected _getDecorations(ctx: RenderingContext): DecorationToRender[] {
let decorations = ctx.getDecorationsInViewport();
let r: DecorationToRender[] = [];
for (let i = 0, len = decorations.length; i < len; i++) {
let d = decorations[i];
let glyphMarginClassName = d.source.options.glyphMarginClassName;
if (glyphMarginClassName) {
r.push(new DecorationToRender(d.range.startLineNumber, d.range.endLineNumber, glyphMarginClassName));
}
}
return r;
}
public prepareRender(ctx: RenderingContext): void {
if (!this._glyphMargin) {
this._renderResult = null;
return;
}
let visibleStartLineNumber = ctx.visibleRange.startLineNumber;
let visibleEndLineNumber = ctx.visibleRange.endLineNumber;
let toRender = this._render(visibleStartLineNumber, visibleEndLineNumber, this._getDecorations(ctx));
let lineHeight = this._lineHeight.toString();
let left = this._glyphMarginLeft.toString();
let width = this._glyphMarginWidth.toString();
let common = '" style="left:' + left + 'px;width:' + width + 'px' + ';height:' + lineHeight + 'px;"></div>';
let output: string[] = [];
for (let lineNumber = visibleStartLineNumber; lineNumber <= visibleEndLineNumber; lineNumber++) {
let lineIndex = lineNumber - visibleStartLineNumber;
let classNames = toRender[lineIndex];
if (classNames.length === 0) {
output[lineIndex] = '';
} else {
output[lineIndex] = (
'<div class="cgmr '
+ classNames.join(' ')
+ common
);
}<|fim▁hole|>
this._renderResult = output;
}
public render(startLineNumber: number, lineNumber: number): string {
if (!this._renderResult) {
return '';
}
let lineIndex = lineNumber - startLineNumber;
if (lineIndex < 0 || lineIndex >= this._renderResult.length) {
throw new Error('Unexpected render request');
}
return this._renderResult[lineIndex];
}
}
registerThemingParticipant((theme, collector) => {
let editorBackgroundColor = theme.getColor(editorBackground);
if (editorBackgroundColor) {
collector.addRule(`.monaco-editor.${theme.selector} .glyph-margin { background-color: ${editorBackgroundColor}; }`);
}
});<|fim▁end|> | } |
<|file_name|>fixture-dialog.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('footierepoApp').controller('FixtureDialogController',
['$scope', '$stateParams', '$uibModalInstance', 'entity', 'Fixture',
function($scope, $stateParams, $uibModalInstance, entity, Fixture) {
entity.kickOff = Date.parse(entity.kickOff);
$scope.fixture = entity;
$scope.load = function(id) {
Fixture.get({id : id}, function(result) {
result.kickOff = Date.parse(result.kickOff);<|fim▁hole|> $scope.fixture = result;
});
};
$scope.$watch('fixture.kickOff', function(newval, oldval) {
if (! (newval instanceof Date)) {
$scope.fixture.kickOff = new Date(Date.parse(newval));
}
})
var onSaveSuccess = function (result) {
$scope.$emit('footierepoApp:fixtureUpdate', result);
$uibModalInstance.close(result);
$scope.isSaving = false;
};
var onSaveError = function (result) {
$scope.isSaving = false;
};
$scope.save = function () {
$scope.isSaving = true;
$scope.fixture.kickOff = $scope.fixture.kickOff.getTime()/1000;
if ($scope.fixture.id != null) {
Fixture.update($scope.fixture, onSaveSuccess, onSaveError);
} else {
Fixture.save($scope.fixture, onSaveSuccess, onSaveError);
}
};
$scope.clear = function() {
$uibModalInstance.dismiss('cancel');
};
$scope.datePickerForKickOff = {};
$scope.datePickerForKickOff.status = {
opened: false
};
$scope.datePickerForKickOffOpen = function($event) {
$scope.datePickerForKickOff.status.opened = true;
};
}]);<|fim▁end|> | |
<|file_name|>mb.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Searches for albums in the MusicBrainz database.
"""
from __future__ import division, absolute_import, print_function
import musicbrainzngs
import re
import traceback
from six.moves.urllib.parse import urljoin
from beets import logging
import beets.autotag.hooks
import beets
from beets import util
from beets import config
import six
VARIOUS_ARTISTS_ID = '89ad4ac3-39f7-470e-963a-56509c546377'
if util.SNI_SUPPORTED:
BASE_URL = 'https://musicbrainz.org/'
else:
BASE_URL = 'http://musicbrainz.org/'
SKIPPED_TRACKS = ['[data track]']
musicbrainzngs.set_useragent('beets', beets.__version__,
'https://beets.io/')
class MusicBrainzAPIError(util.HumanReadableException):
"""An error while talking to MusicBrainz. The `query` field is the
parameter to the action and may have any type.
"""
def __init__(self, reason, verb, query, tb=None):
self.query = query
if isinstance(reason, musicbrainzngs.WebServiceError):
reason = u'MusicBrainz not reachable'
super(MusicBrainzAPIError, self).__init__(reason, verb, tb)
def get_message(self):
return u'{0} in {1} with query {2}'.format(
self._reasonstr(), self.verb, repr(self.query)
)
log = logging.getLogger('beets')
RELEASE_INCLUDES = ['artists', 'media', 'recordings', 'release-groups',
'labels', 'artist-credits', 'aliases',
'recording-level-rels', 'work-rels',
'work-level-rels', 'artist-rels']
TRACK_INCLUDES = ['artists', 'aliases']
if 'work-level-rels' in musicbrainzngs.VALID_INCLUDES['recording']:
TRACK_INCLUDES += ['work-level-rels', 'artist-rels']
def track_url(trackid):
return urljoin(BASE_URL, 'recording/' + trackid)
def album_url(albumid):
return urljoin(BASE_URL, 'release/' + albumid)
def configure():
"""Set up the python-musicbrainz-ngs module according to settings
from the beets configuration. This should be called at startup.
"""
hostname = config['musicbrainz']['host'].as_str()
musicbrainzngs.set_hostname(hostname)
musicbrainzngs.set_rate_limit(
config['musicbrainz']['ratelimit_interval'].as_number(),
config['musicbrainz']['ratelimit'].get(int),
)
def _preferred_alias(aliases):
"""Given an list of alias structures for an artist credit, select
and return the user's preferred alias alias or None if no matching
alias is found.
"""
if not aliases:
return
# Only consider aliases that have locales set.
aliases = [a for a in aliases if 'locale' in a]
# Search configured locales in order.
for locale in config['import']['languages'].as_str_seq():
# Find matching primary aliases for this locale.
matches = [a for a in aliases
if a['locale'] == locale and 'primary' in a]
# Skip to the next locale if we have no matches
if not matches:
continue
return matches[0]
def _preferred_release_event(release):
"""Given a release, select and return the user's preferred release
event as a tuple of (country, release_date). Fall back to the
default release event if a preferred event is not found.
"""
countries = config['match']['preferred']['countries'].as_str_seq()
for country in countries:
for event in release.get('release-event-list', {}):
try:
if country in event['area']['iso-3166-1-code-list']:
return country, event['date']
except KeyError:
pass
return release.get('country'), release.get('date')
def _flatten_artist_credit(credit):
"""Given a list representing an ``artist-credit`` block, flatten the
data into a triple of joined artist name strings: canonical, sort, and
credit.
"""
artist_parts = []
artist_sort_parts = []
artist_credit_parts = []
for el in credit:
if isinstance(el, six.string_types):
# Join phrase.
artist_parts.append(el)
artist_credit_parts.append(el)
artist_sort_parts.append(el)
else:
alias = _preferred_alias(el['artist'].get('alias-list', ()))
# An artist.
if alias:
cur_artist_name = alias['alias']
else:
cur_artist_name = el['artist']['name']
artist_parts.append(cur_artist_name)
# Artist sort name.
if alias:
artist_sort_parts.append(alias['sort-name'])
elif 'sort-name' in el['artist']:
artist_sort_parts.append(el['artist']['sort-name'])
else:
artist_sort_parts.append(cur_artist_name)
# Artist credit.
if 'name' in el:
artist_credit_parts.append(el['name'])
else:
artist_credit_parts.append(cur_artist_name)
return (
''.join(artist_parts),
''.join(artist_sort_parts),
''.join(artist_credit_parts),
)
def track_info(recording, index=None, medium=None, medium_index=None,
medium_total=None):
"""Translates a MusicBrainz recording result dictionary into a beets
``TrackInfo`` object. Three parameters are optional and are used
only for tracks that appear on releases (non-singletons): ``index``,
the overall track number; ``medium``, the disc number;
``medium_index``, the track's index on its medium; ``medium_total``,
the number of tracks on the medium. Each number is a 1-based index.
"""
info = beets.autotag.hooks.TrackInfo(
recording['title'],
recording['id'],
index=index,
medium=medium,
medium_index=medium_index,
medium_total=medium_total,
data_source=u'MusicBrainz',
data_url=track_url(recording['id']),
)
if recording.get('artist-credit'):
# Get the artist names.
info.artist, info.artist_sort, info.artist_credit = \
_flatten_artist_credit(recording['artist-credit'])
# Get the ID and sort name of the first artist.
artist = recording['artist-credit'][0]['artist']
info.artist_id = artist['id']
if recording.get('length'):
info.length = int(recording['length']) / (1000.0)
lyricist = []
composer = []
composer_sort = []
for work_relation in recording.get('work-relation-list', ()):
if work_relation['type'] != 'performance':
continue
info.work = work_relation['work']['title']
info.mb_workid = work_relation['work']['id']
if 'disambiguation' in work_relation['work']:
info.work_disambig = work_relation['work']['disambiguation']
for artist_relation in work_relation['work'].get(
'artist-relation-list', ()):
if 'type' in artist_relation:
type = artist_relation['type']
if type == 'lyricist':
lyricist.append(artist_relation['artist']['name'])
elif type == 'composer':
composer.append(artist_relation['artist']['name'])
composer_sort.append(
artist_relation['artist']['sort-name'])
if lyricist:
info.lyricist = u', '.join(lyricist)
if composer:
info.composer = u', '.join(composer)
info.composer_sort = u', '.join(composer_sort)
arranger = []
for artist_relation in recording.get('artist-relation-list', ()):
if 'type' in artist_relation:
type = artist_relation['type']
if type == 'arranger':
arranger.append(artist_relation['artist']['name'])
if arranger:
info.arranger = u', '.join(arranger)
info.decode()
return info
def _set_date_str(info, date_str, original=False):
"""Given a (possibly partial) YYYY-MM-DD string and an AlbumInfo
object, set the object's release date fields appropriately. If
`original`, then set the original_year, etc., fields.
"""
if date_str:
date_parts = date_str.split('-')
for key in ('year', 'month', 'day'):
if date_parts:
date_part = date_parts.pop(0)
try:
date_num = int(date_part)
except ValueError:
continue
if original:
key = 'original_' + key
setattr(info, key, date_num)
def album_info(release):
"""Takes a MusicBrainz release result dictionary and returns a beets
AlbumInfo object containing the interesting data about that release.
"""
# Get artist name using join phrases.
artist_name, artist_sort_name, artist_credit_name = \
_flatten_artist_credit(release['artist-credit'])
# Basic info.
track_infos = []
index = 0
for medium in release['medium-list']:
disctitle = medium.get('title')
format = medium.get('format')
if format in config['match']['ignored_media'].as_str_seq():
continue
all_tracks = medium['track-list']
if ('data-track-list' in medium
and not config['match']['ignore_data_tracks']):
all_tracks += medium['data-track-list']
track_count = len(all_tracks)
if 'pregap' in medium:
all_tracks.insert(0, medium['pregap'])
for track in all_tracks:
if ('title' in track['recording'] and
track['recording']['title'] in SKIPPED_TRACKS):
continue
if ('video' in track['recording'] and
track['recording']['video'] == 'true' and
config['match']['ignore_video_tracks']):
continue
# Basic information from the recording.
index += 1
ti = track_info(
track['recording'],
index,
int(medium['position']),
int(track['position']),
track_count,
)
ti.release_track_id = track['id']
ti.disctitle = disctitle
ti.media = format
ti.track_alt = track['number']
# Prefer track data, where present, over recording data.
if track.get('title'):
ti.title = track['title']
if track.get('artist-credit'):
# Get the artist names.
ti.artist, ti.artist_sort, ti.artist_credit = \
_flatten_artist_credit(track['artist-credit'])
ti.artist_id = track['artist-credit'][0]['artist']['id']
if track.get('length'):
ti.length = int(track['length']) / (1000.0)
track_infos.append(ti)
info = beets.autotag.hooks.AlbumInfo(
release['title'],
release['id'],
artist_name,
release['artist-credit'][0]['artist']['id'],
track_infos,
mediums=len(release['medium-list']),
artist_sort=artist_sort_name,
artist_credit=artist_credit_name,
data_source=u'MusicBrainz',
data_url=album_url(release['id']),
)
info.va = info.artist_id == VARIOUS_ARTISTS_ID
if info.va:
info.artist = config['va_name'].as_str()
info.asin = release.get('asin')
info.releasegroup_id = release['release-group']['id']
info.albumstatus = release.get('status')
# Get the disambiguation strings at the release and release group level.
if release['release-group'].get('disambiguation'):
info.releasegroupdisambig = \
release['release-group'].get('disambiguation')
if release.get('disambiguation'):
info.albumdisambig = release.get('disambiguation')
# Get the "classic" Release type. This data comes from a legacy API
# feature before MusicBrainz supported multiple release types.
if 'type' in release['release-group']:
reltype = release['release-group']['type']
if reltype:
info.albumtype = reltype.lower()
# Log the new-style "primary" and "secondary" release types.
# Eventually, we'd like to actually store this data, but we just log<|fim▁hole|> if 'primary-type' in release['release-group']:
rel_primarytype = release['release-group']['primary-type']
if rel_primarytype:
log.debug('primary MB release type: ' + rel_primarytype.lower())
if 'secondary-type-list' in release['release-group']:
if release['release-group']['secondary-type-list']:
log.debug('secondary MB release type(s): ' + ', '.join(
[secondarytype.lower() for secondarytype in
release['release-group']['secondary-type-list']]))
# Release events.
info.country, release_date = _preferred_release_event(release)
release_group_date = release['release-group'].get('first-release-date')
if not release_date:
# Fall back if release-specific date is not available.
release_date = release_group_date
_set_date_str(info, release_date, False)
_set_date_str(info, release_group_date, True)
# Label name.
if release.get('label-info-list'):
label_info = release['label-info-list'][0]
if label_info.get('label'):
label = label_info['label']['name']
if label != '[no label]':
info.label = label
info.catalognum = label_info.get('catalog-number')
# Text representation data.
if release.get('text-representation'):
rep = release['text-representation']
info.script = rep.get('script')
info.language = rep.get('language')
# Media (format).
if release['medium-list']:
first_medium = release['medium-list'][0]
info.media = first_medium.get('format')
info.decode()
return info
def match_album(artist, album, tracks=None):
"""Searches for a single album ("release" in MusicBrainz parlance)
and returns an iterator over AlbumInfo objects. May raise a
MusicBrainzAPIError.
The query consists of an artist name, an album name, and,
optionally, a number of tracks on the album.
"""
# Build search criteria.
criteria = {'release': album.lower().strip()}
if artist is not None:
criteria['artist'] = artist.lower().strip()
else:
# Various Artists search.
criteria['arid'] = VARIOUS_ARTISTS_ID
if tracks is not None:
criteria['tracks'] = six.text_type(tracks)
# Abort if we have no search terms.
if not any(criteria.values()):
return
try:
log.debug(u'Searching for MusicBrainz releases with: {!r}', criteria)
res = musicbrainzngs.search_releases(
limit=config['musicbrainz']['searchlimit'].get(int), **criteria)
except musicbrainzngs.MusicBrainzError as exc:
raise MusicBrainzAPIError(exc, 'release search', criteria,
traceback.format_exc())
for release in res['release-list']:
# The search result is missing some data (namely, the tracks),
# so we just use the ID and fetch the rest of the information.
albuminfo = album_for_id(release['id'])
if albuminfo is not None:
yield albuminfo
def match_track(artist, title):
"""Searches for a single track and returns an iterable of TrackInfo
objects. May raise a MusicBrainzAPIError.
"""
criteria = {
'artist': artist.lower().strip(),
'recording': title.lower().strip(),
}
if not any(criteria.values()):
return
try:
res = musicbrainzngs.search_recordings(
limit=config['musicbrainz']['searchlimit'].get(int), **criteria)
except musicbrainzngs.MusicBrainzError as exc:
raise MusicBrainzAPIError(exc, 'recording search', criteria,
traceback.format_exc())
for recording in res['recording-list']:
yield track_info(recording)
def _parse_id(s):
"""Search for a MusicBrainz ID in the given string and return it. If
no ID can be found, return None.
"""
# Find the first thing that looks like a UUID/MBID.
match = re.search(u'[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}', s)
if match:
return match.group()
def album_for_id(releaseid):
"""Fetches an album by its MusicBrainz ID and returns an AlbumInfo
object or None if the album is not found. May raise a
MusicBrainzAPIError.
"""
log.debug(u'Requesting MusicBrainz release {}', releaseid)
albumid = _parse_id(releaseid)
if not albumid:
log.debug(u'Invalid MBID ({0}).', releaseid)
return
try:
res = musicbrainzngs.get_release_by_id(albumid,
RELEASE_INCLUDES)
except musicbrainzngs.ResponseError:
log.debug(u'Album ID match failed.')
return None
except musicbrainzngs.MusicBrainzError as exc:
raise MusicBrainzAPIError(exc, u'get release by ID', albumid,
traceback.format_exc())
return album_info(res['release'])
def track_for_id(releaseid):
"""Fetches a track by its MusicBrainz ID. Returns a TrackInfo object
or None if no track is found. May raise a MusicBrainzAPIError.
"""
trackid = _parse_id(releaseid)
if not trackid:
log.debug(u'Invalid MBID ({0}).', releaseid)
return
try:
res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES)
except musicbrainzngs.ResponseError:
log.debug(u'Track ID match failed.')
return None
except musicbrainzngs.MusicBrainzError as exc:
raise MusicBrainzAPIError(exc, u'get recording by ID', trackid,
traceback.format_exc())
return track_info(res['recording'])<|fim▁end|> | # it for now to help understand the differences. |
<|file_name|>xrinputsourceevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::EventBinding::EventBinding::EventMethods;
use crate::dom::bindings::codegen::Bindings::XRInputSourceEventBinding::{
self, XRInputSourceEventMethods,
};
use crate::dom::bindings::error::Fallible;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::str::DOMString;
use crate::dom::event::Event;
use crate::dom::globalscope::GlobalScope;
use crate::dom::window::Window;
use crate::dom::xrframe::XRFrame;
use crate::dom::xrinputsource::XRInputSource;
use dom_struct::dom_struct;
use servo_atoms::Atom;
#[dom_struct]
pub struct XRInputSourceEvent {
event: Event,
frame: Dom<XRFrame>,
source: Dom<XRInputSource>,
}
impl XRInputSourceEvent {
#[allow(unrooted_must_root)]
fn new_inherited(frame: &XRFrame, source: &XRInputSource) -> XRInputSourceEvent {
XRInputSourceEvent {
event: Event::new_inherited(),
frame: Dom::from_ref(frame),
source: Dom::from_ref(source),
}
}
pub fn new(
global: &GlobalScope,
type_: Atom,
bubbles: bool,
cancelable: bool,
frame: &XRFrame,
source: &XRInputSource,
) -> DomRoot<XRInputSourceEvent> {
let trackevent = reflect_dom_object(
Box::new(XRInputSourceEvent::new_inherited(frame, source)),
global,
);
{
let event = trackevent.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
trackevent
}
#[allow(non_snake_case)]
pub fn Constructor(
window: &Window,
type_: DOMString,
init: &XRInputSourceEventBinding::XRInputSourceEventInit,
) -> Fallible<DomRoot<XRInputSourceEvent>> {
Ok(XRInputSourceEvent::new(
&window.global(),
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable,<|fim▁hole|> &init.inputSource,
))
}
}
impl XRInputSourceEventMethods for XRInputSourceEvent {
// https://immersive-web.github.io/webxr/#dom-xrinputsourceeventinit-frame
fn Frame(&self) -> DomRoot<XRFrame> {
DomRoot::from_ref(&*self.frame)
}
// https://immersive-web.github.io/webxr/#dom-xrinputsourceeventinit-inputsource
fn InputSource(&self) -> DomRoot<XRInputSource> {
DomRoot::from_ref(&*self.source)
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}<|fim▁end|> | &init.frame, |
<|file_name|>web_page_block.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license information, please see license.txt<|fim▁hole|>
# import frappe
from frappe.model.document import Document
class WebPageBlock(Document):
pass<|fim▁end|> |
from __future__ import unicode_literals |
<|file_name|>TableChunker.js<|end_file_name|><|fim▁begin|>/**
* Produces optimized XTemplates for chunks of tables to be
* used in grids, trees and other table based widgets.
*/
Ext.define('Ext.view.TableChunker', {
singleton: true,
requires: ['Ext.XTemplate'],
metaTableTpl: [
'{%if (this.openTableWrap)out.push(this.openTableWrap())%}',
'<table class="' + Ext.baseCSSPrefix + 'grid-table ' + Ext.baseCSSPrefix + 'grid-table-resizer" border="0" cellspacing="0" cellpadding="0" {[this.embedFullWidth(values)]}>',
'<tbody>',
'<tr class="' + Ext.baseCSSPrefix + 'grid-header-row">',
'<tpl for="columns">',
'<th class="' + Ext.baseCSSPrefix + 'grid-col-resizer-{id}" style="width: {width}px; height: 0px;"></th>',
'</tpl>',
'</tr>',
'{[this.openRows()]}',
'{row}',
'<tpl for="features">',
'{[this.embedFeature(values, parent, xindex, xcount)]}',
'</tpl>',
'{[this.closeRows()]}',
'</tbody>',
'</table>',
'{%if (this.closeTableWrap)out.push(this.closeTableWrap())%}'
],
constructor: function() {
Ext.XTemplate.prototype.recurse = function(values, reference) {
return this.apply(reference ? values[reference] : values);
};
},
embedFeature: function(values, parent, x, xcount) {
if (!values.disabled) {
return values.getFeatureTpl(values, parent, x, xcount);
}
return '';
},
embedFullWidth: function(values) {
var result = 'style="width:{fullWidth}px;';
// If there are no records, we need to give the table a height so that it
// is displayed and causes q scrollbar if the width exceeds the View's width.
if (!values.rowCount) {
result += 'height:1px;';
}
return result + '"';
},
openRows: function() {
return '<tpl for="rows">';
},
closeRows: function() {
return '</tpl>';
},
metaRowTpl: [
'<tr class="' + Ext.baseCSSPrefix + 'grid-row {[this.embedRowCls()]}" {[this.embedRowAttr()]}>',
'<tpl for="columns">',
'<td class="{cls} ' + Ext.baseCSSPrefix + 'grid-cell ' + Ext.baseCSSPrefix + 'grid-cell-{columnId} {{id}-modified} {{id}-tdCls} {[this.firstOrLastCls(xindex, xcount)]}" {{id}-tdAttr}>',
'<div {unselectableAttr} class="' + Ext.baseCSSPrefix + 'grid-cell-inner {unselectableCls}" style="text-align: {align}; {{id}-style};">{{id}}</div>',
'</td>',
'</tpl>',
'</tr>'
],
firstOrLastCls: function(xindex, xcount) {
var result = '';
if (xindex === 1) {
result = Ext.view.Table.prototype.firstCls;
}
if (xindex === xcount) {
result += ' ' + Ext.view.Table.prototype.lastCls;
}
return result;
},
embedRowCls: function() {
return '{rowCls}';
},
embedRowAttr: function() {
return '{rowAttr}';
},
openTableWrap: undefined,
closeTableWrap: undefined,
getTableTpl: function(cfg, textOnly) {
var me = this,
tpl,
tableTplMemberFns = {
openRows: me.openRows,
closeRows: me.closeRows,
embedFeature: me.embedFeature,
embedFullWidth: me.embedFullWidth,
openTableWrap: me.openTableWrap,
closeTableWrap: me.closeTableWrap
},
tplMemberFns = {},
features = cfg.features,
featureCount = features ? features.length : 0,
i = 0,
memberFns = {
embedRowCls: me.embedRowCls,
embedRowAttr: me.embedRowAttr,
firstOrLastCls: me.firstOrLastCls,
unselectableAttr: cfg.enableTextSelection ? '' : 'unselectable="on"',<|fim▁hole|> metaRowTpl = featureCount ? Array.prototype.slice.call(me.metaRowTpl, 0) : me.metaRowTpl;
for (; i < featureCount; i++) {
if (!features[i].disabled) {
features[i].mutateMetaRowTpl(metaRowTpl);
Ext.apply(memberFns, features[i].getMetaRowTplFragments());
Ext.apply(tplMemberFns, features[i].getFragmentTpl());
Ext.apply(tableTplMemberFns, features[i].getTableFragments());
}
}
cfg.row = new Ext.XTemplate(metaRowTpl.join(''), memberFns).applyTemplate(cfg);
tpl = new Ext.XTemplate(me.metaTableTpl.join(''), tableTplMemberFns).applyTemplate(cfg);
// TODO: Investigate eliminating.
if (!textOnly) {
tpl = new Ext.XTemplate(tpl, tplMemberFns);
}
return tpl;
}
});<|fim▁end|> | unselectableCls: cfg.enableTextSelection ? '' : Ext.baseCSSPrefix + 'unselectable'
},
// copy the template spec array if there are Features which might mutate it |
<|file_name|>vmops.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010 Cloud.com, Inc
# Copyright 2012 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for basic VM operations.
"""
import functools
import os
from oslo.config import cfg
from nova.api.metadata import base as instance_metadata
from nova import exception
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova.openstack.common import units
from nova import utils
from nova.virt import configdrive
from nova.virt.hyperv import constants
from nova.virt.hyperv import imagecache
from nova.virt.hyperv import utilsfactory
from nova.virt.hyperv import vhdutilsv2
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
hyperv_opts = [
cfg.BoolOpt('limit_cpu_features',
default=False,
help='Required for live migration among '
'hosts with different CPU features'),
cfg.BoolOpt('config_drive_inject_password',
default=False,
help='Sets the admin password in the config drive image'),
cfg.StrOpt('qemu_img_cmd',
default="qemu-img.exe",
help='Path of qemu-img command which is used to convert '
'between different image types'),
cfg.BoolOpt('config_drive_cdrom',
default=False,
help='Attaches the Config Drive image as a cdrom drive '
'instead of a disk drive'),
cfg.BoolOpt('enable_instance_metrics_collection',
default=False,
help='Enables metrics collections for an instance by using '
'Hyper-V\'s metric APIs. Collected data can by retrieved '
'by other apps and services, e.g.: Ceilometer. '
'Requires Hyper-V / Windows Server 2012 and above'),
cfg.FloatOpt('dynamic_memory_ratio',
default=1.0,
help='Enables dynamic memory allocation (ballooning) when '
'set to a value greater than 1. The value expresses '
'the ratio between the total RAM assigned to an '
'instance and its startup RAM amount. For example a '
'ratio of 2.0 for an instance with 1024MB of RAM '
'implies 512MB of RAM allocated at startup')
]
CONF = cfg.CONF
CONF.register_opts(hyperv_opts, 'hyperv')
CONF.import_opt('use_cow_images', 'nova.virt.driver')
CONF.import_opt('network_api_class', 'nova.network')
def check_admin_permissions(function):
@functools.wraps(function)
def wrapper(self, *args, **kwds):
# Make sure the windows account has the required admin permissions.
self._vmutils.check_admin_permissions()
return function(self, *args, **kwds)
return wrapper
class VMOps(object):
_vif_driver_class_map = {
'nova.network.neutronv2.api.API':
'nova.virt.hyperv.vif.HyperVNeutronVIFDriver',
'nova.network.api.API':
'nova.virt.hyperv.vif.HyperVNovaNetworkVIFDriver',
}
def __init__(self):
self._vmutils = utilsfactory.get_vmutils()
self._vhdutils = utilsfactory.get_vhdutils()
self._pathutils = utilsfactory.get_pathutils()
self._volumeops = volumeops.VolumeOps()
self._imagecache = imagecache.ImageCache()
self._vif_driver = None
self._load_vif_driver_class()
def _load_vif_driver_class(self):
try:
class_name = self._vif_driver_class_map[CONF.network_api_class]
self._vif_driver = importutils.import_object(class_name)
except KeyError:
raise TypeError(_("VIF driver not found for "
"network_api_class: %s") %
CONF.network_api_class)
def list_instances(self):
return self._vmutils.list_instances()
def get_info(self, instance):
"""Get information about the VM."""
LOG.debug(_("get_info called for instance"), instance=instance)
instance_name = instance['name']
if not self._vmutils.vm_exists(instance_name):
raise exception.InstanceNotFound(instance_id=instance['uuid'])
info = self._vmutils.get_vm_summary_info(instance_name)
state = constants.HYPERV_POWER_STATE[info['EnabledState']]
return {'state': state,
'max_mem': info['MemoryUsage'],
'mem': info['MemoryUsage'],
'num_cpu': info['NumberOfProcessors'],
'cpu_time': info['UpTime']}
def _create_root_vhd(self, context, instance):
base_vhd_path = self._imagecache.get_cached_image(context, instance)
format_ext = base_vhd_path.split('.')[-1]
root_vhd_path = self._pathutils.get_root_vhd_path(instance['name'],
format_ext)
try:
if CONF.use_cow_images:
LOG.debug(_("Creating differencing VHD. Parent: "
"%(base_vhd_path)s, Target: %(root_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._vhdutils.create_differencing_vhd(root_vhd_path,
base_vhd_path)<|fim▁hole|> else:
LOG.debug(_("Copying VHD image %(base_vhd_path)s to target: "
"%(root_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._pathutils.copyfile(base_vhd_path, root_vhd_path)
base_vhd_info = self._vhdutils.get_vhd_info(base_vhd_path)
base_vhd_size = base_vhd_info['MaxInternalSize']
root_vhd_size = instance['root_gb'] * units.Gi
# NOTE(lpetrut): Checking the namespace is needed as the
# following method is not yet implemented in vhdutilsv2.
if not isinstance(self._vhdutils, vhdutilsv2.VHDUtilsV2):
root_vhd_internal_size = (
self._vhdutils.get_internal_vhd_size_by_file_size(
root_vhd_path, root_vhd_size))
else:
root_vhd_internal_size = root_vhd_size
if root_vhd_internal_size < base_vhd_size:
error_msg = _("Cannot resize a VHD to a smaller size, the"
" original size is %(base_vhd_size)s, the"
" newer size is %(root_vhd_size)s"
) % {'base_vhd_size': base_vhd_size,
'root_vhd_size': root_vhd_internal_size}
raise vmutils.HyperVException(error_msg)
elif root_vhd_internal_size > base_vhd_size:
LOG.debug(_("Resizing VHD %(root_vhd_path)s to new "
"size %(root_vhd_size)s"),
{'root_vhd_size': root_vhd_internal_size,
'root_vhd_path': root_vhd_path})
self._vhdutils.resize_vhd(root_vhd_path, root_vhd_size)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(root_vhd_path):
self._pathutils.remove(root_vhd_path)
return root_vhd_path
def create_ephemeral_vhd(self, instance):
eph_vhd_size = instance.get('ephemeral_gb', 0) * units.Gi
if eph_vhd_size:
vhd_format = self._vhdutils.get_best_supported_vhd_format()
eph_vhd_path = self._pathutils.get_ephemeral_vhd_path(
instance['name'], vhd_format)
self._vhdutils.create_dynamic_vhd(eph_vhd_path, eph_vhd_size,
vhd_format)
return eph_vhd_path
@check_admin_permissions
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None):
"""Create a new VM and start it."""
LOG.info(_("Spawning new instance"), instance=instance)
instance_name = instance['name']
if self._vmutils.vm_exists(instance_name):
raise exception.InstanceExists(name=instance_name)
# Make sure we're starting with a clean slate.
self._delete_disk_files(instance_name)
if self._volumeops.ebs_root_in_block_devices(block_device_info):
root_vhd_path = None
else:
root_vhd_path = self._create_root_vhd(context, instance)
eph_vhd_path = self.create_ephemeral_vhd(instance)
try:
self.create_instance(instance, network_info, block_device_info,
root_vhd_path, eph_vhd_path)
if configdrive.required_by(instance):
self._create_config_drive(instance, injected_files,
admin_password)
self.power_on(instance)
except Exception as ex:
LOG.exception(ex)
self.destroy(instance)
raise vmutils.HyperVException(_('Spawn instance failed'))
def create_instance(self, instance, network_info, block_device_info,
root_vhd_path, eph_vhd_path):
instance_name = instance['name']
self._vmutils.create_vm(instance_name,
instance['memory_mb'],
instance['vcpus'],
CONF.hyperv.limit_cpu_features,
CONF.hyperv.dynamic_memory_ratio)
ctrl_disk_addr = 0
if root_vhd_path:
self._vmutils.attach_ide_drive(instance_name,
root_vhd_path,
0,
ctrl_disk_addr,
constants.IDE_DISK)
ctrl_disk_addr += 1
if eph_vhd_path:
self._vmutils.attach_ide_drive(instance_name,
eph_vhd_path,
0,
ctrl_disk_addr,
constants.IDE_DISK)
self._vmutils.create_scsi_controller(instance_name)
self._volumeops.attach_volumes(block_device_info,
instance_name,
root_vhd_path is None)
for vif in network_info:
LOG.debug(_('Creating nic for instance: %s'), instance_name)
self._vmutils.create_nic(instance_name,
vif['id'],
vif['address'])
self._vif_driver.plug(instance, vif)
if CONF.hyperv.enable_instance_metrics_collection:
self._vmutils.enable_vm_metrics_collection(instance_name)
def _create_config_drive(self, instance, injected_files, admin_password):
if CONF.config_drive_format != 'iso9660':
vmutils.HyperVException(_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
LOG.info(_('Using config drive for instance: %s'), instance=instance)
extra_md = {}
if admin_password and CONF.hyperv.config_drive_inject_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md)
instance_path = self._pathutils.get_instance_dir(
instance['name'])
configdrive_path_iso = os.path.join(instance_path, 'configdrive.iso')
LOG.info(_('Creating config drive at %(path)s'),
{'path': configdrive_path_iso}, instance=instance)
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
try:
cdb.make_drive(configdrive_path_iso)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Creating config drive failed with error: %s'),
e, instance=instance)
if not CONF.hyperv.config_drive_cdrom:
drive_type = constants.IDE_DISK
configdrive_path = os.path.join(instance_path,
'configdrive.vhd')
utils.execute(CONF.hyperv.qemu_img_cmd,
'convert',
'-f',
'raw',
'-O',
'vpc',
configdrive_path_iso,
configdrive_path,
attempts=1)
self._pathutils.remove(configdrive_path_iso)
else:
drive_type = constants.IDE_DVD
configdrive_path = configdrive_path_iso
self._vmutils.attach_ide_drive(instance['name'], configdrive_path,
1, 0, drive_type)
def _disconnect_volumes(self, volume_drives):
for volume_drive in volume_drives:
self._volumeops.disconnect_volume(volume_drive)
def _delete_disk_files(self, instance_name):
self._pathutils.get_instance_dir(instance_name,
create_dir=False,
remove_dir=True)
def destroy(self, instance, network_info=None, block_device_info=None,
destroy_disks=True):
instance_name = instance['name']
LOG.info(_("Got request to destroy instance: %s"), instance_name)
try:
if self._vmutils.vm_exists(instance_name):
#Stop the VM first.
self.power_off(instance)
storage = self._vmutils.get_vm_storage_paths(instance_name)
(disk_files, volume_drives) = storage
self._vmutils.destroy_vm(instance_name)
self._disconnect_volumes(volume_drives)
else:
LOG.debug(_("Instance not found: %s"), instance_name)
if destroy_disks:
self._delete_disk_files(instance_name)
except Exception as ex:
LOG.exception(ex)
raise vmutils.HyperVException(_('Failed to destroy instance: %s') %
instance_name)
def reboot(self, instance, network_info, reboot_type):
"""Reboot the specified instance."""
LOG.debug(_("reboot instance"), instance=instance)
self._set_vm_state(instance['name'],
constants.HYPERV_VM_STATE_REBOOT)
def pause(self, instance):
"""Pause VM instance."""
LOG.debug(_("Pause instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_PAUSED)
def unpause(self, instance):
"""Unpause paused VM instance."""
LOG.debug(_("Unpause instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def suspend(self, instance):
"""Suspend the specified instance."""
LOG.debug(_("Suspend instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_SUSPENDED)
def resume(self, instance):
"""Resume the suspended VM instance."""
LOG.debug(_("Resume instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def power_off(self, instance):
"""Power off the specified instance."""
LOG.debug(_("Power off instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_DISABLED)
def power_on(self, instance):
"""Power on the specified instance."""
LOG.debug(_("Power on instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def _set_vm_state(self, vm_name, req_state):
try:
self._vmutils.set_vm_state(vm_name, req_state)
LOG.debug(_("Successfully changed state of VM %(vm_name)s"
" to: %(req_state)s"),
{'vm_name': vm_name, 'req_state': req_state})
except Exception as ex:
LOG.exception(ex)
msg = (_("Failed to change vm state of %(vm_name)s"
" to %(req_state)s") %
{'vm_name': vm_name, 'req_state': req_state})
raise vmutils.HyperVException(msg)<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>global_var = 2.11
print('main():')
print('global_var = ', ++global_var)
def func_use_global():
print('func_use_global():')
print('global_var = ', global_var)
func_use_global()
def func_modify_global():
global global_var # Must declare when to reassign global variables !!!!<|fim▁hole|>func_modify_global()
print('main():')
print('global_var = ', ++global_var)
global_dict = {0:22, 1:44}
#
# If the global value is mutable, you can modify it without declaring it!!
# (Flake: Think mutable as pointer. In this case, pointer doesn't been modified!!)
#
def func_modify_global_dict_1():
global_dict[1] = 55;
global_dict[2] = 66;
func_modify_global_dict_1()
print('global_dict = ', global_dict)
#
# Reassign a dictionary = modify pointer = need declare "global"
#
def func_modify_global_dict_2():
global global_dict
global_dict = {0:88, 1:99}
func_modify_global_dict_2()
print('global_dict = ', global_dict)<|fim▁end|> | global_var += 1
print('func_modify_global():')
print('global_var = ', ++global_var)
|
<|file_name|>expand.rs<|end_file_name|><|fim▁begin|>// This file is part of the uutils coreutils package.
//
// (c) Virgile Andreani <[email protected]>
// (c) kwantam <[email protected]>
// * 2015-04-28 ~ updated to work with both UTF-8 and non-UTF-8 encodings
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) ctype cwidth iflag nbytes nspaces nums tspaces uflag
#[macro_use]
extern crate uucore;
use clap::{crate_version, App, AppSettings, Arg, ArgMatches};
use std::fs::File;
use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Read, Write};
use std::str::from_utf8;
use unicode_width::UnicodeWidthChar;
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult};
use uucore::format_usage;
static ABOUT: &str = "Convert tabs in each FILE to spaces, writing to standard output.
With no FILE, or when FILE is -, read standard input.";
const USAGE: &str = "{} [OPTION]... [FILE]...";
pub mod options {
pub static TABS: &str = "tabs";
pub static INITIAL: &str = "initial";
pub static NO_UTF8: &str = "no-utf8";
pub static FILES: &str = "FILES";
}
static LONG_HELP: &str = "";
static DEFAULT_TABSTOP: usize = 8;
/// The mode to use when replacing tabs beyond the last one specified in
/// the `--tabs` argument.
enum RemainingMode {
None,
Slash,
Plus,
}
/// Decide whether the character is either a space or a comma.
///
/// # Examples
///
/// ```rust,ignore
/// assert!(is_space_or_comma(' '))
/// assert!(is_space_or_comma(','))
/// assert!(!is_space_or_comma('a'))
/// ```
fn is_space_or_comma(c: char) -> bool {
c == ' ' || c == ','
}
/// Parse a list of tabstops from a `--tabs` argument.
///
/// This function returns both the vector of numbers appearing in the
/// comma- or space-separated list, and also an optional mode, specified
/// by either a "/" or a "+" character appearing before the final number
/// in the list. This mode defines the strategy to use for computing the
/// number of spaces to use for columns beyond the end of the tab stop
/// list specified here.
fn tabstops_parse(s: &str) -> (RemainingMode, Vec<usize>) {
// Leading commas and spaces are ignored.
let s = s.trim_start_matches(is_space_or_comma);
// If there were only commas and spaces in the string, just use the
// default tabstops.
if s.is_empty() {
return (RemainingMode::None, vec![DEFAULT_TABSTOP]);
}
let mut nums = vec![];
let mut remaining_mode = RemainingMode::None;
for word in s.split(is_space_or_comma) {
let bytes = word.as_bytes();
for i in 0..bytes.len() {
match bytes[i] {
b'+' => {
remaining_mode = RemainingMode::Plus;
}
b'/' => {
remaining_mode = RemainingMode::Slash;
}
_ => {
// Parse a number from the byte sequence.
let num = from_utf8(&bytes[i..]).unwrap().parse::<usize>().unwrap();
// Tab size must be positive.
if num == 0 {
crash!(1, "{}\n", "tab size cannot be 0");
}
// Tab sizes must be ascending.
if let Some(last_stop) = nums.last() {
if *last_stop >= num {
crash!(1, "tab sizes must be ascending");
}
}
// Append this tab stop to the list of all tabstops.
nums.push(num);
break;
}
}
}
}
// If no numbers could be parsed (for example, if `s` were "+,+,+"),
// then just use the default tabstops.
if nums.is_empty() {
nums = vec![DEFAULT_TABSTOP];
}
(remaining_mode, nums)
}
struct Options {
files: Vec<String>,
tabstops: Vec<usize>,
tspaces: String,
iflag: bool,
uflag: bool,
/// Strategy for expanding tabs for columns beyond those specified
/// in `tabstops`.
remaining_mode: RemainingMode,
}
impl Options {
fn new(matches: &ArgMatches) -> Self {
let (remaining_mode, tabstops) = match matches.value_of(options::TABS) {
Some(s) => tabstops_parse(s),
None => (RemainingMode::None, vec![DEFAULT_TABSTOP]),
};
let iflag = matches.is_present(options::INITIAL);
let uflag = !matches.is_present(options::NO_UTF8);
// avoid allocations when dumping out long sequences of spaces
// by precomputing the longest string of spaces we will ever need
let nspaces = tabstops
.iter()
.scan(0, |pr, &it| {
let ret = Some(it - *pr);
*pr = it;
ret
})
.max()
.unwrap(); // length of tabstops is guaranteed >= 1
let tspaces = " ".repeat(nspaces);
let files: Vec<String> = match matches.values_of(options::FILES) {
Some(s) => s.map(|v| v.to_string()).collect(),
None => vec!["-".to_owned()],
};
Self {
files,
tabstops,
tspaces,
iflag,
uflag,
remaining_mode,
}
}
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let matches = uu_app().get_matches_from(args);
expand(&Options::new(&matches)).map_err_context(|| "failed to write output".to_string())
}
pub fn uu_app<'a>() -> App<'a> {
App::new(uucore::util_name())
.version(crate_version!())
.about(ABOUT)
.after_help(LONG_HELP)
.override_usage(format_usage(USAGE))
.setting(AppSettings::InferLongArgs)
.arg(
Arg::new(options::INITIAL)
.long(options::INITIAL)
.short('i')
.help("do not convert tabs after non blanks"),
)
.arg(
Arg::new(options::TABS)
.long(options::TABS)
.short('t')
.value_name("N, LIST")
.takes_value(true)
.help("have tabs N characters apart, not 8 or use comma separated list of explicit tab positions"),
)
.arg(
Arg::new(options::NO_UTF8)
.long(options::NO_UTF8)
.short('U')
.help("interpret input file as 8-bit ASCII rather than UTF-8"),
).arg(
Arg::new(options::FILES)
.multiple_occurrences(true)
.hide(true)
.takes_value(true)
)
}
fn open(path: &str) -> BufReader<Box<dyn Read + 'static>> {
let file_buf;
if path == "-" {
BufReader::new(Box::new(stdin()) as Box<dyn Read>)
} else {
file_buf = match File::open(path) {<|fim▁hole|> BufReader::new(Box::new(file_buf) as Box<dyn Read>)
}
}
/// Compute the number of spaces to the next tabstop.
///
/// `tabstops` is the sequence of tabstop locations.
///
/// `col` is the index of the current cursor in the line being written.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
///
/// If `remaining_mode` is [`RemainingMode::Plus`], then the last entry
/// in the `tabstops` slice is interpreted as a relative number of
/// spaces, which this function will return for every input value of
/// `col` beyond the end of the second-to-last element of `tabstops`.
fn next_tabstop(tabstops: &[usize], col: usize, remaining_mode: &RemainingMode) -> usize {
let num_tabstops = tabstops.len();
match remaining_mode {
RemainingMode::Plus => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - 1,
},
RemainingMode::Slash => match tabstops[0..num_tabstops - 1].iter().find(|&&t| t > col) {
Some(t) => t - col,
None => tabstops[num_tabstops - 1] - col % tabstops[num_tabstops - 1],
},
RemainingMode::None => {
if num_tabstops == 1 {
tabstops[0] - col % tabstops[0]
} else {
match tabstops.iter().find(|&&t| t > col) {
Some(t) => t - col,
None => 1,
}
}
}
}
}
#[derive(PartialEq, Eq, Debug)]
enum CharType {
Backspace,
Tab,
Other,
}
fn expand(options: &Options) -> std::io::Result<()> {
use self::CharType::*;
let mut output = BufWriter::new(stdout());
let ts = options.tabstops.as_ref();
let mut buf = Vec::new();
for file in &options.files {
let mut fh = open(file);
while match fh.read_until(b'\n', &mut buf) {
Ok(s) => s > 0,
Err(_) => buf.is_empty(),
} {
let mut col = 0;
let mut byte = 0;
let mut init = true;
while byte < buf.len() {
let (ctype, cwidth, nbytes) = if options.uflag {
let nbytes = char::from(buf[byte]).len_utf8();
if byte + nbytes > buf.len() {
// don't overrun buffer because of invalid UTF-8
(Other, 1, 1)
} else if let Ok(t) = from_utf8(&buf[byte..byte + nbytes]) {
match t.chars().next() {
Some('\t') => (Tab, 0, nbytes),
Some('\x08') => (Backspace, 0, nbytes),
Some(c) => (Other, UnicodeWidthChar::width(c).unwrap_or(0), nbytes),
None => {
// no valid char at start of t, so take 1 byte
(Other, 1, 1)
}
}
} else {
(Other, 1, 1) // implicit assumption: non-UTF-8 char is 1 col wide
}
} else {
(
match buf[byte] {
// always take exactly 1 byte in strict ASCII mode
0x09 => Tab,
0x08 => Backspace,
_ => Other,
},
1,
1,
)
};
// figure out how many columns this char takes up
match ctype {
Tab => {
// figure out how many spaces to the next tabstop
let nts = next_tabstop(ts, col, &options.remaining_mode);
col += nts;
// now dump out either spaces if we're expanding, or a literal tab if we're not
if init || !options.iflag {
if nts <= options.tspaces.len() {
output.write_all(options.tspaces[..nts].as_bytes())?;
} else {
output.write_all(" ".repeat(nts).as_bytes())?;
};
} else {
output.write_all(&buf[byte..byte + nbytes])?;
}
}
_ => {
col = if ctype == Other {
col + cwidth
} else if col > 0 {
col - 1
} else {
0
};
// if we're writing anything other than a space, then we're
// done with the line's leading spaces
if buf[byte] != 0x20 {
init = false;
}
output.write_all(&buf[byte..byte + nbytes])?;
}
}
byte += nbytes; // advance the pointer
}
output.flush()?;
buf.truncate(0); // clear the buffer
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::next_tabstop;
use super::RemainingMode;
#[test]
fn test_next_tabstop_remaining_mode_none() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::None), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::None), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::None), 1);
}
#[test]
fn test_next_tabstop_remaining_mode_plus() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Plus), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Plus), 4);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Plus), 4);
}
#[test]
fn test_next_tabstop_remaining_mode_slash() {
assert_eq!(next_tabstop(&[1, 5], 0, &RemainingMode::Slash), 1);
assert_eq!(next_tabstop(&[1, 5], 3, &RemainingMode::Slash), 2);
assert_eq!(next_tabstop(&[1, 5], 6, &RemainingMode::Slash), 4);
}
}<|fim▁end|> | Ok(a) => a,
Err(e) => crash!(1, "{}: {}\n", path.maybe_quote(), e),
}; |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include "hermes2d.h"
#include "solver_umfpack.h"
#include "function.h"
// This example shows how to combine the Newton's method with
// automatic adaptivity.
//
// PDE: stationary heat transfer equation with nonlinear thermal
// conductivity, - div[lambda(u)grad u] = 0
//
// Domain: unit square (-10,10)^2
//
// BC: Dirichlet, see function dir_lift() below.
//
// The following parameters can be changed:
const int P_INIT = 1; // Initial polynomial degree
const int PROJ_TYPE = 1; // For the projection of the initial condition
// on the initial mesh: 1 = H1 projection, 0 = L2 projection
const int INIT_GLOB_REF_NUM = 1; // Number of initial uniform mesh refinements
const int INIT_BDY_REF_NUM = 0; // Number of initial refinements towards boundary
const double THRESHOLD = 0.2; // This is a quantitative parameter of the adapt(...) function and
// it has different meanings for various adaptive strategies (see below).
const int STRATEGY = 1; // Adaptive strategy:
// STRATEGY = 0 ... refine elements until sqrt(THRESHOLD) times total
// error is processed. If more elements have similar errors, refine
// all to keep the mesh symmetric.
// STRATEGY = 1 ... refine all elements whose error is larger
// than THRESHOLD times maximum element error.
// STRATEGY = 2 ... refine all elements whose error is larger
// than THRESHOLD.
// More adaptive strategies can be created in adapt_ortho_h1.cpp.
const int ADAPT_TYPE = 0; // Type of automatic adaptivity:
// ADAPT_TYPE = 0 ... adaptive hp-FEM (default),
// ADAPT_TYPE = 1 ... adaptive h-FEM,
// ADAPT_TYPE = 2 ... adaptive p-FEM.
const bool ISO_ONLY = false; // Isotropic refinement flag (concerns quadrilateral elements only).
// ISO_ONLY = false ... anisotropic refinement of quad elements
// is allowed (default),
// ISO_ONLY = true ... only isotropic refinements of quad elements
// are allowed.
const int MESH_REGULARITY = -1; // Maximum allowed level of hanging nodes:
// MESH_REGULARITY = -1 ... arbitrary level hangning nodes (default),
// MESH_REGULARITY = 1 ... at most one-level hanging nodes,
// MESH_REGULARITY = 2 ... at most two-level hanging nodes, etc.
// Note that regular meshes are not supported, this is due to
// their notoriously bad performance.
const double ERR_STOP = 0.001; // Stopping criterion for adaptivity (rel. error tolerance between the
// fine mesh and coarse mesh solution in percent).
const int NDOF_STOP = 60000; // Adaptivity process stops when the number of degrees of freedom grows
// over this limit. This is to prevent h-adaptivity to go on forever.
const double NEWTON_TOL = 1e-6; // Stopping criterion for the Newton's method on coarse mesh
const double NEWTON_TOL_REF = 1e-6; // Stopping criterion for the Newton's method on fine mesh
// Thermal conductivity (temperature-dependent)
// Note: for any u, this function has to be positive
template<typename Real>
Real lam(Real u)
{
return 1 + pow(u, 4);
}
// Derivative of the thermal conductivity with respect to 'u'
template<typename Real>
Real dlam_du(Real u) {
return 4*pow(u, 3);
}
// This function is used to define Dirichlet boundary conditions
double dir_lift(double x, double y, double& dx, double& dy) {
dx = (y+10)/10.;
dy = (x+10)/10.;
return (x+10)*(y+10)/100.;
}
// This function will be projected on the initial mesh and
// used as initial guess for the Newton's method
scalar init_cond(double x, double y, double& dx, double& dy)
{
// using the Dirichlet lift elevated by two
double val = dir_lift(x, y, dx, dy) + 2;
return val;
}
// Boundary condition type (essential = Dirichlet)
int bc_types(int marker)
{
return BC_ESSENTIAL;
}
// Dirichlet boundary condition values
scalar bc_values(int marker, double x, double y)
{
double dx, dy;
return dir_lift(x, y, dx, dy);
}
// Heat sources (can be a general function of 'x' and 'y')
template<typename Real>
Real heat_src(Real x, Real y)
{
return 1.0;
}
// Jacobian matrix
template<typename Real, typename Scalar>
Scalar jac(int n, double *wt, Func<Real> *u, Func<Real> *v, Geom<Real> *e, ExtData<Scalar> *ext)
{
Scalar result = 0;
Func<Scalar>* u_prev = ext->fn[0];
for (int i = 0; i < n; i++)
result += wt[i] * (dlam_du(u_prev->val[i]) * u->val[i] * (u_prev->dx[i] * v->dx[i] + u_prev->dy[i] * v->dy[i])
+ lam(u_prev->val[i]) * (u->dx[i] * v->dx[i] + u->dy[i] * v->dy[i]));
return result;
}
// Fesidual vector
template<typename Real, typename Scalar>
Scalar res(int n, double *wt, Func<Real> *v, Geom<Real> *e, ExtData<Scalar> *ext)
{
Scalar result = 0;
Func<Scalar>* u_prev = ext->fn[0];
for (int i = 0; i < n; i++)
result += wt[i] * (lam(u_prev->val[i]) * (u_prev->dx[i] * v->dx[i] + u_prev->dy[i] * v->dy[i])
- heat_src(e->x[i], e->y[i]) * v->val[i]);
return result;
}
int main(int argc, char* argv[])
{
// load the mesh file
Mesh mesh;
H2DReader mloader;
mloader.load("square.mesh", &mesh);
// initial mesh refinements
for(int i = 0; i < INIT_GLOB_REF_NUM; i++) mesh.refine_all_elements();
mesh.refine_towards_boundary(1,INIT_BDY_REF_NUM);
// initialize the shapeset and the cache
H1Shapeset shapeset;
PrecalcShapeset pss(&shapeset);
// create an H1 space
H1Space space(&mesh, &shapeset);
space.set_bc_types(bc_types);
space.set_bc_values(bc_values);
space.set_uniform_order(P_INIT);
space.assign_dofs();
// previous solution for the Newton's iteration
Solution u_prev;
// initialize the weak formulation
WeakForm wf(1);
wf.add_biform(0, 0, callback(jac), UNSYM, ANY, 1, &u_prev);
wf.add_liform(0, callback(res), ANY, 1, &u_prev);
// initialize the nonlinear system and solver
UmfpackSolver umfpack;
NonlinSystem nls(&wf, &umfpack);
nls.set_spaces(1, &space);
nls.set_pss(1, &pss);
// DOF and CPU convergence graphs
SimpleGraph graph_dof, graph_cpu;
// project the function init_cond() on the mesh
// to obtain initial guess u_prev for the Newton's method
nls.set_ic(init_cond, &mesh, &u_prev, PROJ_TYPE);
// visualise the initial ocndition
ScalarView view("Initial condition", 0, 0, 700, 600);
view.show(&u_prev);
OrderView oview("Initial mesh", 720, 0, 700, 600);
oview.show(&space);
//printf("Click into the image window and press any key to proceed.\n");
//view.wait_for_keypress();
// adaptivity loop
double cpu = 0.0, err_est;
int a_step = 1;
bool done = false;
do {
a_step++;
// Newton's loop on the coarse mesh
int it = 1;
double res_l2_norm;
Solution sln_coarse;
do
{
info("\n---- Adapt step %d, Newton iter %d (coarse mesh) ---------------------------------\n", a_step, it++);
printf("ndof = %d\n", space.get_num_dofs());
// time measurement
begin_time();
// assemble the Jacobian matrix and residual vector,
// solve the system
nls.assemble();
nls.solve(1, &sln_coarse);
// calculate the l2-norm of residual vector
res_l2_norm = nls.get_residuum_l2_norm();
info("Residuum L2 norm: %g\n", res_l2_norm);
// time measurement
cpu += end_time();
// visualise the solution
char title[100];
sprintf(title, "Temperature (coarse mesh), Newton iteration %d", it-1);
view.set_title(title);
view.show(&sln_coarse);
sprintf(title, "Coarse mesh, Newton iteration %d", it-1);
oview.set_title(title);
oview.show(&space);
//printf("Click into the image window and press any key to proceed.\n");
//view.wait_for_keypress();
// save the new solution as "previous" for the
// next Newton's iteration
u_prev.copy(&sln_coarse);
}
while (res_l2_norm > NEWTON_TOL);
// Setting initial guess for the Newton's method on the fine mesh
Solution sln_fine, u_prev_fine;
RefNonlinSystem rs(&nls);
rs.prepare();
rs.set_ic(&u_prev, &u_prev);
// Newton's loop on the fine mesh
it = 1;
do {
info("\n---- Adapt step %d, Newton iter %d (fine mesh) ---------------------------------\n", a_step, it++);
// time measurement
begin_time();
// assemble the Jacobian matrix and residual vector,
// solve the system
rs.assemble();
rs.solve(1, &sln_fine);
// calculate the l2-norm of residual vector
res_l2_norm = rs.get_residuum_l2_norm();
info("Residuum L2 norm: %g\n", res_l2_norm);
// time measurement
cpu += end_time();
// visualise the solution
char title[100];
sprintf(title, "Temperature (fine mesh), Newton iteration %d", it-1);
view.set_title(title);
view.show(&sln_fine);
sprintf(title, "Fine mesh, Newton iteration %d", it-1);
oview.set_title(title);
oview.show(rs.get_ref_space(0));
//printf("Click into the image window and press any key to proceed.\n");
//view.wait_for_keypress();
u_prev.copy(&sln_fine);
} while (res_l2_norm > NEWTON_TOL_REF);
// time measurement<|fim▁hole|> err_est = hp.calc_error(&sln_coarse, &sln_fine) * 100;
info("Error estimate: %g%%", err_est);
// add entry to DOF convergence graph
graph_dof.add_values(space.get_num_dofs(), err_est);
graph_dof.save("conv_dof.dat");
// add entry to CPU convergence graph
graph_cpu.add_values(cpu, err_est);
graph_cpu.save("conv_cpu.dat");
// if err_est too large, adapt the mesh
if (err_est < ERR_STOP) done = true;
else {
hp.adapt(THRESHOLD, STRATEGY, ADAPT_TYPE, ISO_ONLY, MESH_REGULARITY);
int ndof = space.assign_dofs();
if (ndof >= NDOF_STOP) done = true;
}
// time measurement
cpu += end_time();
}
while (!done);
verbose("Total running time: %g sec", cpu);
// wait for keyboard or mouse input
View::wait();
return 0;
}<|fim▁end|> | begin_time();
// calculate element errors and total error estimate
H1OrthoHP hp(1, &space); |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import uuid
import re
import datetime
import decimal
import itertools
import functools
import random
import string
import six
from six import iteritems
from ..exceptions import (
StopValidation, ValidationError, ConversionError, MockCreationError
)
try:
from string import ascii_letters # PY3
except ImportError:
from string import letters as ascii_letters #PY2
try:
basestring #PY2
except NameError:
basestring = str #PY3
try:
unicode #PY2
except:
import codecs
unicode = str #PY3
def utf8_decode(s):
if six.PY3:
s = str(s) #todo: right thing to do?
else:
s = unicode(s, 'utf-8')
return s
def fill_template(template, min_length, max_length):
return template % random_string(
get_value_in(
min_length,
max_length,
padding=len(template) - 2,
required_length=1))
def force_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
#obj = unicode(obj, encoding)
obj = utf8_decode(obj)
elif not obj is None:
#obj = unicode(obj)
obj = utf8_decode(obj)
return obj
def get_range_endpoints(min_length, max_length, padding=0, required_length=0):
if min_length is None and max_length is None:
min_length = 0
max_length = 16
elif min_length is None:
min_length = 0
elif max_length is None:
max_length = max(min_length * 2, 16)
if padding:
max_length = max_length - padding
min_length = max(min_length - padding, 0)
if max_length < required_length:
raise MockCreationError(
'This field is too short to hold the mock data')
min_length = max(min_length, required_length)
return min_length, max_length
def get_value_in(min_length, max_length, padding=0, required_length=0):
return random.randint(
*get_range_endpoints(min_length, max_length, padding, required_length))
def random_string(length, chars=ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(length))
_last_position_hint = -1
_next_position_hint = itertools.count()
class TypeMeta(type):
"""
Meta class for BaseType. Merges `MESSAGES` dict and accumulates
validator methods.
"""
def __new__(mcs, name, bases, attrs):
messages = {}
validators = []
for base in reversed(bases):
if hasattr(base, 'MESSAGES'):
messages.update(base.MESSAGES)
if hasattr(base, "_validators"):
validators.extend(base._validators)
if 'MESSAGES' in attrs:
messages.update(attrs['MESSAGES'])
attrs['MESSAGES'] = messages
for attr_name, attr in iteritems(attrs):
if attr_name.startswith("validate_"):
validators.append(attr)
attrs["_validators"] = validators
return type.__new__(mcs, name, bases, attrs)
class BaseType(TypeMeta('BaseTypeBase', (object, ), {})):
"""A base class for Types in a Schematics model. Instances of this
class may be added to subclasses of ``Model`` to define a model schema.
Validators that need to access variables on the instance
can be defined be implementing methods whose names start with ``validate_``
and accept one parameter (in addition to ``self``)
:param required:
Invalidate field when value is None or is not supplied. Default:
False.
:param default:
When no data is provided default to this value. May be a callable.
Default: None.
:param serialized_name:
The name of this field defaults to the class attribute used in the
model. However if the field has another name in foreign data set this
argument. Serialized data will use this value for the key name too.
:param deserialize_from:
A name or list of named fields for which foreign data sets are
searched to provide a value for the given field. This only effects
inbound data.
:param choices:
A list of valid choices. This is the last step of the validator
chain.
:param validators:
A list of callables. Each callable receives the value after it has been
converted into a rich python type. Default: []
:param serialize_when_none:
Dictates if the field should appear in the serialized data even if the
value is None. Default: True
:param messages:
Override the error messages with a dict. You can also do this by
subclassing the Type and defining a `MESSAGES` dict attribute on the
class. A metaclass will merge all the `MESSAGES` and override the
resulting dict with instance level `messages` and assign to
`self.messages`.
"""
MESSAGES = {
'required': u"This field is required.",
'choices': u"Value must be one of {0}.",
}
def __init__(self, required=False, default=None, serialized_name=None,
choices=None, validators=None, deserialize_from=None,
serialize_when_none=None, messages=None):
super(BaseType, self).__init__()
self.required = required
self._default = default
self.serialized_name = serialized_name
if choices and not isinstance(choices, (list, tuple)):
raise TypeError('"choices" must be a list or tuple')
self.choices = choices
self.deserialize_from = deserialize_from
self.validators = [functools.partial(v, self) for v in self._validators]
if validators:
self.validators += validators
self.serialize_when_none = serialize_when_none
self.messages = dict(self.MESSAGES, **(messages or {}))
self._position_hint = next(_next_position_hint) # For ordering of fields
def __call__(self, value):
return self.to_native(value)
def _mock(self, context=None):
return None
def _setup(self, field_name, owner_model):
"""Perform late-stage setup tasks that are run after the containing model
has been created.
"""
self.name = field_name
self.owner_model = owner_model
@property
def default(self):
default = self._default
if callable(self._default):
default = self._default()
return default
def to_primitive(self, value, context=None):
"""Convert internal data to a value safe to serialize.
"""
return value
def to_native(self, value, context=None):
"""
Convert untrusted data to a richer Python construct.
"""
return value
def allow_none(self):
if hasattr(self, 'owner_model'):
return self.owner_model.allow_none(self)
else:
return self.serialize_when_none
def validate(self, value):
"""
Validate the field and return a clean value or raise a
``ValidationError`` with a list of errors raised by the validation
chain. Stop the validation process from continuing through the
validators by raising ``StopValidation`` instead of ``ValidationError``.
"""
errors = []
for validator in self.validators:
try:
validator(value)
except ValidationError as exc:
errors.extend(exc.messages)
if isinstance(exc, StopValidation):
break
if errors:
raise ValidationError(errors)
def validate_required(self, value):
if self.required and value is None:
raise ValidationError(self.messages['required'])
def validate_choices(self, value):
if self.choices is not None:
if value not in self.choices:
raise ValidationError(self.messages['choices']
.format(unicode(self.choices)))
def mock(self, context=None):
if not self.required and not random.choice([True, False]):
return self.default
if self.choices is not None:
return random.choice(self.choices)
return self._mock(context)
class UUIDType(BaseType):
"""A field that stores a valid UUID value.
"""
MESSAGES = {
'convert': u"Couldn't interpret '{0}' value as UUID.",
}
def _mock(self, context=None):
return uuid.uuid4()
def to_native(self, value, context=None):
if not isinstance(value, uuid.UUID):
try:
value = uuid.UUID(value)
except (AttributeError, TypeError, ValueError):
raise ConversionError(self.messages['convert'].format(value))
return value
def to_primitive(self, value, context=None):
return str(value)
class IPv4Type(BaseType):
""" A field that stores a valid IPv4 address """
def _mock(self, context=None):
return '.'.join(str(random.randrange(256)) for _ in range(4))
@classmethod
def valid_ip(cls, addr):
try:
addr = addr.strip().split(".")
except AttributeError:
return False
try:
return len(addr) == 4 and all(0 <= int(octet) < 256 for octet in addr)
except ValueError:
return False
def validate(self, value):
"""
Make sure the value is a IPv4 address:
http://stackoverflow.com/questions/9948833/validate-ip-address-from-list
"""
if not IPv4Type.valid_ip(value):
error_msg = 'Invalid IPv4 address'
raise ValidationError(error_msg)
return True
class StringType(BaseType):
"""A unicode string field. Default minimum length is one. If you want to
accept empty strings, init with ``min_length`` 0.
"""
allow_casts = (int, str)
MESSAGES = {
'convert': u"Couldn't interpret '{0}' as string.",
'max_length': u"String value is too long.",
'min_length': u"String value is too short.",
'regex': u"String value did not match validation regex.",
}
def __init__(self, regex=None, max_length=None, min_length=None, **kwargs):
self.regex = regex
self.max_length = max_length
self.min_length = min_length
super(StringType, self).__init__(**kwargs)
def _mock(self, context=None):
return random_string(get_value_in(self.min_length, self.max_length))
def to_native(self, value, context=None):
if value is None:
return None
if not isinstance(value, unicode):
if isinstance(value, self.allow_casts):
if not isinstance(value, str):
value = str(value)
value = utf8_decode(value) #unicode(value, 'utf-8')
else:
raise ConversionError(self.messages['convert'].format(value))
return value
def validate_length(self, value):
len_of_value = len(value) if value else 0
if self.max_length is not None and len_of_value > self.max_length:
raise ValidationError(self.messages['max_length'])
if self.min_length is not None and len_of_value < self.min_length:
raise ValidationError(self.messages['min_length'])
def validate_regex(self, value):
if self.regex is not None and re.match(self.regex, value) is None:
raise ValidationError(self.messages['regex'])
class URLType(StringType):
"""A field that validates input as an URL.
If verify_exists=True is passed the validate function will make sure
the URL makes a valid connection.
"""
MESSAGES = {
'invalid_url': u"Not a well formed URL.",
'not_found': u"URL does not exist.",
}
URL_REGEX = re.compile(
r'^https?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,2000}[A-Z0-9])?\.)+[A-Z]{2,63}\.?|'
r'localhost|'
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
r'(?::\d+)?'
r'(?:/?|[/?]\S+)$', re.IGNORECASE
)
def __init__(self, verify_exists=False, **kwargs):
self.verify_exists = verify_exists
super(URLType, self).__init__(**kwargs)
def _mock(self, context=None):
return fill_template('http://a%s.ZZ', self.min_length,
self.max_length)
def validate_url(self, value):
if not URLType.URL_REGEX.match(value):
raise StopValidation(self.messages['invalid_url'])
if self.verify_exists:
from six.moves import urllib
try:
request = urllib.Request(value)
urllib.urlopen(request)
except Exception:
raise StopValidation(self.messages['not_found'])
class EmailType(StringType):
"""A field that validates input as an E-Mail-Address.
"""
MESSAGES = {
'email': u"Not a well formed email address."
}
EMAIL_REGEX = re.compile(
# dot-atom
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
# quoted-string
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016'
r'-\177])*"'
# domain
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,2000}[A-Z0-9])?\.)+[A-Z]{2,63}\.?$',
re.IGNORECASE
)
def _mock(self, context=None):
return fill_template('%[email protected]', self.min_length,
self.max_length)
def validate_email(self, value):
if not EmailType.EMAIL_REGEX.match(value):
raise StopValidation(self.messages['email'])
class NumberType(BaseType):
"""A number field.
"""
MESSAGES = {
'number_coerce': u"Value '{0}' is not {1}.",
'number_min': u"{0} value should be greater than {1}.",
'number_max': u"{0} value should be less than {1}.",
}
def __init__(self, number_class, number_type,
min_value=None, max_value=None, **kwargs):
self.number_class = number_class
self.number_type = number_type
self.min_value = min_value
self.max_value = max_value
super(NumberType, self).__init__(**kwargs)
def _mock(self, context=None):
return get_value_in(self.min_value, self.max_value)
def to_native(self, value, context=None):
try:
value = self.number_class(value)
except (TypeError, ValueError):
raise ConversionError(self.messages['number_coerce']
.format(value, self.number_type.lower()))
return value
def validate_is_a_number(self, value):
try:
self.number_class(value)
except (TypeError, ValueError):
raise ConversionError(self.messages['number_coerce']
.format(value, self.number_type.lower()))
def validate_range(self, value):
if self.min_value is not None and value < self.min_value:
raise ValidationError(self.messages['number_min']
.format(self.number_type, self.min_value))
if self.max_value is not None and value > self.max_value:
raise ValidationError(self.messages['number_max']
.format(self.number_type, self.max_value))
return value
class IntType(NumberType):
"""A field that validates input as an Integer
"""
def __init__(self, *args, **kwargs):
super(IntType, self).__init__(number_class=int,
number_type='Int',
*args, **kwargs)
class LongType(NumberType):
"""A field that validates input as a Long
"""
def __init__(self, *args, **kwargs):
try:
number_class = long #PY2
except NameError:
number_class = int #PY3
super(LongType, self).__init__(number_class=number_class,
number_type='Long',
*args, **kwargs)
class FloatType(NumberType):
"""A field that validates input as a Float
"""
def __init__(self, *args, **kwargs):
super(FloatType, self).__init__(number_class=float,
number_type='Float',
*args, **kwargs)
class DecimalType(BaseType):
"""A fixed-point decimal number field.
"""
MESSAGES = {
'number_coerce': u"Number '{0}' failed to convert to a decimal.",
'number_min': u"Value should be greater than {0}.",
'number_max': u"Value should be less than {0}.",
}
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
super(DecimalType, self).__init__(**kwargs)
def _mock(self, context=None):
return get_value_in(self.min_value, self.max_value)
def to_primitive(self, value, context=None):
return unicode(value)
def to_native(self, value, context=None):
if not isinstance(value, decimal.Decimal):
if not isinstance(value, basestring):
value = unicode(value)
try:
value = decimal.Decimal(value)
except (TypeError, decimal.InvalidOperation):
raise ConversionError(self.messages['number_coerce'].format(value))
return value
def validate_range(self, value):
if self.min_value is not None and value < self.min_value:
error_msg = self.messages['number_min'].format(self.min_value)
raise ValidationError(error_msg)
if self.max_value is not None and value > self.max_value:
error_msg = self.messages['number_max'].format(self.max_value)
raise ValidationError(error_msg)
return value
class HashType(BaseType):
MESSAGES = {
'hash_length': u"Hash value is wrong length.",
'hash_hex': u"Hash value is not hexadecimal.",
}
def _mock(self, context=None):
return random_string(self.LENGTH, string.hexdigits)
def to_native(self, value, context=None):
if len(value) != self.LENGTH:
raise ValidationError(self.messages['hash_length'])
try:
int(value, 16)
except ValueError:
raise ConversionError(self.messages['hash_hex'])
return value
class MD5Type(HashType):
"""A field that validates input as resembling an MD5 hash.
"""
LENGTH = 32
class SHA1Type(HashType):
"""A field that validates input as resembling an SHA1 hash.
"""
LENGTH = 40
class BooleanType(BaseType):
"""A boolean field type. In addition to ``True`` and ``False``, coerces these
values:
+ For ``True``: "True", "true", "1"
+ For ``False``: "False", "false", "0"
"""
TRUE_VALUES = ('True', 'true', '1')
FALSE_VALUES = ('False', 'false', '0')
def _mock(self, context=None):
return random.choice([True, False])
def to_native(self, value, context=None):
if isinstance(value, basestring):
if value in self.TRUE_VALUES:
value = True
elif value in self.FALSE_VALUES:
value = False
if isinstance(value, int) and value in [0, 1]:
value = bool(value)
if not isinstance(value, bool):
raise ConversionError(u"Must be either true or false.")
return value
class DateType(BaseType):
"""Defaults to converting to and from ISO8601 date values.
"""
SERIALIZED_FORMAT = '%Y-%m-%d'
MESSAGES = {
'parse': u"Could not parse {0}. Should be ISO8601 (YYYY-MM-DD).",
}
def __init__(self, **kwargs):
self.serialized_format = self.SERIALIZED_FORMAT
super(DateType, self).__init__(**kwargs)
def _mock(self, context=None):
return datetime.datetime(
year=random.randrange(600) + 1900,
month=random.randrange(12) + 1,
day=random.randrange(28) + 1,
)
def to_native(self, value, context=None):
if isinstance(value, datetime.date):
return value
try:
return datetime.datetime.strptime(value, self.serialized_format).date()
except (ValueError, TypeError):
raise ConversionError(self.messages['parse'].format(value))
def to_primitive(self, value, context=None):
return value.strftime(self.serialized_format)
class DateTimeType(BaseType):
"""Defaults to converting to and from ISO8601 datetime values.
:param formats:
A value or list of values suitable for ``datetime.datetime.strptime``
parsing. Default: `('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%fZ', '%Y-%m-%dT%H:%M:%SZ')`
:param serialized_format:
The output format suitable for Python ``strftime``. Default: ``'%Y-%m-%dT%H:%M:%S.%f'``
"""
DEFAULT_FORMATS = (
'%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%fZ', '%Y-%m-%dT%H:%M:%SZ',
)
SERIALIZED_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
MESSAGES = {
'parse_formats': u'Could not parse {0}. Valid formats: {1}',
'parse': u"Could not parse {0}. Should be ISO8601.",
}
def __init__(self, formats=None, serialized_format=None, **kwargs):
"""
"""
if isinstance(formats, basestring):
formats = [formats]
if formats is None:
formats = self.DEFAULT_FORMATS
if serialized_format is None:
serialized_format = self.SERIALIZED_FORMAT
self.formats = formats
self.serialized_format = serialized_format
super(DateTimeType, self).__init__(**kwargs)
def _mock(self, context=None):
return datetime.datetime(
year=random.randrange(600) + 1900,
month=random.randrange(12) + 1,
day=random.randrange(28) + 1,
hour=random.randrange(24),
minute=random.randrange(60),
second=random.randrange(60),
microsecond=random.randrange(1000000),
)
def to_native(self, value, context=None):
if isinstance(value, datetime.datetime):
return value
for fmt in self.formats:
try:
return datetime.datetime.strptime(value, fmt)
except (ValueError, TypeError):
continue
if self.formats == self.DEFAULT_FORMATS:
message = self.messages['parse'].format(value)
else:
message = self.messages['parse_formats'].format(
value, ", ".join(self.formats)
)
raise ConversionError(message)
def to_primitive(self, value, context=None):
if callable(self.serialized_format):
return self.serialized_format(value)
return value.strftime(self.serialized_format)
class GeoPointType(BaseType):
"""A list storing a latitude and longitude.
"""
def _mock(self, context=None):
return (random.randrange(-90, 90), random.randrange(-180, 180))
def to_native(self, value, context=None):
"""Make sure that a geo-value is of type (x, y)
"""
if not len(value) == 2:
raise ValueError('Value must be a two-dimensional point')
if isinstance(value, dict):
for val in value.values():
if not isinstance(val, (float, int)):
raise ValueError('Both values in point must be float or int')
elif isinstance(value, (list, tuple)):
if (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
raise ValueError('Both values in point must be float or int')
else:
raise ValueError('GeoPointType can only accept tuples, lists, or dicts')
return value
class MultilingualStringType(BaseType):
"""
A multilanguage string field, stored as a dict with {'locale': 'localized_value'}.
Minimum and maximum lengths apply to each of the localized values.
At least one of ``default_locale`` or ``context['locale']`` must be defined
when calling ``.to_primitive``.
"""
allow_casts = (int, str)
MESSAGES = {
'convert': u"Couldn't interpret value as string.",
'max_length': u"String value in locale {0} is too long.",
'min_length': u"String value in locale {0} is too short.",
'locale_not_found': u"No requested locale was available.",
'no_locale': u"No default or explicit locales were given.",
'regex_locale': u"Name of locale {0} did not match validation regex.",
'regex_localized': u"String value in locale {0} did not match validation regex.",
}
LOCALE_REGEX = r'^[a-z]{2}(:?_[A-Z]{2})?$'
def __init__(self, regex=None, max_length=None, min_length=None,
default_locale=None, locale_regex=LOCALE_REGEX, **kwargs):
self.regex = re.compile(regex) if regex else None
self.max_length = max_length
self.min_length = min_length
self.default_locale = default_locale
self.locale_regex = re.compile(locale_regex) if locale_regex else None
super(MultilingualStringType, self).__init__(**kwargs)
def _mock(self, context=None):
return random_string(get_value_in(self.min_length, self.max_length))
def to_native(self, value, context=None):
"""Make sure a MultilingualStringType value is a dict or None."""
if not (value is None or isinstance(value, dict)):
raise ValueError('Value must be a dict or None')
return value
def to_primitive(self, value, context=None):
"""
Use a combination of ``default_locale`` and ``context['locale']`` to return
the best localized string.
"""
if value is None:
return None
context_locale = None
if context is not None and 'locale' in context:
context_locale = context['locale']
# Build a list of all possible locales to try
possible_locales = []
for locale in (context_locale, self.default_locale):
if not locale:
continue
if isinstance(locale, basestring):
possible_locales.append(locale)
else:
possible_locales.extend(locale)
if not possible_locales:
raise ConversionError(self.messages['no_locale'])
for locale in possible_locales:
if locale in value:
localized = value[locale]
break
else:
raise ConversionError(self.messages['locale_not_found'])
if not isinstance(localized, unicode):
if isinstance(localized, self.allow_casts):
if not isinstance(localized, str):<|fim▁hole|> #localized = unicode(localized, 'utf-8')
localized = utf8_decode(localized)
else:
raise ConversionError(self.messages['convert'])
return localized
def validate_length(self, value):
for locale, localized in value.items():
len_of_value = len(localized) if localized else 0
if self.max_length is not None and len_of_value > self.max_length:
raise ValidationError(self.messages['max_length'].format(locale))
if self.min_length is not None and len_of_value < self.min_length:
raise ValidationError(self.messages['min_length'].format(locale))
def validate_regex(self, value):
if self.regex is None and self.locale_regex is None:
return
for locale, localized in value.items():
if self.regex is not None and self.regex.match(localized) is None:
raise ValidationError(
self.messages['regex_localized'].format(locale))
if self.locale_regex is not None and self.locale_regex.match(locale) is None:
raise ValidationError(
self.messages['regex_locale'].format(locale))<|fim▁end|> | localized = str(localized) |
<|file_name|>test_california_housing.py<|end_file_name|><|fim▁begin|>"""Test the california_housing loader, if the data is available,
or if specifically requested via environment variable
(e.g. for travis cron job)."""
import pytest
from sklearn.datasets.tests.test_common import check_return_X_y
from functools import partial
def test_fetch(fetch_california_housing_fxt):
data = fetch_california_housing_fxt()
assert((20640, 8) == data.data.shape)
assert((20640, ) == data.target.shape)
# test return_X_y option
fetch_func = partial(fetch_california_housing_fxt)
check_return_X_y(data, fetch_func)
def test_fetch_asframe(fetch_california_housing_fxt):
pd = pytest.importorskip('pandas')
bunch = fetch_california_housing_fxt(as_frame=True)
frame = bunch.frame
assert hasattr(bunch, 'frame') is True
assert frame.shape == (20640, 9)
assert isinstance(bunch.data, pd.DataFrame)
assert isinstance(bunch.target, pd.Series)
def test_pandas_dependency_message(fetch_california_housing_fxt,
hide_available_pandas):
# Check that pandas is imported lazily and that an informative error
# message is raised when pandas is missing:
expected_msg = ('fetch_california_housing with as_frame=True'<|fim▁hole|><|fim▁end|> | ' requires pandas')
with pytest.raises(ImportError, match=expected_msg):
fetch_california_housing_fxt(as_frame=True) |
<|file_name|>uint.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Lenient uint json deserialization for test json files.
use std::fmt;
use std::str::FromStr;
use serde::{Deserialize, Deserializer};
use serde::de::{Error, Visitor};
use util::U256;
/// Lenient uint json deserialization for test json files.
#[derive(Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct Uint(pub U256);
impl Into<U256> for Uint {
fn into(self) -> U256 {
self.0
}
}
impl Into<u64> for Uint {
fn into(self) -> u64 {
u64::from(self.0)
}
}<|fim▁hole|> fn into(self) -> usize {
// TODO: clean it after util conversions refactored.
u64::from(self.0) as usize
}
}
impl Into<u8> for Uint {
fn into(self) -> u8 {
u64::from(self.0) as u8
}
}
impl Deserialize for Uint {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer {
deserializer.deserialize(UintVisitor)
}
}
struct UintVisitor;
impl Visitor for UintVisitor {
type Value = Uint;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a hex encoded or decimal uint")
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E> where E: Error {
Ok(Uint(U256::from(value)))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: Error {
let value = match value.len() {
0 => U256::from(0),
2 if value.starts_with("0x") => U256::from(0),
_ if value.starts_with("0x") => U256::from_str(&value[2..]).map_err(|e| {
Error::custom(format!("Invalid hex value {}: {}", value, e).as_str())
})?,
_ => U256::from_dec_str(value).map_err(|e| {
Error::custom(format!("Invalid decimal value {}: {:?}", value, e).as_str())
})?
};
Ok(Uint(value))
}
fn visit_string<E>(self, value: String) -> Result<Self::Value, E> where E: Error {
self.visit_str(value.as_ref())
}
}
#[cfg(test)]
mod test {
use serde_json;
use util::U256;
use uint::Uint;
#[test]
fn uint_deserialization() {
let s = r#"["0xa", "10", "", "0x", 0]"#;
let deserialized: Vec<Uint> = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, vec![
Uint(U256::from(10)),
Uint(U256::from(10)),
Uint(U256::from(0)),
Uint(U256::from(0)),
Uint(U256::from(0))
]);
}
#[test]
fn uint_into() {
assert_eq!(U256::from(10), Uint(U256::from(10)).into());
}
}<|fim▁end|> |
impl Into<usize> for Uint { |
<|file_name|>_output.py<|end_file_name|><|fim▁begin|>"""
Implementation of hooks and APIs for outputting log messages.
"""
import sys
import traceback
import inspect
import json as pyjson
from threading import Lock
from functools import wraps<|fim▁hole|>
from pyrsistent import PClass, field
from . import _bytesjson as bytesjson
from zope.interface import Interface, implementer
from ._traceback import write_traceback, TRACEBACK_MESSAGE
from ._message import EXCEPTION_FIELD, MESSAGE_TYPE_FIELD, REASON_FIELD
from ._util import saferepr, safeunicode
from .json import EliotJSONEncoder
from ._validation import ValidationError
class _DestinationsSendError(Exception):
"""
An error occured sending to one or more destinations.
@ivar errors: A list of tuples output from C{sys.exc_info()}.
"""
def __init__(self, errors):
self.errors = errors
Exception.__init__(self, errors)
class BufferingDestination(object):
"""
Buffer messages in memory.
"""
def __init__(self):
self.messages = []
def __call__(self, message):
self.messages.append(message)
while len(self.messages) > 1000:
self.messages.pop(0)
class Destinations(object):
"""
Manage a list of destinations for message dictionaries.
The global instance of this class is where L{Logger} instances will
send written messages.
"""
def __init__(self):
self._destinations = [BufferingDestination()]
self._any_added = False
self._globalFields = {}
def addGlobalFields(self, **fields):
"""
Add fields that will be included in all messages sent through this
destination.
@param fields: Keyword arguments mapping field names to values.
"""
self._globalFields.update(fields)
def send(self, message):
"""
Deliver a message to all destinations.
The passed in message might be mutated.
@param message: A message dictionary that can be serialized to JSON.
@type message: L{dict}
"""
message.update(self._globalFields)
errors = []
for dest in self._destinations:
try:
dest(message)
except:
errors.append(sys.exc_info())
if errors:
raise _DestinationsSendError(errors)
def add(self, *destinations):
"""
Adds new destinations.
A destination should never ever throw an exception. Seriously.
A destination should not mutate the dictionary it is given.
@param destinations: A list of callables that takes message
dictionaries.
"""
buffered_messages = None
if not self._any_added:
# These are first set of messages added, so we need to clear
# BufferingDestination:
self._any_added = True
buffered_messages = self._destinations[0].messages
self._destinations = []
self._destinations.extend(destinations)
if buffered_messages:
# Re-deliver buffered messages:
for message in buffered_messages:
self.send(message)
def remove(self, destination):
"""
Remove an existing destination.
@param destination: A destination previously added with C{self.add}.
@raises ValueError: If the destination is unknown.
"""
self._destinations.remove(destination)
class ILogger(Interface):
"""
Write out message dictionaries to some destination.
"""
def write(dictionary, serializer=None):
"""
Write a dictionary to the appropriate destination.
@note: This method is thread-safe.
@param serializer: Either C{None}, or a
L{eliot._validation._MessageSerializer} which can be used to
validate this message.
@param dictionary: The message to write out. The given dictionary
will not be mutated.
@type dictionary: C{dict}
"""
@implementer(ILogger)
class Logger(object):
"""
Write out messages to the globally configured destination(s).
You will typically want to create one of these for every chunk of code
whose messages you want to unit test in isolation, e.g. a class. The tests
can then replace a specific L{Logger} with a L{MemoryLogger}.
"""
_destinations = Destinations()
_log_tracebacks = True
def _safeUnicodeDictionary(self, dictionary):
"""
Serialize a dictionary to a unicode string no matter what it contains.
The resulting dictionary will loosely follow Python syntax but it is
not expected to actually be a lossless encoding in all cases.
@param dictionary: A L{dict} to serialize.
@return: A L{unicode} string representing the input dictionary as
faithfully as can be done without putting in too much effort.
"""
try:
return str(
dict(
(saferepr(key), saferepr(value))
for (key, value) in dictionary.items()
)
)
except:
return saferepr(dictionary)
def write(self, dictionary, serializer=None):
"""
Serialize the dictionary, and write it to C{self._destinations}.
"""
dictionary = dictionary.copy()
try:
if serializer is not None:
serializer.serialize(dictionary)
except:
write_traceback(self)
from ._action import log_message
log_message(
"eliot:serialization_failure",
message=self._safeUnicodeDictionary(dictionary),
__eliot_logger__=self,
)
return
try:
self._destinations.send(dictionary)
except _DestinationsSendError as e:
from ._action import log_message
if self._log_tracebacks:
for (exc_type, exception, exc_traceback) in e.errors:
# Can't use same Logger as serialization errors because
# if destination continues to error out we will get
# infinite recursion. So instead we have to manually
# construct a Logger that won't retry.
logger = Logger()
logger._log_tracebacks = False
logger._destinations = self._destinations
msg = {
MESSAGE_TYPE_FIELD: "eliot:destination_failure",
REASON_FIELD: safeunicode(exception),
EXCEPTION_FIELD: exc_type.__module__ + "." + exc_type.__name__,
"message": self._safeUnicodeDictionary(dictionary),
"__eliot_logger__": logger,
}
log_message(**msg)
else:
# Nothing we can do here, raising exception to caller will
# break business logic, better to have that continue to
# work even if logging isn't.
pass
def exclusively(f):
"""
Decorate a function to make it thread-safe by serializing invocations
using a per-instance lock.
"""
@wraps(f)
def exclusively_f(self, *a, **kw):
with self._lock:
return f(self, *a, **kw)
return exclusively_f
@implementer(ILogger)
class MemoryLogger(object):
"""
Store written messages in memory.
When unit testing you don't want to create this directly but rather use
the L{eliot.testing.validateLogging} decorator on a test method, which
will provide additional testing integration.
@ivar messages: A C{list} of the dictionaries passed to
L{MemoryLogger.write}. Do not mutate this list.
@ivar serializers: A C{list} of the serializers passed to
L{MemoryLogger.write}, each corresponding to a message
L{MemoryLogger.messages}. Do not mutate this list.
@ivar tracebackMessages: A C{list} of messages written to this logger for
tracebacks using L{eliot.write_traceback} or L{eliot.writeFailure}. Do
not mutate this list.
"""
def __init__(self, encoder=EliotJSONEncoder):
"""
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
self._lock = Lock()
self._encoder = encoder
self.reset()
@exclusively
def flushTracebacks(self, exceptionType):
"""
Flush all logged tracebacks whose exception is of the given type.
This means they are expected tracebacks and should not cause the test
to fail.
@param exceptionType: A subclass of L{Exception}.
@return: C{list} of flushed messages.
"""
result = []
remaining = []
for message in self.tracebackMessages:
if isinstance(message[REASON_FIELD], exceptionType):
result.append(message)
else:
remaining.append(message)
self.tracebackMessages = remaining
return result
# PEP 8 variant:
flush_tracebacks = flushTracebacks
@exclusively
def write(self, dictionary, serializer=None):
"""
Add the dictionary to list of messages.
"""
# Validate copy of the dictionary, to ensure what we store isn't
# mutated.
try:
self._validate_message(dictionary.copy(), serializer)
except Exception as e:
# Skip irrelevant frames that don't help pinpoint the problem:
from . import _output, _message, _action
skip_filenames = [_output.__file__, _message.__file__, _action.__file__]
for frame in inspect.stack():
if frame[1] not in skip_filenames:
break
self._failed_validations.append(
"{}: {}".format(e, "".join(traceback.format_stack(frame[0])))
)
self.messages.append(dictionary)
self.serializers.append(serializer)
if serializer is TRACEBACK_MESSAGE._serializer:
self.tracebackMessages.append(dictionary)
def _validate_message(self, dictionary, serializer):
"""Validate an individual message.
As a side-effect, the message is replaced with its serialized contents.
@param dictionary: A message C{dict} to be validated. Might be mutated
by the serializer!
@param serializer: C{None} or a serializer.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
if serializer is not None:
serializer.validate(dictionary)
for key in dictionary:
if not isinstance(key, str):
if isinstance(key, bytes):
key.decode("utf-8")
else:
raise TypeError(dictionary, "%r is not unicode" % (key,))
if serializer is not None:
serializer.serialize(dictionary)
try:
pyjson.dumps(dictionary, cls=self._encoder)
except Exception as e:
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
@exclusively
def validate(self):
"""
Validate all written messages.
Does minimal validation of types, and for messages with corresponding
serializers use those to do additional validation.
As a side-effect, the messages are replaced with their serialized
contents.
@raises TypeError: If a field name is not unicode, or the dictionary
fails to serialize to JSON.
@raises eliot.ValidationError: If serializer was given and validation
failed.
"""
for dictionary, serializer in zip(self.messages, self.serializers):
try:
self._validate_message(dictionary, serializer)
except (TypeError, ValidationError) as e:
# We already figured out which messages failed validation
# earlier. This just lets us figure out which exception type to
# raise.
raise e.__class__("\n\n".join(self._failed_validations))
@exclusively
def serialize(self):
"""
Serialize all written messages.
This is the Field-based serialization, not JSON.
@return: A C{list} of C{dict}, the serialized messages.
"""
result = []
for dictionary, serializer in zip(self.messages, self.serializers):
dictionary = dictionary.copy()
serializer.serialize(dictionary)
result.append(dictionary)
return result
@exclusively
def reset(self):
"""
Clear all logged messages.
Any logged tracebacks will also be cleared, and will therefore not
cause a test failure.
This is useful to ensure a logger is in a known state before testing
logging of a specific code path.
"""
self.messages = []
self.serializers = []
self.tracebackMessages = []
self._failed_validations = []
class FileDestination(PClass):
"""
Callable that writes JSON messages to a file.
On Python 3 the file may support either C{bytes} or C{unicode}. On
Python 2 only C{bytes} are supported since that is what all files expect
in practice.
@ivar file: The file to which messages will be written.
@ivar _dumps: Function that serializes an object to JSON.
@ivar _linebreak: C{"\n"} as either bytes or unicode.
"""
file = field(mandatory=True)
encoder = field(mandatory=True)
_dumps = field(mandatory=True)
_linebreak = field(mandatory=True)
def __new__(cls, file, encoder=EliotJSONEncoder):
if isinstance(file, IOBase) and not file.writable():
raise RuntimeError("Given file {} is not writeable.")
unicodeFile = False
try:
file.write(b"")
except TypeError:
unicodeFile = True
if unicodeFile:
# On Python 3 native json module outputs unicode:
_dumps = pyjson.dumps
_linebreak = "\n"
else:
_dumps = bytesjson.dumps
_linebreak = b"\n"
return PClass.__new__(
cls, file=file, _dumps=_dumps, _linebreak=_linebreak, encoder=encoder
)
def __call__(self, message):
"""
@param message: A message dictionary.
"""
self.file.write(self._dumps(message, cls=self.encoder) + self._linebreak)
self.file.flush()
def to_file(output_file, encoder=EliotJSONEncoder):
"""
Add a destination that writes a JSON message per line to the given file.
@param output_file: A file-like object.
@param encoder: A JSONEncoder subclass to use when encoding JSON.
"""
Logger._destinations.add(FileDestination(file=output_file, encoder=encoder))
# The default Logger, used when none is specified:
_DEFAULT_LOGGER = Logger()<|fim▁end|> | from io import IOBase |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate hyper;
mod credentials;<|fim▁hole|>use std::process;
use std::thread;
use std::time::Duration;
use credentials::Credentials;
use http::Client;
use error::Error;
fn main() {
let credentials = credentials();
let client = Client::new(credentials);
let interval = Duration::from_secs(5);
loop {
let result = run(&client);
if result.is_err() {
println!("{}", result.unwrap_err());
}
thread::sleep(interval);
}
}
fn credentials() -> Credentials {
match credentials::from_env() {
Ok(val) => val,
Err(e) => {
println!("{}", e);
process::exit(1);
}
}
}
fn run(client: &Client) -> Result<(), Error> {
let response = try!(client.put("https://api.github.com/notifications", "{}"));
if response.status.is_success() {
Ok(())
} else {
Err(Error::Request { status: response.status })
}
}<|fim▁end|> | mod error;
mod http;
|
<|file_name|>graphql.js<|end_file_name|><|fim▁begin|>Cypress.Commands.add('fetchQuery', (query, variables) => {
cy.request({
url: '/graphql',
method: 'POST',
body: JSON.stringify({ query, variables }),<|fim▁hole|> 'Content-Type': 'application/json'
}
});
});<|fim▁end|> | headers: { |
<|file_name|>StartedSearches.py<|end_file_name|><|fim▁begin|>from measures.generic.GenericMeasure import GenericMeasure
import measures.generic.Units as Units
class StartedSearches(GenericMeasure):
"""Total number of started searches"""
<|fim▁hole|>
def parseLine(self, line):
self.parseInc(line)<|fim▁end|> | def __init__(self, period, simulationTime):
GenericMeasure.__init__(self, r'DEBUG .*? - Peer [0-9]+ started search for parameters .*? ([0-9]+\,[0-9]+).*?', period, simulationTime, Units.MESSAGES) |
<|file_name|>random_forest.py<|end_file_name|><|fim▁begin|># imports
import h2o
import numpy as np
import pandas as pd
from h2o.estimators.gbm import H2OGradientBoostingEstimator
from h2o.estimators.random_forest import H2ORandomForestEstimator
from h2o.grid.grid_search import H2OGridSearch
import sys
from operator import add
from pyspark import SparkContext
from pyspark.sql import SparkSession
from pyspark.sql import SQLContext
from pyspark.sql import functions as F #https://stackoverflow.com/questions/39504950/python-pyspark-get-sum-of-a-pyspark-dataframe-column-values
from logging_lib.LoggingController import LoggingController
import h2o
h2o.show_progress() # turn on progress bars<|fim▁hole|>from h2o.estimators.deeplearning import H2ODeepLearningEstimator
from h2o.estimators.gbm import H2OGradientBoostingEstimator
from h2o.estimators.random_forest import H2ORandomForestEstimator
from h2o.grid.grid_search import H2OGridSearch # grid search
from h2o.estimators.xgboost import H2OXGBoostEstimator
from h2o.estimators.stackedensemble import H2OStackedEnsembleEstimator
import xgboost as xgb
import matplotlib
matplotlib.use('Agg') #Need this if running matplot on a server w/o display
from pysparkling import *
#Define your s3 bucket to load and store data
S3_BUCKET = 'rza-ml-1'
#Create a custom logger to log statistics and plots
logger = LoggingController()
logger.s3_bucket = S3_BUCKET
#.config('spark.executor.cores','6') \
spark = SparkSession.builder \
.appName("App") \
.getOrCreate()
# .master("local[*]") \
# .config('spark.cores.max','16')
#.master("local") \
# .config("spark.some.config.option", "some-value") \
spark.sparkContext.setLogLevel('WARN') #Get rid of all the junk in output
Y = 'y'
ID_VAR = 'ID'
DROPS = [ID_VAR]
#From an XGBoost model
# location of "dirty" file
# decision trees handle dirty data elegantly
#path = ## Read File
# NOTE the top 6 are categorical, might want to look into this.
MOST_IMPORTANT_VARS_ORDERD = ['X5','X0','X8','X3','X1','X2','X314','X47','X118',\
'X315','X29','X127','X236','X115','X383','X152','X151','X351','X327','X77','X104',\
'X267','X95','X142']
#Load data from s3
train = spark.read.format('com.databricks.spark.csv').options(header='true', inferschema='true').load('s3n://'+S3_BUCKET+'/train.csv')
test = spark.read.format('com.databricks.spark.csv').options(header='true', inferschema='true').load('s3n://'+S3_BUCKET+'/test.csv')
#this needs to be done for h2o glm.predict() bug (which needs same number of columns)
test = test.withColumn(Y,test[ID_VAR])
#Work around for splitting wide data, you need to split on only an ID varaibles
#Then join back with a train varaible (bug in spark as of 2.1 with randomSplit())
train_temp , valid_temp = train.select(ID_VAR).randomSplit([0.7,0.3], seed=123)
valid = valid_temp.join(train,ID_VAR,'inner')
train = train_temp.join(train,ID_VAR,'inner')
# split into 40% training, 30% validation, and 30% test
#train, valid, test = frame.split_frame([0.4, 0.3])
conf = H2OConf(spark=spark)
conf.nthreads = -1
hc = H2OContext.getOrCreate(spark,conf)
print('Making h2o frames...')
train_h20_frame = hc.as_h2o_frame(train, "trainTable")
valid_h20_frame = hc.as_h2o_frame(valid, "validTable")
test_h2o_frame = hc.as_h2o_frame(test, "testTable")
print('Done making h2o frames.')
logger.log_string("Train Summary:")
logger.log_string("Rows:{}".format(train_h20_frame.nrow))
logger.log_string("Cols:{}".format(train_h20_frame.ncol))
X = [name for name in train.columns if name not in ['id', '_WARN_', Y]]
# assign target and inputs
#y = 'bad_loan'
#X = [name for name in frame.columns if name not in ['id', '_WARN_', y]]
#print(y)
#print(X)
# random forest
# initialize rf model
rf_model = H2ORandomForestEstimator(
ntrees=500, # Up to 500 decision trees in the forest
max_depth=30, # trees can grow to depth of 30
stopping_rounds=5, # stop after validation error does not decrease for 5 iterations/new trees
score_each_iteration=True, # score validation error on every iteration/new tree
model_id='rf_model') # for easy lookup in flow
# train rf model
rf_model.train(
x=X,
y=Y,
training_frame=train_h20_frame,
validation_frame=valid_h20_frame)
# print model information
sub = test_h2o_frame[ID_VAR].cbind(rf_model.predict(test_h2o_frame))
print(sub.head())
# create time stamp
import re
import time
time_stamp = re.sub('[: ]', '_', time.asctime())
# save file for submission
sub.columns = [ID_VAR, Y]
sub_fname = 'Submission_'+str(time_stamp) + '.csv'
# h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname)
spark_sub_frame = hc.as_spark_frame(sub)
spark_sub_frame.select(ID_VAR,Y).coalesce(1).write.option("header","true").csv('s3n://'+S3_BUCKET+'/Kaggle_Submissions/Mercedes/' +sub_fname)<|fim▁end|> | from h2o.estimators.glm import H2OGeneralizedLinearEstimator # import GLM models |
<|file_name|>test_schemeinfo.py<|end_file_name|><|fim▁begin|>from ...scheme import Scheme
from ..schemeinfo import SchemeInfoDialog
from ...gui import test
class TestSchemeInfo(test.QAppTestCase):
def test_scheme_info(self):
scheme = Scheme(title="A Scheme", description="A String\n")
dialog = SchemeInfoDialog()
dialog.setScheme(scheme)<|fim▁hole|>
status = dialog.exec_()
if status == dialog.Accepted:
self.assertEqual(scheme.title.strip(),
str(dialog.editor.name_edit.text()).strip())
self.assertEqual(scheme.description,
str(dialog.editor.desc_edit \
.toPlainText()).strip())<|fim▁end|> | |
<|file_name|>test_ftd_configuration.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
from ansible.module_utils import basic
from ansible.module_utils.network.ftd.common import FtdConfigurationError, FtdServerError, FtdUnexpectedResponse
from ansible.module_utils.network.ftd.configuration import FtdInvalidOperationNameError, CheckModeException
from ansible.module_utils.network.ftd.fdm_swagger_client import ValidationError
from ansible.modules.network.ftd import ftd_configuration
class TestFtdConfiguration(object):
module = ftd_configuration
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture(autouse=True)
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.modules.network.ftd.ftd_configuration.Connection')
return connection_class_mock.return_value
@pytest.fixture
def resource_mock(self, mocker):
resource_class_mock = mocker.patch('ansible.modules.network.ftd.ftd_configuration.BaseConfigurationResource')
resource_instance = resource_class_mock.return_value
return resource_instance.execute_operation
def test_module_should_fail_when_ftd_invalid_operation_name_error(self, resource_mock):
operation_name = 'test name'
resource_mock.side_effect = FtdInvalidOperationNameError(operation_name)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Invalid operation name provided: %s' % operation_name == result['msg']
def test_module_should_fail_when_ftd_configuration_error(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = FtdConfigurationError(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Failed to execute %s operation because of the configuration error: %s' % (operation_name, msg) == \
result['msg']
def test_module_should_fail_when_ftd_server_error(self, resource_mock):
operation_name = 'test name'
code = 500
response = {'error': 'foo'}
resource_mock.side_effect = FtdServerError(response, code)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Server returned an error trying to execute %s operation. Status code: %s. ' \
'Server response: %s' % (operation_name, code, response) == \
result['msg']
def test_module_should_fail_when_validation_error(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = ValidationError(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert msg == result['msg']
def test_module_should_fail_when_unexpected_server_response(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = FtdUnexpectedResponse(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert msg == result['msg']
def test_module_should_fail_when_check_mode_exception(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = CheckModeException(msg)
result = self._run_module({'operation': operation_name})
assert not result['changed']
def test_module_should_run_successful(self, resource_mock):<|fim▁hole|> resource_mock.return_value = 'ok'
result = self._run_module({'operation': operation_name})
assert result['response'] == 'ok'
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
def _run_module_with_fail_json(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleFailJson) as exc:
self.module.main()
result = exc.value.args[0]
return result<|fim▁end|> | operation_name = 'test name' |
<|file_name|>svn_fetch.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#<|fim▁hole|># For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import pytest
from llnl.util.filesystem import touch, working_dir
import spack.repo
import spack.config
from spack.spec import Spec
from spack.version import ver
from spack.util.executable import which
pytestmark = pytest.mark.skipif(
not which('svn'), reason='requires subversion to be installed')
@pytest.mark.parametrize("type_of_test", ['default', 'rev0'])
@pytest.mark.parametrize("secure", [True, False])
def test_fetch(
type_of_test,
secure,
mock_svn_repository,
config,
mutable_mock_packages
):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
supplied args (they depend on type_of_test).
2. Check if the test_file is in the checked out repository.
3. Assert that the repository is at the revision supplied.
4. Add and remove some files, then reset the repo, and
ensure it's all there again.
"""
# Retrieve the right test parameters
t = mock_svn_repository.checks[type_of_test]
h = mock_svn_repository.hash
# Construct the package under test
spec = Spec('svn-test')
spec.concretize()
pkg = spack.repo.get(spec)
pkg.versions[ver('svn')] = t.args
# Enter the stage directory and check some properties
with pkg.stage:
with spack.config.override('config:verify_ssl', secure):
pkg.do_stage()
with working_dir(pkg.stage.source_path):
assert h() == t.revision
file_path = os.path.join(pkg.stage.source_path, t.file)
assert os.path.isdir(pkg.stage.source_path)
assert os.path.isfile(file_path)
os.unlink(file_path)
assert not os.path.isfile(file_path)
untracked_file = 'foobarbaz'
touch(untracked_file)
assert os.path.isfile(untracked_file)
pkg.do_restage()
assert not os.path.isfile(untracked_file)
assert os.path.isdir(pkg.stage.source_path)
assert os.path.isfile(file_path)
assert h() == t.revision<|fim▁end|> | # This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
# |
<|file_name|>date.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from datetime import date, datetime
import six
from wtforms import DateField
from wtforms.validators import optional
from ..field_base import WebDepositField
__all__ = ['Date']
class Date(WebDepositField, DateField):
def __init__(self, **kwargs):
defaults = dict(
icon='calendar',
validators=[optional()],
widget_classes="form-control"
)
defaults.update(kwargs)
super(Date, self).__init__(**defaults)
def process_data(self, value):
"""
Called when loading data from Python (incoming objects can be either
datetime objects or strings, depending on if they are loaded from
an JSON or Python objects).<|fim▁hole|> self.object_data = datetime.strptime(value, self.format).date()
elif isinstance(value, datetime):
self.object_data = value.date()
elif isinstance(value, date):
self.object_data = value
# Be sure to set both self.object_data and self.data due to internals
# of Field.process() and draft_form_process_and_validate().
self.data = self.object_data
@property
def json_data(self):
"""
Serialize data into JSON serializalbe object
"""
# Just use _value() to format the date into a string.
if self.data:
return self.data.strftime(self.format) # pylint: disable-msg=
return None<|fim▁end|> | """
if isinstance(value, six.string_types): |
<|file_name|>LuceneChildApplicationContextFactory.java<|end_file_name|><|fim▁begin|>/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%<|fim▁hole|> * the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.management.subsystems;
import java.io.IOException;
/**
* @author Andy
*
*/
public class LuceneChildApplicationContextFactory extends ChildApplicationContextFactory
{
/* (non-Javadoc)
* @see org.alfresco.repo.management.subsystems.ChildApplicationContextFactory#createInitialState()
*/
@Override
protected PropertyBackedBeanState createInitialState() throws IOException
{
return new ApplicationContextState(true);
}
protected void destroy(boolean isPermanent)
{
super.destroy(isPermanent);
doInit();
}
}<|fim▁end|> | * This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of |
<|file_name|>universal-cache.ts<|end_file_name|><|fim▁begin|>import { Injectable, isDevMode } from '@angular/core';
@Injectable()
export class CacheService {
static KEY = 'CacheService';
_cache = new Map();
/**
* check if there is a value in our store
*/
has(key: string | number): boolean {
let _key = this.normalizeKey(key);
return this._cache.has(_key);
}
/**
* store our state<|fim▁hole|> let _key = this.normalizeKey(key);
this._cache.set(_key, value);
}
/**
* get our cached value
*/
get(key: string | number): any {
let _key = this.normalizeKey(key);
return this._cache.get(_key);
}
/**
* remove specific cache item
*/
remove(key: string | number): boolean {
let _key = this.normalizeKey(key);
if (_key && this._cache.has(_key)) {
this._cache.delete(_key);
return true;
}
return false;
}
/**
* release memory refs
*/
clear(): void {
this._cache.clear();
}
/**
* convert to json for the client
*/
dehydrate(): any {
let json = {};
this._cache.forEach((value: any, key: string) => json[key] = value);
return json;
}
/**
* convert server json into out initial state
*/
rehydrate(json: any): void {
Object.keys(json).forEach((key: string) => {
let _key = this.normalizeKey(key);
let value = json[_key];
this._cache.set(_key, value);
});
}
/**
* allow JSON.stringify to work
*/
toJSON(): any {
return this.dehydrate();
}
/**
* convert numbers into strings
*/
normalizeKey(key: string | number): string {
if (isDevMode() && this._isInvalidValue(key)) {
throw new Error('Please provide a valid key to save in the CacheService');
}
return key + '';
}
_isInvalidValue(key): boolean {
return key === undefined ||
key === undefined ||
key === 0 ||
key === '' ||
typeof key === 'boolean' ||
Number.isNaN(<number>key);
}
}<|fim▁end|> | */
set(key: string | number, value: any): void { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from serial_settings import SerialSettings
class AbstractStream(object):
def __init__(self, config, name):
"""
:type name: str
"""
self.config = config
self.name = name
def open(self):
raise NotImplementedError
<|fim▁hole|> def close(self):
raise NotImplementedError
def read(self, num_bytes=1):
raise NotImplementedError
def write(self, data):
raise NotImplementedError
def reconfigure(self, config):
raise NotImplementedError<|fim▁end|> | |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>angular.module('aac.controllers.main', [])
/**
* Main layout controller
* @param $scope
*/
.controller('MainCtrl', function($scope, $rootScope, $location, Data, Utils) {
$scope.go = function(v) {
$location.path(v);
}
<|fim▁hole|> return view == $rootScope.currentView ? 'active' : '';
};
$scope.signOut = function() {
window.document.location = "./logout";
};
Data.getProfile().then(function(data) {
data.fullname = data.name + ' ' + data.surname;
$rootScope.user = data;
}).catch(function(err) {
Utils.showError(err);
});
})
.controller('HomeController', function($scope, $rootScope, $location) {
})
.controller('AccountsController', function($scope, $rootScope, $location, Data, Utils) {
Data.getAccounts().then(function(data) {
Data.getProviders().then(function(providers) {
providers.sort(function(a,b) {
if (data.accounts[a] && !data.accounts[b]) return -1;
if (data.accounts[b] && !data.accounts[a]) return 1;
return a.localeCompare(b);
});
$scope.providers = providers;
var accounts = {};
for (var p in data.accounts) {
var amap = {};
for (var k in data.accounts[p]) {
if (k === 'it.smartcommunitylab.aac.surname') amap['surname'] = data.accounts[p][k];
else if (k === 'it.smartcommunitylab.aac.givenname') amap['givenname'] = data.accounts[p][k];
else if (k === 'it.smartcommunitylab.aac.username') amap['username'] = data.accounts[p][k];
else amap[k] = data.accounts[p][k];
}
accounts[p] = amap;
}
$scope.accounts = accounts;
}).catch(function(err) {
Utils.showError(err);
});
}).catch(function(err) {
Utils.showError(err);
});
$scope.confirmDeleteAccount = function() {
$('#deleteConfirm').modal({keyboard: false});
}
$scope.deleteAccount = function() {
$('#deleteConfirm').modal('hide');
Data.deleteAccount().then(function() {
window.location.href = './logout';
}).catch(function(err) {
Utils.showError(err);
});
}
})
.controller('ConnectionsController', function($scope, $rootScope, $location, Data, Utils) {
Data.getConnections().then(function(connections) {
$scope.connections = connections;
}).catch(function(err) {
Utils.showError(err);
});
$scope.confirmDeleteApp = function(app) {
$scope.clientId = app.clientId;
$('#deleteConfirm').modal({keyboard: false});
}
$scope.deleteApp = function() {
$('#deleteConfirm').modal('hide');
Data.removeConnection($scope.clientId).then(function(connections) {
$scope.connections = connections;
Utils.showSuccess();
}).catch(function(err) {
Utils.showError(err);
});
}
})
.controller('ProfileController', function($scope, $rootScope, $location, Data, Utils) {
$scope.profile = Object.assign($rootScope.user);
Data.getAccounts().then(function(data) {
if (!data.accounts.internal) {
$scope.password_required = true;
}
}).catch(function(err) {
Utils.showError(err);
});
$scope.cancel = function() {
window.history.back();
}
$scope.save = function() {
if (!$scope.profile.name ||
!$scope.profile.surname ||
!$scope.profile.username ||
$scope.profile.password && $scope.profile.password != $scope.profile.password2)
{
return;
}
Data.saveAccount($scope.profile).then(function(data) {
data.fullname = data.name + ' ' + data.surname;
$rootScope.user = data;
$scope.profile = Object.assign($rootScope.user);
$scope.password_required = false;
Utils.showSuccess();
}).catch(function(err) {
Utils.showError(err);
});
}
Utils.initUI();
})
;<|fim▁end|> |
$scope.activeView = function(view) { |
<|file_name|>CacheManagerBuilder.java<|end_file_name|><|fim▁begin|>/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.config.builders;
import org.ehcache.CacheManager;
import org.ehcache.PersistentCacheManager;
import org.ehcache.config.Builder;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.Configuration;
import org.ehcache.config.units.MemoryUnit;
import org.ehcache.core.EhcacheManager;
import org.ehcache.core.spi.store.heap.SizeOfEngine;
import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration;
import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration;
import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration;
import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration;
import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration;
import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration;
import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration;
import org.ehcache.spi.copy.Copier;
import org.ehcache.spi.serialization.Serializer;
import org.ehcache.spi.service.Service;
import org.ehcache.spi.service.ServiceCreationConfiguration;
import java.io.File;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE;
import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT;
/**
* The {@code CacheManagerBuilder} enables building cache managers using a fluent style.
* <p>
* As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new
* instance without modifying the one on which the method was called.
* This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere.
*/
public class CacheManagerBuilder<T extends CacheManager> implements Builder<T> {
private final ConfigurationBuilder configBuilder;
private final Set<Service> services;
/**
* Builds a {@link CacheManager} or a subtype of it and initializes it if requested.
*
* @param init whether the returned {@code CacheManager} is to be initialized or not
* @return a {@code CacheManager} or a subtype of it
*/
public T build(final boolean init) {
final T cacheManager = newCacheManager(services, configBuilder.build());
if(init) {
cacheManager.init();
}
return cacheManager;
}
/**
* Builds a {@link CacheManager} or a subtype of it uninitialized.
*
* @return a {@code CacheManager} or a subtype of it uninitialized
*/
@Override
public T build() {
return build(false);
}
private CacheManagerBuilder() {
this.configBuilder = newConfigurationBuilder();
this.services = emptySet();
}
private CacheManagerBuilder(CacheManagerBuilder<T> builder, Set<Service> services) {
this.configBuilder = builder.configBuilder;
this.services = unmodifiableSet(services);
}
private CacheManagerBuilder(CacheManagerBuilder<T> builder, ConfigurationBuilder configBuilder) {
this.configBuilder = configBuilder;
this.services = builder.services;
}
/**
* Creates a new {@link CacheManager} based on the provided configuration.
* The returned {@code CacheManager} is uninitialized.
*
* @param configuration the configuration to use
* @return a {@code CacheManager}
*/
public static CacheManager newCacheManager(final Configuration configuration) {
return new EhcacheManager(configuration);
}
T newCacheManager(Collection<Service> services, final Configuration configuration) {
final EhcacheManager ehcacheManager = new EhcacheManager(configuration, services);
return cast(ehcacheManager);
}
@SuppressWarnings("unchecked")
T cast(EhcacheManager ehcacheManager) {
return (T) ehcacheManager;
}
/**
* Adds a {@link CacheConfiguration} linked to the specified alias to the returned builder.
*
* @param alias the cache alias
* @param configuration the {@code CacheConfiguration}
* @param <K> the cache key type
* @param <V> the cache value type
* @return a new builder with the added cache configuration
*
* @see CacheConfigurationBuilder
*/
public <K, V> CacheManagerBuilder<T> withCache(String alias, CacheConfiguration<K, V> configuration) {
return new CacheManagerBuilder<>(this, configBuilder.addCache(alias, configuration));
}
/**
* Convenience method to add a {@link CacheConfiguration} linked to the specified alias to the returned builder by
* building it from the provided {@link Builder}.
*
* @param alias the cache alias
* @param configurationBuilder the {@code Builder} to get {@code CacheConfiguration} from
* @param <K> the cache key type
* @param <V> the cache value type
* @return a new builder with the added cache configuration
*
* @see CacheConfigurationBuilder
*/
public <K, V> CacheManagerBuilder<T> withCache(String alias, Builder<? extends CacheConfiguration<K, V>> configurationBuilder) {
return withCache(alias, configurationBuilder.build());
}
/**
* Specializes the returned {@link CacheManager} subtype through a specific {@link CacheManagerConfiguration} which
* will optionally add configurations to the returned builder.
*
* @param cfg the {@code CacheManagerConfiguration} to use
* @param <N> the subtype of {@code CacheManager}
* @return a new builder ready to build a more specific subtype of cache manager
*
* @see #persistence(String)
* @see PersistentCacheManager
* @see CacheManagerPersistenceConfiguration
*/
public <N extends T> CacheManagerBuilder<N> with(CacheManagerConfiguration<N> cfg) {
return cfg.builder(this);
}
/**
* Convenience method to specialize the returned {@link CacheManager} subtype through a {@link CacheManagerConfiguration}
* built using the provided {@link Builder}.
*
* @param cfgBuilder the {@code Builder} to get the {@code CacheManagerConfiguration} from
* @return a new builder ready to build a more specific subtype of cache manager
*
* @see CacheConfigurationBuilder
*/
public <N extends T> CacheManagerBuilder<N> with(Builder<? extends CacheManagerConfiguration<N>> cfgBuilder) {
return with(cfgBuilder.build());
}
/**
* Adds a {@link Service} instance to the returned builder.
* <p>
* The service instance will be used by the constructed {@link CacheManager}.
*
* @param service the {@code Service} to add
* @return a new builder with the added service
*/
public CacheManagerBuilder<T> using(Service service) {
Set<Service> newServices = new HashSet<>(services);
newServices.add(service);
return new CacheManagerBuilder<>(this, newServices);
}
/**
* Adds a default {@link Copier} for the specified type to the returned builder.
*
* @param clazz the {@code Class} for which the copier is
* @param copier the {@code Copier} instance
* @param <C> the type which can be copied
* @return a new builder with the added default copier
*/
public <C> CacheManagerBuilder<T> withCopier(Class<C> clazz, Class<? extends Copier<C>> copier) {
DefaultCopyProviderConfiguration service = configBuilder.findServiceByClass(DefaultCopyProviderConfiguration.class);
if (service == null) {
service = new DefaultCopyProviderConfiguration();
service.addCopierFor(clazz, copier);
return new CacheManagerBuilder<>(this, configBuilder.addService(service));
} else {
DefaultCopyProviderConfiguration newConfig = new DefaultCopyProviderConfiguration(service);
newConfig.addCopierFor(clazz, copier, true);<|fim▁hole|> return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig));
}
}
/**
* Adds a default {@link Serializer} for the specified type to the returned builder.
*
* @param clazz the {@code Class} for which the serializer is
* @param serializer the {@code Serializer} instance
* @param <C> the type which can be serialized
* @return a new builder with the added default serializer
*/
public <C> CacheManagerBuilder<T> withSerializer(Class<C> clazz, Class<? extends Serializer<C>> serializer) {
DefaultSerializationProviderConfiguration service = configBuilder.findServiceByClass(DefaultSerializationProviderConfiguration.class);
if (service == null) {
service = new DefaultSerializationProviderConfiguration();
service.addSerializerFor(clazz, serializer);
return new CacheManagerBuilder<>(this, configBuilder.addService(service));
} else {
DefaultSerializationProviderConfiguration newConfig = new DefaultSerializationProviderConfiguration(service);
newConfig.addSerializerFor(clazz, serializer, true);
return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig));
}
}
/**
* Adds a default {@link SizeOfEngine} configuration, that limits the max object graph to
* size, to the returned builder.
*
* @param size the max object graph size
* @return a new builder with the added configuration
*/
public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectGraph(long size) {
DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class);
if (configuration == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(configuration);
return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(configuration
.getMaxObjectSize(), configuration.getUnit(), size)));
}
}
/**
* Adds a default {@link SizeOfEngine} configuration, that limits the max object size, to
* the returned builder.
*
* @param size the max object size
* @param unit the max object size unit
* @return a new builder with the added configuration
*/
public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectSize(long size, MemoryUnit unit) {
DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class);
if (configuration == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(configuration);
return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, configuration
.getMaxObjectGraphSize())));
}
}
/**
* Adds a {@link WriteBehindProviderConfiguration}, that specifies the thread pool to use, to the returned builder.
*
* @param threadPoolAlias the thread pool alias
* @return a new builder with the added configuration
*
* @see PooledExecutionServiceConfigurationBuilder
*/
public CacheManagerBuilder<T> withDefaultWriteBehindThreadPool(String threadPoolAlias) {
WriteBehindProviderConfiguration config = configBuilder.findServiceByClass(WriteBehindProviderConfiguration.class);
if (config == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new WriteBehindProviderConfiguration(threadPoolAlias)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(config);
return new CacheManagerBuilder<>(this, builder.addService(new WriteBehindProviderConfiguration(threadPoolAlias)));
}
}
/**
* Adds a {@link OffHeapDiskStoreProviderConfiguration}, that specifies the thread pool to use, to the returned
* builder.
*
* @param threadPoolAlias the thread pool alias
* @return a new builder with the added configuration
*
* @see PooledExecutionServiceConfigurationBuilder
*/
public CacheManagerBuilder<T> withDefaultDiskStoreThreadPool(String threadPoolAlias) {
OffHeapDiskStoreProviderConfiguration config = configBuilder.findServiceByClass(OffHeapDiskStoreProviderConfiguration.class);
if (config == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(config);
return new CacheManagerBuilder<>(this, builder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias)));
}
}
/**
* Adds a {@link CacheEventDispatcherFactoryConfiguration}, that specifies the thread pool to use, to the returned
* builder.
*
* @param threadPoolAlias the thread pool alias
* @return a new builder with the added configuration
*
* @see PooledExecutionServiceConfigurationBuilder
*/
public CacheManagerBuilder<T> withDefaultEventListenersThreadPool(String threadPoolAlias) {
CacheEventDispatcherFactoryConfiguration config = configBuilder.findServiceByClass(CacheEventDispatcherFactoryConfiguration.class);
if (config == null) {
return new CacheManagerBuilder<>(this, configBuilder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias)));
} else {
ConfigurationBuilder builder = configBuilder.removeService(config);
return new CacheManagerBuilder<>(this, builder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias)));
}
}
/**
* Adds a {@link ServiceCreationConfiguration} to the returned builder.
* <p>
* These configurations are used to load services and configure them at creation time.
*
* @param serviceConfiguration the {@code ServiceCreationConfiguration} to use
* @return a new builder with the added configuration
*/
public CacheManagerBuilder<T> using(ServiceCreationConfiguration<?> serviceConfiguration) {
return new CacheManagerBuilder<>(this, configBuilder.addService(serviceConfiguration));
}
/**
* Replaces an existing {@link ServiceCreationConfiguration} of the same type on the returned builder.
* <p>
* Duplicate service creation configuration will cause a cache manager to fail to initialize.
*
* @param overwriteServiceConfiguration the new {@code ServiceCreationConfiguration} to use
* @return a new builder with the replaced configuration
*/
public CacheManagerBuilder<T> replacing(ServiceCreationConfiguration<?> overwriteServiceConfiguration) {
ServiceCreationConfiguration<?> existingConfiguration = configBuilder.findServiceByClass(overwriteServiceConfiguration.getClass());
return new CacheManagerBuilder<>(this, configBuilder.removeService(existingConfiguration)
.addService(overwriteServiceConfiguration));
}
/**
* Adds a {@link ClassLoader}, to use for non Ehcache types, to the returned builder
*
* @param classLoader the class loader to use
* @return a new builder with the added class loader
*/
public CacheManagerBuilder<T> withClassLoader(ClassLoader classLoader) {
return new CacheManagerBuilder<>(this, configBuilder.withClassLoader(classLoader));
}
/**
* Creates a new {@code CacheManagerBuilder}
*
* @return the cache manager builder
*/
public static CacheManagerBuilder<CacheManager> newCacheManagerBuilder() {
return new CacheManagerBuilder<>();
}
/**
* Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual
* level of persistence is configured on the disk resource pool per cache.
*
* @param rootDirectory the root directory to use for disk storage
* @return a {@code CacheManagerConfiguration}
*
* @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean)
* @see #with(CacheManagerConfiguration)
* @see PersistentCacheManager
*/
public static CacheManagerConfiguration<PersistentCacheManager> persistence(String rootDirectory) {
return persistence(new File(rootDirectory));
}
/**
* Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual
* level of persistence is configured on the disk resource pool per cache.
*
* @param rootDirectory the root directory to use for disk storage
* @return a {@code CacheManagerConfiguration}
*
* @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean)
* @see #with(CacheManagerConfiguration)
* @see PersistentCacheManager
*/
public static CacheManagerConfiguration<PersistentCacheManager> persistence(File rootDirectory) {
return new CacheManagerPersistenceConfiguration(rootDirectory);
}
}<|fim▁end|> | |
<|file_name|>ToolbarPersistTest.ts<|end_file_name|><|fim▁begin|>import { UiFinder, Waiter } from '@ephox/agar';
import { describe, it } from '@ephox/bedrock-client';
import { TinyHooks, TinyUiActions } from '@ephox/mcagar';
import { Focus, Insert, Remove, SugarBody, SugarElement } from '@ephox/sugar';
import Editor from 'tinymce/core/api/Editor';
import Theme from 'tinymce/themes/silver/Theme';
describe('browser.tinymce.themes.silver.editor.ToolbarPersistTest', () => {
const hook = TinyHooks.bddSetup<Editor>({
inline: true,
base_url: '/project/tinymce/js/tinymce',
toolbar_persist: true
}, [ Theme ]);
const unfocusEditor = () => {
const div = SugarElement.fromTag('input');
Insert.append(SugarBody.body(), div);
Focus.focus(div);
Remove.remove(div);
};
it('TINY-4847: With toolbar_persist focus & unfocus should not affect toolbar visibility', async () => {
const editor = hook.editor();
await TinyUiActions.pWaitForPopup(editor, '.tox-tinymce-inline');
unfocusEditor();
await Waiter.pWait(200); // Need to wait since nothing should happen.
await TinyUiActions.pWaitForPopup(editor, '.tox-tinymce-inline');
editor.ui.hide();
await UiFinder.pWaitForHidden('Wait for editor to be hidden', SugarBody.body(), '.tox-tinymce-inline');
editor.focus();
editor.nodeChanged();
await Waiter.pWait(200); // Need to wait since nothing should happen.
await UiFinder.pWaitForHidden('Wait for editor to be hidden', SugarBody.body(), '.tox-tinymce-inline');
editor.ui.show();
await TinyUiActions.pWaitForPopup(editor, '.tox-tinymce-inline');<|fim▁hole|><|fim▁end|> | });
}); |
<|file_name|>texteditor.py<|end_file_name|><|fim▁begin|>import gtk
import pango
import math
from core.world import TheWorld
class TextEditor(object):
def __init__(self, text):
self.__text = text
self.cursorindex = 0
self.padding = 10.0
self.width = 0.0
self.height = 0.0
self.pixel_width = 0.0
self.pixel_height = 0.0
# create text layout
self.layout = pango.Layout(TheWorld.pango_context)
fontDescription = pango.FontDescription("Monospace 8")
self.layout.set_font_description(fontDescription)
#layout.set_markup(self.text)
self.layout.set_text(text)
# calc text metrics
self.recalc_text_size()
# -- properties
def __get_text(self):
return self.__text
def __set_text(self, text):
self.__text = text
self.layout.set_text(self.__text)
self.recalc_text_size() # recalc text size
text = property(__get_text, __set_text)
def recalc_text_size(self):
(self.pixel_width, self.pixel_height) = self.layout.get_pixel_size() # bogus when called from init() !?
self.width = self.pixel_width / float(TheWorld.width) #+ self.padding * 2
self.height = self.pixel_height / float(TheWorld.height) #+ self.padding
def draw(self, context, x, y):
# figure out scale factor
# TODO - Text should be independant of scale factor
scale_x = 1.0 / self.pixel_width
scale_y = 1.0 / self.pixel_height
# render the text
context.save()
#context.set_source_rgba(0.0, 0.0, 0.0, 1.0)
context.move_to(0.0, 0.0)
context.scale(scale_x, scale_y)
# draw a background for the text
self.draw_background(context, 0.0, 0.0, self.pixel_width, self.pixel_height, 10) # ve vant square rounded corners :-)
context.set_source_rgba(0.0, 0.0, 0.0, 1.0)
context.show_layout(self.layout)
context.restore()
# render cursor
self.draw_cursor(context)
def draw_cursor(self, context):
(strong, weak) = self.layout.get_cursor_pos(self.cursorindex)
(startx, starty, curx, cury) = strong
startx /= pango.SCALE * float(TheWorld.width)
starty /= pango.SCALE * float(TheWorld.height)
curx /= pango.SCALE * float(TheWorld.width)
cury /= pango.SCALE * float(TheWorld.height)
context.set_line_width(0.02)
context.set_source_rgba(0.0, 0.0, 0.0, 1.0)
context.move_to(0.001 + (startx / self.width), starty / self.height)
context.line_to(0.001 + (startx / self.width), (starty + cury) / self.height)
context.stroke()
def draw_background(self, context, x, y, w, h, r):
x -= self.padding
y -= self.padding
w += self.padding * 2.0
h += self.padding * 2.0
# rounded box
context.move_to(x + r, y) # top left
context.line_to(x + w - r, y) # top right
context.arc(x + w - r, y + r, r, math.pi + math.pi / 2.0, 0.0)
context.line_to(x + w, y + h - r) # bottom right
context.arc(x + w - r, y + h - r, r, 0, math.pi / 2.0)
context.line_to(x + r, y + h) # bottom left
context.arc(x + r, y + h - r, r, math.pi / 2.0, math.pi)
context.line_to(x, y + r) # top left
context.arc(x + r, y + r, r, math.pi, math.pi + math.pi / 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_line_width(4.0)
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
# thought bubble
context.arc(x + w / 1.5, y + h * 1.1, self.pixel_height / 10.0, 0.0, math.pi * 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
context.arc(x + w / 1.7, y + h * 1.2, self.pixel_height / 20.0, 0.0, math.pi * 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
context.arc(x + w / 1.9, y + h * 1.3, self.pixel_height / 30.0, 0.0, math.pi * 2.0)
context.set_source_rgba(0.8, 0.8, 1.0, 0.5)
context.fill_preserve()
context.set_source_rgba(0.2, 0.2, 1.0, 0.9)
context.stroke()
# -- key handling ---------------------------------------------------------
def do_key_press_event(self, event):
try :
{ gtk.keysyms.BackSpace : self.do_key_press_backspace,
gtk.keysyms.Delete : self.do_key_press_delete,
gtk.keysyms.Home : self.do_key_press_home,
gtk.keysyms.End : self.do_key_press_end,
gtk.keysyms.Left : self.do_key_press_left,
gtk.keysyms.Right : self.do_key_press_right,
gtk.keysyms.Up : self.do_key_press_up,
gtk.keysyms.Down : self.do_key_press_down } [event.keyval]()
except:
pass
if event.string:
left = self.text[ : self.cursorindex]
right = self.text[self.cursorindex : ]
if event.string == "\r":
self.text = left + "\n" + right
else:
self.text = left + event.string + right
self.cursorindex += 1
def do_key_press_backspace(self):
left = self.text[ : self.cursorindex - 1]
right = self.text[self.cursorindex : ]
self.text = left + right
if self.cursorindex > 0:
self.cursorindex -= 1<|fim▁hole|> left = self.text[ : self.cursorindex]
right = self.text[self.cursorindex + 1 : ]
self.text = left + right
def do_key_press_home(self):
lines = self.text.splitlines ()
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
self.cursorindex = loc - len(i) - 1
return
line += 1
def do_key_press_end(self):
lines = self.text.splitlines()
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
self.cursorindex = loc - 1
return
line += 1
def do_key_press_left(self):
if self.cursorindex > 0:
self.cursorindex -= 1
def do_key_press_right(self):
if self.cursorindex < len(self.text):
self.cursorindex += 1
def do_key_press_up(self):
lines = self.text.splitlines()
if len(lines) == 1:
return
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
loc -= len(i) + 1
line -= 1
break
line += 1
if line == -1:
return
elif line >= len(lines):
self.cursorindex -= len(lines[-1]) + 1
return
dist = self.cursorindex - loc -1
self.cursorindex = loc
if dist < len(lines[line]):
self.cursorindex -= (len(lines[line]) - dist)
else:
self.cursorindex -= 1
def do_key_press_down(self):
lines = self.text.splitlines()
if len(lines) == 1:
return
loc = 0
line = 0
for i in lines:
loc += len(i) + 1
if loc > self.cursorindex:
break
line += 1
if line >= len(lines) - 1:
return
dist = self.cursorindex - (loc - len(lines[line])) + 1
self.cursorindex = loc
if dist > len (lines[line + 1]):
self.cursorindex += len(lines[line + 1])
else:
self.cursorindex += dist<|fim▁end|> |
def do_key_press_delete(self): |
<|file_name|>test_hashing.py<|end_file_name|><|fim▁begin|>import numpy as np
import pandas as pd<|fim▁hole|>from dask.dataframe.hashing import hash_pandas_object
from dask.dataframe.utils import assert_eq
@pytest.mark.parametrize('obj', [
pd.Series([1, 2, 3]),
pd.Series([1.0, 1.5, 3.2]),
pd.Series([1.0, 1.5, 3.2], index=[1.5, 1.1, 3.3]),
pd.Series(['a', 'b', 'c']),
pd.Series([True, False, True]),
pd.Index([1, 2, 3]),
pd.Index([True, False, True]),
pd.DataFrame({'x': ['a', 'b', 'c'], 'y': [1, 2, 3]}),
pd.util.testing.makeMissingDataframe(),
pd.util.testing.makeMixedDataFrame(),
pd.util.testing.makeTimeDataFrame(),
pd.util.testing.makeTimeSeries(),
pd.util.testing.makeTimedeltaIndex()])
def test_hash_pandas_object(obj):
a = hash_pandas_object(obj)
b = hash_pandas_object(obj)
if isinstance(a, np.ndarray):
np.testing.assert_equal(a, b)
else:
assert_eq(a, b)<|fim▁end|> |
import pytest
|
<|file_name|>listsinceblock.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2017 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import AuroracoinTestFramework
from test_framework.util import assert_equal
class ListSinceBlockTest (AuroracoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def run_test (self):
'''
`listsinceblock` did not behave correctly when handed a block that was
no longer in the main chain:<|fim▁hole|> | |
aa2 bb2
| |
aa3 bb3
|
bb4
Consider a client that has only seen block `aa3` above. It asks the node
to `listsinceblock aa3`. But at some point prior the main chain switched
to the bb chain.
Previously: listsinceblock would find height=4 for block aa3 and compare
this to height=5 for the tip of the chain (bb4). It would then return
results restricted to bb3-bb4.
Now: listsinceblock finds the fork at ab0 and returns results in the
range bb1-bb4.
This test only checks that [tx0] is present.
'''
assert_equal(self.is_network_split, False)
self.nodes[2].generate(101)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 50)
assert_equal(self.nodes[3].getbalance(), 0)
# Split network into two
self.split_network()
assert_equal(self.is_network_split, True)
# send to nodes[0] from nodes[2]
senttx = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1)
# generate on both sides
lastblockhash = self.nodes[1].generate(6)[5]
self.nodes[2].generate(7)
print('lastblockhash=%s' % (lastblockhash))
self.sync_all()
self.join_network()
# listsinceblock(lastblockhash) should now include tx, as seen from nodes[0]
lsbres = self.nodes[0].listsinceblock(lastblockhash)
found = False
for tx in lsbres['transactions']:
if tx['txid'] == senttx:
found = True
break
assert_equal(found, True)
if __name__ == '__main__':
ListSinceBlockTest().main()<|fim▁end|> |
ab0
/ \
aa1 [tx0] bb1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.