file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
transaction.py | import threading
from collections import defaultdict
from funcy import once, decorator
from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
|
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback()
class CursorWrapperMixin(object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn)
monkey_mix(CursorWrapper, CursorWrapperMixin)
| def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self) | identifier_body |
transaction.py | import threading
from collections import defaultdict
from funcy import once, decorator
from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self)
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback()
class | (object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn)
monkey_mix(CursorWrapper, CursorWrapperMixin)
| CursorWrapperMixin | identifier_name |
transaction.py | import threading
from collections import defaultdict
from funcy import once, decorator
from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self)
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
|
class CursorWrapperMixin(object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn)
monkey_mix(CursorWrapper, CursorWrapperMixin)
| if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback() | conditional_block |
transaction.py | import threading
from collections import defaultdict
from funcy import once, decorator
from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self)
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback()
class CursorWrapperMixin(object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn) | monkey_mix(CursorWrapper, CursorWrapperMixin) | random_line_split |
|
tweet.py | import cherrystrap
import orielpy
from cherrystrap import logger, formatter
from orielpy import common
# parse_qsl moved to urlparse module in v2.6
try:
from urllib.parse import parse_qsl #@UnusedImport
except:
from cgi import parse_qsl #@Reimport
import oauth2 as oauth
import twitter
class TwitterNotifier:
consumer_key = "ZUJt6TLfdoDx5MBZLCOFKQ"
consumer_secret = "9gS5c4AAdhk6YSkL5F4E67Xclyao6GRDnXQKWCAw"
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
def notify_health(self, output):
self._notifyTwitter('OrielPy: '+common.notifyStrings[common.NOTIFY_PREPEND]+output)
def test_notify(self):
return self._notifyTwitter("This is a test notification from OrielPy / " + formatter.now(), force=True)
def _get_authorization(self):
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
logger.info('Requesting temp token from Twitter')
resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
logger.info('Invalid respond from Twitter requesting temp token: %s' % resp['status'])
else:
request_token = dict(parse_qsl(content.decode('UTF-8')))
orielpy.TWITTER_TOKEN = request_token.get('oauth_token')
orielpy.TWITTER_SECRET = request_token.get('oauth_token_secret')
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token.get('oauth_token')
def _get_credentials(self, key):
request_token = {}
request_token['oauth_token'] = orielpy.TWITTER_TOKEN
request_token['oauth_token_secret'] = orielpy.TWITTER_SECRET
request_token['oauth_callback_confirmed'] = 'true'
token = oauth.Token(request_token.get('oauth_token'), request_token.get('oauth_token_secret'))
token.set_verifier(key)
logger.info('Generating and signing request for an access token using key '+key)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
logger.info('oauth_consumer: '+str(oauth_consumer))
oauth_client = oauth.Client(oauth_consumer, token)
logger.info('oauth_client: '+str(oauth_client))
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
logger.info('resp, content: '+str(resp)+','+str(content))
access_token = dict(parse_qsl(content.decode('UTF-8')))
logger.info('access_token: '+str(access_token))
logger.info('resp[status] = '+str(resp['status']))
if resp['status'] != '200':
logger.error('The request for a token with did not succeed: '+str(resp['status']))
return False
else:
logger.info('Your Twitter Access Token key: %s' % access_token.get('oauth_token'))
logger.info('Access Token secret: %s' % access_token.get('oauth_token_secret'))
orielpy.TWITTER_TOKEN = access_token.get('oauth_token')
orielpy.TWITTER_SECRET = access_token.get('oauth_token_secret')
return True
def _send_tweet(self, message=None):
username=self.consumer_key
password=self.consumer_secret
access_token_key=orielpy.TWITTER_TOKEN
access_token_secret=orielpy.TWITTER_SECRET
logger.info(u"Sending tweet: "+message)
api = twitter.Api(username, password, access_token_key, access_token_secret)
try:
api.PostUpdate(message)
except Exception as e:
logger.error(u"Error Sending Tweet: %s" %e)
return False
return True
def _notifyTwitter(self, message='', force=False):
prefix = orielpy.TWITTER_PREFIX
if not orielpy.TWITTER_ENABLED and not force:
|
return self._send_tweet(prefix+": "+message)
notifier = TwitterNotifier
| return False | conditional_block |
tweet.py | import cherrystrap
import orielpy
from cherrystrap import logger, formatter
from orielpy import common
# parse_qsl moved to urlparse module in v2.6
try:
from urllib.parse import parse_qsl #@UnusedImport
except:
from cgi import parse_qsl #@Reimport
import oauth2 as oauth
import twitter
class TwitterNotifier:
|
notifier = TwitterNotifier
| consumer_key = "ZUJt6TLfdoDx5MBZLCOFKQ"
consumer_secret = "9gS5c4AAdhk6YSkL5F4E67Xclyao6GRDnXQKWCAw"
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
def notify_health(self, output):
self._notifyTwitter('OrielPy: '+common.notifyStrings[common.NOTIFY_PREPEND]+output)
def test_notify(self):
return self._notifyTwitter("This is a test notification from OrielPy / " + formatter.now(), force=True)
def _get_authorization(self):
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
logger.info('Requesting temp token from Twitter')
resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
logger.info('Invalid respond from Twitter requesting temp token: %s' % resp['status'])
else:
request_token = dict(parse_qsl(content.decode('UTF-8')))
orielpy.TWITTER_TOKEN = request_token.get('oauth_token')
orielpy.TWITTER_SECRET = request_token.get('oauth_token_secret')
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token.get('oauth_token')
def _get_credentials(self, key):
request_token = {}
request_token['oauth_token'] = orielpy.TWITTER_TOKEN
request_token['oauth_token_secret'] = orielpy.TWITTER_SECRET
request_token['oauth_callback_confirmed'] = 'true'
token = oauth.Token(request_token.get('oauth_token'), request_token.get('oauth_token_secret'))
token.set_verifier(key)
logger.info('Generating and signing request for an access token using key '+key)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
logger.info('oauth_consumer: '+str(oauth_consumer))
oauth_client = oauth.Client(oauth_consumer, token)
logger.info('oauth_client: '+str(oauth_client))
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
logger.info('resp, content: '+str(resp)+','+str(content))
access_token = dict(parse_qsl(content.decode('UTF-8')))
logger.info('access_token: '+str(access_token))
logger.info('resp[status] = '+str(resp['status']))
if resp['status'] != '200':
logger.error('The request for a token with did not succeed: '+str(resp['status']))
return False
else:
logger.info('Your Twitter Access Token key: %s' % access_token.get('oauth_token'))
logger.info('Access Token secret: %s' % access_token.get('oauth_token_secret'))
orielpy.TWITTER_TOKEN = access_token.get('oauth_token')
orielpy.TWITTER_SECRET = access_token.get('oauth_token_secret')
return True
def _send_tweet(self, message=None):
username=self.consumer_key
password=self.consumer_secret
access_token_key=orielpy.TWITTER_TOKEN
access_token_secret=orielpy.TWITTER_SECRET
logger.info(u"Sending tweet: "+message)
api = twitter.Api(username, password, access_token_key, access_token_secret)
try:
api.PostUpdate(message)
except Exception as e:
logger.error(u"Error Sending Tweet: %s" %e)
return False
return True
def _notifyTwitter(self, message='', force=False):
prefix = orielpy.TWITTER_PREFIX
if not orielpy.TWITTER_ENABLED and not force:
return False
return self._send_tweet(prefix+": "+message) | identifier_body |
tweet.py | import cherrystrap
import orielpy
from cherrystrap import logger, formatter
from orielpy import common
# parse_qsl moved to urlparse module in v2.6
try:
from urllib.parse import parse_qsl #@UnusedImport
except:
from cgi import parse_qsl #@Reimport
import oauth2 as oauth
import twitter
class TwitterNotifier:
consumer_key = "ZUJt6TLfdoDx5MBZLCOFKQ"
consumer_secret = "9gS5c4AAdhk6YSkL5F4E67Xclyao6GRDnXQKWCAw"
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
|
def notify_health(self, output):
self._notifyTwitter('OrielPy: '+common.notifyStrings[common.NOTIFY_PREPEND]+output)
def test_notify(self):
return self._notifyTwitter("This is a test notification from OrielPy / " + formatter.now(), force=True)
def _get_authorization(self):
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
logger.info('Requesting temp token from Twitter')
resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
logger.info('Invalid respond from Twitter requesting temp token: %s' % resp['status'])
else:
request_token = dict(parse_qsl(content.decode('UTF-8')))
orielpy.TWITTER_TOKEN = request_token.get('oauth_token')
orielpy.TWITTER_SECRET = request_token.get('oauth_token_secret')
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token.get('oauth_token')
def _get_credentials(self, key):
request_token = {}
request_token['oauth_token'] = orielpy.TWITTER_TOKEN
request_token['oauth_token_secret'] = orielpy.TWITTER_SECRET
request_token['oauth_callback_confirmed'] = 'true'
token = oauth.Token(request_token.get('oauth_token'), request_token.get('oauth_token_secret'))
token.set_verifier(key)
logger.info('Generating and signing request for an access token using key '+key)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
logger.info('oauth_consumer: '+str(oauth_consumer))
oauth_client = oauth.Client(oauth_consumer, token)
logger.info('oauth_client: '+str(oauth_client))
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
logger.info('resp, content: '+str(resp)+','+str(content))
access_token = dict(parse_qsl(content.decode('UTF-8')))
logger.info('access_token: '+str(access_token))
logger.info('resp[status] = '+str(resp['status']))
if resp['status'] != '200':
logger.error('The request for a token with did not succeed: '+str(resp['status']))
return False
else:
logger.info('Your Twitter Access Token key: %s' % access_token.get('oauth_token'))
logger.info('Access Token secret: %s' % access_token.get('oauth_token_secret'))
orielpy.TWITTER_TOKEN = access_token.get('oauth_token')
orielpy.TWITTER_SECRET = access_token.get('oauth_token_secret')
return True
def _send_tweet(self, message=None):
username=self.consumer_key
password=self.consumer_secret
access_token_key=orielpy.TWITTER_TOKEN
access_token_secret=orielpy.TWITTER_SECRET
logger.info(u"Sending tweet: "+message)
api = twitter.Api(username, password, access_token_key, access_token_secret)
try:
api.PostUpdate(message)
except Exception as e:
logger.error(u"Error Sending Tweet: %s" %e)
return False
return True
def _notifyTwitter(self, message='', force=False):
prefix = orielpy.TWITTER_PREFIX
if not orielpy.TWITTER_ENABLED and not force:
return False
return self._send_tweet(prefix+": "+message)
notifier = TwitterNotifier | AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
| random_line_split |
tweet.py | import cherrystrap
import orielpy
from cherrystrap import logger, formatter
from orielpy import common
# parse_qsl moved to urlparse module in v2.6
try:
from urllib.parse import parse_qsl #@UnusedImport
except:
from cgi import parse_qsl #@Reimport
import oauth2 as oauth
import twitter
class TwitterNotifier:
consumer_key = "ZUJt6TLfdoDx5MBZLCOFKQ"
consumer_secret = "9gS5c4AAdhk6YSkL5F4E67Xclyao6GRDnXQKWCAw"
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
def notify_health(self, output):
self._notifyTwitter('OrielPy: '+common.notifyStrings[common.NOTIFY_PREPEND]+output)
def test_notify(self):
return self._notifyTwitter("This is a test notification from OrielPy / " + formatter.now(), force=True)
def _get_authorization(self):
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
oauth_client = oauth.Client(oauth_consumer)
logger.info('Requesting temp token from Twitter')
resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET')
if resp['status'] != '200':
logger.info('Invalid respond from Twitter requesting temp token: %s' % resp['status'])
else:
request_token = dict(parse_qsl(content.decode('UTF-8')))
orielpy.TWITTER_TOKEN = request_token.get('oauth_token')
orielpy.TWITTER_SECRET = request_token.get('oauth_token_secret')
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token.get('oauth_token')
def _get_credentials(self, key):
request_token = {}
request_token['oauth_token'] = orielpy.TWITTER_TOKEN
request_token['oauth_token_secret'] = orielpy.TWITTER_SECRET
request_token['oauth_callback_confirmed'] = 'true'
token = oauth.Token(request_token.get('oauth_token'), request_token.get('oauth_token_secret'))
token.set_verifier(key)
logger.info('Generating and signing request for an access token using key '+key)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
logger.info('oauth_consumer: '+str(oauth_consumer))
oauth_client = oauth.Client(oauth_consumer, token)
logger.info('oauth_client: '+str(oauth_client))
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
logger.info('resp, content: '+str(resp)+','+str(content))
access_token = dict(parse_qsl(content.decode('UTF-8')))
logger.info('access_token: '+str(access_token))
logger.info('resp[status] = '+str(resp['status']))
if resp['status'] != '200':
logger.error('The request for a token with did not succeed: '+str(resp['status']))
return False
else:
logger.info('Your Twitter Access Token key: %s' % access_token.get('oauth_token'))
logger.info('Access Token secret: %s' % access_token.get('oauth_token_secret'))
orielpy.TWITTER_TOKEN = access_token.get('oauth_token')
orielpy.TWITTER_SECRET = access_token.get('oauth_token_secret')
return True
def | (self, message=None):
username=self.consumer_key
password=self.consumer_secret
access_token_key=orielpy.TWITTER_TOKEN
access_token_secret=orielpy.TWITTER_SECRET
logger.info(u"Sending tweet: "+message)
api = twitter.Api(username, password, access_token_key, access_token_secret)
try:
api.PostUpdate(message)
except Exception as e:
logger.error(u"Error Sending Tweet: %s" %e)
return False
return True
def _notifyTwitter(self, message='', force=False):
prefix = orielpy.TWITTER_PREFIX
if not orielpy.TWITTER_ENABLED and not force:
return False
return self._send_tweet(prefix+": "+message)
notifier = TwitterNotifier
| _send_tweet | identifier_name |
clone.spec.js | describe("clone", function() {
"use strict";
var link;
beforeEach(function() {
jasmine.sandbox.set("<a id='link'><input id='input'></a>");
link = DOM.find("#link");
});
it("allows to clone all clildren", function() {
var clone = link.clone(true),
child = clone.child(0);
|
expect(child).not.toBe(link.child(0));
expect(child).toHaveTag("input");
expect(child).toHaveId("input");
});
it("should allow to do a shallow copy", function() {
var clone = link.clone(false);
expect(clone).not.toBe(link);
expect(clone).toHaveTag("a");
expect(clone).toHaveId("link");
expect(clone.children().length).toBe(0);
});
it("should work on empty elements", function() {
var emptyEl = DOM.find("xxx");
expect(emptyEl.clone(false)).toBeTruthy();
});
it("should throw error if argument is invalud", function() {
expect(function() { link.clone() }).toThrow();
expect(function() { link.clone(1) }).toThrow();
expect(function() { link.clone({}) }).toThrow();
expect(function() { link.clone(function() {}) }).toThrow();
expect(function() { link.clone(null) }).toThrow();
expect(function() { link.clone("abc") }).toThrow();
});
}); | jasmine.sandbox.set(clone);
expect(clone).not.toBe(link);
expect(clone).toHaveTag("a");
expect(clone).toHaveId("link"); | random_line_split |
parser.py | # Copyright (C) 2010, 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from webkit import model
def combine_condition(conditions):
if conditions:
if len(conditions) == 1:
return conditions[0]
else:
return bracket_if_needed(' && '.join(map(bracket_if_needed, conditions)))
else:
return None
def bracket_if_needed(condition):
if re.match(r'.*(&&|\|\|).*', condition):
return '(%s)' % condition
else:
return condition
def | (file):
receiver_attributes = None
destination = None
messages = []
conditions = []
master_condition = None
superclass = []
for line in file:
match = re.search(r'messages -> (?P<destination>[A-Za-z_0-9]+) \s*(?::\s*(?P<superclass>.*?) \s*)?(?:(?P<attributes>.*?)\s+)?{', line)
if match:
receiver_attributes = parse_attributes_string(match.group('attributes'))
if match.group('superclass'):
superclass = match.group('superclass')
if conditions:
master_condition = conditions
conditions = []
destination = match.group('destination')
continue
if line.startswith('#'):
trimmed = line.rstrip()
if line.startswith('#if '):
conditions.append(trimmed[4:])
elif line.startswith('#endif') and conditions:
conditions.pop()
elif line.startswith('#else') or line.startswith('#elif'):
raise Exception("ERROR: '%s' is not supported in the *.in files" % trimmed)
continue
match = re.search(r'([A-Za-z_0-9]+)\((.*?)\)(?:(?:\s+->\s+)\((.*?)\))?(?:\s+(.*))?', line)
if match:
name, parameters_string, reply_parameters_string, attributes_string = match.groups()
if parameters_string:
parameters = parse_parameters_string(parameters_string)
for parameter in parameters:
parameter.condition = combine_condition(conditions)
else:
parameters = []
attributes = parse_attributes_string(attributes_string)
if reply_parameters_string:
reply_parameters = parse_parameters_string(reply_parameters_string)
for reply_parameter in reply_parameters:
reply_parameter.condition = combine_condition(conditions)
elif reply_parameters_string == '':
reply_parameters = []
else:
reply_parameters = None
messages.append(model.Message(name, parameters, reply_parameters, attributes, combine_condition(conditions)))
return model.MessageReceiver(destination, superclass, receiver_attributes, messages, combine_condition(master_condition))
def parse_attributes_string(attributes_string):
if not attributes_string:
return None
return attributes_string.split()
def split_parameters_string(parameters_string):
parameters = []
current_parameter_string = ''
nest_level = 0
for character in parameters_string:
if character == ',' and nest_level == 0:
parameters.append(current_parameter_string)
current_parameter_string = ''
continue
if character == '<':
nest_level += 1
elif character == '>':
nest_level -= 1
current_parameter_string += character
parameters.append(current_parameter_string)
return parameters
def parse_parameters_string(parameters_string):
parameters = []
for parameter_string in split_parameters_string(parameters_string):
match = re.search(r'\s*(?:\[(?P<attributes>.*?)\]\s+)?(?P<type_and_name>.*)', parameter_string)
attributes_string, type_and_name_string = match.group('attributes', 'type_and_name')
split = type_and_name_string.rsplit(' ', 1)
parameter_kind = 'class'
if split[0].startswith('struct '):
parameter_kind = 'struct'
split[0] = split[0][7:]
parameter_type = split[0]
parameter_name = split[1]
parameters.append(model.Parameter(kind=parameter_kind, type=parameter_type, name=parameter_name, attributes=parse_attributes_string(attributes_string)))
return parameters
| parse | identifier_name |
parser.py | # Copyright (C) 2010, 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from webkit import model
def combine_condition(conditions):
if conditions:
if len(conditions) == 1:
return conditions[0]
else:
return bracket_if_needed(' && '.join(map(bracket_if_needed, conditions)))
else:
return None
def bracket_if_needed(condition):
if re.match(r'.*(&&|\|\|).*', condition):
return '(%s)' % condition
else:
return condition
def parse(file):
receiver_attributes = None
destination = None
messages = []
conditions = []
master_condition = None
superclass = []
for line in file:
match = re.search(r'messages -> (?P<destination>[A-Za-z_0-9]+) \s*(?::\s*(?P<superclass>.*?) \s*)?(?:(?P<attributes>.*?)\s+)?{', line)
if match:
receiver_attributes = parse_attributes_string(match.group('attributes'))
if match.group('superclass'):
superclass = match.group('superclass')
if conditions:
master_condition = conditions
conditions = []
destination = match.group('destination')
continue
if line.startswith('#'):
trimmed = line.rstrip()
if line.startswith('#if '):
|
elif line.startswith('#endif') and conditions:
conditions.pop()
elif line.startswith('#else') or line.startswith('#elif'):
raise Exception("ERROR: '%s' is not supported in the *.in files" % trimmed)
continue
match = re.search(r'([A-Za-z_0-9]+)\((.*?)\)(?:(?:\s+->\s+)\((.*?)\))?(?:\s+(.*))?', line)
if match:
name, parameters_string, reply_parameters_string, attributes_string = match.groups()
if parameters_string:
parameters = parse_parameters_string(parameters_string)
for parameter in parameters:
parameter.condition = combine_condition(conditions)
else:
parameters = []
attributes = parse_attributes_string(attributes_string)
if reply_parameters_string:
reply_parameters = parse_parameters_string(reply_parameters_string)
for reply_parameter in reply_parameters:
reply_parameter.condition = combine_condition(conditions)
elif reply_parameters_string == '':
reply_parameters = []
else:
reply_parameters = None
messages.append(model.Message(name, parameters, reply_parameters, attributes, combine_condition(conditions)))
return model.MessageReceiver(destination, superclass, receiver_attributes, messages, combine_condition(master_condition))
def parse_attributes_string(attributes_string):
if not attributes_string:
return None
return attributes_string.split()
def split_parameters_string(parameters_string):
parameters = []
current_parameter_string = ''
nest_level = 0
for character in parameters_string:
if character == ',' and nest_level == 0:
parameters.append(current_parameter_string)
current_parameter_string = ''
continue
if character == '<':
nest_level += 1
elif character == '>':
nest_level -= 1
current_parameter_string += character
parameters.append(current_parameter_string)
return parameters
def parse_parameters_string(parameters_string):
parameters = []
for parameter_string in split_parameters_string(parameters_string):
match = re.search(r'\s*(?:\[(?P<attributes>.*?)\]\s+)?(?P<type_and_name>.*)', parameter_string)
attributes_string, type_and_name_string = match.group('attributes', 'type_and_name')
split = type_and_name_string.rsplit(' ', 1)
parameter_kind = 'class'
if split[0].startswith('struct '):
parameter_kind = 'struct'
split[0] = split[0][7:]
parameter_type = split[0]
parameter_name = split[1]
parameters.append(model.Parameter(kind=parameter_kind, type=parameter_type, name=parameter_name, attributes=parse_attributes_string(attributes_string)))
return parameters
| conditions.append(trimmed[4:]) | conditional_block |
parser.py | # Copyright (C) 2010, 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from webkit import model
def combine_condition(conditions):
if conditions:
if len(conditions) == 1:
return conditions[0]
else:
return bracket_if_needed(' && '.join(map(bracket_if_needed, conditions)))
else:
return None
def bracket_if_needed(condition):
if re.match(r'.*(&&|\|\|).*', condition):
return '(%s)' % condition
else:
return condition
def parse(file):
|
def parse_attributes_string(attributes_string):
if not attributes_string:
return None
return attributes_string.split()
def split_parameters_string(parameters_string):
parameters = []
current_parameter_string = ''
nest_level = 0
for character in parameters_string:
if character == ',' and nest_level == 0:
parameters.append(current_parameter_string)
current_parameter_string = ''
continue
if character == '<':
nest_level += 1
elif character == '>':
nest_level -= 1
current_parameter_string += character
parameters.append(current_parameter_string)
return parameters
def parse_parameters_string(parameters_string):
parameters = []
for parameter_string in split_parameters_string(parameters_string):
match = re.search(r'\s*(?:\[(?P<attributes>.*?)\]\s+)?(?P<type_and_name>.*)', parameter_string)
attributes_string, type_and_name_string = match.group('attributes', 'type_and_name')
split = type_and_name_string.rsplit(' ', 1)
parameter_kind = 'class'
if split[0].startswith('struct '):
parameter_kind = 'struct'
split[0] = split[0][7:]
parameter_type = split[0]
parameter_name = split[1]
parameters.append(model.Parameter(kind=parameter_kind, type=parameter_type, name=parameter_name, attributes=parse_attributes_string(attributes_string)))
return parameters
| receiver_attributes = None
destination = None
messages = []
conditions = []
master_condition = None
superclass = []
for line in file:
match = re.search(r'messages -> (?P<destination>[A-Za-z_0-9]+) \s*(?::\s*(?P<superclass>.*?) \s*)?(?:(?P<attributes>.*?)\s+)?{', line)
if match:
receiver_attributes = parse_attributes_string(match.group('attributes'))
if match.group('superclass'):
superclass = match.group('superclass')
if conditions:
master_condition = conditions
conditions = []
destination = match.group('destination')
continue
if line.startswith('#'):
trimmed = line.rstrip()
if line.startswith('#if '):
conditions.append(trimmed[4:])
elif line.startswith('#endif') and conditions:
conditions.pop()
elif line.startswith('#else') or line.startswith('#elif'):
raise Exception("ERROR: '%s' is not supported in the *.in files" % trimmed)
continue
match = re.search(r'([A-Za-z_0-9]+)\((.*?)\)(?:(?:\s+->\s+)\((.*?)\))?(?:\s+(.*))?', line)
if match:
name, parameters_string, reply_parameters_string, attributes_string = match.groups()
if parameters_string:
parameters = parse_parameters_string(parameters_string)
for parameter in parameters:
parameter.condition = combine_condition(conditions)
else:
parameters = []
attributes = parse_attributes_string(attributes_string)
if reply_parameters_string:
reply_parameters = parse_parameters_string(reply_parameters_string)
for reply_parameter in reply_parameters:
reply_parameter.condition = combine_condition(conditions)
elif reply_parameters_string == '':
reply_parameters = []
else:
reply_parameters = None
messages.append(model.Message(name, parameters, reply_parameters, attributes, combine_condition(conditions)))
return model.MessageReceiver(destination, superclass, receiver_attributes, messages, combine_condition(master_condition)) | identifier_body |
parser.py | # Copyright (C) 2010, 2011 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from webkit import model
def combine_condition(conditions):
if conditions:
if len(conditions) == 1:
return conditions[0]
else:
return bracket_if_needed(' && '.join(map(bracket_if_needed, conditions)))
else:
return None
| else:
return condition
def parse(file):
receiver_attributes = None
destination = None
messages = []
conditions = []
master_condition = None
superclass = []
for line in file:
match = re.search(r'messages -> (?P<destination>[A-Za-z_0-9]+) \s*(?::\s*(?P<superclass>.*?) \s*)?(?:(?P<attributes>.*?)\s+)?{', line)
if match:
receiver_attributes = parse_attributes_string(match.group('attributes'))
if match.group('superclass'):
superclass = match.group('superclass')
if conditions:
master_condition = conditions
conditions = []
destination = match.group('destination')
continue
if line.startswith('#'):
trimmed = line.rstrip()
if line.startswith('#if '):
conditions.append(trimmed[4:])
elif line.startswith('#endif') and conditions:
conditions.pop()
elif line.startswith('#else') or line.startswith('#elif'):
raise Exception("ERROR: '%s' is not supported in the *.in files" % trimmed)
continue
match = re.search(r'([A-Za-z_0-9]+)\((.*?)\)(?:(?:\s+->\s+)\((.*?)\))?(?:\s+(.*))?', line)
if match:
name, parameters_string, reply_parameters_string, attributes_string = match.groups()
if parameters_string:
parameters = parse_parameters_string(parameters_string)
for parameter in parameters:
parameter.condition = combine_condition(conditions)
else:
parameters = []
attributes = parse_attributes_string(attributes_string)
if reply_parameters_string:
reply_parameters = parse_parameters_string(reply_parameters_string)
for reply_parameter in reply_parameters:
reply_parameter.condition = combine_condition(conditions)
elif reply_parameters_string == '':
reply_parameters = []
else:
reply_parameters = None
messages.append(model.Message(name, parameters, reply_parameters, attributes, combine_condition(conditions)))
return model.MessageReceiver(destination, superclass, receiver_attributes, messages, combine_condition(master_condition))
def parse_attributes_string(attributes_string):
if not attributes_string:
return None
return attributes_string.split()
def split_parameters_string(parameters_string):
parameters = []
current_parameter_string = ''
nest_level = 0
for character in parameters_string:
if character == ',' and nest_level == 0:
parameters.append(current_parameter_string)
current_parameter_string = ''
continue
if character == '<':
nest_level += 1
elif character == '>':
nest_level -= 1
current_parameter_string += character
parameters.append(current_parameter_string)
return parameters
def parse_parameters_string(parameters_string):
parameters = []
for parameter_string in split_parameters_string(parameters_string):
match = re.search(r'\s*(?:\[(?P<attributes>.*?)\]\s+)?(?P<type_and_name>.*)', parameter_string)
attributes_string, type_and_name_string = match.group('attributes', 'type_and_name')
split = type_and_name_string.rsplit(' ', 1)
parameter_kind = 'class'
if split[0].startswith('struct '):
parameter_kind = 'struct'
split[0] = split[0][7:]
parameter_type = split[0]
parameter_name = split[1]
parameters.append(model.Parameter(kind=parameter_kind, type=parameter_type, name=parameter_name, attributes=parse_attributes_string(attributes_string)))
return parameters | def bracket_if_needed(condition):
if re.match(r'.*(&&|\|\|).*', condition):
return '(%s)' % condition | random_line_split |
borrowck-lend-flow-match.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-pretty -- comments are unfaithfully preserved
#[allow(unused_variable)];
#[allow(dead_assignment)];
fn cond() -> bool { fail!() }
fn link<'a>(v: &'a uint, w: &mut &'a uint) -> bool { *w = v; true }
fn separate_arms() {
// Here both arms perform assignments, but only is illegal.
let mut x = None;
match x {
None => {
// It is ok to reassign x here, because there is in
// fact no outstanding loan of x!
x = Some(0);
}
Some(ref _i) => {
x = Some(1); //~ ERROR cannot assign
}
}
copy x; // just to prevent liveness warnings
}
fn guard() |
fn main() {}
| {
// Here the guard performs a borrow. This borrow "infects" all
// subsequent arms (but not the prior ones).
let mut a = ~3;
let mut b = ~4;
let mut w = &*a;
match 22 {
_ if cond() => {
b = ~5;
}
_ if link(&*b, &mut w) => {
b = ~6; //~ ERROR cannot assign
}
_ => {
b = ~7; //~ ERROR cannot assign
}
}
b = ~8; //~ ERROR cannot assign
} | identifier_body |
borrowck-lend-flow-match.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-pretty -- comments are unfaithfully preserved
#[allow(unused_variable)];
#[allow(dead_assignment)];
fn cond() -> bool { fail!() }
fn link<'a>(v: &'a uint, w: &mut &'a uint) -> bool { *w = v; true }
fn separate_arms() {
// Here both arms perform assignments, but only is illegal.
let mut x = None;
match x {
None => {
// It is ok to reassign x here, because there is in
// fact no outstanding loan of x!
x = Some(0);
}
Some(ref _i) => {
x = Some(1); //~ ERROR cannot assign
}
}
copy x; // just to prevent liveness warnings
}
fn | () {
// Here the guard performs a borrow. This borrow "infects" all
// subsequent arms (but not the prior ones).
let mut a = ~3;
let mut b = ~4;
let mut w = &*a;
match 22 {
_ if cond() => {
b = ~5;
}
_ if link(&*b, &mut w) => {
b = ~6; //~ ERROR cannot assign
}
_ => {
b = ~7; //~ ERROR cannot assign
}
}
b = ~8; //~ ERROR cannot assign
}
fn main() {}
| guard | identifier_name |
borrowck-lend-flow-match.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-pretty -- comments are unfaithfully preserved
#[allow(unused_variable)];
#[allow(dead_assignment)];
fn cond() -> bool { fail!() }
fn link<'a>(v: &'a uint, w: &mut &'a uint) -> bool { *w = v; true }
fn separate_arms() {
// Here both arms perform assignments, but only is illegal.
let mut x = None;
match x {
None => {
// It is ok to reassign x here, because there is in
// fact no outstanding loan of x!
x = Some(0);
}
Some(ref _i) => {
x = Some(1); //~ ERROR cannot assign
}
}
copy x; // just to prevent liveness warnings
}
fn guard() {
// Here the guard performs a borrow. This borrow "infects" all
// subsequent arms (but not the prior ones).
let mut a = ~3;
let mut b = ~4;
let mut w = &*a;
match 22 {
_ if cond() => {
b = ~5;
}
_ if link(&*b, &mut w) => { |
_ => {
b = ~7; //~ ERROR cannot assign
}
}
b = ~8; //~ ERROR cannot assign
}
fn main() {} | b = ~6; //~ ERROR cannot assign
} | random_line_split |
mod.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A drop-in replacement for string_cache, but backed by Gecko `nsAtom`s.
use gecko_bindings::bindings::Gecko_AddRefAtom;
use gecko_bindings::bindings::Gecko_Atomize;
use gecko_bindings::bindings::Gecko_Atomize16;
use gecko_bindings::bindings::Gecko_ReleaseAtom;
use gecko_bindings::structs::{nsAtom, nsAtom_AtomKind, nsDynamicAtom, nsStaticAtom};
use nsstring::{nsAString, nsStr};
use precomputed_hash::PrecomputedHash;
use std::{mem, slice, str};
use std::borrow::{Borrow, Cow};
use std::char::{self, DecodeUtf16};
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::iter::Cloned;
use std::ops::Deref;
use style_traits::SpecifiedValueInfo;
#[macro_use]
#[allow(improper_ctypes, non_camel_case_types, missing_docs)]
pub mod atom_macro {
include!(concat!(env!("OUT_DIR"), "/gecko/atom_macro.rs"));
}
#[macro_use]
pub mod namespace;
pub use self::namespace::{Namespace, WeakNamespace};
macro_rules! local_name {
($s:tt) => {
atom!($s)
};
}
/// A strong reference to a Gecko atom.
#[derive(Eq, PartialEq)]
pub struct Atom(*mut WeakAtom);
/// An atom *without* a strong reference.
///
/// Only usable as `&'a WeakAtom`,
/// where `'a` is the lifetime of something that holds a strong reference to that atom.
pub struct WeakAtom(nsAtom);
/// A BorrowedAtom for Gecko is just a weak reference to a `nsAtom`, that
/// hasn't been bumped.
pub type BorrowedAtom<'a> = &'a WeakAtom;
impl Deref for Atom {
type Target = WeakAtom;
#[inline]
fn deref(&self) -> &WeakAtom {
unsafe { &*self.0 }
}
}
impl PrecomputedHash for Atom {
#[inline]
fn precomputed_hash(&self) -> u32 {
self.get_hash()
}
}
impl Borrow<WeakAtom> for Atom {
#[inline]
fn borrow(&self) -> &WeakAtom {
self
}
}
impl Eq for WeakAtom {}
impl PartialEq for WeakAtom {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakAtom = self;
let other: *const WeakAtom = other;
weak == other
}
}
unsafe impl Send for Atom {}
unsafe impl Sync for Atom {}
unsafe impl Sync for WeakAtom {}
impl WeakAtom {
/// Construct a `WeakAtom` from a raw `nsAtom`.
#[inline]
pub unsafe fn new<'a>(atom: *const nsAtom) -> &'a mut Self {
&mut *(atom as *mut WeakAtom)
}
/// Clone this atom, bumping the refcount if the atom is not static.
#[inline]
pub fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
/// Get the atom hash.
#[inline]
pub fn get_hash(&self) -> u32 {
self.0.mHash
}
/// Get the atom as a slice of utf-16 chars.
#[inline]
pub fn as_slice(&self) -> &[u16] {
let string = if self.is_static() {
let atom_ptr = self.as_ptr() as *const nsStaticAtom;
let string_offset = unsafe { (*atom_ptr).mStringOffset };
let string_offset = -(string_offset as isize);
let u8_ptr = atom_ptr as *const u8;
// It is safe to use offset() here because both addresses are within
// the same struct, e.g. mozilla::detail::gGkAtoms.
unsafe { u8_ptr.offset(string_offset) as *const u16 }
} else {
let atom_ptr = self.as_ptr() as *const nsDynamicAtom;
unsafe { (*(atom_ptr)).mString }
};
unsafe { slice::from_raw_parts(string, self.len() as usize) }
}
// NOTE: don't expose this, since it's slow, and easy to be misused.
fn chars(&self) -> DecodeUtf16<Cloned<slice::Iter<u16>>> {
char::decode_utf16(self.as_slice().iter().cloned())
}
/// Execute `cb` with the string that this atom represents.
///
/// Find alternatives to this function when possible, please, since it's
/// pretty slow.
pub fn with_str<F, Output>(&self, cb: F) -> Output
where
F: FnOnce(&str) -> Output,
{
let mut buffer: [u8; 64] = unsafe { mem::uninitialized() };
// The total string length in utf16 is going to be less than or equal
// the slice length (each utf16 character is going to take at least one
// and at most 2 items in the utf16 slice).
//
// Each of those characters will take at most four bytes in the utf8
// one. Thus if the slice is less than 64 / 4 (16) we can guarantee that
// we'll decode it in place.
let owned_string;
let len = self.len();
let utf8_slice = if len <= 16 {
let mut total_len = 0;
for c in self.chars() {
let c = c.unwrap_or(char::REPLACEMENT_CHARACTER);
let utf8_len = c.encode_utf8(&mut buffer[total_len..]).len();
total_len += utf8_len;
}
let slice = unsafe { str::from_utf8_unchecked(&buffer[..total_len]) };
debug_assert_eq!(slice, String::from_utf16_lossy(self.as_slice()));
slice
} else {
owned_string = String::from_utf16_lossy(self.as_slice());
&*owned_string
};
cb(utf8_slice)
}
/// Returns whether this atom is static.
#[inline]
pub fn is_static(&self) -> bool {
unsafe { (*self.as_ptr()).mKind() == nsAtom_AtomKind::Static as u32 }
}
/// Returns the length of the atom string.
#[inline]
pub fn len(&self) -> u32 {
unsafe { (*self.as_ptr()).mLength() }
}
/// Returns whether this atom is the empty string.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the atom as a mutable pointer.
#[inline]
pub fn as_ptr(&self) -> *mut nsAtom {
let const_ptr: *const nsAtom = &self.0;
const_ptr as *mut nsAtom
}
/// Convert this atom to ASCII lower-case
pub fn to_ascii_lowercase(&self) -> Atom {
let slice = self.as_slice();
match slice
.iter()
.position(|&char16| (b'A' as u16) <= char16 && char16 <= (b'Z' as u16))
{
None => self.clone(),
Some(i) => {
let mut buffer: [u16; 64] = unsafe { mem::uninitialized() };
let mut vec;
let mutable_slice = if let Some(buffer_prefix) = buffer.get_mut(..slice.len()) {
buffer_prefix.copy_from_slice(slice);
buffer_prefix
} else {
vec = slice.to_vec();
&mut vec
};
for char16 in &mut mutable_slice[i..] {
if *char16 <= 0x7F {
*char16 = (*char16 as u8).to_ascii_lowercase() as u16
}
}
Atom::from(&*mutable_slice)
},
}
}
/// Return whether two atoms are ASCII-case-insensitive matches
pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool {
if self == other {
return true;
}
let a = self.as_slice();
let b = other.as_slice();
a.len() == b.len() && a.iter().zip(b).all(|(&a16, &b16)| {
if a16 <= 0x7F && b16 <= 0x7F {
(a16 as u8).eq_ignore_ascii_case(&(b16 as u8))
} else {
a16 == b16
}
})
}
/// Return whether this atom is an ASCII-case-insensitive match for the given string
pub fn eq_str_ignore_ascii_case(&self, other: &str) -> bool {
self.chars()
.map(|r| r.map(|c: char| c.to_ascii_lowercase()))
.eq(other.chars().map(|c: char| Ok(c.to_ascii_lowercase())))
}
}
impl fmt::Debug for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko WeakAtom({:p}, {})", self, self)
}
}
impl fmt::Display for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
for c in self.chars() {
w.write_char(c.unwrap_or(char::REPLACEMENT_CHARACTER))?
}
Ok(())
}
}
| impl Atom {
/// Execute a callback with the atom represented by `ptr`.
pub unsafe fn with<F, R>(ptr: *mut nsAtom, callback: F) -> R
where
F: FnOnce(&Atom) -> R,
{
let atom = Atom(WeakAtom::new(ptr));
let ret = callback(&atom);
mem::forget(atom);
ret
}
/// Creates an atom from an static atom pointer without checking in release
/// builds.
///
/// Right now it's only used by the atom macro, and ideally it should keep
/// that way, now we have sugar for is_static, creating atoms using
/// Atom::from_raw should involve almost no overhead.
#[inline]
pub unsafe fn from_static(ptr: *mut nsStaticAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
debug_assert!(
atom.is_static(),
"Called from_static for a non-static atom!"
);
atom
}
/// Creates an atom from an atom pointer.
#[inline(always)]
pub unsafe fn from_raw(ptr: *mut nsAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
if !atom.is_static() {
Gecko_AddRefAtom(ptr);
}
atom
}
/// Creates an atom from a dynamic atom pointer that has already had AddRef
/// called on it.
#[inline]
pub unsafe fn from_addrefed(ptr: *mut nsAtom) -> Self {
assert!(!ptr.is_null());
Atom(WeakAtom::new(ptr))
}
/// Convert this atom into an addrefed nsAtom pointer.
#[inline]
pub fn into_addrefed(self) -> *mut nsAtom {
let ptr = self.as_ptr();
mem::forget(self);
ptr
}
}
impl Hash for Atom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Hash for WeakAtom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Clone for Atom {
#[inline(always)]
fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
}
impl Drop for Atom {
#[inline]
fn drop(&mut self) {
if !self.is_static() {
unsafe {
Gecko_ReleaseAtom(self.as_ptr());
}
}
}
}
impl Default for Atom {
#[inline]
fn default() -> Self {
atom!("")
}
}
impl fmt::Debug for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko Atom({:p}, {})", self.0, self)
}
}
impl fmt::Display for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
unsafe { (&*self.0).fmt(w) }
}
}
impl<'a> From<&'a str> for Atom {
#[inline]
fn from(string: &str) -> Atom {
debug_assert!(string.len() <= u32::max_value() as usize);
unsafe {
Atom(WeakAtom::new(Gecko_Atomize(
string.as_ptr() as *const _,
string.len() as u32,
)))
}
}
}
impl<'a> From<&'a [u16]> for Atom {
#[inline]
fn from(slice: &[u16]) -> Atom {
Atom::from(&*nsStr::from(slice))
}
}
impl<'a> From<&'a nsAString> for Atom {
#[inline]
fn from(string: &nsAString) -> Atom {
unsafe { Atom(WeakAtom::new(Gecko_Atomize16(string))) }
}
}
impl<'a> From<Cow<'a, str>> for Atom {
#[inline]
fn from(string: Cow<'a, str>) -> Atom {
Atom::from(&*string)
}
}
impl From<String> for Atom {
#[inline]
fn from(string: String) -> Atom {
Atom::from(&*string)
}
}
malloc_size_of_is_0!(Atom);
impl SpecifiedValueInfo for Atom {} | random_line_split |
|
mod.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A drop-in replacement for string_cache, but backed by Gecko `nsAtom`s.
use gecko_bindings::bindings::Gecko_AddRefAtom;
use gecko_bindings::bindings::Gecko_Atomize;
use gecko_bindings::bindings::Gecko_Atomize16;
use gecko_bindings::bindings::Gecko_ReleaseAtom;
use gecko_bindings::structs::{nsAtom, nsAtom_AtomKind, nsDynamicAtom, nsStaticAtom};
use nsstring::{nsAString, nsStr};
use precomputed_hash::PrecomputedHash;
use std::{mem, slice, str};
use std::borrow::{Borrow, Cow};
use std::char::{self, DecodeUtf16};
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::iter::Cloned;
use std::ops::Deref;
use style_traits::SpecifiedValueInfo;
#[macro_use]
#[allow(improper_ctypes, non_camel_case_types, missing_docs)]
pub mod atom_macro {
include!(concat!(env!("OUT_DIR"), "/gecko/atom_macro.rs"));
}
#[macro_use]
pub mod namespace;
pub use self::namespace::{Namespace, WeakNamespace};
macro_rules! local_name {
($s:tt) => {
atom!($s)
};
}
/// A strong reference to a Gecko atom.
#[derive(Eq, PartialEq)]
pub struct Atom(*mut WeakAtom);
/// An atom *without* a strong reference.
///
/// Only usable as `&'a WeakAtom`,
/// where `'a` is the lifetime of something that holds a strong reference to that atom.
pub struct WeakAtom(nsAtom);
/// A BorrowedAtom for Gecko is just a weak reference to a `nsAtom`, that
/// hasn't been bumped.
pub type BorrowedAtom<'a> = &'a WeakAtom;
impl Deref for Atom {
type Target = WeakAtom;
#[inline]
fn deref(&self) -> &WeakAtom {
unsafe { &*self.0 }
}
}
impl PrecomputedHash for Atom {
#[inline]
fn precomputed_hash(&self) -> u32 {
self.get_hash()
}
}
impl Borrow<WeakAtom> for Atom {
#[inline]
fn borrow(&self) -> &WeakAtom {
self
}
}
impl Eq for WeakAtom {}
impl PartialEq for WeakAtom {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakAtom = self;
let other: *const WeakAtom = other;
weak == other
}
}
unsafe impl Send for Atom {}
unsafe impl Sync for Atom {}
unsafe impl Sync for WeakAtom {}
impl WeakAtom {
/// Construct a `WeakAtom` from a raw `nsAtom`.
#[inline]
pub unsafe fn | <'a>(atom: *const nsAtom) -> &'a mut Self {
&mut *(atom as *mut WeakAtom)
}
/// Clone this atom, bumping the refcount if the atom is not static.
#[inline]
pub fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
/// Get the atom hash.
#[inline]
pub fn get_hash(&self) -> u32 {
self.0.mHash
}
/// Get the atom as a slice of utf-16 chars.
#[inline]
pub fn as_slice(&self) -> &[u16] {
let string = if self.is_static() {
let atom_ptr = self.as_ptr() as *const nsStaticAtom;
let string_offset = unsafe { (*atom_ptr).mStringOffset };
let string_offset = -(string_offset as isize);
let u8_ptr = atom_ptr as *const u8;
// It is safe to use offset() here because both addresses are within
// the same struct, e.g. mozilla::detail::gGkAtoms.
unsafe { u8_ptr.offset(string_offset) as *const u16 }
} else {
let atom_ptr = self.as_ptr() as *const nsDynamicAtom;
unsafe { (*(atom_ptr)).mString }
};
unsafe { slice::from_raw_parts(string, self.len() as usize) }
}
// NOTE: don't expose this, since it's slow, and easy to be misused.
fn chars(&self) -> DecodeUtf16<Cloned<slice::Iter<u16>>> {
char::decode_utf16(self.as_slice().iter().cloned())
}
/// Execute `cb` with the string that this atom represents.
///
/// Find alternatives to this function when possible, please, since it's
/// pretty slow.
pub fn with_str<F, Output>(&self, cb: F) -> Output
where
F: FnOnce(&str) -> Output,
{
let mut buffer: [u8; 64] = unsafe { mem::uninitialized() };
// The total string length in utf16 is going to be less than or equal
// the slice length (each utf16 character is going to take at least one
// and at most 2 items in the utf16 slice).
//
// Each of those characters will take at most four bytes in the utf8
// one. Thus if the slice is less than 64 / 4 (16) we can guarantee that
// we'll decode it in place.
let owned_string;
let len = self.len();
let utf8_slice = if len <= 16 {
let mut total_len = 0;
for c in self.chars() {
let c = c.unwrap_or(char::REPLACEMENT_CHARACTER);
let utf8_len = c.encode_utf8(&mut buffer[total_len..]).len();
total_len += utf8_len;
}
let slice = unsafe { str::from_utf8_unchecked(&buffer[..total_len]) };
debug_assert_eq!(slice, String::from_utf16_lossy(self.as_slice()));
slice
} else {
owned_string = String::from_utf16_lossy(self.as_slice());
&*owned_string
};
cb(utf8_slice)
}
/// Returns whether this atom is static.
#[inline]
pub fn is_static(&self) -> bool {
unsafe { (*self.as_ptr()).mKind() == nsAtom_AtomKind::Static as u32 }
}
/// Returns the length of the atom string.
#[inline]
pub fn len(&self) -> u32 {
unsafe { (*self.as_ptr()).mLength() }
}
/// Returns whether this atom is the empty string.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the atom as a mutable pointer.
#[inline]
pub fn as_ptr(&self) -> *mut nsAtom {
let const_ptr: *const nsAtom = &self.0;
const_ptr as *mut nsAtom
}
/// Convert this atom to ASCII lower-case
pub fn to_ascii_lowercase(&self) -> Atom {
let slice = self.as_slice();
match slice
.iter()
.position(|&char16| (b'A' as u16) <= char16 && char16 <= (b'Z' as u16))
{
None => self.clone(),
Some(i) => {
let mut buffer: [u16; 64] = unsafe { mem::uninitialized() };
let mut vec;
let mutable_slice = if let Some(buffer_prefix) = buffer.get_mut(..slice.len()) {
buffer_prefix.copy_from_slice(slice);
buffer_prefix
} else {
vec = slice.to_vec();
&mut vec
};
for char16 in &mut mutable_slice[i..] {
if *char16 <= 0x7F {
*char16 = (*char16 as u8).to_ascii_lowercase() as u16
}
}
Atom::from(&*mutable_slice)
},
}
}
/// Return whether two atoms are ASCII-case-insensitive matches
pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool {
if self == other {
return true;
}
let a = self.as_slice();
let b = other.as_slice();
a.len() == b.len() && a.iter().zip(b).all(|(&a16, &b16)| {
if a16 <= 0x7F && b16 <= 0x7F {
(a16 as u8).eq_ignore_ascii_case(&(b16 as u8))
} else {
a16 == b16
}
})
}
/// Return whether this atom is an ASCII-case-insensitive match for the given string
pub fn eq_str_ignore_ascii_case(&self, other: &str) -> bool {
self.chars()
.map(|r| r.map(|c: char| c.to_ascii_lowercase()))
.eq(other.chars().map(|c: char| Ok(c.to_ascii_lowercase())))
}
}
impl fmt::Debug for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko WeakAtom({:p}, {})", self, self)
}
}
impl fmt::Display for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
for c in self.chars() {
w.write_char(c.unwrap_or(char::REPLACEMENT_CHARACTER))?
}
Ok(())
}
}
impl Atom {
/// Execute a callback with the atom represented by `ptr`.
pub unsafe fn with<F, R>(ptr: *mut nsAtom, callback: F) -> R
where
F: FnOnce(&Atom) -> R,
{
let atom = Atom(WeakAtom::new(ptr));
let ret = callback(&atom);
mem::forget(atom);
ret
}
/// Creates an atom from an static atom pointer without checking in release
/// builds.
///
/// Right now it's only used by the atom macro, and ideally it should keep
/// that way, now we have sugar for is_static, creating atoms using
/// Atom::from_raw should involve almost no overhead.
#[inline]
pub unsafe fn from_static(ptr: *mut nsStaticAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
debug_assert!(
atom.is_static(),
"Called from_static for a non-static atom!"
);
atom
}
/// Creates an atom from an atom pointer.
#[inline(always)]
pub unsafe fn from_raw(ptr: *mut nsAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
if !atom.is_static() {
Gecko_AddRefAtom(ptr);
}
atom
}
/// Creates an atom from a dynamic atom pointer that has already had AddRef
/// called on it.
#[inline]
pub unsafe fn from_addrefed(ptr: *mut nsAtom) -> Self {
assert!(!ptr.is_null());
Atom(WeakAtom::new(ptr))
}
/// Convert this atom into an addrefed nsAtom pointer.
#[inline]
pub fn into_addrefed(self) -> *mut nsAtom {
let ptr = self.as_ptr();
mem::forget(self);
ptr
}
}
impl Hash for Atom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Hash for WeakAtom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Clone for Atom {
#[inline(always)]
fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
}
impl Drop for Atom {
#[inline]
fn drop(&mut self) {
if !self.is_static() {
unsafe {
Gecko_ReleaseAtom(self.as_ptr());
}
}
}
}
impl Default for Atom {
#[inline]
fn default() -> Self {
atom!("")
}
}
impl fmt::Debug for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko Atom({:p}, {})", self.0, self)
}
}
impl fmt::Display for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
unsafe { (&*self.0).fmt(w) }
}
}
impl<'a> From<&'a str> for Atom {
#[inline]
fn from(string: &str) -> Atom {
debug_assert!(string.len() <= u32::max_value() as usize);
unsafe {
Atom(WeakAtom::new(Gecko_Atomize(
string.as_ptr() as *const _,
string.len() as u32,
)))
}
}
}
impl<'a> From<&'a [u16]> for Atom {
#[inline]
fn from(slice: &[u16]) -> Atom {
Atom::from(&*nsStr::from(slice))
}
}
impl<'a> From<&'a nsAString> for Atom {
#[inline]
fn from(string: &nsAString) -> Atom {
unsafe { Atom(WeakAtom::new(Gecko_Atomize16(string))) }
}
}
impl<'a> From<Cow<'a, str>> for Atom {
#[inline]
fn from(string: Cow<'a, str>) -> Atom {
Atom::from(&*string)
}
}
impl From<String> for Atom {
#[inline]
fn from(string: String) -> Atom {
Atom::from(&*string)
}
}
malloc_size_of_is_0!(Atom);
impl SpecifiedValueInfo for Atom {}
| new | identifier_name |
mod.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A drop-in replacement for string_cache, but backed by Gecko `nsAtom`s.
use gecko_bindings::bindings::Gecko_AddRefAtom;
use gecko_bindings::bindings::Gecko_Atomize;
use gecko_bindings::bindings::Gecko_Atomize16;
use gecko_bindings::bindings::Gecko_ReleaseAtom;
use gecko_bindings::structs::{nsAtom, nsAtom_AtomKind, nsDynamicAtom, nsStaticAtom};
use nsstring::{nsAString, nsStr};
use precomputed_hash::PrecomputedHash;
use std::{mem, slice, str};
use std::borrow::{Borrow, Cow};
use std::char::{self, DecodeUtf16};
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::iter::Cloned;
use std::ops::Deref;
use style_traits::SpecifiedValueInfo;
#[macro_use]
#[allow(improper_ctypes, non_camel_case_types, missing_docs)]
pub mod atom_macro {
include!(concat!(env!("OUT_DIR"), "/gecko/atom_macro.rs"));
}
#[macro_use]
pub mod namespace;
pub use self::namespace::{Namespace, WeakNamespace};
macro_rules! local_name {
($s:tt) => {
atom!($s)
};
}
/// A strong reference to a Gecko atom.
#[derive(Eq, PartialEq)]
pub struct Atom(*mut WeakAtom);
/// An atom *without* a strong reference.
///
/// Only usable as `&'a WeakAtom`,
/// where `'a` is the lifetime of something that holds a strong reference to that atom.
pub struct WeakAtom(nsAtom);
/// A BorrowedAtom for Gecko is just a weak reference to a `nsAtom`, that
/// hasn't been bumped.
pub type BorrowedAtom<'a> = &'a WeakAtom;
impl Deref for Atom {
type Target = WeakAtom;
#[inline]
fn deref(&self) -> &WeakAtom {
unsafe { &*self.0 }
}
}
impl PrecomputedHash for Atom {
#[inline]
fn precomputed_hash(&self) -> u32 {
self.get_hash()
}
}
impl Borrow<WeakAtom> for Atom {
#[inline]
fn borrow(&self) -> &WeakAtom {
self
}
}
impl Eq for WeakAtom {}
impl PartialEq for WeakAtom {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakAtom = self;
let other: *const WeakAtom = other;
weak == other
}
}
unsafe impl Send for Atom {}
unsafe impl Sync for Atom {}
unsafe impl Sync for WeakAtom {}
impl WeakAtom {
/// Construct a `WeakAtom` from a raw `nsAtom`.
#[inline]
pub unsafe fn new<'a>(atom: *const nsAtom) -> &'a mut Self {
&mut *(atom as *mut WeakAtom)
}
/// Clone this atom, bumping the refcount if the atom is not static.
#[inline]
pub fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
/// Get the atom hash.
#[inline]
pub fn get_hash(&self) -> u32 {
self.0.mHash
}
/// Get the atom as a slice of utf-16 chars.
#[inline]
pub fn as_slice(&self) -> &[u16] {
let string = if self.is_static() {
let atom_ptr = self.as_ptr() as *const nsStaticAtom;
let string_offset = unsafe { (*atom_ptr).mStringOffset };
let string_offset = -(string_offset as isize);
let u8_ptr = atom_ptr as *const u8;
// It is safe to use offset() here because both addresses are within
// the same struct, e.g. mozilla::detail::gGkAtoms.
unsafe { u8_ptr.offset(string_offset) as *const u16 }
} else {
let atom_ptr = self.as_ptr() as *const nsDynamicAtom;
unsafe { (*(atom_ptr)).mString }
};
unsafe { slice::from_raw_parts(string, self.len() as usize) }
}
// NOTE: don't expose this, since it's slow, and easy to be misused.
fn chars(&self) -> DecodeUtf16<Cloned<slice::Iter<u16>>> {
char::decode_utf16(self.as_slice().iter().cloned())
}
/// Execute `cb` with the string that this atom represents.
///
/// Find alternatives to this function when possible, please, since it's
/// pretty slow.
pub fn with_str<F, Output>(&self, cb: F) -> Output
where
F: FnOnce(&str) -> Output,
{
let mut buffer: [u8; 64] = unsafe { mem::uninitialized() };
// The total string length in utf16 is going to be less than or equal
// the slice length (each utf16 character is going to take at least one
// and at most 2 items in the utf16 slice).
//
// Each of those characters will take at most four bytes in the utf8
// one. Thus if the slice is less than 64 / 4 (16) we can guarantee that
// we'll decode it in place.
let owned_string;
let len = self.len();
let utf8_slice = if len <= 16 {
let mut total_len = 0;
for c in self.chars() {
let c = c.unwrap_or(char::REPLACEMENT_CHARACTER);
let utf8_len = c.encode_utf8(&mut buffer[total_len..]).len();
total_len += utf8_len;
}
let slice = unsafe { str::from_utf8_unchecked(&buffer[..total_len]) };
debug_assert_eq!(slice, String::from_utf16_lossy(self.as_slice()));
slice
} else {
owned_string = String::from_utf16_lossy(self.as_slice());
&*owned_string
};
cb(utf8_slice)
}
/// Returns whether this atom is static.
#[inline]
pub fn is_static(&self) -> bool {
unsafe { (*self.as_ptr()).mKind() == nsAtom_AtomKind::Static as u32 }
}
/// Returns the length of the atom string.
#[inline]
pub fn len(&self) -> u32 {
unsafe { (*self.as_ptr()).mLength() }
}
/// Returns whether this atom is the empty string.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the atom as a mutable pointer.
#[inline]
pub fn as_ptr(&self) -> *mut nsAtom {
let const_ptr: *const nsAtom = &self.0;
const_ptr as *mut nsAtom
}
/// Convert this atom to ASCII lower-case
pub fn to_ascii_lowercase(&self) -> Atom {
let slice = self.as_slice();
match slice
.iter()
.position(|&char16| (b'A' as u16) <= char16 && char16 <= (b'Z' as u16))
{
None => self.clone(),
Some(i) => | ,
}
}
/// Return whether two atoms are ASCII-case-insensitive matches
pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool {
if self == other {
return true;
}
let a = self.as_slice();
let b = other.as_slice();
a.len() == b.len() && a.iter().zip(b).all(|(&a16, &b16)| {
if a16 <= 0x7F && b16 <= 0x7F {
(a16 as u8).eq_ignore_ascii_case(&(b16 as u8))
} else {
a16 == b16
}
})
}
/// Return whether this atom is an ASCII-case-insensitive match for the given string
pub fn eq_str_ignore_ascii_case(&self, other: &str) -> bool {
self.chars()
.map(|r| r.map(|c: char| c.to_ascii_lowercase()))
.eq(other.chars().map(|c: char| Ok(c.to_ascii_lowercase())))
}
}
impl fmt::Debug for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko WeakAtom({:p}, {})", self, self)
}
}
impl fmt::Display for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
for c in self.chars() {
w.write_char(c.unwrap_or(char::REPLACEMENT_CHARACTER))?
}
Ok(())
}
}
impl Atom {
/// Execute a callback with the atom represented by `ptr`.
pub unsafe fn with<F, R>(ptr: *mut nsAtom, callback: F) -> R
where
F: FnOnce(&Atom) -> R,
{
let atom = Atom(WeakAtom::new(ptr));
let ret = callback(&atom);
mem::forget(atom);
ret
}
/// Creates an atom from an static atom pointer without checking in release
/// builds.
///
/// Right now it's only used by the atom macro, and ideally it should keep
/// that way, now we have sugar for is_static, creating atoms using
/// Atom::from_raw should involve almost no overhead.
#[inline]
pub unsafe fn from_static(ptr: *mut nsStaticAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
debug_assert!(
atom.is_static(),
"Called from_static for a non-static atom!"
);
atom
}
/// Creates an atom from an atom pointer.
#[inline(always)]
pub unsafe fn from_raw(ptr: *mut nsAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
if !atom.is_static() {
Gecko_AddRefAtom(ptr);
}
atom
}
/// Creates an atom from a dynamic atom pointer that has already had AddRef
/// called on it.
#[inline]
pub unsafe fn from_addrefed(ptr: *mut nsAtom) -> Self {
assert!(!ptr.is_null());
Atom(WeakAtom::new(ptr))
}
/// Convert this atom into an addrefed nsAtom pointer.
#[inline]
pub fn into_addrefed(self) -> *mut nsAtom {
let ptr = self.as_ptr();
mem::forget(self);
ptr
}
}
impl Hash for Atom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Hash for WeakAtom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Clone for Atom {
#[inline(always)]
fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
}
impl Drop for Atom {
#[inline]
fn drop(&mut self) {
if !self.is_static() {
unsafe {
Gecko_ReleaseAtom(self.as_ptr());
}
}
}
}
impl Default for Atom {
#[inline]
fn default() -> Self {
atom!("")
}
}
impl fmt::Debug for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko Atom({:p}, {})", self.0, self)
}
}
impl fmt::Display for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
unsafe { (&*self.0).fmt(w) }
}
}
impl<'a> From<&'a str> for Atom {
#[inline]
fn from(string: &str) -> Atom {
debug_assert!(string.len() <= u32::max_value() as usize);
unsafe {
Atom(WeakAtom::new(Gecko_Atomize(
string.as_ptr() as *const _,
string.len() as u32,
)))
}
}
}
impl<'a> From<&'a [u16]> for Atom {
#[inline]
fn from(slice: &[u16]) -> Atom {
Atom::from(&*nsStr::from(slice))
}
}
impl<'a> From<&'a nsAString> for Atom {
#[inline]
fn from(string: &nsAString) -> Atom {
unsafe { Atom(WeakAtom::new(Gecko_Atomize16(string))) }
}
}
impl<'a> From<Cow<'a, str>> for Atom {
#[inline]
fn from(string: Cow<'a, str>) -> Atom {
Atom::from(&*string)
}
}
impl From<String> for Atom {
#[inline]
fn from(string: String) -> Atom {
Atom::from(&*string)
}
}
malloc_size_of_is_0!(Atom);
impl SpecifiedValueInfo for Atom {}
| {
let mut buffer: [u16; 64] = unsafe { mem::uninitialized() };
let mut vec;
let mutable_slice = if let Some(buffer_prefix) = buffer.get_mut(..slice.len()) {
buffer_prefix.copy_from_slice(slice);
buffer_prefix
} else {
vec = slice.to_vec();
&mut vec
};
for char16 in &mut mutable_slice[i..] {
if *char16 <= 0x7F {
*char16 = (*char16 as u8).to_ascii_lowercase() as u16
}
}
Atom::from(&*mutable_slice)
} | conditional_block |
mod.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![allow(unsafe_code)]
//! A drop-in replacement for string_cache, but backed by Gecko `nsAtom`s.
use gecko_bindings::bindings::Gecko_AddRefAtom;
use gecko_bindings::bindings::Gecko_Atomize;
use gecko_bindings::bindings::Gecko_Atomize16;
use gecko_bindings::bindings::Gecko_ReleaseAtom;
use gecko_bindings::structs::{nsAtom, nsAtom_AtomKind, nsDynamicAtom, nsStaticAtom};
use nsstring::{nsAString, nsStr};
use precomputed_hash::PrecomputedHash;
use std::{mem, slice, str};
use std::borrow::{Borrow, Cow};
use std::char::{self, DecodeUtf16};
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::iter::Cloned;
use std::ops::Deref;
use style_traits::SpecifiedValueInfo;
#[macro_use]
#[allow(improper_ctypes, non_camel_case_types, missing_docs)]
pub mod atom_macro {
include!(concat!(env!("OUT_DIR"), "/gecko/atom_macro.rs"));
}
#[macro_use]
pub mod namespace;
pub use self::namespace::{Namespace, WeakNamespace};
macro_rules! local_name {
($s:tt) => {
atom!($s)
};
}
/// A strong reference to a Gecko atom.
#[derive(Eq, PartialEq)]
pub struct Atom(*mut WeakAtom);
/// An atom *without* a strong reference.
///
/// Only usable as `&'a WeakAtom`,
/// where `'a` is the lifetime of something that holds a strong reference to that atom.
pub struct WeakAtom(nsAtom);
/// A BorrowedAtom for Gecko is just a weak reference to a `nsAtom`, that
/// hasn't been bumped.
pub type BorrowedAtom<'a> = &'a WeakAtom;
impl Deref for Atom {
type Target = WeakAtom;
#[inline]
fn deref(&self) -> &WeakAtom {
unsafe { &*self.0 }
}
}
impl PrecomputedHash for Atom {
#[inline]
fn precomputed_hash(&self) -> u32 {
self.get_hash()
}
}
impl Borrow<WeakAtom> for Atom {
#[inline]
fn borrow(&self) -> &WeakAtom {
self
}
}
impl Eq for WeakAtom {}
impl PartialEq for WeakAtom {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakAtom = self;
let other: *const WeakAtom = other;
weak == other
}
}
unsafe impl Send for Atom {}
unsafe impl Sync for Atom {}
unsafe impl Sync for WeakAtom {}
impl WeakAtom {
/// Construct a `WeakAtom` from a raw `nsAtom`.
#[inline]
pub unsafe fn new<'a>(atom: *const nsAtom) -> &'a mut Self {
&mut *(atom as *mut WeakAtom)
}
/// Clone this atom, bumping the refcount if the atom is not static.
#[inline]
pub fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
/// Get the atom hash.
#[inline]
pub fn get_hash(&self) -> u32 {
self.0.mHash
}
/// Get the atom as a slice of utf-16 chars.
#[inline]
pub fn as_slice(&self) -> &[u16] {
let string = if self.is_static() {
let atom_ptr = self.as_ptr() as *const nsStaticAtom;
let string_offset = unsafe { (*atom_ptr).mStringOffset };
let string_offset = -(string_offset as isize);
let u8_ptr = atom_ptr as *const u8;
// It is safe to use offset() here because both addresses are within
// the same struct, e.g. mozilla::detail::gGkAtoms.
unsafe { u8_ptr.offset(string_offset) as *const u16 }
} else {
let atom_ptr = self.as_ptr() as *const nsDynamicAtom;
unsafe { (*(atom_ptr)).mString }
};
unsafe { slice::from_raw_parts(string, self.len() as usize) }
}
// NOTE: don't expose this, since it's slow, and easy to be misused.
fn chars(&self) -> DecodeUtf16<Cloned<slice::Iter<u16>>> {
char::decode_utf16(self.as_slice().iter().cloned())
}
/// Execute `cb` with the string that this atom represents.
///
/// Find alternatives to this function when possible, please, since it's
/// pretty slow.
pub fn with_str<F, Output>(&self, cb: F) -> Output
where
F: FnOnce(&str) -> Output,
{
let mut buffer: [u8; 64] = unsafe { mem::uninitialized() };
// The total string length in utf16 is going to be less than or equal
// the slice length (each utf16 character is going to take at least one
// and at most 2 items in the utf16 slice).
//
// Each of those characters will take at most four bytes in the utf8
// one. Thus if the slice is less than 64 / 4 (16) we can guarantee that
// we'll decode it in place.
let owned_string;
let len = self.len();
let utf8_slice = if len <= 16 {
let mut total_len = 0;
for c in self.chars() {
let c = c.unwrap_or(char::REPLACEMENT_CHARACTER);
let utf8_len = c.encode_utf8(&mut buffer[total_len..]).len();
total_len += utf8_len;
}
let slice = unsafe { str::from_utf8_unchecked(&buffer[..total_len]) };
debug_assert_eq!(slice, String::from_utf16_lossy(self.as_slice()));
slice
} else {
owned_string = String::from_utf16_lossy(self.as_slice());
&*owned_string
};
cb(utf8_slice)
}
/// Returns whether this atom is static.
#[inline]
pub fn is_static(&self) -> bool {
unsafe { (*self.as_ptr()).mKind() == nsAtom_AtomKind::Static as u32 }
}
/// Returns the length of the atom string.
#[inline]
pub fn len(&self) -> u32 {
unsafe { (*self.as_ptr()).mLength() }
}
/// Returns whether this atom is the empty string.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the atom as a mutable pointer.
#[inline]
pub fn as_ptr(&self) -> *mut nsAtom {
let const_ptr: *const nsAtom = &self.0;
const_ptr as *mut nsAtom
}
/// Convert this atom to ASCII lower-case
pub fn to_ascii_lowercase(&self) -> Atom {
let slice = self.as_slice();
match slice
.iter()
.position(|&char16| (b'A' as u16) <= char16 && char16 <= (b'Z' as u16))
{
None => self.clone(),
Some(i) => {
let mut buffer: [u16; 64] = unsafe { mem::uninitialized() };
let mut vec;
let mutable_slice = if let Some(buffer_prefix) = buffer.get_mut(..slice.len()) {
buffer_prefix.copy_from_slice(slice);
buffer_prefix
} else {
vec = slice.to_vec();
&mut vec
};
for char16 in &mut mutable_slice[i..] {
if *char16 <= 0x7F {
*char16 = (*char16 as u8).to_ascii_lowercase() as u16
}
}
Atom::from(&*mutable_slice)
},
}
}
/// Return whether two atoms are ASCII-case-insensitive matches
pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool {
if self == other {
return true;
}
let a = self.as_slice();
let b = other.as_slice();
a.len() == b.len() && a.iter().zip(b).all(|(&a16, &b16)| {
if a16 <= 0x7F && b16 <= 0x7F {
(a16 as u8).eq_ignore_ascii_case(&(b16 as u8))
} else {
a16 == b16
}
})
}
/// Return whether this atom is an ASCII-case-insensitive match for the given string
pub fn eq_str_ignore_ascii_case(&self, other: &str) -> bool {
self.chars()
.map(|r| r.map(|c: char| c.to_ascii_lowercase()))
.eq(other.chars().map(|c: char| Ok(c.to_ascii_lowercase())))
}
}
impl fmt::Debug for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
write!(w, "Gecko WeakAtom({:p}, {})", self, self)
}
}
impl fmt::Display for WeakAtom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
for c in self.chars() {
w.write_char(c.unwrap_or(char::REPLACEMENT_CHARACTER))?
}
Ok(())
}
}
impl Atom {
/// Execute a callback with the atom represented by `ptr`.
pub unsafe fn with<F, R>(ptr: *mut nsAtom, callback: F) -> R
where
F: FnOnce(&Atom) -> R,
{
let atom = Atom(WeakAtom::new(ptr));
let ret = callback(&atom);
mem::forget(atom);
ret
}
/// Creates an atom from an static atom pointer without checking in release
/// builds.
///
/// Right now it's only used by the atom macro, and ideally it should keep
/// that way, now we have sugar for is_static, creating atoms using
/// Atom::from_raw should involve almost no overhead.
#[inline]
pub unsafe fn from_static(ptr: *mut nsStaticAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
debug_assert!(
atom.is_static(),
"Called from_static for a non-static atom!"
);
atom
}
/// Creates an atom from an atom pointer.
#[inline(always)]
pub unsafe fn from_raw(ptr: *mut nsAtom) -> Self {
let atom = Atom(ptr as *mut WeakAtom);
if !atom.is_static() {
Gecko_AddRefAtom(ptr);
}
atom
}
/// Creates an atom from a dynamic atom pointer that has already had AddRef
/// called on it.
#[inline]
pub unsafe fn from_addrefed(ptr: *mut nsAtom) -> Self {
assert!(!ptr.is_null());
Atom(WeakAtom::new(ptr))
}
/// Convert this atom into an addrefed nsAtom pointer.
#[inline]
pub fn into_addrefed(self) -> *mut nsAtom {
let ptr = self.as_ptr();
mem::forget(self);
ptr
}
}
impl Hash for Atom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Hash for WeakAtom {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
state.write_u32(self.get_hash());
}
}
impl Clone for Atom {
#[inline(always)]
fn clone(&self) -> Atom {
unsafe { Atom::from_raw(self.as_ptr()) }
}
}
impl Drop for Atom {
#[inline]
fn drop(&mut self) {
if !self.is_static() {
unsafe {
Gecko_ReleaseAtom(self.as_ptr());
}
}
}
}
impl Default for Atom {
#[inline]
fn default() -> Self {
atom!("")
}
}
impl fmt::Debug for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result |
}
impl fmt::Display for Atom {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
unsafe { (&*self.0).fmt(w) }
}
}
impl<'a> From<&'a str> for Atom {
#[inline]
fn from(string: &str) -> Atom {
debug_assert!(string.len() <= u32::max_value() as usize);
unsafe {
Atom(WeakAtom::new(Gecko_Atomize(
string.as_ptr() as *const _,
string.len() as u32,
)))
}
}
}
impl<'a> From<&'a [u16]> for Atom {
#[inline]
fn from(slice: &[u16]) -> Atom {
Atom::from(&*nsStr::from(slice))
}
}
impl<'a> From<&'a nsAString> for Atom {
#[inline]
fn from(string: &nsAString) -> Atom {
unsafe { Atom(WeakAtom::new(Gecko_Atomize16(string))) }
}
}
impl<'a> From<Cow<'a, str>> for Atom {
#[inline]
fn from(string: Cow<'a, str>) -> Atom {
Atom::from(&*string)
}
}
impl From<String> for Atom {
#[inline]
fn from(string: String) -> Atom {
Atom::from(&*string)
}
}
malloc_size_of_is_0!(Atom);
impl SpecifiedValueInfo for Atom {}
| {
write!(w, "Gecko Atom({:p}, {})", self.0, self)
} | identifier_body |
__init__.py | # -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.sqlalchemy
----------------------
This module provides initialization and configuration for
`flask.ext.sqlalchemy` module.
"""
import sqlalchemy
from flask.ext.registry import RegistryProxy, ModuleAutoDiscoveryRegistry
from flask.ext.sqlalchemy import SQLAlchemy as FlaskSQLAlchemy
from sqlalchemy import event
from sqlalchemy.ext.hybrid import hybrid_property, Comparator
from sqlalchemy.pool import Pool
from sqlalchemy_utils import JSONType
from invenio.utils.hash import md5
from .expressions import AsBINARY
from .types import MarshalBinary, PickleBinary, GUID
from .utils import get_model_type
def _include_sqlalchemy(obj, engine=None):
#for module in sqlalchemy, sqlalchemy.orm:
# for key in module.__all__:
# if not hasattr(obj, key):
# setattr(obj, key,
# getattr(module, key))
if engine == 'mysql':
from sqlalchemy.dialects import mysql as engine_types
else:
from sqlalchemy import types as engine_types
# Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
# of TEXT which only provides for 64kb storage compared to 4gb for
# LONGTEXT.
setattr(obj, 'JSON', JSONType(length=2**32-2))
setattr(obj, 'Char', engine_types.CHAR)
try:
setattr(obj, 'TinyText', engine_types.TINYTEXT)
except:
setattr(obj, 'TinyText', engine_types.TEXT)
setattr(obj, 'hybrid_property', hybrid_property)
try:
setattr(obj, 'Double', engine_types.DOUBLE)
except:
setattr(obj, 'Double', engine_types.FLOAT)
setattr(obj, 'Integer', engine_types.INTEGER)
setattr(obj, 'SmallInteger', engine_types.SMALLINT)
try:
setattr(obj, 'MediumInteger', engine_types.MEDIUMINT)
except:
setattr(obj, 'MediumInteger', engine_types.INT)
setattr(obj, 'BigInteger', engine_types.BIGINT)
try:
setattr(obj, 'TinyInteger', engine_types.TINYINT)
except:
setattr(obj, 'TinyInteger', engine_types.INT)
setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'UUID', GUID)
if engine == 'mysql':
from .engines import mysql as dummy_mysql # noqa
# module = invenio.sqlalchemyutils_mysql
# for key in module.__dict__:
# setattr(obj, key,
# getattr(module, key))
obj.AsBINARY = AsBINARY
obj.MarshalBinary = MarshalBinary
obj.PickleBinary = PickleBinary
## Overwrite :meth:`MutableDick.update` to detect changes.
from sqlalchemy.ext.mutable import MutableDict
def update_mutable_dict(self, *args, **kwargs):
super(MutableDict, self).update(*args, **kwargs)
self.changed()
MutableDict.update = update_mutable_dict
obj.MutableDict = MutableDict
class PasswordComparator(Comparator):
def __eq__(self, other):
return self.__clause_element__() == self.hash(other)
def | (self, password):
if db.engine.name != 'mysql':
return md5(password).digest()
email = self.__clause_element__().table.columns.email
return db.func.aes_encrypt(email, password)
def autocommit_on_checkin(dbapi_con, con_record):
"""Calls autocommit on raw mysql connection for fixing bug in MySQL 5.5"""
try:
dbapi_con.autocommit(True)
except:
pass
#FIXME
#from invenio.ext.logging import register_exception
#register_exception()
## Possibly register globally.
#event.listen(Pool, 'checkin', autocommit_on_checkin)
class SQLAlchemy(FlaskSQLAlchemy):
"""Database object."""
PasswordComparator = PasswordComparator
def init_app(self, app):
super(self.__class__, self).init_app(app)
engine = app.config.get('CFG_DATABASE_TYPE', 'mysql')
self.Model = get_model_type(self.Model)
if engine == 'mysql':
self.Model.__table_args__ = {'keep_existing': True,
'extend_existing': False,
'mysql_engine': 'MyISAM',
'mysql_charset': 'utf8'}
_include_sqlalchemy(self, engine=engine)
def __getattr__(self, name):
# This is only called when the normal mechanism fails, so in practice
# should never be called.
# It is only provided to satisfy pylint that it is okay not to
# raise E1101 errors in the client code.
# :see http://stackoverflow.com/a/3515234/780928
raise AttributeError("%r instance has no attribute %r" % (self, name))
def schemadiff(self, excludeTables=None):
from migrate.versioning import schemadiff
return schemadiff.getDiffOfModelAgainstDatabase(self.metadata,
self.engine,
excludeTables=excludeTables)
def apply_driver_hacks(self, app, info, options):
"""
This method is called before engine creation.
"""
# Don't forget to apply hacks defined on parent object.
super(self.__class__, self).apply_driver_hacks(app, info, options)
if info.drivername == 'mysql':
options.setdefault('execution_options', {'autocommit': True,
'use_unicode': False,
'charset': 'utf8mb4',
})
event.listen(Pool, 'checkin', autocommit_on_checkin)
db = SQLAlchemy()
"""
Provides access to :class:`~.SQLAlchemy` instance.
"""
models = RegistryProxy('models', ModuleAutoDiscoveryRegistry, 'models')
def setup_app(app):
"""Setup SQLAlchemy extension."""
if 'SQLALCHEMY_DATABASE_URI' not in app.config:
from sqlalchemy.engine.url import URL
cfg = app.config
app.config['SQLALCHEMY_DATABASE_URI'] = URL(
cfg.get('CFG_DATABASE_TYPE', 'mysql'),
username=cfg.get('CFG_DATABASE_USER'),
password=cfg.get('CFG_DATABASE_PASS'),
host=cfg.get('CFG_DATABASE_HOST'),
database=cfg.get('CFG_DATABASE_NAME'),
port=cfg.get('CFG_DATABASE_PORT'),
)
## Let's initialize database.
db.init_app(app)
return app
| hash | identifier_name |
__init__.py | # -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.sqlalchemy
----------------------
This module provides initialization and configuration for
`flask.ext.sqlalchemy` module.
"""
import sqlalchemy
from flask.ext.registry import RegistryProxy, ModuleAutoDiscoveryRegistry
from flask.ext.sqlalchemy import SQLAlchemy as FlaskSQLAlchemy
from sqlalchemy import event
from sqlalchemy.ext.hybrid import hybrid_property, Comparator
from sqlalchemy.pool import Pool
from sqlalchemy_utils import JSONType
from invenio.utils.hash import md5
from .expressions import AsBINARY
from .types import MarshalBinary, PickleBinary, GUID
from .utils import get_model_type
def _include_sqlalchemy(obj, engine=None):
#for module in sqlalchemy, sqlalchemy.orm:
# for key in module.__all__:
# if not hasattr(obj, key):
# setattr(obj, key,
# getattr(module, key))
if engine == 'mysql':
from sqlalchemy.dialects import mysql as engine_types
else:
from sqlalchemy import types as engine_types
# Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
# of TEXT which only provides for 64kb storage compared to 4gb for
# LONGTEXT.
setattr(obj, 'JSON', JSONType(length=2**32-2))
setattr(obj, 'Char', engine_types.CHAR)
try:
setattr(obj, 'TinyText', engine_types.TINYTEXT)
except:
setattr(obj, 'TinyText', engine_types.TEXT)
setattr(obj, 'hybrid_property', hybrid_property)
try:
setattr(obj, 'Double', engine_types.DOUBLE)
except:
setattr(obj, 'Double', engine_types.FLOAT)
setattr(obj, 'Integer', engine_types.INTEGER)
setattr(obj, 'SmallInteger', engine_types.SMALLINT)
try:
setattr(obj, 'MediumInteger', engine_types.MEDIUMINT)
except:
setattr(obj, 'MediumInteger', engine_types.INT)
setattr(obj, 'BigInteger', engine_types.BIGINT)
try:
setattr(obj, 'TinyInteger', engine_types.TINYINT)
except:
setattr(obj, 'TinyInteger', engine_types.INT)
setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'UUID', GUID)
if engine == 'mysql':
from .engines import mysql as dummy_mysql # noqa
# module = invenio.sqlalchemyutils_mysql
# for key in module.__dict__:
# setattr(obj, key,
# getattr(module, key))
obj.AsBINARY = AsBINARY
obj.MarshalBinary = MarshalBinary
obj.PickleBinary = PickleBinary
## Overwrite :meth:`MutableDick.update` to detect changes.
from sqlalchemy.ext.mutable import MutableDict
def update_mutable_dict(self, *args, **kwargs):
super(MutableDict, self).update(*args, **kwargs)
self.changed()
MutableDict.update = update_mutable_dict
obj.MutableDict = MutableDict
class PasswordComparator(Comparator):
def __eq__(self, other):
return self.__clause_element__() == self.hash(other)
def hash(self, password):
if db.engine.name != 'mysql':
return md5(password).digest()
email = self.__clause_element__().table.columns.email
return db.func.aes_encrypt(email, password)
def autocommit_on_checkin(dbapi_con, con_record):
"""Calls autocommit on raw mysql connection for fixing bug in MySQL 5.5"""
try:
dbapi_con.autocommit(True)
except:
pass
#FIXME
#from invenio.ext.logging import register_exception
#register_exception()
## Possibly register globally.
#event.listen(Pool, 'checkin', autocommit_on_checkin)
class SQLAlchemy(FlaskSQLAlchemy):
"""Database object."""
PasswordComparator = PasswordComparator
def init_app(self, app):
super(self.__class__, self).init_app(app)
engine = app.config.get('CFG_DATABASE_TYPE', 'mysql')
self.Model = get_model_type(self.Model)
if engine == 'mysql':
self.Model.__table_args__ = {'keep_existing': True,
'extend_existing': False,
'mysql_engine': 'MyISAM',
'mysql_charset': 'utf8'}
_include_sqlalchemy(self, engine=engine)
def __getattr__(self, name):
# This is only called when the normal mechanism fails, so in practice
# should never be called.
# It is only provided to satisfy pylint that it is okay not to
# raise E1101 errors in the client code.
# :see http://stackoverflow.com/a/3515234/780928
raise AttributeError("%r instance has no attribute %r" % (self, name))
def schemadiff(self, excludeTables=None):
from migrate.versioning import schemadiff
return schemadiff.getDiffOfModelAgainstDatabase(self.metadata,
self.engine,
excludeTables=excludeTables)
def apply_driver_hacks(self, app, info, options):
"""
This method is called before engine creation.
"""
# Don't forget to apply hacks defined on parent object.
super(self.__class__, self).apply_driver_hacks(app, info, options)
if info.drivername == 'mysql':
options.setdefault('execution_options', {'autocommit': True,
'use_unicode': False,
'charset': 'utf8mb4',
})
event.listen(Pool, 'checkin', autocommit_on_checkin)
db = SQLAlchemy()
"""
Provides access to :class:`~.SQLAlchemy` instance.
""" | models = RegistryProxy('models', ModuleAutoDiscoveryRegistry, 'models')
def setup_app(app):
"""Setup SQLAlchemy extension."""
if 'SQLALCHEMY_DATABASE_URI' not in app.config:
from sqlalchemy.engine.url import URL
cfg = app.config
app.config['SQLALCHEMY_DATABASE_URI'] = URL(
cfg.get('CFG_DATABASE_TYPE', 'mysql'),
username=cfg.get('CFG_DATABASE_USER'),
password=cfg.get('CFG_DATABASE_PASS'),
host=cfg.get('CFG_DATABASE_HOST'),
database=cfg.get('CFG_DATABASE_NAME'),
port=cfg.get('CFG_DATABASE_PORT'),
)
## Let's initialize database.
db.init_app(app)
return app | random_line_split |
|
__init__.py | # -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.sqlalchemy
----------------------
This module provides initialization and configuration for
`flask.ext.sqlalchemy` module.
"""
import sqlalchemy
from flask.ext.registry import RegistryProxy, ModuleAutoDiscoveryRegistry
from flask.ext.sqlalchemy import SQLAlchemy as FlaskSQLAlchemy
from sqlalchemy import event
from sqlalchemy.ext.hybrid import hybrid_property, Comparator
from sqlalchemy.pool import Pool
from sqlalchemy_utils import JSONType
from invenio.utils.hash import md5
from .expressions import AsBINARY
from .types import MarshalBinary, PickleBinary, GUID
from .utils import get_model_type
def _include_sqlalchemy(obj, engine=None):
#for module in sqlalchemy, sqlalchemy.orm:
# for key in module.__all__:
# if not hasattr(obj, key):
# setattr(obj, key,
# getattr(module, key))
if engine == 'mysql':
from sqlalchemy.dialects import mysql as engine_types
else:
from sqlalchemy import types as engine_types
# Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
# of TEXT which only provides for 64kb storage compared to 4gb for
# LONGTEXT.
setattr(obj, 'JSON', JSONType(length=2**32-2))
setattr(obj, 'Char', engine_types.CHAR)
try:
setattr(obj, 'TinyText', engine_types.TINYTEXT)
except:
setattr(obj, 'TinyText', engine_types.TEXT)
setattr(obj, 'hybrid_property', hybrid_property)
try:
setattr(obj, 'Double', engine_types.DOUBLE)
except:
setattr(obj, 'Double', engine_types.FLOAT)
setattr(obj, 'Integer', engine_types.INTEGER)
setattr(obj, 'SmallInteger', engine_types.SMALLINT)
try:
setattr(obj, 'MediumInteger', engine_types.MEDIUMINT)
except:
setattr(obj, 'MediumInteger', engine_types.INT)
setattr(obj, 'BigInteger', engine_types.BIGINT)
try:
setattr(obj, 'TinyInteger', engine_types.TINYINT)
except:
setattr(obj, 'TinyInteger', engine_types.INT)
setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'UUID', GUID)
if engine == 'mysql':
from .engines import mysql as dummy_mysql # noqa
# module = invenio.sqlalchemyutils_mysql
# for key in module.__dict__:
# setattr(obj, key,
# getattr(module, key))
obj.AsBINARY = AsBINARY
obj.MarshalBinary = MarshalBinary
obj.PickleBinary = PickleBinary
## Overwrite :meth:`MutableDick.update` to detect changes.
from sqlalchemy.ext.mutable import MutableDict
def update_mutable_dict(self, *args, **kwargs):
super(MutableDict, self).update(*args, **kwargs)
self.changed()
MutableDict.update = update_mutable_dict
obj.MutableDict = MutableDict
class PasswordComparator(Comparator):
def __eq__(self, other):
return self.__clause_element__() == self.hash(other)
def hash(self, password):
if db.engine.name != 'mysql':
return md5(password).digest()
email = self.__clause_element__().table.columns.email
return db.func.aes_encrypt(email, password)
def autocommit_on_checkin(dbapi_con, con_record):
"""Calls autocommit on raw mysql connection for fixing bug in MySQL 5.5"""
try:
dbapi_con.autocommit(True)
except:
pass
#FIXME
#from invenio.ext.logging import register_exception
#register_exception()
## Possibly register globally.
#event.listen(Pool, 'checkin', autocommit_on_checkin)
class SQLAlchemy(FlaskSQLAlchemy):
"""Database object."""
PasswordComparator = PasswordComparator
def init_app(self, app):
super(self.__class__, self).init_app(app)
engine = app.config.get('CFG_DATABASE_TYPE', 'mysql')
self.Model = get_model_type(self.Model)
if engine == 'mysql':
self.Model.__table_args__ = {'keep_existing': True,
'extend_existing': False,
'mysql_engine': 'MyISAM',
'mysql_charset': 'utf8'}
_include_sqlalchemy(self, engine=engine)
def __getattr__(self, name):
# This is only called when the normal mechanism fails, so in practice
# should never be called.
# It is only provided to satisfy pylint that it is okay not to
# raise E1101 errors in the client code.
# :see http://stackoverflow.com/a/3515234/780928
raise AttributeError("%r instance has no attribute %r" % (self, name))
def schemadiff(self, excludeTables=None):
from migrate.versioning import schemadiff
return schemadiff.getDiffOfModelAgainstDatabase(self.metadata,
self.engine,
excludeTables=excludeTables)
def apply_driver_hacks(self, app, info, options):
"""
This method is called before engine creation.
"""
# Don't forget to apply hacks defined on parent object.
super(self.__class__, self).apply_driver_hacks(app, info, options)
if info.drivername == 'mysql':
|
db = SQLAlchemy()
"""
Provides access to :class:`~.SQLAlchemy` instance.
"""
models = RegistryProxy('models', ModuleAutoDiscoveryRegistry, 'models')
def setup_app(app):
"""Setup SQLAlchemy extension."""
if 'SQLALCHEMY_DATABASE_URI' not in app.config:
from sqlalchemy.engine.url import URL
cfg = app.config
app.config['SQLALCHEMY_DATABASE_URI'] = URL(
cfg.get('CFG_DATABASE_TYPE', 'mysql'),
username=cfg.get('CFG_DATABASE_USER'),
password=cfg.get('CFG_DATABASE_PASS'),
host=cfg.get('CFG_DATABASE_HOST'),
database=cfg.get('CFG_DATABASE_NAME'),
port=cfg.get('CFG_DATABASE_PORT'),
)
## Let's initialize database.
db.init_app(app)
return app
| options.setdefault('execution_options', {'autocommit': True,
'use_unicode': False,
'charset': 'utf8mb4',
})
event.listen(Pool, 'checkin', autocommit_on_checkin) | conditional_block |
__init__.py | # -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2011, 2012, 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.sqlalchemy
----------------------
This module provides initialization and configuration for
`flask.ext.sqlalchemy` module.
"""
import sqlalchemy
from flask.ext.registry import RegistryProxy, ModuleAutoDiscoveryRegistry
from flask.ext.sqlalchemy import SQLAlchemy as FlaskSQLAlchemy
from sqlalchemy import event
from sqlalchemy.ext.hybrid import hybrid_property, Comparator
from sqlalchemy.pool import Pool
from sqlalchemy_utils import JSONType
from invenio.utils.hash import md5
from .expressions import AsBINARY
from .types import MarshalBinary, PickleBinary, GUID
from .utils import get_model_type
def _include_sqlalchemy(obj, engine=None):
#for module in sqlalchemy, sqlalchemy.orm:
# for key in module.__all__:
# if not hasattr(obj, key):
# setattr(obj, key,
# getattr(module, key))
if engine == 'mysql':
from sqlalchemy.dialects import mysql as engine_types
else:
from sqlalchemy import types as engine_types
# Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
# of TEXT which only provides for 64kb storage compared to 4gb for
# LONGTEXT.
setattr(obj, 'JSON', JSONType(length=2**32-2))
setattr(obj, 'Char', engine_types.CHAR)
try:
setattr(obj, 'TinyText', engine_types.TINYTEXT)
except:
setattr(obj, 'TinyText', engine_types.TEXT)
setattr(obj, 'hybrid_property', hybrid_property)
try:
setattr(obj, 'Double', engine_types.DOUBLE)
except:
setattr(obj, 'Double', engine_types.FLOAT)
setattr(obj, 'Integer', engine_types.INTEGER)
setattr(obj, 'SmallInteger', engine_types.SMALLINT)
try:
setattr(obj, 'MediumInteger', engine_types.MEDIUMINT)
except:
setattr(obj, 'MediumInteger', engine_types.INT)
setattr(obj, 'BigInteger', engine_types.BIGINT)
try:
setattr(obj, 'TinyInteger', engine_types.TINYINT)
except:
setattr(obj, 'TinyInteger', engine_types.INT)
setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'UUID', GUID)
if engine == 'mysql':
from .engines import mysql as dummy_mysql # noqa
# module = invenio.sqlalchemyutils_mysql
# for key in module.__dict__:
# setattr(obj, key,
# getattr(module, key))
obj.AsBINARY = AsBINARY
obj.MarshalBinary = MarshalBinary
obj.PickleBinary = PickleBinary
## Overwrite :meth:`MutableDick.update` to detect changes.
from sqlalchemy.ext.mutable import MutableDict
def update_mutable_dict(self, *args, **kwargs):
super(MutableDict, self).update(*args, **kwargs)
self.changed()
MutableDict.update = update_mutable_dict
obj.MutableDict = MutableDict
class PasswordComparator(Comparator):
def __eq__(self, other):
return self.__clause_element__() == self.hash(other)
def hash(self, password):
if db.engine.name != 'mysql':
return md5(password).digest()
email = self.__clause_element__().table.columns.email
return db.func.aes_encrypt(email, password)
def autocommit_on_checkin(dbapi_con, con_record):
"""Calls autocommit on raw mysql connection for fixing bug in MySQL 5.5"""
try:
dbapi_con.autocommit(True)
except:
pass
#FIXME
#from invenio.ext.logging import register_exception
#register_exception()
## Possibly register globally.
#event.listen(Pool, 'checkin', autocommit_on_checkin)
class SQLAlchemy(FlaskSQLAlchemy):
"""Database object."""
PasswordComparator = PasswordComparator
def init_app(self, app):
super(self.__class__, self).init_app(app)
engine = app.config.get('CFG_DATABASE_TYPE', 'mysql')
self.Model = get_model_type(self.Model)
if engine == 'mysql':
self.Model.__table_args__ = {'keep_existing': True,
'extend_existing': False,
'mysql_engine': 'MyISAM',
'mysql_charset': 'utf8'}
_include_sqlalchemy(self, engine=engine)
def __getattr__(self, name):
# This is only called when the normal mechanism fails, so in practice
# should never be called.
# It is only provided to satisfy pylint that it is okay not to
# raise E1101 errors in the client code.
# :see http://stackoverflow.com/a/3515234/780928
raise AttributeError("%r instance has no attribute %r" % (self, name))
def schemadiff(self, excludeTables=None):
from migrate.versioning import schemadiff
return schemadiff.getDiffOfModelAgainstDatabase(self.metadata,
self.engine,
excludeTables=excludeTables)
def apply_driver_hacks(self, app, info, options):
|
db = SQLAlchemy()
"""
Provides access to :class:`~.SQLAlchemy` instance.
"""
models = RegistryProxy('models', ModuleAutoDiscoveryRegistry, 'models')
def setup_app(app):
"""Setup SQLAlchemy extension."""
if 'SQLALCHEMY_DATABASE_URI' not in app.config:
from sqlalchemy.engine.url import URL
cfg = app.config
app.config['SQLALCHEMY_DATABASE_URI'] = URL(
cfg.get('CFG_DATABASE_TYPE', 'mysql'),
username=cfg.get('CFG_DATABASE_USER'),
password=cfg.get('CFG_DATABASE_PASS'),
host=cfg.get('CFG_DATABASE_HOST'),
database=cfg.get('CFG_DATABASE_NAME'),
port=cfg.get('CFG_DATABASE_PORT'),
)
## Let's initialize database.
db.init_app(app)
return app
| """
This method is called before engine creation.
"""
# Don't forget to apply hacks defined on parent object.
super(self.__class__, self).apply_driver_hacks(app, info, options)
if info.drivername == 'mysql':
options.setdefault('execution_options', {'autocommit': True,
'use_unicode': False,
'charset': 'utf8mb4',
})
event.listen(Pool, 'checkin', autocommit_on_checkin) | identifier_body |
main.rs | use std::collections::HashMap;
use std::io::{self, BufRead};
use lazy_regex::regex;
use regex::Regex;
use structopt::StructOpt;
type H = HashMap<Header, Vec<(String, String, String)>>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum Header {
Versioned { package: String, version: String },
Missing { package: String },
}
#[derive(Debug, StructOpt)]
#[structopt(
name = "commenter",
about = "Automates generation of bounds in build-constraints.yaml"
)]
enum Opt {
Clear,
Add,
Outdated,
}
fn main() {
let opt = Opt::from_args();
match opt {
Opt::Clear => clear(),
Opt::Add => add(),
Opt::Outdated => outdated(),
}
}
fn clear() {
commenter::clear();
}
fn outdated() {
commenter::outdated();
}
fn add() {
let mut lib_exes: H = Default::default();
let mut tests: H = Default::default();
let mut benches: H = Default::default();
let mut last_header: Option<Header> = None;
let header_versioned = regex!(
r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?is out of bounds for:$"#
);
let header_missing = regex!(r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*)).+?depended on by:$"#);
let package = regex!(
r#"^- \[ \] (?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?Used by: (?P<component>.+)$"#
);
// Ignore everything until the bounds issues show up.
let mut process_line = false;
for line in io::stdin().lock().lines().flatten() {
if is_reg_match(&line, regex!(r#"^\s*$"#)) {
// noop
} else if line == "curator: Snapshot dependency graph contains errors:" {
process_line = true;
} else if !process_line {
println!("[INFO] {}", line);
} else if let Some(cap) = package.captures(&line) {
let root = last_header.clone().unwrap();
let package = cap.name("package").unwrap().as_str();
let version = cap.name("version").unwrap().as_str();
let component = cap.name("component").unwrap().as_str();
match component {
"library" | "executable" => {
insert(&mut lib_exes, root, package, version, component)
}
"benchmark" => insert(&mut benches, root, package, version, "benchmarks"),
"test-suite" => insert(&mut tests, root, package, version, component),
_ => panic!("Bad component: {}", component),
}
} else if let Some(cap) = header_versioned.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
let version = cap.name("version").unwrap().as_str().to_owned();
last_header = Some(Header::Versioned { package, version });
} else if let Some(cap) = header_missing.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
last_header = Some(Header::Missing { package });
} else {
panic!("Unhandled: {:?}", line);
}
}
let mut auto_lib_exes = vec![];
let mut auto_tests = vec![];
let mut auto_benches = vec![];
if !lib_exes.is_empty() {
println!("\nLIBS + EXES\n");
}
for (header, packages) in lib_exes {
for (package, version, component) in packages {
let s = printer(" ", &package, true, &version, &component, &header);
println!("{}", s);
auto_lib_exes.push(s);
} | println!("\nTESTS\n");
}
for (header, packages) in tests {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_tests.push(s);
}
}
if !benches.is_empty() {
println!("\nBENCHMARKS\n");
}
for (header, packages) in benches {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_benches.push(s);
}
}
println!();
println!(
"Adding {lib_exes} libs, {tests} tests, {benches} benches to build-constraints.yaml",
lib_exes = auto_lib_exes.len(),
tests = auto_tests.len(),
benches = auto_benches.len()
);
commenter::add(auto_lib_exes, auto_tests, auto_benches);
}
fn printer(
indent: &str,
package: &str,
lt0: bool,
version: &str,
component: &str,
header: &Header,
) -> String {
let lt0 = if lt0 { " < 0" } else { "" };
format!(
"{indent}- {package}{lt0} # tried {package}-{version}, but its *{component}* {cause}",
indent = indent,
package = package,
lt0 = lt0,
version = version,
component = component,
cause = match header {
Header::Versioned { package, version } => format!(
"does not support: {package}-{version}",
package = package,
version = version
),
Header::Missing { package } => format!(
"requires the disabled package: {package}",
package = package
),
},
)
}
fn insert(h: &mut H, header: Header, package: &str, version: &str, component: &str) {
(*h.entry(header).or_insert_with(Vec::new)).push((
package.to_owned(),
version.to_owned(),
component.to_owned(),
));
}
fn is_reg_match(s: &str, r: &Regex) -> bool {
r.captures(s).is_some()
} | }
if !tests.is_empty() { | random_line_split |
main.rs | use std::collections::HashMap;
use std::io::{self, BufRead};
use lazy_regex::regex;
use regex::Regex;
use structopt::StructOpt;
type H = HashMap<Header, Vec<(String, String, String)>>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum Header {
Versioned { package: String, version: String },
Missing { package: String },
}
#[derive(Debug, StructOpt)]
#[structopt(
name = "commenter",
about = "Automates generation of bounds in build-constraints.yaml"
)]
enum Opt {
Clear,
Add,
Outdated,
}
fn main() {
let opt = Opt::from_args();
match opt {
Opt::Clear => clear(),
Opt::Add => add(),
Opt::Outdated => outdated(),
}
}
fn clear() {
commenter::clear();
}
fn outdated() |
fn add() {
let mut lib_exes: H = Default::default();
let mut tests: H = Default::default();
let mut benches: H = Default::default();
let mut last_header: Option<Header> = None;
let header_versioned = regex!(
r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?is out of bounds for:$"#
);
let header_missing = regex!(r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*)).+?depended on by:$"#);
let package = regex!(
r#"^- \[ \] (?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?Used by: (?P<component>.+)$"#
);
// Ignore everything until the bounds issues show up.
let mut process_line = false;
for line in io::stdin().lock().lines().flatten() {
if is_reg_match(&line, regex!(r#"^\s*$"#)) {
// noop
} else if line == "curator: Snapshot dependency graph contains errors:" {
process_line = true;
} else if !process_line {
println!("[INFO] {}", line);
} else if let Some(cap) = package.captures(&line) {
let root = last_header.clone().unwrap();
let package = cap.name("package").unwrap().as_str();
let version = cap.name("version").unwrap().as_str();
let component = cap.name("component").unwrap().as_str();
match component {
"library" | "executable" => {
insert(&mut lib_exes, root, package, version, component)
}
"benchmark" => insert(&mut benches, root, package, version, "benchmarks"),
"test-suite" => insert(&mut tests, root, package, version, component),
_ => panic!("Bad component: {}", component),
}
} else if let Some(cap) = header_versioned.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
let version = cap.name("version").unwrap().as_str().to_owned();
last_header = Some(Header::Versioned { package, version });
} else if let Some(cap) = header_missing.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
last_header = Some(Header::Missing { package });
} else {
panic!("Unhandled: {:?}", line);
}
}
let mut auto_lib_exes = vec![];
let mut auto_tests = vec![];
let mut auto_benches = vec![];
if !lib_exes.is_empty() {
println!("\nLIBS + EXES\n");
}
for (header, packages) in lib_exes {
for (package, version, component) in packages {
let s = printer(" ", &package, true, &version, &component, &header);
println!("{}", s);
auto_lib_exes.push(s);
}
}
if !tests.is_empty() {
println!("\nTESTS\n");
}
for (header, packages) in tests {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_tests.push(s);
}
}
if !benches.is_empty() {
println!("\nBENCHMARKS\n");
}
for (header, packages) in benches {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_benches.push(s);
}
}
println!();
println!(
"Adding {lib_exes} libs, {tests} tests, {benches} benches to build-constraints.yaml",
lib_exes = auto_lib_exes.len(),
tests = auto_tests.len(),
benches = auto_benches.len()
);
commenter::add(auto_lib_exes, auto_tests, auto_benches);
}
fn printer(
indent: &str,
package: &str,
lt0: bool,
version: &str,
component: &str,
header: &Header,
) -> String {
let lt0 = if lt0 { " < 0" } else { "" };
format!(
"{indent}- {package}{lt0} # tried {package}-{version}, but its *{component}* {cause}",
indent = indent,
package = package,
lt0 = lt0,
version = version,
component = component,
cause = match header {
Header::Versioned { package, version } => format!(
"does not support: {package}-{version}",
package = package,
version = version
),
Header::Missing { package } => format!(
"requires the disabled package: {package}",
package = package
),
},
)
}
fn insert(h: &mut H, header: Header, package: &str, version: &str, component: &str) {
(*h.entry(header).or_insert_with(Vec::new)).push((
package.to_owned(),
version.to_owned(),
component.to_owned(),
));
}
fn is_reg_match(s: &str, r: &Regex) -> bool {
r.captures(s).is_some()
}
| {
commenter::outdated();
} | identifier_body |
main.rs | use std::collections::HashMap;
use std::io::{self, BufRead};
use lazy_regex::regex;
use regex::Regex;
use structopt::StructOpt;
type H = HashMap<Header, Vec<(String, String, String)>>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum Header {
Versioned { package: String, version: String },
Missing { package: String },
}
#[derive(Debug, StructOpt)]
#[structopt(
name = "commenter",
about = "Automates generation of bounds in build-constraints.yaml"
)]
enum Opt {
Clear,
Add,
Outdated,
}
fn main() {
let opt = Opt::from_args();
match opt {
Opt::Clear => clear(),
Opt::Add => add(),
Opt::Outdated => outdated(),
}
}
fn clear() {
commenter::clear();
}
fn outdated() {
commenter::outdated();
}
fn add() {
let mut lib_exes: H = Default::default();
let mut tests: H = Default::default();
let mut benches: H = Default::default();
let mut last_header: Option<Header> = None;
let header_versioned = regex!(
r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?is out of bounds for:$"#
);
let header_missing = regex!(r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*)).+?depended on by:$"#);
let package = regex!(
r#"^- \[ \] (?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?Used by: (?P<component>.+)$"#
);
// Ignore everything until the bounds issues show up.
let mut process_line = false;
for line in io::stdin().lock().lines().flatten() {
if is_reg_match(&line, regex!(r#"^\s*$"#)) {
// noop
} else if line == "curator: Snapshot dependency graph contains errors:" {
process_line = true;
} else if !process_line {
println!("[INFO] {}", line);
} else if let Some(cap) = package.captures(&line) {
let root = last_header.clone().unwrap();
let package = cap.name("package").unwrap().as_str();
let version = cap.name("version").unwrap().as_str();
let component = cap.name("component").unwrap().as_str();
match component {
"library" | "executable" => {
insert(&mut lib_exes, root, package, version, component)
}
"benchmark" => insert(&mut benches, root, package, version, "benchmarks"),
"test-suite" => insert(&mut tests, root, package, version, component),
_ => panic!("Bad component: {}", component),
}
} else if let Some(cap) = header_versioned.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
let version = cap.name("version").unwrap().as_str().to_owned();
last_header = Some(Header::Versioned { package, version });
} else if let Some(cap) = header_missing.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
last_header = Some(Header::Missing { package });
} else {
panic!("Unhandled: {:?}", line);
}
}
let mut auto_lib_exes = vec![];
let mut auto_tests = vec![];
let mut auto_benches = vec![];
if !lib_exes.is_empty() {
println!("\nLIBS + EXES\n");
}
for (header, packages) in lib_exes {
for (package, version, component) in packages {
let s = printer(" ", &package, true, &version, &component, &header);
println!("{}", s);
auto_lib_exes.push(s);
}
}
if !tests.is_empty() {
println!("\nTESTS\n");
}
for (header, packages) in tests {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_tests.push(s);
}
}
if !benches.is_empty() {
println!("\nBENCHMARKS\n");
}
for (header, packages) in benches {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_benches.push(s);
}
}
println!();
println!(
"Adding {lib_exes} libs, {tests} tests, {benches} benches to build-constraints.yaml",
lib_exes = auto_lib_exes.len(),
tests = auto_tests.len(),
benches = auto_benches.len()
);
commenter::add(auto_lib_exes, auto_tests, auto_benches);
}
fn printer(
indent: &str,
package: &str,
lt0: bool,
version: &str,
component: &str,
header: &Header,
) -> String {
let lt0 = if lt0 { " < 0" } else { "" };
format!(
"{indent}- {package}{lt0} # tried {package}-{version}, but its *{component}* {cause}",
indent = indent,
package = package,
lt0 = lt0,
version = version,
component = component,
cause = match header {
Header::Versioned { package, version } => format!(
"does not support: {package}-{version}",
package = package,
version = version
),
Header::Missing { package } => format!(
"requires the disabled package: {package}",
package = package
),
},
)
}
fn insert(h: &mut H, header: Header, package: &str, version: &str, component: &str) {
(*h.entry(header).or_insert_with(Vec::new)).push((
package.to_owned(),
version.to_owned(),
component.to_owned(),
));
}
fn | (s: &str, r: &Regex) -> bool {
r.captures(s).is_some()
}
| is_reg_match | identifier_name |
main.rs | use std::collections::HashMap;
use std::io::{self, BufRead};
use lazy_regex::regex;
use regex::Regex;
use structopt::StructOpt;
type H = HashMap<Header, Vec<(String, String, String)>>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum Header {
Versioned { package: String, version: String },
Missing { package: String },
}
#[derive(Debug, StructOpt)]
#[structopt(
name = "commenter",
about = "Automates generation of bounds in build-constraints.yaml"
)]
enum Opt {
Clear,
Add,
Outdated,
}
fn main() {
let opt = Opt::from_args();
match opt {
Opt::Clear => clear(),
Opt::Add => add(),
Opt::Outdated => outdated(),
}
}
fn clear() {
commenter::clear();
}
fn outdated() {
commenter::outdated();
}
fn add() {
let mut lib_exes: H = Default::default();
let mut tests: H = Default::default();
let mut benches: H = Default::default();
let mut last_header: Option<Header> = None;
let header_versioned = regex!(
r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?is out of bounds for:$"#
);
let header_missing = regex!(r#"^(?P<package>[a-zA-z]([a-zA-z0-9.-]*)).+?depended on by:$"#);
let package = regex!(
r#"^- \[ \] (?P<package>[a-zA-z]([a-zA-z0-9.-]*?))-(?P<version>(\d+(\.\d+)*)).+?Used by: (?P<component>.+)$"#
);
// Ignore everything until the bounds issues show up.
let mut process_line = false;
for line in io::stdin().lock().lines().flatten() {
if is_reg_match(&line, regex!(r#"^\s*$"#)) {
// noop
} else if line == "curator: Snapshot dependency graph contains errors:" {
process_line = true;
} else if !process_line {
println!("[INFO] {}", line);
} else if let Some(cap) = package.captures(&line) {
let root = last_header.clone().unwrap();
let package = cap.name("package").unwrap().as_str();
let version = cap.name("version").unwrap().as_str();
let component = cap.name("component").unwrap().as_str();
match component {
"library" | "executable" => {
insert(&mut lib_exes, root, package, version, component)
}
"benchmark" => insert(&mut benches, root, package, version, "benchmarks"),
"test-suite" => insert(&mut tests, root, package, version, component),
_ => panic!("Bad component: {}", component),
}
} else if let Some(cap) = header_versioned.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
let version = cap.name("version").unwrap().as_str().to_owned();
last_header = Some(Header::Versioned { package, version });
} else if let Some(cap) = header_missing.captures(&line) {
let package = cap.name("package").unwrap().as_str().to_owned();
last_header = Some(Header::Missing { package });
} else {
panic!("Unhandled: {:?}", line);
}
}
let mut auto_lib_exes = vec![];
let mut auto_tests = vec![];
let mut auto_benches = vec![];
if !lib_exes.is_empty() {
println!("\nLIBS + EXES\n");
}
for (header, packages) in lib_exes {
for (package, version, component) in packages {
let s = printer(" ", &package, true, &version, &component, &header);
println!("{}", s);
auto_lib_exes.push(s);
}
}
if !tests.is_empty() {
println!("\nTESTS\n");
}
for (header, packages) in tests {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_tests.push(s);
}
}
if !benches.is_empty() {
println!("\nBENCHMARKS\n");
}
for (header, packages) in benches {
for (package, version, component) in packages {
let s = printer(" ", &package, false, &version, &component, &header);
println!("{}", s);
auto_benches.push(s);
}
}
println!();
println!(
"Adding {lib_exes} libs, {tests} tests, {benches} benches to build-constraints.yaml",
lib_exes = auto_lib_exes.len(),
tests = auto_tests.len(),
benches = auto_benches.len()
);
commenter::add(auto_lib_exes, auto_tests, auto_benches);
}
fn printer(
indent: &str,
package: &str,
lt0: bool,
version: &str,
component: &str,
header: &Header,
) -> String {
let lt0 = if lt0 { " < 0" } else | ;
format!(
"{indent}- {package}{lt0} # tried {package}-{version}, but its *{component}* {cause}",
indent = indent,
package = package,
lt0 = lt0,
version = version,
component = component,
cause = match header {
Header::Versioned { package, version } => format!(
"does not support: {package}-{version}",
package = package,
version = version
),
Header::Missing { package } => format!(
"requires the disabled package: {package}",
package = package
),
},
)
}
fn insert(h: &mut H, header: Header, package: &str, version: &str, component: &str) {
(*h.entry(header).or_insert_with(Vec::new)).push((
package.to_owned(),
version.to_owned(),
component.to_owned(),
));
}
fn is_reg_match(s: &str, r: &Regex) -> bool {
r.captures(s).is_some()
}
| { "" } | conditional_block |
devtools.ts | import { expectType } from 'tsd'
browser.enablePerformanceAudits()
browser.enablePerformanceAudits({
networkThrottling: 'online',
cpuThrottling: 0,
cacheEnabled: false,
formFactor: 'desktop'
})
browser.disablePerformanceAudits()
const metrics = browser.getMetrics()
expectType<number>(metrics.totalBlockingTime)
expectType<number>(metrics.maxPotentialFID)
const diagnostics = browser.getDiagnostics()
const mainThreadWorkBreakdown = browser.getMainThreadWorkBreakdown()
expectType<number>(mainThreadWorkBreakdown[0].duration)
const performanceScore: number = browser.getPerformanceScore()
expectType<number>(performanceScore)
const pwaCheck = browser.checkPWA()
pwaCheck.passed
expectType<number>(pwaCheck.details['foo'].score)
const pwaFilterdCheck = browser.checkPWA(['maskableIcon', 'isInstallable'])
expectType<boolean>(pwaFilterdCheck.passed)
browser.emulateDevice('iPad')
browser.emulateDevice({ viewport: { height: 10, width: 10 }, userAgent: 'test' }) |
const cdpResponse = browser.cdp('test', 'test')
expectType<number>(browser.getNodeId('selector'))
expectType<number[]>(browser.getNodeIds('selector'))
browser.startTracing()
browser.startTracing({ path: '/foo' })
browser.endTracing()
const traceLogs = browser.getTraceLogs()
expectType<string>(traceLogs[0].cat)
const pageWeight = browser.getPageWeight()
expectType<number>(pageWeight.requestCount)
const coverage = browser.getCoverageReport()
expectType<number>(coverage.lines.total) | random_line_split |
|
contentscript.js | (function() {
/* globals chrome */
'use strict';
| var itpubDownloader = {
//http://www.itpub.net/attachment.php?aid=OTIzNzAxfDQyYzhjNDRkfDE0MDQ0NjYwMjB8MzUwMTczfDE4NzU4NDA%3D&fid=61
//href = href.replace("attachment.php?", "forum.php?mod=attachment&");
attachmentRegexp: /attachment.php\?aid=[a-zA-Z0-9]+%3D&fid/,
mapElement: function(element) {
if (element.tagName.toLowerCase() === 'a') {
var href = element.href;
if (itpubDownloader.isAttachmentURL(href)) {
href = href.replace(/attachment.php\?/, "forum.php?mod=attachment&");
var text = element.text;
itpubDownloader.attachmentTexts[href] = text;
console.log(text + " | " + href);
itpubDownloader.linkedImages[href] = text;
return href;
}
}
return '';
},
isAttachmentURL: function(url) {
return itpubDownloader.attachmentRegexp.test(url);
},
removeDuplicateOrEmpty: function(images) {
var result = [],
hash = {};
for (var i = 0; i < images.length; i++) {
hash[images[i]] = 0;
}
for (var key in hash) {
if (key !== '') {
result.push(key);
}
}
return result;
}
};
itpubDownloader.attachments = {};
itpubDownloader.linkedImages = {};
itpubDownloader.attachmentTexts = {};
itpubDownloader.images = [].slice.apply(document.getElementsByTagName('*'));
itpubDownloader.images = itpubDownloader.images.map(itpubDownloader.mapElement);
itpubDownloader.images = itpubDownloader.removeDuplicateOrEmpty(itpubDownloader.images);
chrome.extension.sendMessage({
linkedImages: itpubDownloader.linkedImages,
attachmentTexts: itpubDownloader.attachmentTexts,
attachments: itpubDownloader.attachments,
images: itpubDownloader.images
});
itpubDownloader.attachments = null;
itpubDownloader.linkedImages = null;
itpubDownloader.attachmentTexts = null;
itpubDownloader.images = null;
}()); | random_line_split |
|
contentscript.js | (function() {
/* globals chrome */
'use strict';
var itpubDownloader = {
//http://www.itpub.net/attachment.php?aid=OTIzNzAxfDQyYzhjNDRkfDE0MDQ0NjYwMjB8MzUwMTczfDE4NzU4NDA%3D&fid=61
//href = href.replace("attachment.php?", "forum.php?mod=attachment&");
attachmentRegexp: /attachment.php\?aid=[a-zA-Z0-9]+%3D&fid/,
mapElement: function(element) {
if (element.tagName.toLowerCase() === 'a') |
return '';
},
isAttachmentURL: function(url) {
return itpubDownloader.attachmentRegexp.test(url);
},
removeDuplicateOrEmpty: function(images) {
var result = [],
hash = {};
for (var i = 0; i < images.length; i++) {
hash[images[i]] = 0;
}
for (var key in hash) {
if (key !== '') {
result.push(key);
}
}
return result;
}
};
itpubDownloader.attachments = {};
itpubDownloader.linkedImages = {};
itpubDownloader.attachmentTexts = {};
itpubDownloader.images = [].slice.apply(document.getElementsByTagName('*'));
itpubDownloader.images = itpubDownloader.images.map(itpubDownloader.mapElement);
itpubDownloader.images = itpubDownloader.removeDuplicateOrEmpty(itpubDownloader.images);
chrome.extension.sendMessage({
linkedImages: itpubDownloader.linkedImages,
attachmentTexts: itpubDownloader.attachmentTexts,
attachments: itpubDownloader.attachments,
images: itpubDownloader.images
});
itpubDownloader.attachments = null;
itpubDownloader.linkedImages = null;
itpubDownloader.attachmentTexts = null;
itpubDownloader.images = null;
}()); | {
var href = element.href;
if (itpubDownloader.isAttachmentURL(href)) {
href = href.replace(/attachment.php\?/, "forum.php?mod=attachment&");
var text = element.text;
itpubDownloader.attachmentTexts[href] = text;
console.log(text + " | " + href);
itpubDownloader.linkedImages[href] = text;
return href;
}
} | conditional_block |
ozone.py | # -*- coding: utf-8 -*-
"""
Ozone Bricklet Plugin
Copyright (C) 2015 Olaf Lüke <[email protected]>
ozone.py: Ozone Bricklet Plugin Implementation
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QVBoxLayout, QLabel, QHBoxLayout, QSpinBox
from brickv.plugin_system.plugin_base import PluginBase
from brickv.bindings.bricklet_ozone import BrickletOzone
from brickv.plot_widget import PlotWidget
from brickv.async_call import async_call
from brickv.callback_emulator import CallbackEmulator
class OzoneConcentrationLabel(QLabel):
def setText(self, text):
text = "Ozone Concentration: " + text + " ppb (parts per billion)"
super(OzoneConcentrationLabel, self).setText(text)
class Ozone(PluginBase):
def __init__(self, *args):
PluginBase.__init__(self, BrickletOzone, *args)
self.ozone = self.device
self.cbe_ozone_concentration = CallbackEmulator(self.ozone.get_ozone_concentration,
self.cb_ozone_concentration,
self.increase_error_count)
self.ozone_concentration_label = OzoneConcentrationLabel('Ozone Concentration: ')
self.current_value = None
plot_list = [['', Qt.red, self.get_current_value]]
self.plot_widget = PlotWidget('Ozone Concentration [ppb]', plot_list)
layout_h2 = QHBoxLayout()
layout_h2.addStretch()
layout_h2.addWidget(self.ozone_concentration_label)
layout_h2.addStretch()
layout = QVBoxLayout(self)
layout.addLayout(layout_h2)
layout.addWidget(self.plot_widget)
self.spin_average = QSpinBox()
self.spin_average.setMinimum(1)
self.spin_average.setMaximum(50)
self.spin_average.setSingleStep(1)
self.spin_average.setValue(50)
self.spin_average.editingFinished.connect(self.spin_average_finished)
layout_h1 = QHBoxLayout()
layout_h1.addWidget(QLabel('Length of moving average:'))
layout_h1.addWidget(self.spin_average)
layout_h1.addStretch()
layout.addLayout(layout_h1)
def get_moving_average_async(self, average):
self.spin_average.setValue(average)
def start(self):
async_call(self.ozone.get_moving_average, None, self.get_moving_average_async, self.increase_error_count)
async_call(self.ozone.get_ozone_concentration, None, self.cb_ozone_concentration, self.increase_error_count)
self.cbe_ozone_concentration.set_period(100)
self.plot_widget.stop = False
def stop(self):
self.cbe_ozone_concentration.set_period(0)
self.plot_widget.stop = True
def d | self):
pass
def get_url_part(self):
return 'ozone'
@staticmethod
def has_device_identifier(device_identifier):
return device_identifier == BrickletOzone.DEVICE_IDENTIFIER
def get_current_value(self):
return self.current_value
def cb_ozone_concentration(self, ozone_concentration):
self.current_value = ozone_concentration
self.ozone_concentration_label.setText(str(ozone_concentration))
def spin_average_finished(self):
self.ozone.set_moving_average(self.spin_average.value())
| estroy( | identifier_name |
ozone.py | # -*- coding: utf-8 -*-
"""
Ozone Bricklet Plugin
Copyright (C) 2015 Olaf Lüke <[email protected]>
ozone.py: Ozone Bricklet Plugin Implementation
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QVBoxLayout, QLabel, QHBoxLayout, QSpinBox
from brickv.plugin_system.plugin_base import PluginBase
from brickv.bindings.bricklet_ozone import BrickletOzone
from brickv.plot_widget import PlotWidget
from brickv.async_call import async_call
from brickv.callback_emulator import CallbackEmulator
class OzoneConcentrationLabel(QLabel):
def setText(self, text):
text = "Ozone Concentration: " + text + " ppb (parts per billion)"
super(OzoneConcentrationLabel, self).setText(text)
class Ozone(PluginBase):
def __init__(self, *args):
PluginBase.__init__(self, BrickletOzone, *args)
self.ozone = self.device
self.cbe_ozone_concentration = CallbackEmulator(self.ozone.get_ozone_concentration,
self.cb_ozone_concentration,
self.increase_error_count)
self.ozone_concentration_label = OzoneConcentrationLabel('Ozone Concentration: ')
self.current_value = None
plot_list = [['', Qt.red, self.get_current_value]]
self.plot_widget = PlotWidget('Ozone Concentration [ppb]', plot_list)
layout_h2 = QHBoxLayout()
layout_h2.addStretch()
layout_h2.addWidget(self.ozone_concentration_label)
layout_h2.addStretch()
layout = QVBoxLayout(self)
layout.addLayout(layout_h2)
layout.addWidget(self.plot_widget)
self.spin_average = QSpinBox()
self.spin_average.setMinimum(1)
self.spin_average.setMaximum(50)
self.spin_average.setSingleStep(1)
self.spin_average.setValue(50)
self.spin_average.editingFinished.connect(self.spin_average_finished)
layout_h1 = QHBoxLayout()
layout_h1.addWidget(QLabel('Length of moving average:'))
layout_h1.addWidget(self.spin_average)
layout_h1.addStretch()
layout.addLayout(layout_h1)
def get_moving_average_async(self, average):
self.spin_average.setValue(average)
def start(self):
async_call(self.ozone.get_moving_average, None, self.get_moving_average_async, self.increase_error_count)
async_call(self.ozone.get_ozone_concentration, None, self.cb_ozone_concentration, self.increase_error_count)
self.cbe_ozone_concentration.set_period(100)
self.plot_widget.stop = False
def stop(self):
s |
def destroy(self):
pass
def get_url_part(self):
return 'ozone'
@staticmethod
def has_device_identifier(device_identifier):
return device_identifier == BrickletOzone.DEVICE_IDENTIFIER
def get_current_value(self):
return self.current_value
def cb_ozone_concentration(self, ozone_concentration):
self.current_value = ozone_concentration
self.ozone_concentration_label.setText(str(ozone_concentration))
def spin_average_finished(self):
self.ozone.set_moving_average(self.spin_average.value())
| elf.cbe_ozone_concentration.set_period(0)
self.plot_widget.stop = True
| identifier_body |
ozone.py | # -*- coding: utf-8 -*-
"""
Ozone Bricklet Plugin
Copyright (C) 2015 Olaf Lüke <[email protected]>
ozone.py: Ozone Bricklet Plugin Implementation
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QVBoxLayout, QLabel, QHBoxLayout, QSpinBox
from brickv.plugin_system.plugin_base import PluginBase
from brickv.bindings.bricklet_ozone import BrickletOzone
from brickv.plot_widget import PlotWidget
from brickv.async_call import async_call
from brickv.callback_emulator import CallbackEmulator
class OzoneConcentrationLabel(QLabel):
def setText(self, text):
text = "Ozone Concentration: " + text + " ppb (parts per billion)"
super(OzoneConcentrationLabel, self).setText(text)
class Ozone(PluginBase):
def __init__(self, *args):
PluginBase.__init__(self, BrickletOzone, *args)
self.ozone = self.device
self.cbe_ozone_concentration = CallbackEmulator(self.ozone.get_ozone_concentration,
self.cb_ozone_concentration,
self.increase_error_count)
self.ozone_concentration_label = OzoneConcentrationLabel('Ozone Concentration: ')
self.current_value = None
plot_list = [['', Qt.red, self.get_current_value]]
self.plot_widget = PlotWidget('Ozone Concentration [ppb]', plot_list)
|
layout = QVBoxLayout(self)
layout.addLayout(layout_h2)
layout.addWidget(self.plot_widget)
self.spin_average = QSpinBox()
self.spin_average.setMinimum(1)
self.spin_average.setMaximum(50)
self.spin_average.setSingleStep(1)
self.spin_average.setValue(50)
self.spin_average.editingFinished.connect(self.spin_average_finished)
layout_h1 = QHBoxLayout()
layout_h1.addWidget(QLabel('Length of moving average:'))
layout_h1.addWidget(self.spin_average)
layout_h1.addStretch()
layout.addLayout(layout_h1)
def get_moving_average_async(self, average):
self.spin_average.setValue(average)
def start(self):
async_call(self.ozone.get_moving_average, None, self.get_moving_average_async, self.increase_error_count)
async_call(self.ozone.get_ozone_concentration, None, self.cb_ozone_concentration, self.increase_error_count)
self.cbe_ozone_concentration.set_period(100)
self.plot_widget.stop = False
def stop(self):
self.cbe_ozone_concentration.set_period(0)
self.plot_widget.stop = True
def destroy(self):
pass
def get_url_part(self):
return 'ozone'
@staticmethod
def has_device_identifier(device_identifier):
return device_identifier == BrickletOzone.DEVICE_IDENTIFIER
def get_current_value(self):
return self.current_value
def cb_ozone_concentration(self, ozone_concentration):
self.current_value = ozone_concentration
self.ozone_concentration_label.setText(str(ozone_concentration))
def spin_average_finished(self):
self.ozone.set_moving_average(self.spin_average.value()) | layout_h2 = QHBoxLayout()
layout_h2.addStretch()
layout_h2.addWidget(self.ozone_concentration_label)
layout_h2.addStretch() | random_line_split |
mcapi.py | print('Importing command definitions...')
from jycraft.plugin.interpreter import PyContext
from org.bukkit import Bukkit
from org.bukkit import Location
from org.bukkit import Material
from org.bukkit import Effect
from org.bukkit.command import Command
from org.bukkit.event import Listener, EventPriority
from random import *
SERVER = Bukkit.getServer()
WORLD = SERVER.getWorlds().get(0)
MORNING = 2000
NOON = 6000
EVENING = 14000
NIGHT = 18000
# reflection to get command map
_commandMapField = SERVER.getClass().getDeclaredField("commandMap")
_commandMapField.setAccessible(True)
_commandMap = _commandMapField.get(SERVER)
#full list of BlockTypes available in JavaDocs on canarymod.net
AIR = Material.AIR
STONE = Material.STONE
GRASS = Material.GRASS
DIRT = Material.DIRT
COBBLESTONE = Material.COBBLESTONE
WOOD_PLANKS = Material.WOOD
# SAPLING = BlockType.OakSapling
# BEDROCK = BlockType.Bedrock
# WATER_FLOWING = BlockType.WaterFlowing
# WATER = WATER_FLOWING
# WATER_STATIONARY = BlockType.Water
# LAVA_FLOWING = BlockType.LavaFlowing
# LAVA = LAVA_FLOWING
# LAVA_STATIONARY = BlockType.Lava
# SAND = BlockType.Sand
# GRAVEL = BlockType.Gravel
# GOLD_ORE = BlockType.GoldOre
# IRON_ORE = BlockType.IronOre
# COAL_ORE = BlockType.CoalOre
# WOOD = BlockType.OakLog
# LEAVES = BlockType.OakLeaves
# GLASS = BlockType.Glass
# LAPIS_LAZULI_ORE = BlockType.LapisOre
# LAPIS_LAZULI_BLOCK = BlockType.LapisBlock
# SANDSTONE = BlockType.Sandstone
# BED = BlockType.Bed
# COBWEB = BlockType.Web
# GRASS_TALL = BlockType.TallGrass
# WOOL = BlockType.WhiteWool
# FLOWER_YELLOW = BlockType.Dandelion
# FLOWER_CYAN = BlockType.BlueOrchid
# MUSHROOM_BROWN = BlockType.BrownMushroom
# MUSHROOM_RED = BlockType.RedMushroom
# GOLD_BLOCK = BlockType.GoldBlock
# IRON_BLOCK = BlockType.IronBlock
# STONE_SLAB_DOUBLE = BlockType.DoubleStoneSlab
# STONE_SLAB = BlockType.StoneSlab
# BRICK_BLOCK = BlockType.BrickBlock
# TNT = BlockType.TNT
# BOOKSHELF = BlockType.Bookshelf
# MOSS_STONE = BlockType.MossyCobble
# OBSIDIAN = BlockType.Obsidian
# TORCH = BlockType.Torch
# FIRE = BlockType.FireBlock
# STAIRS_WOOD = BlockType.OakStairs
# CHEST = BlockType.Chest
# DIAMOND_ORE = BlockType.DiamondOre
# DIAMOND_BLOCK = BlockType.DiamondBlock
# CRAFTING_TABLE = BlockType.Workbench
# FARMLAND = BlockType.Farmland
# FURNACE_INACTIVE = BlockType.Furnace
# FURNACE_ACTIVE = BlockType.BurningFurnace
# DOOR_WOOD = BlockType.WoodenDoor
# LADDER = BlockType.Ladder
# STAIRS_COBBLESTONE = BlockType.StoneStairs
# DOOR_IRON = BlockType.IronDoor
# REDSTONE_ORE = BlockType.RedstoneOre
# SNOW = BlockType.Snow
# ICE = BlockType.Ice
# SNOW_BLOCK = BlockType.SnowBlock
# CACTUS = BlockType.Cactus
# CLAY = BlockType.Clay
# SUGAR_CANE = BlockType.Reed
# FENCE = BlockType.Fence
# GLOWSTONE_BLOCK = BlockType.GlowStone
# STONE_BRICK = BlockType.StoneBrick
# GLASS_PANE = BlockType.GlassPane
# MELON = BlockType.Melon
# FENCE_GATE = BlockType.FenceGate
def pos(*args):
return Location(WORLD, *args)
def parseargswithpos(args, kwargs, asint=True, ledger={}):
results = {}
base = 3
tr = [args[0], args[1], args[2]]
if asint:
pos = (int(tr[0]), int(tr[1]), int(tr[2]))
results['x'] = pos[0]
results['y'] = pos[1]
results['z'] = pos[2]
for k,v in ledger.iteritems():
results[k] = kwargs.get(v[0], None)
if results[k] is None:
if len(args) > base+v[1]:
results[k] = args[base+v[1]]
else:
results[k] = v[2]
return results
def getplayer(name):
return SERVER.getPlayer(name)
def randomplayer():
pl = SERVER.getOnlinePlayers()
return choice(pl)
def yell(message):
SERVER.broadcastMessage(message)
def time(time):
WORLD.setTime(time)
def weather(rainsnow, thunder):
|
def explosion(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'power':['power', 0, 8]})
WORLD.createExplosion(r['x'], r['y'], r['z'], r['power'], True)
def teleport(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'whom':['whom', 0, 'GameStartSchool']})
someone = getplayer(r['whom'])
someone.teleport(pos(r['x'], r['y'], r['z']))
def setblock(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'type':['type', 0, COBBLESTONE]})
WORLD.getBlockAt(r['x'], r['y'], r['z']).setType(r['type'])
def cube(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, COBBLESTONE],
'size':['size', 1, 4]})
size = min(r['size'], 12)
for x in range(size):
for y in range(size):
for z in range(size):
setblock(x + r['x'], y + r['y'], z + r['z'], r['type'])
def bolt(*args, **kwargs):
r = parseargswithpos(args, kwargs)
WORLD.strikeLightning(pos(r['x'], r['y'], r['z']))
def bless(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, Effect.COLOURED_DUST],
'vx':['vx', 1, 1],
'vy':['vy', 2, 1],
'vz':['vz', 3, 1],
'sp':['sp', 4, 100],
'q':['q', 5, 100],
'r':['r', 6, 20],
'block':['block', 7, COBBLESTONE],
'data':['data', 8, 0]})
WORLD.spigot().playEffect(pos(r['x'], r['y'], r['z']),
r['type'], r['block'].getId(),
r['data'], r['vx'], r['vy'], r['vz'],
r['sp'], r['q'], r['r'])
# don't know how to do this in spigot
# def lookingat(player):
# return LineTracer(player).getTargetBlock()
class SpigotCommand(Command):
def __init__(self, name, execfunc):
super(SpigotCommand, self).__init__(name)
self.execfunc = execfunc
def execute(self, caller, label, parameters):
self.execfunc(caller, parameters)
def registercommand(name, execfunc):
# Use like this:
# >>> def functiontest(caller, params):
# ... yell(params[0])
# >>> registercommand("test", functiontest)
_commandMap.register("jycraft", SpigotCommand(name, execfunc))
class EventListener(Listener):
def __init__(self, func):
self.func = func
def execute(self, event):
self.func(event)
def execute(listener, event):
listener.execute(event)
def registerhook(hookCls, execfunc, priority=EventPriority.NORMAL):
# Use like this:
# >>> from mcapi import *
# >>> from org.bukkit.event.block import BlockPlaceEvent
# >>> def place(e):
# ... yell("Placed {}".format(e.getBlockPlaced()))
# >>> registerhook(BlockPlaceEvent, place)
SERVER.getPluginManager().registerEvent(hookCls, EventListener(execfunc), priority, execute, PyContext.getPlugin())
| WORLD.setStorm(rainsnow)
WORLD.setThundering(thunder) | identifier_body |
mcapi.py | print('Importing command definitions...')
from jycraft.plugin.interpreter import PyContext
from org.bukkit import Bukkit
from org.bukkit import Location
from org.bukkit import Material
from org.bukkit import Effect
from org.bukkit.command import Command
from org.bukkit.event import Listener, EventPriority
from random import *
SERVER = Bukkit.getServer()
WORLD = SERVER.getWorlds().get(0)
MORNING = 2000
NOON = 6000
EVENING = 14000
NIGHT = 18000
# reflection to get command map
_commandMapField = SERVER.getClass().getDeclaredField("commandMap")
_commandMapField.setAccessible(True)
_commandMap = _commandMapField.get(SERVER)
#full list of BlockTypes available in JavaDocs on canarymod.net
AIR = Material.AIR
STONE = Material.STONE
GRASS = Material.GRASS
DIRT = Material.DIRT
COBBLESTONE = Material.COBBLESTONE
WOOD_PLANKS = Material.WOOD
# SAPLING = BlockType.OakSapling
# BEDROCK = BlockType.Bedrock
# WATER_FLOWING = BlockType.WaterFlowing
# WATER = WATER_FLOWING
# WATER_STATIONARY = BlockType.Water
# LAVA_FLOWING = BlockType.LavaFlowing
# LAVA = LAVA_FLOWING
# LAVA_STATIONARY = BlockType.Lava
# SAND = BlockType.Sand
# GRAVEL = BlockType.Gravel
# GOLD_ORE = BlockType.GoldOre
# IRON_ORE = BlockType.IronOre
# COAL_ORE = BlockType.CoalOre
# WOOD = BlockType.OakLog
# LEAVES = BlockType.OakLeaves
# GLASS = BlockType.Glass
# LAPIS_LAZULI_ORE = BlockType.LapisOre
# LAPIS_LAZULI_BLOCK = BlockType.LapisBlock
# SANDSTONE = BlockType.Sandstone
# BED = BlockType.Bed
# COBWEB = BlockType.Web
# GRASS_TALL = BlockType.TallGrass
# WOOL = BlockType.WhiteWool
# FLOWER_YELLOW = BlockType.Dandelion
# FLOWER_CYAN = BlockType.BlueOrchid
# MUSHROOM_BROWN = BlockType.BrownMushroom
# MUSHROOM_RED = BlockType.RedMushroom
# GOLD_BLOCK = BlockType.GoldBlock
# IRON_BLOCK = BlockType.IronBlock
# STONE_SLAB_DOUBLE = BlockType.DoubleStoneSlab
# STONE_SLAB = BlockType.StoneSlab
# BRICK_BLOCK = BlockType.BrickBlock
# TNT = BlockType.TNT
# BOOKSHELF = BlockType.Bookshelf
# MOSS_STONE = BlockType.MossyCobble
# OBSIDIAN = BlockType.Obsidian
# TORCH = BlockType.Torch
# FIRE = BlockType.FireBlock
# STAIRS_WOOD = BlockType.OakStairs
# CHEST = BlockType.Chest
# DIAMOND_ORE = BlockType.DiamondOre
# DIAMOND_BLOCK = BlockType.DiamondBlock
# CRAFTING_TABLE = BlockType.Workbench
# FARMLAND = BlockType.Farmland
# FURNACE_INACTIVE = BlockType.Furnace
# FURNACE_ACTIVE = BlockType.BurningFurnace
# DOOR_WOOD = BlockType.WoodenDoor
# LADDER = BlockType.Ladder
# STAIRS_COBBLESTONE = BlockType.StoneStairs
# DOOR_IRON = BlockType.IronDoor
# REDSTONE_ORE = BlockType.RedstoneOre
# SNOW = BlockType.Snow
# ICE = BlockType.Ice
# SNOW_BLOCK = BlockType.SnowBlock
# CACTUS = BlockType.Cactus
# CLAY = BlockType.Clay
# SUGAR_CANE = BlockType.Reed
# FENCE = BlockType.Fence
# GLOWSTONE_BLOCK = BlockType.GlowStone
# STONE_BRICK = BlockType.StoneBrick
# GLASS_PANE = BlockType.GlassPane
# MELON = BlockType.Melon
# FENCE_GATE = BlockType.FenceGate
def pos(*args):
return Location(WORLD, *args)
def parseargswithpos(args, kwargs, asint=True, ledger={}):
results = {}
base = 3
tr = [args[0], args[1], args[2]]
if asint:
pos = (int(tr[0]), int(tr[1]), int(tr[2]))
results['x'] = pos[0]
results['y'] = pos[1]
results['z'] = pos[2]
for k,v in ledger.iteritems():
results[k] = kwargs.get(v[0], None)
if results[k] is None:
if len(args) > base+v[1]:
results[k] = args[base+v[1]]
else:
results[k] = v[2]
return results
def getplayer(name):
return SERVER.getPlayer(name)
def randomplayer():
pl = SERVER.getOnlinePlayers()
return choice(pl)
def yell(message):
SERVER.broadcastMessage(message)
def time(time):
WORLD.setTime(time)
def | (rainsnow, thunder):
WORLD.setStorm(rainsnow)
WORLD.setThundering(thunder)
def explosion(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'power':['power', 0, 8]})
WORLD.createExplosion(r['x'], r['y'], r['z'], r['power'], True)
def teleport(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'whom':['whom', 0, 'GameStartSchool']})
someone = getplayer(r['whom'])
someone.teleport(pos(r['x'], r['y'], r['z']))
def setblock(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'type':['type', 0, COBBLESTONE]})
WORLD.getBlockAt(r['x'], r['y'], r['z']).setType(r['type'])
def cube(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, COBBLESTONE],
'size':['size', 1, 4]})
size = min(r['size'], 12)
for x in range(size):
for y in range(size):
for z in range(size):
setblock(x + r['x'], y + r['y'], z + r['z'], r['type'])
def bolt(*args, **kwargs):
r = parseargswithpos(args, kwargs)
WORLD.strikeLightning(pos(r['x'], r['y'], r['z']))
def bless(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, Effect.COLOURED_DUST],
'vx':['vx', 1, 1],
'vy':['vy', 2, 1],
'vz':['vz', 3, 1],
'sp':['sp', 4, 100],
'q':['q', 5, 100],
'r':['r', 6, 20],
'block':['block', 7, COBBLESTONE],
'data':['data', 8, 0]})
WORLD.spigot().playEffect(pos(r['x'], r['y'], r['z']),
r['type'], r['block'].getId(),
r['data'], r['vx'], r['vy'], r['vz'],
r['sp'], r['q'], r['r'])
# don't know how to do this in spigot
# def lookingat(player):
# return LineTracer(player).getTargetBlock()
class SpigotCommand(Command):
def __init__(self, name, execfunc):
super(SpigotCommand, self).__init__(name)
self.execfunc = execfunc
def execute(self, caller, label, parameters):
self.execfunc(caller, parameters)
def registercommand(name, execfunc):
# Use like this:
# >>> def functiontest(caller, params):
# ... yell(params[0])
# >>> registercommand("test", functiontest)
_commandMap.register("jycraft", SpigotCommand(name, execfunc))
class EventListener(Listener):
def __init__(self, func):
self.func = func
def execute(self, event):
self.func(event)
def execute(listener, event):
listener.execute(event)
def registerhook(hookCls, execfunc, priority=EventPriority.NORMAL):
# Use like this:
# >>> from mcapi import *
# >>> from org.bukkit.event.block import BlockPlaceEvent
# >>> def place(e):
# ... yell("Placed {}".format(e.getBlockPlaced()))
# >>> registerhook(BlockPlaceEvent, place)
SERVER.getPluginManager().registerEvent(hookCls, EventListener(execfunc), priority, execute, PyContext.getPlugin())
| weather | identifier_name |
mcapi.py | print('Importing command definitions...')
from jycraft.plugin.interpreter import PyContext
from org.bukkit import Bukkit
from org.bukkit import Location
from org.bukkit import Material
from org.bukkit import Effect
from org.bukkit.command import Command
from org.bukkit.event import Listener, EventPriority
from random import *
SERVER = Bukkit.getServer()
WORLD = SERVER.getWorlds().get(0)
MORNING = 2000
NOON = 6000
EVENING = 14000
NIGHT = 18000
# reflection to get command map
_commandMapField = SERVER.getClass().getDeclaredField("commandMap")
_commandMapField.setAccessible(True)
_commandMap = _commandMapField.get(SERVER)
#full list of BlockTypes available in JavaDocs on canarymod.net
AIR = Material.AIR
STONE = Material.STONE
GRASS = Material.GRASS
DIRT = Material.DIRT
COBBLESTONE = Material.COBBLESTONE
WOOD_PLANKS = Material.WOOD
# SAPLING = BlockType.OakSapling
# BEDROCK = BlockType.Bedrock
# WATER_FLOWING = BlockType.WaterFlowing
# WATER = WATER_FLOWING
# WATER_STATIONARY = BlockType.Water
# LAVA_FLOWING = BlockType.LavaFlowing
# LAVA = LAVA_FLOWING
# LAVA_STATIONARY = BlockType.Lava
# SAND = BlockType.Sand
# GRAVEL = BlockType.Gravel
# GOLD_ORE = BlockType.GoldOre
# IRON_ORE = BlockType.IronOre
# COAL_ORE = BlockType.CoalOre
# WOOD = BlockType.OakLog
# LEAVES = BlockType.OakLeaves
# GLASS = BlockType.Glass
# LAPIS_LAZULI_ORE = BlockType.LapisOre
# LAPIS_LAZULI_BLOCK = BlockType.LapisBlock
# SANDSTONE = BlockType.Sandstone
# BED = BlockType.Bed
# COBWEB = BlockType.Web
# GRASS_TALL = BlockType.TallGrass
# WOOL = BlockType.WhiteWool
# FLOWER_YELLOW = BlockType.Dandelion
# FLOWER_CYAN = BlockType.BlueOrchid
# MUSHROOM_BROWN = BlockType.BrownMushroom
# MUSHROOM_RED = BlockType.RedMushroom
# GOLD_BLOCK = BlockType.GoldBlock
# IRON_BLOCK = BlockType.IronBlock
# STONE_SLAB_DOUBLE = BlockType.DoubleStoneSlab
# STONE_SLAB = BlockType.StoneSlab
# BRICK_BLOCK = BlockType.BrickBlock
# TNT = BlockType.TNT
# BOOKSHELF = BlockType.Bookshelf
# MOSS_STONE = BlockType.MossyCobble
# OBSIDIAN = BlockType.Obsidian
# TORCH = BlockType.Torch
# FIRE = BlockType.FireBlock
# STAIRS_WOOD = BlockType.OakStairs
# CHEST = BlockType.Chest
# DIAMOND_ORE = BlockType.DiamondOre
# DIAMOND_BLOCK = BlockType.DiamondBlock
# CRAFTING_TABLE = BlockType.Workbench
# FARMLAND = BlockType.Farmland
# FURNACE_INACTIVE = BlockType.Furnace
# FURNACE_ACTIVE = BlockType.BurningFurnace
# DOOR_WOOD = BlockType.WoodenDoor
# LADDER = BlockType.Ladder
# STAIRS_COBBLESTONE = BlockType.StoneStairs
# DOOR_IRON = BlockType.IronDoor
# REDSTONE_ORE = BlockType.RedstoneOre
# SNOW = BlockType.Snow
# ICE = BlockType.Ice
# SNOW_BLOCK = BlockType.SnowBlock
# CACTUS = BlockType.Cactus
# CLAY = BlockType.Clay
# SUGAR_CANE = BlockType.Reed
# FENCE = BlockType.Fence
# GLOWSTONE_BLOCK = BlockType.GlowStone
# STONE_BRICK = BlockType.StoneBrick
# GLASS_PANE = BlockType.GlassPane
# MELON = BlockType.Melon
# FENCE_GATE = BlockType.FenceGate
def pos(*args):
return Location(WORLD, *args)
def parseargswithpos(args, kwargs, asint=True, ledger={}):
results = {}
base = 3
tr = [args[0], args[1], args[2]]
if asint:
|
results['x'] = pos[0]
results['y'] = pos[1]
results['z'] = pos[2]
for k,v in ledger.iteritems():
results[k] = kwargs.get(v[0], None)
if results[k] is None:
if len(args) > base+v[1]:
results[k] = args[base+v[1]]
else:
results[k] = v[2]
return results
def getplayer(name):
return SERVER.getPlayer(name)
def randomplayer():
pl = SERVER.getOnlinePlayers()
return choice(pl)
def yell(message):
SERVER.broadcastMessage(message)
def time(time):
WORLD.setTime(time)
def weather(rainsnow, thunder):
WORLD.setStorm(rainsnow)
WORLD.setThundering(thunder)
def explosion(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'power':['power', 0, 8]})
WORLD.createExplosion(r['x'], r['y'], r['z'], r['power'], True)
def teleport(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'whom':['whom', 0, 'GameStartSchool']})
someone = getplayer(r['whom'])
someone.teleport(pos(r['x'], r['y'], r['z']))
def setblock(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'type':['type', 0, COBBLESTONE]})
WORLD.getBlockAt(r['x'], r['y'], r['z']).setType(r['type'])
def cube(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, COBBLESTONE],
'size':['size', 1, 4]})
size = min(r['size'], 12)
for x in range(size):
for y in range(size):
for z in range(size):
setblock(x + r['x'], y + r['y'], z + r['z'], r['type'])
def bolt(*args, **kwargs):
r = parseargswithpos(args, kwargs)
WORLD.strikeLightning(pos(r['x'], r['y'], r['z']))
def bless(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, Effect.COLOURED_DUST],
'vx':['vx', 1, 1],
'vy':['vy', 2, 1],
'vz':['vz', 3, 1],
'sp':['sp', 4, 100],
'q':['q', 5, 100],
'r':['r', 6, 20],
'block':['block', 7, COBBLESTONE],
'data':['data', 8, 0]})
WORLD.spigot().playEffect(pos(r['x'], r['y'], r['z']),
r['type'], r['block'].getId(),
r['data'], r['vx'], r['vy'], r['vz'],
r['sp'], r['q'], r['r'])
# don't know how to do this in spigot
# def lookingat(player):
# return LineTracer(player).getTargetBlock()
class SpigotCommand(Command):
def __init__(self, name, execfunc):
super(SpigotCommand, self).__init__(name)
self.execfunc = execfunc
def execute(self, caller, label, parameters):
self.execfunc(caller, parameters)
def registercommand(name, execfunc):
# Use like this:
# >>> def functiontest(caller, params):
# ... yell(params[0])
# >>> registercommand("test", functiontest)
_commandMap.register("jycraft", SpigotCommand(name, execfunc))
class EventListener(Listener):
def __init__(self, func):
self.func = func
def execute(self, event):
self.func(event)
def execute(listener, event):
listener.execute(event)
def registerhook(hookCls, execfunc, priority=EventPriority.NORMAL):
# Use like this:
# >>> from mcapi import *
# >>> from org.bukkit.event.block import BlockPlaceEvent
# >>> def place(e):
# ... yell("Placed {}".format(e.getBlockPlaced()))
# >>> registerhook(BlockPlaceEvent, place)
SERVER.getPluginManager().registerEvent(hookCls, EventListener(execfunc), priority, execute, PyContext.getPlugin())
| pos = (int(tr[0]), int(tr[1]), int(tr[2])) | conditional_block |
mcapi.py | print('Importing command definitions...')
from jycraft.plugin.interpreter import PyContext
from org.bukkit import Bukkit
from org.bukkit import Location
from org.bukkit import Material
from org.bukkit import Effect
from org.bukkit.command import Command
from org.bukkit.event import Listener, EventPriority
from random import *
SERVER = Bukkit.getServer()
WORLD = SERVER.getWorlds().get(0)
MORNING = 2000
NOON = 6000
EVENING = 14000
NIGHT = 18000
# reflection to get command map
_commandMapField = SERVER.getClass().getDeclaredField("commandMap")
_commandMapField.setAccessible(True)
_commandMap = _commandMapField.get(SERVER)
#full list of BlockTypes available in JavaDocs on canarymod.net
AIR = Material.AIR
STONE = Material.STONE
GRASS = Material.GRASS
DIRT = Material.DIRT
COBBLESTONE = Material.COBBLESTONE
WOOD_PLANKS = Material.WOOD
# SAPLING = BlockType.OakSapling
# BEDROCK = BlockType.Bedrock
# WATER_FLOWING = BlockType.WaterFlowing
# WATER = WATER_FLOWING
# WATER_STATIONARY = BlockType.Water
# LAVA_FLOWING = BlockType.LavaFlowing
# LAVA = LAVA_FLOWING
# LAVA_STATIONARY = BlockType.Lava
# SAND = BlockType.Sand
# GRAVEL = BlockType.Gravel
# GOLD_ORE = BlockType.GoldOre
# IRON_ORE = BlockType.IronOre
# COAL_ORE = BlockType.CoalOre
# WOOD = BlockType.OakLog
# LEAVES = BlockType.OakLeaves
# GLASS = BlockType.Glass
# LAPIS_LAZULI_ORE = BlockType.LapisOre
# LAPIS_LAZULI_BLOCK = BlockType.LapisBlock
# SANDSTONE = BlockType.Sandstone
# BED = BlockType.Bed
# COBWEB = BlockType.Web
# GRASS_TALL = BlockType.TallGrass
# WOOL = BlockType.WhiteWool
# FLOWER_YELLOW = BlockType.Dandelion
# FLOWER_CYAN = BlockType.BlueOrchid
# MUSHROOM_BROWN = BlockType.BrownMushroom
# MUSHROOM_RED = BlockType.RedMushroom
# GOLD_BLOCK = BlockType.GoldBlock
# IRON_BLOCK = BlockType.IronBlock
# STONE_SLAB_DOUBLE = BlockType.DoubleStoneSlab
# STONE_SLAB = BlockType.StoneSlab
# BRICK_BLOCK = BlockType.BrickBlock
# TNT = BlockType.TNT
# BOOKSHELF = BlockType.Bookshelf
# MOSS_STONE = BlockType.MossyCobble
# OBSIDIAN = BlockType.Obsidian
# TORCH = BlockType.Torch
# FIRE = BlockType.FireBlock
# STAIRS_WOOD = BlockType.OakStairs
# CHEST = BlockType.Chest
# DIAMOND_ORE = BlockType.DiamondOre
# DIAMOND_BLOCK = BlockType.DiamondBlock
# CRAFTING_TABLE = BlockType.Workbench
# FARMLAND = BlockType.Farmland
# FURNACE_INACTIVE = BlockType.Furnace
# FURNACE_ACTIVE = BlockType.BurningFurnace
# DOOR_WOOD = BlockType.WoodenDoor
# LADDER = BlockType.Ladder
# STAIRS_COBBLESTONE = BlockType.StoneStairs
# DOOR_IRON = BlockType.IronDoor
# REDSTONE_ORE = BlockType.RedstoneOre
# SNOW = BlockType.Snow
# ICE = BlockType.Ice
# SNOW_BLOCK = BlockType.SnowBlock
# CACTUS = BlockType.Cactus
# CLAY = BlockType.Clay
# SUGAR_CANE = BlockType.Reed
# FENCE = BlockType.Fence
# GLOWSTONE_BLOCK = BlockType.GlowStone
# STONE_BRICK = BlockType.StoneBrick
# GLASS_PANE = BlockType.GlassPane
# MELON = BlockType.Melon
# FENCE_GATE = BlockType.FenceGate
def pos(*args):
return Location(WORLD, *args)
def parseargswithpos(args, kwargs, asint=True, ledger={}):
results = {}
base = 3
tr = [args[0], args[1], args[2]]
if asint:
pos = (int(tr[0]), int(tr[1]), int(tr[2]))
results['x'] = pos[0]
results['y'] = pos[1]
results['z'] = pos[2]
for k,v in ledger.iteritems():
results[k] = kwargs.get(v[0], None)
if results[k] is None:
if len(args) > base+v[1]:
results[k] = args[base+v[1]]
else:
results[k] = v[2]
return results
def getplayer(name):
return SERVER.getPlayer(name)
def randomplayer():
pl = SERVER.getOnlinePlayers()
return choice(pl)
def yell(message):
SERVER.broadcastMessage(message)
| WORLD.setTime(time)
def weather(rainsnow, thunder):
WORLD.setStorm(rainsnow)
WORLD.setThundering(thunder)
def explosion(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'power':['power', 0, 8]})
WORLD.createExplosion(r['x'], r['y'], r['z'], r['power'], True)
def teleport(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'whom':['whom', 0, 'GameStartSchool']})
someone = getplayer(r['whom'])
someone.teleport(pos(r['x'], r['y'], r['z']))
def setblock(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'type':['type', 0, COBBLESTONE]})
WORLD.getBlockAt(r['x'], r['y'], r['z']).setType(r['type'])
def cube(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, COBBLESTONE],
'size':['size', 1, 4]})
size = min(r['size'], 12)
for x in range(size):
for y in range(size):
for z in range(size):
setblock(x + r['x'], y + r['y'], z + r['z'], r['type'])
def bolt(*args, **kwargs):
r = parseargswithpos(args, kwargs)
WORLD.strikeLightning(pos(r['x'], r['y'], r['z']))
def bless(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, Effect.COLOURED_DUST],
'vx':['vx', 1, 1],
'vy':['vy', 2, 1],
'vz':['vz', 3, 1],
'sp':['sp', 4, 100],
'q':['q', 5, 100],
'r':['r', 6, 20],
'block':['block', 7, COBBLESTONE],
'data':['data', 8, 0]})
WORLD.spigot().playEffect(pos(r['x'], r['y'], r['z']),
r['type'], r['block'].getId(),
r['data'], r['vx'], r['vy'], r['vz'],
r['sp'], r['q'], r['r'])
# don't know how to do this in spigot
# def lookingat(player):
# return LineTracer(player).getTargetBlock()
class SpigotCommand(Command):
def __init__(self, name, execfunc):
super(SpigotCommand, self).__init__(name)
self.execfunc = execfunc
def execute(self, caller, label, parameters):
self.execfunc(caller, parameters)
def registercommand(name, execfunc):
# Use like this:
# >>> def functiontest(caller, params):
# ... yell(params[0])
# >>> registercommand("test", functiontest)
_commandMap.register("jycraft", SpigotCommand(name, execfunc))
class EventListener(Listener):
def __init__(self, func):
self.func = func
def execute(self, event):
self.func(event)
def execute(listener, event):
listener.execute(event)
def registerhook(hookCls, execfunc, priority=EventPriority.NORMAL):
# Use like this:
# >>> from mcapi import *
# >>> from org.bukkit.event.block import BlockPlaceEvent
# >>> def place(e):
# ... yell("Placed {}".format(e.getBlockPlaced()))
# >>> registerhook(BlockPlaceEvent, place)
SERVER.getPluginManager().registerEvent(hookCls, EventListener(execfunc), priority, execute, PyContext.getPlugin()) |
def time(time): | random_line_split |
id.test.ts | import validNodeId from '../../../src/validations/node/id'
describe('Node Id Validations', () => {
describe('When node does not have "id"', () => {
const node = { states: ['T', 'F'] }
it('throws an error that node id is required', () => {
expect(() => {
// @ts-ignore
validNodeId(node)
}).toThrow(`The node id is required and must be a string.
Node: {"states": ["T", "F"]}`)
})
})
describe('When node has "id" but is not a string', () => {
const node = { id: 123 }
it('throws an error that node id must be a string', () => {
expect(() => {
// @ts-ignore
validNodeId(node)
}).toThrow(`The node id must be a string.
Node id type: Number | Node id: 123
Node: {"id": 123}`)
})
})
describe('When node has "id" and is a string', () => {
const node = { id: 'node-id' }
it('does not throws an error', () => {
expect(() => {
// @ts-ignore
validNodeId(node)
}).not.toThrow()
})
})
}) | random_line_split |
|
libraryExtractor.ts | ///<reference path='refs.ts'/>
module TDev
{
// BUGS:
// - Refs not handled correctly (see TD Junior)
// - we are not generating extension methods whenever possible (see from_string in TD Junior)
// - not show LibraryAbstractKind in dependences
// TODOs:
// - automated testing
// - move rewriting to ast, so we can test on node
export class LibraryExtractor {
private split: AST.SplitAppIntoAppAndLibrary;
constructor() {
this.reset();
}
public reset() {
this.split = new AST.SplitAppIntoAppAndLibrary();
}
private getAllDeclsToMove() : AST.DeclAndDeps[] {
return this.split.getAllDeclsToMove();
}
private getRemainingDecls(): AST.DeclAndDeps[] {
return this.split.getRemainingDecls();
}
// everything starts off with the user requesting to move a declaration D
// from a script/app into a library. The identification of a pair
// of app A and library L is needed before we can start the process. Certain
// declarations can't be moved.
public moveDecl(d: AST.Decl) {
this.split.invalidate();
if (d instanceof AST.App) {
HTML.showErrorNotification(lf("can't add a script to a library"));
return;
} else if (d instanceof AST.Action && (<AST.Action>d).isEvent()) {
HTML.showErrorNotification(lf("can't add an event function to a library (use event handlers instead)"));
return;
} else if (d instanceof AST.LibraryRef) {
HTML.showErrorNotification(lf("can't move a library reference directly"));
return;
}
if (this.split.getLib() == null) {
this.selectLibraryFromScript(() => {
if (this.split.getLib() != null) {
this.processDecl(d);
}
});
} else {
this.processDecl(d);
}
}
static defaultLibraryTemplate =
"meta version 'v2.2';\n" +
"meta isLibrary 'yes';\n" +
"meta hasIds 'yes';\n"
;
// this dialog prompts the user to select an existing library in the app
// or create a new one
private selectLibraryFromScript(next: () => void) {
var m = new ModalDialog();
m.add(div("wall-dialog-header", lf("move to library (identify target library)")));
if (Script.hasLibraries()) {
m.add(div("wall-dialog-body", lf("select a referenced library:")));
m.add(
Script.libraries().map((lib) => {
var b = DeclRender.mkBox(lib);
b.withClick((e) => {
tick(Ticks.toLibExistingLib);
this.split.setAppAndLib(Script,lib);
m.dismiss();
});
return b;
})
);
}
if (Script.hasLibraries()) {
m.add(div("wall-body-body", lf("or create a new library:")));
} else {
m.add(div("wall-body-body", lf("create a new library:")));
}
m.add([
div("wall-dialog-buttons",
HTML.mkButton(lf("new library"), () => {
// make a library with default name and select it;
// again, a TD-specific feature not for external editors
var stub: World.ScriptStub = {
scriptName: "mylib",
editorName: "touchdevelop",
scriptText: LibraryExtractor.defaultLibraryTemplate,
};
TheEditor.newScriptAsync(stub)
.then((newLibrary) => {
var header = <Cloud.Header>newLibrary;
var lib = TheEditor.freshLibrary();
this.split.setAppAndLib(Script, lib);
Script.addDecl(lib);
TheEditor.bindLibrary(lib, Browser.TheHost.createInstalled(header));
tick(Ticks.toLibNewLib);
m.dismiss();
});
}),
HTML.mkButton(lf("cancel"), () => m.dismiss())),
]);
|
m.setScroll();
m.fullWhite();
m.show();
}
// if the declaration has not been added to the "pending" set
// then prompt the user if it's OK to move the declaration and all
// declarations in its downward closure
private processDecl(d: AST.Decl) {
if (this.getAllDeclsToMove().indexOf(this.split.getter(d)) < 0) {
var newOne = this.split.getter(d);
this.askOKtoMove(newOne, (moveOK) => {
if (moveOK) {
this.split.addToDeclsToMove([newOne]);
this.selectFromRemaining();
}
});
} else
this.selectFromRemaining();
}
private filterUnwanted(dl: AST.DeclAndDeps[]) {
return dl.filter(dd => !(dd.decl instanceof AST.LibraryRef));
}
private askOKtoMove(toMove: AST.DeclAndDeps, next:(ok:boolean) => void) {
var m = new ModalDialog();
var moveOK = false;
var theRest = this.filterUnwanted(this.split.computeClosure([toMove]));
if (theRest.length > 0) {
m.add(
div("wall-dialog-header", "moving",
DeclRender.mkBox(toMove.decl),
"to library",
DeclRender.mkBox(this.split.getLib()),
"requires moving",
theRest.map(dd => DeclRender.mkBox(dd.decl))
)
);
m.add(div("wall-dialog-buttons",
HTML.mkButton(lf("ok"), () => { tick(Ticks.toLibOKtoMove); moveOK = true; m.dismiss(); }),
HTML.mkButton(lf("cancel"), () => m.dismiss())
));
m.onDismiss = () => { next(moveOK); };
m.setScroll();
m.fullWhite();
m.show();
} else {
next(true);
}
}
// once there are some declarations selected to move to the library, we help
// the user by identifying other declarations they might want to consider moving:
//
// 1. subsetOfDeclsToMove: these are declarations whose direct accesses
// are a subset of the declsToMove; that is, the downward closure of D in this set
// is guaranteed to be in declsToMove (won't drag anything else in).
//
// 2. someFromDeclsToMove: these declarations access something from declsToMove, but
// access some things outside the set.
private selectFromRemaining() {
var remaining = this.getRemainingDecls();
// we are going to rate the remainingDecls with respect to the current
// set of declarations pending to move to library
remaining.forEach((a) => AST.DeclAndDeps.rateTargetAgainstDecls(this.getAllDeclsToMove(), a));
remaining.sort(AST.DeclAndDeps.compareSize);
var subsetOfDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove == dd.getAllDirectAccesses().length);
var someFromDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove < dd.getAllDirectAccesses().length);
this.showDeclAndDeps(subsetOfDeclsToMove, someFromDeclsToMove);
}
private updateBoxes(dd: AST.DeclAndDeps) {
if ((<any>dd).boxes != null) {
(<any>dd).boxes.forEach(b => {
(<HTMLElement>b).style.backgroundColor = dd.count > 0 ? "lightblue" : "white";
});
}
}
private mkCheckBox(dd: AST.DeclAndDeps) {
var b = DeclRender.mkBox(dd.decl);
(<any>dd).myCheckBox = b;
b.setFlag("selected", dd.getInclude());
b.onclick = (e) => {
dd.setInclude(!dd.getInclude());
b.setFlag("selected", dd.getInclude());
dd.count += (dd.getInclude() ? 1 : -1);
this.updateBoxes(dd);
dd.getTransitiveClosure().forEach(ee => { ee.count += (dd.getInclude() ? 1 : -1); });
dd.getTransitiveClosure().forEach(ee => this.updateBoxes(ee));
};
return b;
}
private showDeclAndDep(dd: AST.DeclAndDeps, showTrans:boolean) {
var rest = [div("wall-dialog-body", " ")];
if (showTrans) {
var tc = this.filterUnwanted(dd.getTransitiveClosure());
var upto10 = tc.length >= 10 ? tc.slice(0, 10) : tc;
rest = upto10.map(ee => {
var ret = div("smallDecl", DeclRender.mkNameSpaceDecl(ee.decl));
ret.style.backgroundColor = ee.count > 0 ? "lightblue" : "white";
if ((<any>ee).boxes == null) {
(<any>ee).boxes = [ret];
} else {
(<any>ee).boxes.push(ret);
}
return div("smallDecl", ret, " ");
});
rest.unshift(div("smallDecl", lf("accesses: ")));
if (tc.length > 10) {
var more = tc.length - 10;
// TODO: make a button to expose the rest of the list
rest.push(div("smallDecl", "load (" + more.toString() + " more)"));
}
rest.push(div("smallDecl", " "));
}
// WARNING: don't put the return on its own line (; implicitly inserted - JavaScript semantics)
return [this.mkCheckBox(dd)].concat(rest);
}
// shows the remaining declarations that fall into the subset/notsubset classification
// (note that this doesn't include all remaining declarations)
private showDeclAndDeps(subset: AST.DeclAndDeps[], notsubset: AST.DeclAndDeps[]) {
this.split.getAll().forEach(dd => { dd.count = 0; (<any>dd).boxes = null; });
this.getAllDeclsToMove().forEach(dd => dd.count = 1);
var closureBoxes: HTMLElement[] = [];
var m = new ModalDialog();
m.add([
div("wall-dialog-header",
lf("move to library (pending)"),
DeclRender.mkBox(this.split.getLib()))
]);
m.add([div("wall-dialog-body", lf("elements pending to move:"))]);
m.add(this.filterUnwanted(this.getAllDeclsToMove()).map(dd => {
var name = div("smallDecl",DeclRender.mkNameSpaceDecl(dd.decl));
name.style.backgroundColor = "lightblue";
return div("smallDecl", name, " ");
}));
m.add([
div("wall-dialog-buttons",
((subset.length == 0 && notsubset.length == 0) ? <any>"" :
<any>HTML.mkButton(lf("advance selected to pending"), () => {
tick(Ticks.toLibAdvanceSelectedToPending);
// TODO: don't dismiss, reconfigure modal dialog in place instead.
m.dismiss();
var selectedToAdd = subset.concat(notsubset).filter(dd => dd.getInclude());
this.split.addToDeclsToMove(selectedToAdd);
this.selectFromRemaining();
})),
HTML.mkButton(lf("make the move"), () => {
var errors = AST.TypeChecker.tcApp(this.split.getApp());
if (errors == 0) {
m.dismiss();
ModalDialog.ask(lf("Confirm rewrite (no undo)?"), lf("confirm"), () => {
tick(Ticks.toLibMakeTheMove);
this.moveToLibrary();
});
} else {
HTML.showErrorNotification(lf("You must correct errors in script before rewriting can proceed."));
}
}),
HTML.mkButton(lf("discard pending"), () => { tick(Ticks.toLibDiscardPending); this.reset(); m.dismiss(); }),
HTML.mkButton(lf("exit to editor"), () => { tick(Ticks.toLibExitToEditor); m.dismiss(); })
)
]);
if (subset.length > 0) {
m.add(div("wall-dialog-header", lf("elements you should think about moving to library (they only access pending elements).")));
subset.forEach(dd => m.add(this.showDeclAndDep(dd, true)));
}
if (notsubset.length > 0) {
m.add(div("wall-dialog-header", lf("elements that access pending elements and other elements:")));
notsubset.forEach(dd => m.add(this.showDeclAndDep(dd,true)));
}
m.onDismiss = () => { };
m.setScroll();
m.fullWhite();
m.show();
}
private moveToLibrary() {
// perform the split of application into application/library
this.split.makeSplit();
// make the new library (TODO: most of this code should be elsewhere)
World.getInstalledHeaderAsync(this.split.getLib().guid)
.then((hd: Cloud.Header) => {
if (!hd) Util.userError("no such script " + this.split.getLib().guid);
return World.updateInstalledScriptAsync(hd, this.split.library, null)
})
.then(() => {
this.split.getApp().notifyChangeAll();
TheEditor.typeCheckNow();
TheEditor.undoMgr.pushMainUndoState();
// TODO: get rid of this horrible hack
Util.setTimeout(1, () => {
this.reset();
TheEditor.reload();
});
})
.done()
}
}
} | m.onDismiss = () => {
next();
}; | random_line_split |
libraryExtractor.ts | ///<reference path='refs.ts'/>
module TDev
{
// BUGS:
// - Refs not handled correctly (see TD Junior)
// - we are not generating extension methods whenever possible (see from_string in TD Junior)
// - not show LibraryAbstractKind in dependences
// TODOs:
// - automated testing
// - move rewriting to ast, so we can test on node
export class LibraryExtractor {
private split: AST.SplitAppIntoAppAndLibrary;
constructor() {
this.reset();
}
public reset() {
this.split = new AST.SplitAppIntoAppAndLibrary();
}
private getAllDeclsToMove() : AST.DeclAndDeps[] {
return this.split.getAllDeclsToMove();
}
private | (): AST.DeclAndDeps[] {
return this.split.getRemainingDecls();
}
// everything starts off with the user requesting to move a declaration D
// from a script/app into a library. The identification of a pair
// of app A and library L is needed before we can start the process. Certain
// declarations can't be moved.
public moveDecl(d: AST.Decl) {
this.split.invalidate();
if (d instanceof AST.App) {
HTML.showErrorNotification(lf("can't add a script to a library"));
return;
} else if (d instanceof AST.Action && (<AST.Action>d).isEvent()) {
HTML.showErrorNotification(lf("can't add an event function to a library (use event handlers instead)"));
return;
} else if (d instanceof AST.LibraryRef) {
HTML.showErrorNotification(lf("can't move a library reference directly"));
return;
}
if (this.split.getLib() == null) {
this.selectLibraryFromScript(() => {
if (this.split.getLib() != null) {
this.processDecl(d);
}
});
} else {
this.processDecl(d);
}
}
static defaultLibraryTemplate =
"meta version 'v2.2';\n" +
"meta isLibrary 'yes';\n" +
"meta hasIds 'yes';\n"
;
// this dialog prompts the user to select an existing library in the app
// or create a new one
private selectLibraryFromScript(next: () => void) {
var m = new ModalDialog();
m.add(div("wall-dialog-header", lf("move to library (identify target library)")));
if (Script.hasLibraries()) {
m.add(div("wall-dialog-body", lf("select a referenced library:")));
m.add(
Script.libraries().map((lib) => {
var b = DeclRender.mkBox(lib);
b.withClick((e) => {
tick(Ticks.toLibExistingLib);
this.split.setAppAndLib(Script,lib);
m.dismiss();
});
return b;
})
);
}
if (Script.hasLibraries()) {
m.add(div("wall-body-body", lf("or create a new library:")));
} else {
m.add(div("wall-body-body", lf("create a new library:")));
}
m.add([
div("wall-dialog-buttons",
HTML.mkButton(lf("new library"), () => {
// make a library with default name and select it;
// again, a TD-specific feature not for external editors
var stub: World.ScriptStub = {
scriptName: "mylib",
editorName: "touchdevelop",
scriptText: LibraryExtractor.defaultLibraryTemplate,
};
TheEditor.newScriptAsync(stub)
.then((newLibrary) => {
var header = <Cloud.Header>newLibrary;
var lib = TheEditor.freshLibrary();
this.split.setAppAndLib(Script, lib);
Script.addDecl(lib);
TheEditor.bindLibrary(lib, Browser.TheHost.createInstalled(header));
tick(Ticks.toLibNewLib);
m.dismiss();
});
}),
HTML.mkButton(lf("cancel"), () => m.dismiss())),
]);
m.onDismiss = () => {
next();
};
m.setScroll();
m.fullWhite();
m.show();
}
// if the declaration has not been added to the "pending" set
// then prompt the user if it's OK to move the declaration and all
// declarations in its downward closure
private processDecl(d: AST.Decl) {
if (this.getAllDeclsToMove().indexOf(this.split.getter(d)) < 0) {
var newOne = this.split.getter(d);
this.askOKtoMove(newOne, (moveOK) => {
if (moveOK) {
this.split.addToDeclsToMove([newOne]);
this.selectFromRemaining();
}
});
} else
this.selectFromRemaining();
}
private filterUnwanted(dl: AST.DeclAndDeps[]) {
return dl.filter(dd => !(dd.decl instanceof AST.LibraryRef));
}
private askOKtoMove(toMove: AST.DeclAndDeps, next:(ok:boolean) => void) {
var m = new ModalDialog();
var moveOK = false;
var theRest = this.filterUnwanted(this.split.computeClosure([toMove]));
if (theRest.length > 0) {
m.add(
div("wall-dialog-header", "moving",
DeclRender.mkBox(toMove.decl),
"to library",
DeclRender.mkBox(this.split.getLib()),
"requires moving",
theRest.map(dd => DeclRender.mkBox(dd.decl))
)
);
m.add(div("wall-dialog-buttons",
HTML.mkButton(lf("ok"), () => { tick(Ticks.toLibOKtoMove); moveOK = true; m.dismiss(); }),
HTML.mkButton(lf("cancel"), () => m.dismiss())
));
m.onDismiss = () => { next(moveOK); };
m.setScroll();
m.fullWhite();
m.show();
} else {
next(true);
}
}
// once there are some declarations selected to move to the library, we help
// the user by identifying other declarations they might want to consider moving:
//
// 1. subsetOfDeclsToMove: these are declarations whose direct accesses
// are a subset of the declsToMove; that is, the downward closure of D in this set
// is guaranteed to be in declsToMove (won't drag anything else in).
//
// 2. someFromDeclsToMove: these declarations access something from declsToMove, but
// access some things outside the set.
private selectFromRemaining() {
var remaining = this.getRemainingDecls();
// we are going to rate the remainingDecls with respect to the current
// set of declarations pending to move to library
remaining.forEach((a) => AST.DeclAndDeps.rateTargetAgainstDecls(this.getAllDeclsToMove(), a));
remaining.sort(AST.DeclAndDeps.compareSize);
var subsetOfDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove == dd.getAllDirectAccesses().length);
var someFromDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove < dd.getAllDirectAccesses().length);
this.showDeclAndDeps(subsetOfDeclsToMove, someFromDeclsToMove);
}
private updateBoxes(dd: AST.DeclAndDeps) {
if ((<any>dd).boxes != null) {
(<any>dd).boxes.forEach(b => {
(<HTMLElement>b).style.backgroundColor = dd.count > 0 ? "lightblue" : "white";
});
}
}
private mkCheckBox(dd: AST.DeclAndDeps) {
var b = DeclRender.mkBox(dd.decl);
(<any>dd).myCheckBox = b;
b.setFlag("selected", dd.getInclude());
b.onclick = (e) => {
dd.setInclude(!dd.getInclude());
b.setFlag("selected", dd.getInclude());
dd.count += (dd.getInclude() ? 1 : -1);
this.updateBoxes(dd);
dd.getTransitiveClosure().forEach(ee => { ee.count += (dd.getInclude() ? 1 : -1); });
dd.getTransitiveClosure().forEach(ee => this.updateBoxes(ee));
};
return b;
}
private showDeclAndDep(dd: AST.DeclAndDeps, showTrans:boolean) {
var rest = [div("wall-dialog-body", " ")];
if (showTrans) {
var tc = this.filterUnwanted(dd.getTransitiveClosure());
var upto10 = tc.length >= 10 ? tc.slice(0, 10) : tc;
rest = upto10.map(ee => {
var ret = div("smallDecl", DeclRender.mkNameSpaceDecl(ee.decl));
ret.style.backgroundColor = ee.count > 0 ? "lightblue" : "white";
if ((<any>ee).boxes == null) {
(<any>ee).boxes = [ret];
} else {
(<any>ee).boxes.push(ret);
}
return div("smallDecl", ret, " ");
});
rest.unshift(div("smallDecl", lf("accesses: ")));
if (tc.length > 10) {
var more = tc.length - 10;
// TODO: make a button to expose the rest of the list
rest.push(div("smallDecl", "load (" + more.toString() + " more)"));
}
rest.push(div("smallDecl", " "));
}
// WARNING: don't put the return on its own line (; implicitly inserted - JavaScript semantics)
return [this.mkCheckBox(dd)].concat(rest);
}
// shows the remaining declarations that fall into the subset/notsubset classification
// (note that this doesn't include all remaining declarations)
private showDeclAndDeps(subset: AST.DeclAndDeps[], notsubset: AST.DeclAndDeps[]) {
this.split.getAll().forEach(dd => { dd.count = 0; (<any>dd).boxes = null; });
this.getAllDeclsToMove().forEach(dd => dd.count = 1);
var closureBoxes: HTMLElement[] = [];
var m = new ModalDialog();
m.add([
div("wall-dialog-header",
lf("move to library (pending)"),
DeclRender.mkBox(this.split.getLib()))
]);
m.add([div("wall-dialog-body", lf("elements pending to move:"))]);
m.add(this.filterUnwanted(this.getAllDeclsToMove()).map(dd => {
var name = div("smallDecl",DeclRender.mkNameSpaceDecl(dd.decl));
name.style.backgroundColor = "lightblue";
return div("smallDecl", name, " ");
}));
m.add([
div("wall-dialog-buttons",
((subset.length == 0 && notsubset.length == 0) ? <any>"" :
<any>HTML.mkButton(lf("advance selected to pending"), () => {
tick(Ticks.toLibAdvanceSelectedToPending);
// TODO: don't dismiss, reconfigure modal dialog in place instead.
m.dismiss();
var selectedToAdd = subset.concat(notsubset).filter(dd => dd.getInclude());
this.split.addToDeclsToMove(selectedToAdd);
this.selectFromRemaining();
})),
HTML.mkButton(lf("make the move"), () => {
var errors = AST.TypeChecker.tcApp(this.split.getApp());
if (errors == 0) {
m.dismiss();
ModalDialog.ask(lf("Confirm rewrite (no undo)?"), lf("confirm"), () => {
tick(Ticks.toLibMakeTheMove);
this.moveToLibrary();
});
} else {
HTML.showErrorNotification(lf("You must correct errors in script before rewriting can proceed."));
}
}),
HTML.mkButton(lf("discard pending"), () => { tick(Ticks.toLibDiscardPending); this.reset(); m.dismiss(); }),
HTML.mkButton(lf("exit to editor"), () => { tick(Ticks.toLibExitToEditor); m.dismiss(); })
)
]);
if (subset.length > 0) {
m.add(div("wall-dialog-header", lf("elements you should think about moving to library (they only access pending elements).")));
subset.forEach(dd => m.add(this.showDeclAndDep(dd, true)));
}
if (notsubset.length > 0) {
m.add(div("wall-dialog-header", lf("elements that access pending elements and other elements:")));
notsubset.forEach(dd => m.add(this.showDeclAndDep(dd,true)));
}
m.onDismiss = () => { };
m.setScroll();
m.fullWhite();
m.show();
}
private moveToLibrary() {
// perform the split of application into application/library
this.split.makeSplit();
// make the new library (TODO: most of this code should be elsewhere)
World.getInstalledHeaderAsync(this.split.getLib().guid)
.then((hd: Cloud.Header) => {
if (!hd) Util.userError("no such script " + this.split.getLib().guid);
return World.updateInstalledScriptAsync(hd, this.split.library, null)
})
.then(() => {
this.split.getApp().notifyChangeAll();
TheEditor.typeCheckNow();
TheEditor.undoMgr.pushMainUndoState();
// TODO: get rid of this horrible hack
Util.setTimeout(1, () => {
this.reset();
TheEditor.reload();
});
})
.done()
}
}
}
| getRemainingDecls | identifier_name |
libraryExtractor.ts | ///<reference path='refs.ts'/>
module TDev
{
// BUGS:
// - Refs not handled correctly (see TD Junior)
// - we are not generating extension methods whenever possible (see from_string in TD Junior)
// - not show LibraryAbstractKind in dependences
// TODOs:
// - automated testing
// - move rewriting to ast, so we can test on node
export class LibraryExtractor {
private split: AST.SplitAppIntoAppAndLibrary;
constructor() {
this.reset();
}
public reset() {
this.split = new AST.SplitAppIntoAppAndLibrary();
}
private getAllDeclsToMove() : AST.DeclAndDeps[] {
return this.split.getAllDeclsToMove();
}
private getRemainingDecls(): AST.DeclAndDeps[] {
return this.split.getRemainingDecls();
}
// everything starts off with the user requesting to move a declaration D
// from a script/app into a library. The identification of a pair
// of app A and library L is needed before we can start the process. Certain
// declarations can't be moved.
public moveDecl(d: AST.Decl) {
this.split.invalidate();
if (d instanceof AST.App) {
HTML.showErrorNotification(lf("can't add a script to a library"));
return;
} else if (d instanceof AST.Action && (<AST.Action>d).isEvent()) {
HTML.showErrorNotification(lf("can't add an event function to a library (use event handlers instead)"));
return;
} else if (d instanceof AST.LibraryRef) {
HTML.showErrorNotification(lf("can't move a library reference directly"));
return;
}
if (this.split.getLib() == null) {
this.selectLibraryFromScript(() => {
if (this.split.getLib() != null) {
this.processDecl(d);
}
});
} else {
this.processDecl(d);
}
}
static defaultLibraryTemplate =
"meta version 'v2.2';\n" +
"meta isLibrary 'yes';\n" +
"meta hasIds 'yes';\n"
;
// this dialog prompts the user to select an existing library in the app
// or create a new one
private selectLibraryFromScript(next: () => void) {
var m = new ModalDialog();
m.add(div("wall-dialog-header", lf("move to library (identify target library)")));
if (Script.hasLibraries()) {
m.add(div("wall-dialog-body", lf("select a referenced library:")));
m.add(
Script.libraries().map((lib) => {
var b = DeclRender.mkBox(lib);
b.withClick((e) => {
tick(Ticks.toLibExistingLib);
this.split.setAppAndLib(Script,lib);
m.dismiss();
});
return b;
})
);
}
if (Script.hasLibraries()) {
m.add(div("wall-body-body", lf("or create a new library:")));
} else {
m.add(div("wall-body-body", lf("create a new library:")));
}
m.add([
div("wall-dialog-buttons",
HTML.mkButton(lf("new library"), () => {
// make a library with default name and select it;
// again, a TD-specific feature not for external editors
var stub: World.ScriptStub = {
scriptName: "mylib",
editorName: "touchdevelop",
scriptText: LibraryExtractor.defaultLibraryTemplate,
};
TheEditor.newScriptAsync(stub)
.then((newLibrary) => {
var header = <Cloud.Header>newLibrary;
var lib = TheEditor.freshLibrary();
this.split.setAppAndLib(Script, lib);
Script.addDecl(lib);
TheEditor.bindLibrary(lib, Browser.TheHost.createInstalled(header));
tick(Ticks.toLibNewLib);
m.dismiss();
});
}),
HTML.mkButton(lf("cancel"), () => m.dismiss())),
]);
m.onDismiss = () => {
next();
};
m.setScroll();
m.fullWhite();
m.show();
}
// if the declaration has not been added to the "pending" set
// then prompt the user if it's OK to move the declaration and all
// declarations in its downward closure
private processDecl(d: AST.Decl) {
if (this.getAllDeclsToMove().indexOf(this.split.getter(d)) < 0) {
var newOne = this.split.getter(d);
this.askOKtoMove(newOne, (moveOK) => {
if (moveOK) |
});
} else
this.selectFromRemaining();
}
private filterUnwanted(dl: AST.DeclAndDeps[]) {
return dl.filter(dd => !(dd.decl instanceof AST.LibraryRef));
}
private askOKtoMove(toMove: AST.DeclAndDeps, next:(ok:boolean) => void) {
var m = new ModalDialog();
var moveOK = false;
var theRest = this.filterUnwanted(this.split.computeClosure([toMove]));
if (theRest.length > 0) {
m.add(
div("wall-dialog-header", "moving",
DeclRender.mkBox(toMove.decl),
"to library",
DeclRender.mkBox(this.split.getLib()),
"requires moving",
theRest.map(dd => DeclRender.mkBox(dd.decl))
)
);
m.add(div("wall-dialog-buttons",
HTML.mkButton(lf("ok"), () => { tick(Ticks.toLibOKtoMove); moveOK = true; m.dismiss(); }),
HTML.mkButton(lf("cancel"), () => m.dismiss())
));
m.onDismiss = () => { next(moveOK); };
m.setScroll();
m.fullWhite();
m.show();
} else {
next(true);
}
}
// once there are some declarations selected to move to the library, we help
// the user by identifying other declarations they might want to consider moving:
//
// 1. subsetOfDeclsToMove: these are declarations whose direct accesses
// are a subset of the declsToMove; that is, the downward closure of D in this set
// is guaranteed to be in declsToMove (won't drag anything else in).
//
// 2. someFromDeclsToMove: these declarations access something from declsToMove, but
// access some things outside the set.
private selectFromRemaining() {
var remaining = this.getRemainingDecls();
// we are going to rate the remainingDecls with respect to the current
// set of declarations pending to move to library
remaining.forEach((a) => AST.DeclAndDeps.rateTargetAgainstDecls(this.getAllDeclsToMove(), a));
remaining.sort(AST.DeclAndDeps.compareSize);
var subsetOfDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove == dd.getAllDirectAccesses().length);
var someFromDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove < dd.getAllDirectAccesses().length);
this.showDeclAndDeps(subsetOfDeclsToMove, someFromDeclsToMove);
}
private updateBoxes(dd: AST.DeclAndDeps) {
if ((<any>dd).boxes != null) {
(<any>dd).boxes.forEach(b => {
(<HTMLElement>b).style.backgroundColor = dd.count > 0 ? "lightblue" : "white";
});
}
}
private mkCheckBox(dd: AST.DeclAndDeps) {
var b = DeclRender.mkBox(dd.decl);
(<any>dd).myCheckBox = b;
b.setFlag("selected", dd.getInclude());
b.onclick = (e) => {
dd.setInclude(!dd.getInclude());
b.setFlag("selected", dd.getInclude());
dd.count += (dd.getInclude() ? 1 : -1);
this.updateBoxes(dd);
dd.getTransitiveClosure().forEach(ee => { ee.count += (dd.getInclude() ? 1 : -1); });
dd.getTransitiveClosure().forEach(ee => this.updateBoxes(ee));
};
return b;
}
private showDeclAndDep(dd: AST.DeclAndDeps, showTrans:boolean) {
var rest = [div("wall-dialog-body", " ")];
if (showTrans) {
var tc = this.filterUnwanted(dd.getTransitiveClosure());
var upto10 = tc.length >= 10 ? tc.slice(0, 10) : tc;
rest = upto10.map(ee => {
var ret = div("smallDecl", DeclRender.mkNameSpaceDecl(ee.decl));
ret.style.backgroundColor = ee.count > 0 ? "lightblue" : "white";
if ((<any>ee).boxes == null) {
(<any>ee).boxes = [ret];
} else {
(<any>ee).boxes.push(ret);
}
return div("smallDecl", ret, " ");
});
rest.unshift(div("smallDecl", lf("accesses: ")));
if (tc.length > 10) {
var more = tc.length - 10;
// TODO: make a button to expose the rest of the list
rest.push(div("smallDecl", "load (" + more.toString() + " more)"));
}
rest.push(div("smallDecl", " "));
}
// WARNING: don't put the return on its own line (; implicitly inserted - JavaScript semantics)
return [this.mkCheckBox(dd)].concat(rest);
}
// shows the remaining declarations that fall into the subset/notsubset classification
// (note that this doesn't include all remaining declarations)
private showDeclAndDeps(subset: AST.DeclAndDeps[], notsubset: AST.DeclAndDeps[]) {
this.split.getAll().forEach(dd => { dd.count = 0; (<any>dd).boxes = null; });
this.getAllDeclsToMove().forEach(dd => dd.count = 1);
var closureBoxes: HTMLElement[] = [];
var m = new ModalDialog();
m.add([
div("wall-dialog-header",
lf("move to library (pending)"),
DeclRender.mkBox(this.split.getLib()))
]);
m.add([div("wall-dialog-body", lf("elements pending to move:"))]);
m.add(this.filterUnwanted(this.getAllDeclsToMove()).map(dd => {
var name = div("smallDecl",DeclRender.mkNameSpaceDecl(dd.decl));
name.style.backgroundColor = "lightblue";
return div("smallDecl", name, " ");
}));
m.add([
div("wall-dialog-buttons",
((subset.length == 0 && notsubset.length == 0) ? <any>"" :
<any>HTML.mkButton(lf("advance selected to pending"), () => {
tick(Ticks.toLibAdvanceSelectedToPending);
// TODO: don't dismiss, reconfigure modal dialog in place instead.
m.dismiss();
var selectedToAdd = subset.concat(notsubset).filter(dd => dd.getInclude());
this.split.addToDeclsToMove(selectedToAdd);
this.selectFromRemaining();
})),
HTML.mkButton(lf("make the move"), () => {
var errors = AST.TypeChecker.tcApp(this.split.getApp());
if (errors == 0) {
m.dismiss();
ModalDialog.ask(lf("Confirm rewrite (no undo)?"), lf("confirm"), () => {
tick(Ticks.toLibMakeTheMove);
this.moveToLibrary();
});
} else {
HTML.showErrorNotification(lf("You must correct errors in script before rewriting can proceed."));
}
}),
HTML.mkButton(lf("discard pending"), () => { tick(Ticks.toLibDiscardPending); this.reset(); m.dismiss(); }),
HTML.mkButton(lf("exit to editor"), () => { tick(Ticks.toLibExitToEditor); m.dismiss(); })
)
]);
if (subset.length > 0) {
m.add(div("wall-dialog-header", lf("elements you should think about moving to library (they only access pending elements).")));
subset.forEach(dd => m.add(this.showDeclAndDep(dd, true)));
}
if (notsubset.length > 0) {
m.add(div("wall-dialog-header", lf("elements that access pending elements and other elements:")));
notsubset.forEach(dd => m.add(this.showDeclAndDep(dd,true)));
}
m.onDismiss = () => { };
m.setScroll();
m.fullWhite();
m.show();
}
private moveToLibrary() {
// perform the split of application into application/library
this.split.makeSplit();
// make the new library (TODO: most of this code should be elsewhere)
World.getInstalledHeaderAsync(this.split.getLib().guid)
.then((hd: Cloud.Header) => {
if (!hd) Util.userError("no such script " + this.split.getLib().guid);
return World.updateInstalledScriptAsync(hd, this.split.library, null)
})
.then(() => {
this.split.getApp().notifyChangeAll();
TheEditor.typeCheckNow();
TheEditor.undoMgr.pushMainUndoState();
// TODO: get rid of this horrible hack
Util.setTimeout(1, () => {
this.reset();
TheEditor.reload();
});
})
.done()
}
}
}
| {
this.split.addToDeclsToMove([newOne]);
this.selectFromRemaining();
} | conditional_block |
libraryExtractor.ts | ///<reference path='refs.ts'/>
module TDev
{
// BUGS:
// - Refs not handled correctly (see TD Junior)
// - we are not generating extension methods whenever possible (see from_string in TD Junior)
// - not show LibraryAbstractKind in dependences
// TODOs:
// - automated testing
// - move rewriting to ast, so we can test on node
export class LibraryExtractor {
private split: AST.SplitAppIntoAppAndLibrary;
constructor() {
this.reset();
}
public reset() {
this.split = new AST.SplitAppIntoAppAndLibrary();
}
private getAllDeclsToMove() : AST.DeclAndDeps[] |
private getRemainingDecls(): AST.DeclAndDeps[] {
return this.split.getRemainingDecls();
}
// everything starts off with the user requesting to move a declaration D
// from a script/app into a library. The identification of a pair
// of app A and library L is needed before we can start the process. Certain
// declarations can't be moved.
public moveDecl(d: AST.Decl) {
this.split.invalidate();
if (d instanceof AST.App) {
HTML.showErrorNotification(lf("can't add a script to a library"));
return;
} else if (d instanceof AST.Action && (<AST.Action>d).isEvent()) {
HTML.showErrorNotification(lf("can't add an event function to a library (use event handlers instead)"));
return;
} else if (d instanceof AST.LibraryRef) {
HTML.showErrorNotification(lf("can't move a library reference directly"));
return;
}
if (this.split.getLib() == null) {
this.selectLibraryFromScript(() => {
if (this.split.getLib() != null) {
this.processDecl(d);
}
});
} else {
this.processDecl(d);
}
}
static defaultLibraryTemplate =
"meta version 'v2.2';\n" +
"meta isLibrary 'yes';\n" +
"meta hasIds 'yes';\n"
;
// this dialog prompts the user to select an existing library in the app
// or create a new one
private selectLibraryFromScript(next: () => void) {
var m = new ModalDialog();
m.add(div("wall-dialog-header", lf("move to library (identify target library)")));
if (Script.hasLibraries()) {
m.add(div("wall-dialog-body", lf("select a referenced library:")));
m.add(
Script.libraries().map((lib) => {
var b = DeclRender.mkBox(lib);
b.withClick((e) => {
tick(Ticks.toLibExistingLib);
this.split.setAppAndLib(Script,lib);
m.dismiss();
});
return b;
})
);
}
if (Script.hasLibraries()) {
m.add(div("wall-body-body", lf("or create a new library:")));
} else {
m.add(div("wall-body-body", lf("create a new library:")));
}
m.add([
div("wall-dialog-buttons",
HTML.mkButton(lf("new library"), () => {
// make a library with default name and select it;
// again, a TD-specific feature not for external editors
var stub: World.ScriptStub = {
scriptName: "mylib",
editorName: "touchdevelop",
scriptText: LibraryExtractor.defaultLibraryTemplate,
};
TheEditor.newScriptAsync(stub)
.then((newLibrary) => {
var header = <Cloud.Header>newLibrary;
var lib = TheEditor.freshLibrary();
this.split.setAppAndLib(Script, lib);
Script.addDecl(lib);
TheEditor.bindLibrary(lib, Browser.TheHost.createInstalled(header));
tick(Ticks.toLibNewLib);
m.dismiss();
});
}),
HTML.mkButton(lf("cancel"), () => m.dismiss())),
]);
m.onDismiss = () => {
next();
};
m.setScroll();
m.fullWhite();
m.show();
}
// if the declaration has not been added to the "pending" set
// then prompt the user if it's OK to move the declaration and all
// declarations in its downward closure
private processDecl(d: AST.Decl) {
if (this.getAllDeclsToMove().indexOf(this.split.getter(d)) < 0) {
var newOne = this.split.getter(d);
this.askOKtoMove(newOne, (moveOK) => {
if (moveOK) {
this.split.addToDeclsToMove([newOne]);
this.selectFromRemaining();
}
});
} else
this.selectFromRemaining();
}
private filterUnwanted(dl: AST.DeclAndDeps[]) {
return dl.filter(dd => !(dd.decl instanceof AST.LibraryRef));
}
private askOKtoMove(toMove: AST.DeclAndDeps, next:(ok:boolean) => void) {
var m = new ModalDialog();
var moveOK = false;
var theRest = this.filterUnwanted(this.split.computeClosure([toMove]));
if (theRest.length > 0) {
m.add(
div("wall-dialog-header", "moving",
DeclRender.mkBox(toMove.decl),
"to library",
DeclRender.mkBox(this.split.getLib()),
"requires moving",
theRest.map(dd => DeclRender.mkBox(dd.decl))
)
);
m.add(div("wall-dialog-buttons",
HTML.mkButton(lf("ok"), () => { tick(Ticks.toLibOKtoMove); moveOK = true; m.dismiss(); }),
HTML.mkButton(lf("cancel"), () => m.dismiss())
));
m.onDismiss = () => { next(moveOK); };
m.setScroll();
m.fullWhite();
m.show();
} else {
next(true);
}
}
// once there are some declarations selected to move to the library, we help
// the user by identifying other declarations they might want to consider moving:
//
// 1. subsetOfDeclsToMove: these are declarations whose direct accesses
// are a subset of the declsToMove; that is, the downward closure of D in this set
// is guaranteed to be in declsToMove (won't drag anything else in).
//
// 2. someFromDeclsToMove: these declarations access something from declsToMove, but
// access some things outside the set.
private selectFromRemaining() {
var remaining = this.getRemainingDecls();
// we are going to rate the remainingDecls with respect to the current
// set of declarations pending to move to library
remaining.forEach((a) => AST.DeclAndDeps.rateTargetAgainstDecls(this.getAllDeclsToMove(), a));
remaining.sort(AST.DeclAndDeps.compareSize);
var subsetOfDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove == dd.getAllDirectAccesses().length);
var someFromDeclsToMove =
remaining.filter((dd) => dd.numberDirectDeclsToMove > 0 &&
dd.numberDirectDeclsToMove < dd.getAllDirectAccesses().length);
this.showDeclAndDeps(subsetOfDeclsToMove, someFromDeclsToMove);
}
private updateBoxes(dd: AST.DeclAndDeps) {
if ((<any>dd).boxes != null) {
(<any>dd).boxes.forEach(b => {
(<HTMLElement>b).style.backgroundColor = dd.count > 0 ? "lightblue" : "white";
});
}
}
private mkCheckBox(dd: AST.DeclAndDeps) {
var b = DeclRender.mkBox(dd.decl);
(<any>dd).myCheckBox = b;
b.setFlag("selected", dd.getInclude());
b.onclick = (e) => {
dd.setInclude(!dd.getInclude());
b.setFlag("selected", dd.getInclude());
dd.count += (dd.getInclude() ? 1 : -1);
this.updateBoxes(dd);
dd.getTransitiveClosure().forEach(ee => { ee.count += (dd.getInclude() ? 1 : -1); });
dd.getTransitiveClosure().forEach(ee => this.updateBoxes(ee));
};
return b;
}
private showDeclAndDep(dd: AST.DeclAndDeps, showTrans:boolean) {
var rest = [div("wall-dialog-body", " ")];
if (showTrans) {
var tc = this.filterUnwanted(dd.getTransitiveClosure());
var upto10 = tc.length >= 10 ? tc.slice(0, 10) : tc;
rest = upto10.map(ee => {
var ret = div("smallDecl", DeclRender.mkNameSpaceDecl(ee.decl));
ret.style.backgroundColor = ee.count > 0 ? "lightblue" : "white";
if ((<any>ee).boxes == null) {
(<any>ee).boxes = [ret];
} else {
(<any>ee).boxes.push(ret);
}
return div("smallDecl", ret, " ");
});
rest.unshift(div("smallDecl", lf("accesses: ")));
if (tc.length > 10) {
var more = tc.length - 10;
// TODO: make a button to expose the rest of the list
rest.push(div("smallDecl", "load (" + more.toString() + " more)"));
}
rest.push(div("smallDecl", " "));
}
// WARNING: don't put the return on its own line (; implicitly inserted - JavaScript semantics)
return [this.mkCheckBox(dd)].concat(rest);
}
// shows the remaining declarations that fall into the subset/notsubset classification
// (note that this doesn't include all remaining declarations)
private showDeclAndDeps(subset: AST.DeclAndDeps[], notsubset: AST.DeclAndDeps[]) {
this.split.getAll().forEach(dd => { dd.count = 0; (<any>dd).boxes = null; });
this.getAllDeclsToMove().forEach(dd => dd.count = 1);
var closureBoxes: HTMLElement[] = [];
var m = new ModalDialog();
m.add([
div("wall-dialog-header",
lf("move to library (pending)"),
DeclRender.mkBox(this.split.getLib()))
]);
m.add([div("wall-dialog-body", lf("elements pending to move:"))]);
m.add(this.filterUnwanted(this.getAllDeclsToMove()).map(dd => {
var name = div("smallDecl",DeclRender.mkNameSpaceDecl(dd.decl));
name.style.backgroundColor = "lightblue";
return div("smallDecl", name, " ");
}));
m.add([
div("wall-dialog-buttons",
((subset.length == 0 && notsubset.length == 0) ? <any>"" :
<any>HTML.mkButton(lf("advance selected to pending"), () => {
tick(Ticks.toLibAdvanceSelectedToPending);
// TODO: don't dismiss, reconfigure modal dialog in place instead.
m.dismiss();
var selectedToAdd = subset.concat(notsubset).filter(dd => dd.getInclude());
this.split.addToDeclsToMove(selectedToAdd);
this.selectFromRemaining();
})),
HTML.mkButton(lf("make the move"), () => {
var errors = AST.TypeChecker.tcApp(this.split.getApp());
if (errors == 0) {
m.dismiss();
ModalDialog.ask(lf("Confirm rewrite (no undo)?"), lf("confirm"), () => {
tick(Ticks.toLibMakeTheMove);
this.moveToLibrary();
});
} else {
HTML.showErrorNotification(lf("You must correct errors in script before rewriting can proceed."));
}
}),
HTML.mkButton(lf("discard pending"), () => { tick(Ticks.toLibDiscardPending); this.reset(); m.dismiss(); }),
HTML.mkButton(lf("exit to editor"), () => { tick(Ticks.toLibExitToEditor); m.dismiss(); })
)
]);
if (subset.length > 0) {
m.add(div("wall-dialog-header", lf("elements you should think about moving to library (they only access pending elements).")));
subset.forEach(dd => m.add(this.showDeclAndDep(dd, true)));
}
if (notsubset.length > 0) {
m.add(div("wall-dialog-header", lf("elements that access pending elements and other elements:")));
notsubset.forEach(dd => m.add(this.showDeclAndDep(dd,true)));
}
m.onDismiss = () => { };
m.setScroll();
m.fullWhite();
m.show();
}
private moveToLibrary() {
// perform the split of application into application/library
this.split.makeSplit();
// make the new library (TODO: most of this code should be elsewhere)
World.getInstalledHeaderAsync(this.split.getLib().guid)
.then((hd: Cloud.Header) => {
if (!hd) Util.userError("no such script " + this.split.getLib().guid);
return World.updateInstalledScriptAsync(hd, this.split.library, null)
})
.then(() => {
this.split.getApp().notifyChangeAll();
TheEditor.typeCheckNow();
TheEditor.undoMgr.pushMainUndoState();
// TODO: get rid of this horrible hack
Util.setTimeout(1, () => {
this.reset();
TheEditor.reload();
});
})
.done()
}
}
}
| {
return this.split.getAllDeclsToMove();
} | identifier_body |
backups.py | # encoding: utf-8
import datetime
import os
from pysteam import shortcuts
import paths
from logs import logger
def | ():
return os.path.join(paths.application_data_directory(), 'Backups')
def backup_filename(user, timestamp_format):
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
return "shortcuts." + timestamp + ".vdf"
def shortcuts_backup_path(directory, user, timestamp_format="%Y%m%d%H%M%S"):
"""
Returns the path for a shortcuts.vdf backup file.
This path is in the designated backup directory, and includes a timestamp
before the extension to allow many backups to exist at once.
"""
assert(directory is not None)
return os.path.join(
directory,
str(user.user_id),
backup_filename(user, timestamp_format)
)
def backup_directory(config):
backup_dir = config.backup_directory
if backup_dir is None:
return None
if backup_dir == "":
backup_dir = default_backups_directory()
logger.debug("Specified empty string as backup directory. Defaulting to %s" % backup_dir)
return backup_dir
def create_backup_of_shortcuts(config, user, dry_run=False):
def _create_directory_if_needed(directory):
if os.path.exists(directory):
return
logger.debug("Creating directory: %s" % directory)
os.makedirs(directory)
backup_dir = backup_directory(config)
if backup_dir is None:
logger.info("No backups directory specified, so not backing up shortcuts.vdf before overwriting. See config.txt for more info")
return
_create_directory_if_needed(backup_dir)
if not os.path.isdir(backup_dir):
logger.warning("Backup directory path is something other than a directory. Skipping backups")
return
backup_path = shortcuts_backup_path(backup_dir, user)
# Make sure the user-specific backups dir exists
_create_directory_if_needed(os.path.dirname(backup_path))
shortcuts.write_shortcuts(backup_path, shortcuts.get_shortcuts(user))
| default_backups_directory | identifier_name |
backups.py | # encoding: utf-8
import datetime
import os
from pysteam import shortcuts
import paths
from logs import logger
def default_backups_directory():
return os.path.join(paths.application_data_directory(), 'Backups')
def backup_filename(user, timestamp_format):
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
return "shortcuts." + timestamp + ".vdf"
def shortcuts_backup_path(directory, user, timestamp_format="%Y%m%d%H%M%S"):
"""
Returns the path for a shortcuts.vdf backup file.
This path is in the designated backup directory, and includes a timestamp
before the extension to allow many backups to exist at once.
"""
assert(directory is not None)
return os.path.join(
directory,
str(user.user_id),
backup_filename(user, timestamp_format)
)
def backup_directory(config):
backup_dir = config.backup_directory
if backup_dir is None:
return None
if backup_dir == "":
backup_dir = default_backups_directory()
logger.debug("Specified empty string as backup directory. Defaulting to %s" % backup_dir)
return backup_dir |
logger.debug("Creating directory: %s" % directory)
os.makedirs(directory)
backup_dir = backup_directory(config)
if backup_dir is None:
logger.info("No backups directory specified, so not backing up shortcuts.vdf before overwriting. See config.txt for more info")
return
_create_directory_if_needed(backup_dir)
if not os.path.isdir(backup_dir):
logger.warning("Backup directory path is something other than a directory. Skipping backups")
return
backup_path = shortcuts_backup_path(backup_dir, user)
# Make sure the user-specific backups dir exists
_create_directory_if_needed(os.path.dirname(backup_path))
shortcuts.write_shortcuts(backup_path, shortcuts.get_shortcuts(user)) |
def create_backup_of_shortcuts(config, user, dry_run=False):
def _create_directory_if_needed(directory):
if os.path.exists(directory):
return | random_line_split |
backups.py | # encoding: utf-8
import datetime
import os
from pysteam import shortcuts
import paths
from logs import logger
def default_backups_directory():
return os.path.join(paths.application_data_directory(), 'Backups')
def backup_filename(user, timestamp_format):
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
return "shortcuts." + timestamp + ".vdf"
def shortcuts_backup_path(directory, user, timestamp_format="%Y%m%d%H%M%S"):
"""
Returns the path for a shortcuts.vdf backup file.
This path is in the designated backup directory, and includes a timestamp
before the extension to allow many backups to exist at once.
"""
assert(directory is not None)
return os.path.join(
directory,
str(user.user_id),
backup_filename(user, timestamp_format)
)
def backup_directory(config):
backup_dir = config.backup_directory
if backup_dir is None:
return None
if backup_dir == "":
|
return backup_dir
def create_backup_of_shortcuts(config, user, dry_run=False):
def _create_directory_if_needed(directory):
if os.path.exists(directory):
return
logger.debug("Creating directory: %s" % directory)
os.makedirs(directory)
backup_dir = backup_directory(config)
if backup_dir is None:
logger.info("No backups directory specified, so not backing up shortcuts.vdf before overwriting. See config.txt for more info")
return
_create_directory_if_needed(backup_dir)
if not os.path.isdir(backup_dir):
logger.warning("Backup directory path is something other than a directory. Skipping backups")
return
backup_path = shortcuts_backup_path(backup_dir, user)
# Make sure the user-specific backups dir exists
_create_directory_if_needed(os.path.dirname(backup_path))
shortcuts.write_shortcuts(backup_path, shortcuts.get_shortcuts(user))
| backup_dir = default_backups_directory()
logger.debug("Specified empty string as backup directory. Defaulting to %s" % backup_dir) | conditional_block |
backups.py | # encoding: utf-8
import datetime
import os
from pysteam import shortcuts
import paths
from logs import logger
def default_backups_directory():
return os.path.join(paths.application_data_directory(), 'Backups')
def backup_filename(user, timestamp_format):
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
return "shortcuts." + timestamp + ".vdf"
def shortcuts_backup_path(directory, user, timestamp_format="%Y%m%d%H%M%S"):
"""
Returns the path for a shortcuts.vdf backup file.
This path is in the designated backup directory, and includes a timestamp
before the extension to allow many backups to exist at once.
"""
assert(directory is not None)
return os.path.join(
directory,
str(user.user_id),
backup_filename(user, timestamp_format)
)
def backup_directory(config):
|
def create_backup_of_shortcuts(config, user, dry_run=False):
def _create_directory_if_needed(directory):
if os.path.exists(directory):
return
logger.debug("Creating directory: %s" % directory)
os.makedirs(directory)
backup_dir = backup_directory(config)
if backup_dir is None:
logger.info("No backups directory specified, so not backing up shortcuts.vdf before overwriting. See config.txt for more info")
return
_create_directory_if_needed(backup_dir)
if not os.path.isdir(backup_dir):
logger.warning("Backup directory path is something other than a directory. Skipping backups")
return
backup_path = shortcuts_backup_path(backup_dir, user)
# Make sure the user-specific backups dir exists
_create_directory_if_needed(os.path.dirname(backup_path))
shortcuts.write_shortcuts(backup_path, shortcuts.get_shortcuts(user))
| backup_dir = config.backup_directory
if backup_dir is None:
return None
if backup_dir == "":
backup_dir = default_backups_directory()
logger.debug("Specified empty string as backup directory. Defaulting to %s" % backup_dir)
return backup_dir | identifier_body |
trait-cast-generic.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Testing casting of a generic Struct to a Trait with a generic method.
// This is test for issue 10955.
#[allow(unused_variable)];
trait Foo {
fn f<A>(a: A) -> A {
a
}
}
struct Bar<T> {
x: T,
}
impl<T> Foo for Bar<T> { }
pub fn main() | {
let a = Bar { x: 1 };
let b = &a as &Foo;
} | identifier_body |
|
trait-cast-generic.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Testing casting of a generic Struct to a Trait with a generic method.
// This is test for issue 10955.
#[allow(unused_variable)];
trait Foo {
fn f<A>(a: A) -> A {
a
}
}
struct | <T> {
x: T,
}
impl<T> Foo for Bar<T> { }
pub fn main() {
let a = Bar { x: 1 };
let b = &a as &Foo;
}
| Bar | identifier_name |
trait-cast-generic.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Testing casting of a generic Struct to a Trait with a generic method.
// This is test for issue 10955.
#[allow(unused_variable)];
trait Foo {
fn f<A>(a: A) -> A {
a
}
}
struct Bar<T> {
x: T,
}
impl<T> Foo for Bar<T> { }
pub fn main() { | let b = &a as &Foo;
} | let a = Bar { x: 1 }; | random_line_split |
takeWhile.ts | /*!
Copyright 2018 Ron Buckton ([email protected])
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/** @module "iterable-query/fn" */
import { assert, ToIterable, FlowHierarchy, ToStringTag} from "../internal";
import { Queryable, HierarchyIterable } from "../types";
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode, U extends T>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => element is U): HierarchyIterable<TNode, U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => boolean): HierarchyIterable<TNode, T>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T, U extends T>(source: Queryable<T>, predicate: (element: T) => element is U): Iterable<U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T>;
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T> {
assert.mustBeQueryable(source, "source");
assert.mustBeFunction(predicate, "predicate");
return FlowHierarchy(new TakeWhileIterable(ToIterable(source), predicate), source);
}
@ToStringTag("TakeWhileIterable")
class TakeWhileIterable<T> implements Iterable<T> {
private _source: Iterable<T>;
private _predicate: (element: T) => boolean;
constructor(source: Iterable<T>, predicate: (element: T) => boolean) {
this._source = source;
this._predicate = predicate;
}
* | (): Iterator<T> {
const predicate = this._predicate;
for (const element of this._source) {
if (!predicate(element)) {
break;
}
yield element;
}
}
}
| [Symbol.iterator] | identifier_name |
takeWhile.ts | /*!
Copyright 2018 Ron Buckton ([email protected])
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/** @module "iterable-query/fn" */
import { assert, ToIterable, FlowHierarchy, ToStringTag} from "../internal";
import { Queryable, HierarchyIterable } from "../types";
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode, U extends T>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => element is U): HierarchyIterable<TNode, U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => boolean): HierarchyIterable<TNode, T>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T, U extends T>(source: Queryable<T>, predicate: (element: T) => element is U): Iterable<U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T>;
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T> {
assert.mustBeQueryable(source, "source");
assert.mustBeFunction(predicate, "predicate");
return FlowHierarchy(new TakeWhileIterable(ToIterable(source), predicate), source);
}
@ToStringTag("TakeWhileIterable")
class TakeWhileIterable<T> implements Iterable<T> {
private _source: Iterable<T>;
private _predicate: (element: T) => boolean;
constructor(source: Iterable<T>, predicate: (element: T) => boolean) {
this._source = source;
this._predicate = predicate;
}
*[Symbol.iterator](): Iterator<T> |
}
| {
const predicate = this._predicate;
for (const element of this._source) {
if (!predicate(element)) {
break;
}
yield element;
}
} | identifier_body |
takeWhile.ts | /*!
Copyright 2018 Ron Buckton ([email protected])
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/** @module "iterable-query/fn" */
import { assert, ToIterable, FlowHierarchy, ToStringTag} from "../internal";
import { Queryable, HierarchyIterable } from "../types";
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode, U extends T>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => element is U): HierarchyIterable<TNode, U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => boolean): HierarchyIterable<TNode, T>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T, U extends T>(source: Queryable<T>, predicate: (element: T) => element is U): Iterable<U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T>;
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T> {
assert.mustBeQueryable(source, "source");
assert.mustBeFunction(predicate, "predicate");
return FlowHierarchy(new TakeWhileIterable(ToIterable(source), predicate), source);
}
@ToStringTag("TakeWhileIterable")
class TakeWhileIterable<T> implements Iterable<T> {
private _source: Iterable<T>;
private _predicate: (element: T) => boolean;
constructor(source: Iterable<T>, predicate: (element: T) => boolean) {
this._source = source;
this._predicate = predicate;
}
| for (const element of this._source) {
if (!predicate(element)) {
break;
}
yield element;
}
}
} |
*[Symbol.iterator](): Iterator<T> {
const predicate = this._predicate;
| random_line_split |
takeWhile.ts | /*!
Copyright 2018 Ron Buckton ([email protected])
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/** @module "iterable-query/fn" */
import { assert, ToIterable, FlowHierarchy, ToStringTag} from "../internal";
import { Queryable, HierarchyIterable } from "../types";
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode, U extends T>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => element is U): HierarchyIterable<TNode, U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<TNode, T extends TNode>(source: HierarchyIterable<TNode, T>, predicate: (element: T) => boolean): HierarchyIterable<TNode, T>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T, U extends T>(source: Queryable<T>, predicate: (element: T) => element is U): Iterable<U>;
/**
* Creates a subquery containing the first elements that match the supplied predicate.
*
* @param source A [[Queryable]] object.
* @param predicate A callback used to match each element.
* @category Subquery
*/
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T>;
export function takeWhile<T>(source: Queryable<T>, predicate: (element: T) => boolean): Iterable<T> {
assert.mustBeQueryable(source, "source");
assert.mustBeFunction(predicate, "predicate");
return FlowHierarchy(new TakeWhileIterable(ToIterable(source), predicate), source);
}
@ToStringTag("TakeWhileIterable")
class TakeWhileIterable<T> implements Iterable<T> {
private _source: Iterable<T>;
private _predicate: (element: T) => boolean;
constructor(source: Iterable<T>, predicate: (element: T) => boolean) {
this._source = source;
this._predicate = predicate;
}
*[Symbol.iterator](): Iterator<T> {
const predicate = this._predicate;
for (const element of this._source) {
if (!predicate(element)) |
yield element;
}
}
}
| {
break;
} | conditional_block |
S3_to_FS.py | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from airflow.hooks.S3_hook import S3Hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class S3ToFileSystem(BaseOperator):
@apply_defaults
def __init__(
self,
s3_bucket,
s3_key,
download_file_location,
s3_conn_id='s3_default',
* args, **kwargs):
super(S3ToFileSystem, self).__init__(*args, **kwargs)
self.local_location = download_file_location
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.s3_conn_id = s3_conn_id
def execute(self, context):
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
file_paths = []
for k in self.s3.list_keys(self.s3_bucket, prefix=self.s3_key):
kpath = os.path.join(self.local_location, os.path.basename(k))
# Download the file
self.s3.download_file(self.s3_bucket, k, kpath) | # insert in respective SQS operators | file_paths.append(kpath)
context['ti'].xcom_push(key=kpath, value="")
context['ti'].xcom_push(key="files_added", value=file_paths)
# read in chunks
# start reading from the file. | random_line_split |
S3_to_FS.py | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from airflow.hooks.S3_hook import S3Hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class S3ToFileSystem(BaseOperator):
@apply_defaults
def __init__(
self,
s3_bucket,
s3_key,
download_file_location,
s3_conn_id='s3_default',
* args, **kwargs):
super(S3ToFileSystem, self).__init__(*args, **kwargs)
self.local_location = download_file_location
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.s3_conn_id = s3_conn_id
def execute(self, context):
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
file_paths = []
for k in self.s3.list_keys(self.s3_bucket, prefix=self.s3_key):
|
context['ti'].xcom_push(key="files_added", value=file_paths)
# read in chunks
# start reading from the file.
# insert in respective SQS operators
| kpath = os.path.join(self.local_location, os.path.basename(k))
# Download the file
self.s3.download_file(self.s3_bucket, k, kpath)
file_paths.append(kpath)
context['ti'].xcom_push(key=kpath, value="") | conditional_block |
S3_to_FS.py | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from airflow.hooks.S3_hook import S3Hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class | (BaseOperator):
@apply_defaults
def __init__(
self,
s3_bucket,
s3_key,
download_file_location,
s3_conn_id='s3_default',
* args, **kwargs):
super(S3ToFileSystem, self).__init__(*args, **kwargs)
self.local_location = download_file_location
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.s3_conn_id = s3_conn_id
def execute(self, context):
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
file_paths = []
for k in self.s3.list_keys(self.s3_bucket, prefix=self.s3_key):
kpath = os.path.join(self.local_location, os.path.basename(k))
# Download the file
self.s3.download_file(self.s3_bucket, k, kpath)
file_paths.append(kpath)
context['ti'].xcom_push(key=kpath, value="")
context['ti'].xcom_push(key="files_added", value=file_paths)
# read in chunks
# start reading from the file.
# insert in respective SQS operators
| S3ToFileSystem | identifier_name |
S3_to_FS.py | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from airflow.hooks.S3_hook import S3Hook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class S3ToFileSystem(BaseOperator):
| @apply_defaults
def __init__(
self,
s3_bucket,
s3_key,
download_file_location,
s3_conn_id='s3_default',
* args, **kwargs):
super(S3ToFileSystem, self).__init__(*args, **kwargs)
self.local_location = download_file_location
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.s3_conn_id = s3_conn_id
def execute(self, context):
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
file_paths = []
for k in self.s3.list_keys(self.s3_bucket, prefix=self.s3_key):
kpath = os.path.join(self.local_location, os.path.basename(k))
# Download the file
self.s3.download_file(self.s3_bucket, k, kpath)
file_paths.append(kpath)
context['ti'].xcom_push(key=kpath, value="")
context['ti'].xcom_push(key="files_added", value=file_paths)
# read in chunks
# start reading from the file.
# insert in respective SQS operators | identifier_body |
|
code_section.py | # coding=utf-8
"""This module, code_section.py, is an abstraction for code sections. Needed for ordering code chunks."""
class CodeSection(object):
"""Represents a single code section of a source code file."""
def __init__(self, section_name):
self._section_name = section_name
self._code_chunks = []
def | (self, code_chunk):
"""Adds a code chunk to the start of this code section."""
self._code_chunks.insert(0, code_chunk)
def add_code_chunk(self, code_chunk):
"""Adds a code chunk to this code section."""
self._code_chunks.append(code_chunk)
def get_all_code_chunks(self):
"""Returns a list of all the code chunks in this code section."""
return self._code_chunks
@property
def empty(self) -> bool:
"""Returns a boolean indicating if this code section is empty or not."""
return len(self._code_chunks) == 0
@property
def name(self) -> str:
"""Returns the name of this code section."""
return self._section_name
| add_code_chunk_at_start | identifier_name |
code_section.py | # coding=utf-8
"""This module, code_section.py, is an abstraction for code sections. Needed for ordering code chunks."""
class CodeSection(object):
"""Represents a single code section of a source code file."""
| def add_code_chunk_at_start(self, code_chunk):
"""Adds a code chunk to the start of this code section."""
self._code_chunks.insert(0, code_chunk)
def add_code_chunk(self, code_chunk):
"""Adds a code chunk to this code section."""
self._code_chunks.append(code_chunk)
def get_all_code_chunks(self):
"""Returns a list of all the code chunks in this code section."""
return self._code_chunks
@property
def empty(self) -> bool:
"""Returns a boolean indicating if this code section is empty or not."""
return len(self._code_chunks) == 0
@property
def name(self) -> str:
"""Returns the name of this code section."""
return self._section_name | def __init__(self, section_name):
self._section_name = section_name
self._code_chunks = []
| random_line_split |
code_section.py | # coding=utf-8
"""This module, code_section.py, is an abstraction for code sections. Needed for ordering code chunks."""
class CodeSection(object):
"""Represents a single code section of a source code file."""
def __init__(self, section_name):
self._section_name = section_name
self._code_chunks = []
def add_code_chunk_at_start(self, code_chunk):
"""Adds a code chunk to the start of this code section."""
self._code_chunks.insert(0, code_chunk)
def add_code_chunk(self, code_chunk):
|
def get_all_code_chunks(self):
"""Returns a list of all the code chunks in this code section."""
return self._code_chunks
@property
def empty(self) -> bool:
"""Returns a boolean indicating if this code section is empty or not."""
return len(self._code_chunks) == 0
@property
def name(self) -> str:
"""Returns the name of this code section."""
return self._section_name
| """Adds a code chunk to this code section."""
self._code_chunks.append(code_chunk) | identifier_body |
autosaveAction.service.ts | import { Injectable } from '@angular/core';
import { Observable, BehaviorSubject } from 'rxjs';
import { services } from 'typescript-angular-utilities';
import TimeoutService = services.timeout.TimeoutService;
import { AsyncHelper, IWaitValue } from '../async/async.service';
export const COMPLETE_MESSAGE_DURATION: number = 1000;
export interface IAutosaveActionService {
waitOn(waitOn: IWaitValue<any>): Observable<any>;
saving$: Observable<boolean>;
complete$: Observable<boolean>;
successful$: Observable<boolean>;
}
@Injectable()
export class AutosaveActionService implements IAutosaveActionService {
timeoutService: TimeoutService;
asyncService: AsyncHelper;
| (timeoutService: TimeoutService
, asyncService: AsyncHelper) {
this.timeoutService = timeoutService;
this.asyncService = asyncService;
this._saving$ = new BehaviorSubject(false);
this._complete$ = new BehaviorSubject(false);
this._successful$ = new BehaviorSubject(false);
}
private _saving$: BehaviorSubject<boolean>;
private _complete$: BehaviorSubject<boolean>;
private _successful$: BehaviorSubject<boolean>;
get saving$(): Observable<boolean> {
return this._saving$.asObservable();
}
get complete$(): Observable<boolean> {
return this._complete$.asObservable();
}
get successful$(): Observable<boolean> {
return this._successful$.asObservable();
}
waitOn<T>(waitOn: IWaitValue<T>): Observable<T> {
this._saving$.next(true);
return this.asyncService.waitAsObservable(waitOn)
.do(this.autosaveSuccessful, this.autosaveFailed);
}
private autosaveSuccessful = (): void => {
this.resolveAutosave(true);
}
private autosaveFailed = (): void => {
this.resolveAutosave(false);
}
private resolveAutosave = (success: boolean): void => {
this._saving$.next(false);
this._complete$.next(true);
this._successful$.next(success);
this.timeoutService.setTimeout(() => this._complete$.next(false), COMPLETE_MESSAGE_DURATION);
}
}
| constructor | identifier_name |
autosaveAction.service.ts | import { Injectable } from '@angular/core';
import { Observable, BehaviorSubject } from 'rxjs';
import { services } from 'typescript-angular-utilities';
import TimeoutService = services.timeout.TimeoutService;
import { AsyncHelper, IWaitValue } from '../async/async.service';
export const COMPLETE_MESSAGE_DURATION: number = 1000;
export interface IAutosaveActionService {
waitOn(waitOn: IWaitValue<any>): Observable<any>;
saving$: Observable<boolean>;
complete$: Observable<boolean>;
successful$: Observable<boolean>;
}
@Injectable()
export class AutosaveActionService implements IAutosaveActionService {
timeoutService: TimeoutService;
asyncService: AsyncHelper;
| constructor(timeoutService: TimeoutService
, asyncService: AsyncHelper) {
this.timeoutService = timeoutService;
this.asyncService = asyncService;
this._saving$ = new BehaviorSubject(false);
this._complete$ = new BehaviorSubject(false);
this._successful$ = new BehaviorSubject(false);
}
private _saving$: BehaviorSubject<boolean>;
private _complete$: BehaviorSubject<boolean>;
private _successful$: BehaviorSubject<boolean>;
get saving$(): Observable<boolean> {
return this._saving$.asObservable();
}
get complete$(): Observable<boolean> {
return this._complete$.asObservable();
}
get successful$(): Observable<boolean> {
return this._successful$.asObservable();
}
waitOn<T>(waitOn: IWaitValue<T>): Observable<T> {
this._saving$.next(true);
return this.asyncService.waitAsObservable(waitOn)
.do(this.autosaveSuccessful, this.autosaveFailed);
}
private autosaveSuccessful = (): void => {
this.resolveAutosave(true);
}
private autosaveFailed = (): void => {
this.resolveAutosave(false);
}
private resolveAutosave = (success: boolean): void => {
this._saving$.next(false);
this._complete$.next(true);
this._successful$.next(success);
this.timeoutService.setTimeout(() => this._complete$.next(false), COMPLETE_MESSAGE_DURATION);
}
} | random_line_split |
|
pants_run_integration_test.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import unittest
from collections import namedtuple
from operator import eq, ne
from pants.base.build_environment import get_buildroot
from pants.fs.archive import ZIP
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
PantsResult = namedtuple('PantsResult', ['command', 'returncode', 'stdout_data', 'stderr_data'])
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.6, 3.
"""
try:
subprocess.call(['python%s' % version, '-V'])
return True
except OSError:
return False
def workdir_root(self):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return root
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
**kwargs):
config = config.copy() if config else {}
# We add workdir to the DEFAULT section, and also ensure that it's emitted first.
default_section = config.pop('DEFAULT', {})
default_section['pants_workdir'] = '%s' % workdir
ini = ''
for section, section_config in [('DEFAULT', default_section)] + config.items():
ini += '\n[%s]\n' % section
for key, val in section_config.items():
ini += '%s: %s\n' % (key, val)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
fp.write(ini)
env = os.environ.copy()
env.update(extra_env or {})
pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
pants_command = [pants_script,
'--no-lock',
'--kill-nailguns',
'--no-pantsrc',
'--config-override={0}'.format(ini_file_name),
'--print-exception-stacktrace'] + command
proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout_data, stderr_data) = proc.communicate(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8"))
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a tuple (returncode, stdout_data, stderr_data).
IMPORTANT NOTE: The subprocess will be run with --no-lock, so that it doesn't deadlock waiting
for this process to release the workspace lock. It's the caller's responsibility to ensure
that the invoked pants doesn't interact badly with this one.
"""
with temporary_dir(root_dir=self.workdir_root()) as workdir:
return self.run_pants_with_workdir(command, workdir, config, stdin_data, extra_env, **kwargs)
def | (self, target, bundle_name, args=None):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .jar extension)
:param args: optional arguments to pass to executable
:return: stdout as a string on success, raises an Exception on error
"""
pants_run = self.run_pants(['bundle', '--archive=zip', target])
self.assert_success(pants_run)
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
optional_args = []
if args:
optional_args = args
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_name}.jar'.format(bundle_name=bundle_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
if pants_run.stdout_data:
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
if pants_run.stderr_data:
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
assertion(value, pants_run.returncode, error_msg)
| bundle_and_run | identifier_name |
pants_run_integration_test.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import unittest
from collections import namedtuple
from operator import eq, ne
from pants.base.build_environment import get_buildroot
from pants.fs.archive import ZIP
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
PantsResult = namedtuple('PantsResult', ['command', 'returncode', 'stdout_data', 'stderr_data'])
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.6, 3.
"""
try:
subprocess.call(['python%s' % version, '-V'])
return True
except OSError:
return False
def workdir_root(self):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return root
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
**kwargs):
config = config.copy() if config else {}
# We add workdir to the DEFAULT section, and also ensure that it's emitted first.
default_section = config.pop('DEFAULT', {})
default_section['pants_workdir'] = '%s' % workdir
ini = ''
for section, section_config in [('DEFAULT', default_section)] + config.items():
ini += '\n[%s]\n' % section
for key, val in section_config.items():
ini += '%s: %s\n' % (key, val)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
fp.write(ini)
env = os.environ.copy()
env.update(extra_env or {})
pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
pants_command = [pants_script,
'--no-lock',
'--kill-nailguns',
'--no-pantsrc',
'--config-override={0}'.format(ini_file_name),
'--print-exception-stacktrace'] + command
proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout_data, stderr_data) = proc.communicate(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8"))
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a tuple (returncode, stdout_data, stderr_data).
IMPORTANT NOTE: The subprocess will be run with --no-lock, so that it doesn't deadlock waiting
for this process to release the workspace lock. It's the caller's responsibility to ensure
that the invoked pants doesn't interact badly with this one.
"""
with temporary_dir(root_dir=self.workdir_root()) as workdir:
return self.run_pants_with_workdir(command, workdir, config, stdin_data, extra_env, **kwargs)
def bundle_and_run(self, target, bundle_name, args=None):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .jar extension)
:param args: optional arguments to pass to executable
:return: stdout as a string on success, raises an Exception on error
"""
pants_run = self.run_pants(['bundle', '--archive=zip', target])
self.assert_success(pants_run)
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
optional_args = []
if args:
optional_args = args
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_name}.jar'.format(bundle_name=bundle_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
if pants_run.stdout_data:
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
if pants_run.stderr_data:
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
| assertion(value, pants_run.returncode, error_msg) | random_line_split |
|
pants_run_integration_test.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import unittest
from collections import namedtuple
from operator import eq, ne
from pants.base.build_environment import get_buildroot
from pants.fs.archive import ZIP
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
PantsResult = namedtuple('PantsResult', ['command', 'returncode', 'stdout_data', 'stderr_data'])
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.6, 3.
"""
try:
subprocess.call(['python%s' % version, '-V'])
return True
except OSError:
return False
def workdir_root(self):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return root
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
**kwargs):
|
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a tuple (returncode, stdout_data, stderr_data).
IMPORTANT NOTE: The subprocess will be run with --no-lock, so that it doesn't deadlock waiting
for this process to release the workspace lock. It's the caller's responsibility to ensure
that the invoked pants doesn't interact badly with this one.
"""
with temporary_dir(root_dir=self.workdir_root()) as workdir:
return self.run_pants_with_workdir(command, workdir, config, stdin_data, extra_env, **kwargs)
def bundle_and_run(self, target, bundle_name, args=None):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .jar extension)
:param args: optional arguments to pass to executable
:return: stdout as a string on success, raises an Exception on error
"""
pants_run = self.run_pants(['bundle', '--archive=zip', target])
self.assert_success(pants_run)
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
optional_args = []
if args:
optional_args = args
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_name}.jar'.format(bundle_name=bundle_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
if pants_run.stdout_data:
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
if pants_run.stderr_data:
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
assertion(value, pants_run.returncode, error_msg)
| config = config.copy() if config else {}
# We add workdir to the DEFAULT section, and also ensure that it's emitted first.
default_section = config.pop('DEFAULT', {})
default_section['pants_workdir'] = '%s' % workdir
ini = ''
for section, section_config in [('DEFAULT', default_section)] + config.items():
ini += '\n[%s]\n' % section
for key, val in section_config.items():
ini += '%s: %s\n' % (key, val)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
fp.write(ini)
env = os.environ.copy()
env.update(extra_env or {})
pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
pants_command = [pants_script,
'--no-lock',
'--kill-nailguns',
'--no-pantsrc',
'--config-override={0}'.format(ini_file_name),
'--print-exception-stacktrace'] + command
proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout_data, stderr_data) = proc.communicate(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8")) | identifier_body |
pants_run_integration_test.py | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
import unittest
from collections import namedtuple
from operator import eq, ne
from pants.base.build_environment import get_buildroot
from pants.fs.archive import ZIP
from pants.util.contextutil import temporary_dir
from pants.util.dirutil import safe_mkdir, safe_open
PantsResult = namedtuple('PantsResult', ['command', 'returncode', 'stdout_data', 'stderr_data'])
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.6, 3.
"""
try:
subprocess.call(['python%s' % version, '-V'])
return True
except OSError:
return False
def workdir_root(self):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return root
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None,
**kwargs):
config = config.copy() if config else {}
# We add workdir to the DEFAULT section, and also ensure that it's emitted first.
default_section = config.pop('DEFAULT', {})
default_section['pants_workdir'] = '%s' % workdir
ini = ''
for section, section_config in [('DEFAULT', default_section)] + config.items():
ini += '\n[%s]\n' % section
for key, val in section_config.items():
ini += '%s: %s\n' % (key, val)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
fp.write(ini)
env = os.environ.copy()
env.update(extra_env or {})
pants_script = os.path.join(get_buildroot(), self.PANTS_SCRIPT_NAME)
pants_command = [pants_script,
'--no-lock',
'--kill-nailguns',
'--no-pantsrc',
'--config-override={0}'.format(ini_file_name),
'--print-exception-stacktrace'] + command
proc = subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
(stdout_data, stderr_data) = proc.communicate(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8"))
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a tuple (returncode, stdout_data, stderr_data).
IMPORTANT NOTE: The subprocess will be run with --no-lock, so that it doesn't deadlock waiting
for this process to release the workspace lock. It's the caller's responsibility to ensure
that the invoked pants doesn't interact badly with this one.
"""
with temporary_dir(root_dir=self.workdir_root()) as workdir:
return self.run_pants_with_workdir(command, workdir, config, stdin_data, extra_env, **kwargs)
def bundle_and_run(self, target, bundle_name, args=None):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .jar extension)
:param args: optional arguments to pass to executable
:return: stdout as a string on success, raises an Exception on error
"""
pants_run = self.run_pants(['bundle', '--archive=zip', target])
self.assert_success(pants_run)
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
optional_args = []
if args:
|
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_name}.jar'.format(bundle_name=bundle_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
if pants_run.stdout_data:
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
if pants_run.stderr_data:
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
assertion(value, pants_run.returncode, error_msg)
| optional_args = args | conditional_block |
config.rs | use std::io::prelude::*;
use std::fs::File;
use std::str::FromStr;
use docopt::Docopt;
use toml;
use gobjects;
use version::Version;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum WorkMode {
Normal, //generate widgets etc.
Sys, //generate -sys with ffi
}
impl Default for WorkMode {
fn default() -> WorkMode { WorkMode::Normal }
}
impl FromStr for WorkMode {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"normal" => Ok(WorkMode::Normal),
"sys" => Ok(WorkMode::Sys),
_ => Err("Wrong work mode".into())
}
}
}
static USAGE: &'static str = "
Usage: gir [options] [<library> <version>]
gir --help
Options:
-h, --help Show this message.
-c CONFIG Config file path (default: Gir.toml)
-d GIRSPATH Directory for girs
-m MODE Work mode: normal or sys
-o PATH Target path
-b, --make_backup Make backup before generating
";
#[derive(Debug)]
pub struct Config {
pub work_mode: WorkMode,
pub girs_dir: String,
pub library_name: String,
pub library_version: String,
pub target_path: String,
pub external_libraries: Vec<String>,
pub objects: gobjects::GObjects,
pub min_cfg_version: Version,
pub make_backup: bool,
}
impl Config {
pub fn new() -> Config {
let args = Docopt::new(USAGE)
.and_then(|dopt| dopt.parse())
.unwrap_or_else(|e| e.exit());
let config_file = match args.get_str("-c") {
"" => "Gir.toml",
a => a,
};
//TODO: add check file existence when stable std::fs::PathExt
let toml = read_toml(config_file);
let work_mode_str = match args.get_str("-m") {
"" => toml.lookup("options.work_mode")
.expect("No options.work_mode in config")
.as_str().unwrap(),
a => a,
};
let work_mode = WorkMode::from_str(work_mode_str)
.unwrap_or_else(|e| panic!(e));
let girs_dir = match args.get_str("-d") {
"" => toml.lookup("options.girs_dir")
.expect("No options.girs_dir in config")
.as_str().unwrap(),
a => a
};
let (library_name, library_version) =
match (args.get_str("<library>"), args.get_str("<version>")) {
("", "") => (
toml.lookup("options.library")
.expect("No options.library in config")
.as_str().unwrap(),
toml.lookup("options.version")
.expect("No options.version in config")
.as_str().unwrap()
),
("", _) | (_, "") => panic!("Library and version can not be specified separately"),
(a, b) => (a, b)
};
let target_path = match args.get_str("-o") {
"" => toml.lookup("options.target_path")
.expect("No target path specified")
.as_str().unwrap(),
a => a
}; |
let external_libraries = toml.lookup("options.external_libraries")
.map(|a| a.as_slice().unwrap().iter()
.filter_map(|v|
if let &toml::Value::String(ref s) = v { Some(s.clone()) } else { None } )
.collect())
.unwrap_or_else(|| Vec::new());
let min_cfg_version = toml.lookup("options.min_cfg_version")
.map_or_else(|| Ok(Default::default()), |t| t.as_str().unwrap().parse())
.unwrap_or_else(|e| panic!(e));
let make_backup = args.get_bool("-b");
Config {
work_mode: work_mode,
girs_dir: girs_dir.into(),
library_name: library_name.into(),
library_version: library_version.into(),
target_path: target_path.into(),
external_libraries: external_libraries,
objects: objects,
min_cfg_version: min_cfg_version,
make_backup: make_backup,
}
}
pub fn library_full_name(&self) -> String {
format!("{}-{}", self.library_name, self.library_version)
}
}
fn read_toml(filename: &str) -> toml::Value {
let mut input = String::new();
File::open(filename).and_then(|mut f| {
f.read_to_string(&mut input)
}).unwrap();
let mut parser = toml::Parser::new(&input);
match parser.parse() {
Some(toml) => toml::Value::Table(toml),
None => {
for err in &parser.errors {
let (loline, locol) = parser.to_linecol(err.lo);
let (hiline, hicol) = parser.to_linecol(err.hi);
println!("{}:{}:{}-{}:{} error: {}",
filename, loline, locol, hiline, hicol, err.desc);
}
panic!("Errors in config")
}
}
} |
let mut objects = toml.lookup("object").map(|t| gobjects::parse_toml(t))
.unwrap_or_else(|| Default::default());
gobjects::parse_status_shorthands(&mut objects, &toml); | random_line_split |
config.rs | use std::io::prelude::*;
use std::fs::File;
use std::str::FromStr;
use docopt::Docopt;
use toml;
use gobjects;
use version::Version;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum WorkMode {
Normal, //generate widgets etc.
Sys, //generate -sys with ffi
}
impl Default for WorkMode {
fn default() -> WorkMode { WorkMode::Normal }
}
impl FromStr for WorkMode {
type Err = String;
fn | (s: &str) -> Result<Self, Self::Err> {
match s {
"normal" => Ok(WorkMode::Normal),
"sys" => Ok(WorkMode::Sys),
_ => Err("Wrong work mode".into())
}
}
}
static USAGE: &'static str = "
Usage: gir [options] [<library> <version>]
gir --help
Options:
-h, --help Show this message.
-c CONFIG Config file path (default: Gir.toml)
-d GIRSPATH Directory for girs
-m MODE Work mode: normal or sys
-o PATH Target path
-b, --make_backup Make backup before generating
";
#[derive(Debug)]
pub struct Config {
pub work_mode: WorkMode,
pub girs_dir: String,
pub library_name: String,
pub library_version: String,
pub target_path: String,
pub external_libraries: Vec<String>,
pub objects: gobjects::GObjects,
pub min_cfg_version: Version,
pub make_backup: bool,
}
impl Config {
pub fn new() -> Config {
let args = Docopt::new(USAGE)
.and_then(|dopt| dopt.parse())
.unwrap_or_else(|e| e.exit());
let config_file = match args.get_str("-c") {
"" => "Gir.toml",
a => a,
};
//TODO: add check file existence when stable std::fs::PathExt
let toml = read_toml(config_file);
let work_mode_str = match args.get_str("-m") {
"" => toml.lookup("options.work_mode")
.expect("No options.work_mode in config")
.as_str().unwrap(),
a => a,
};
let work_mode = WorkMode::from_str(work_mode_str)
.unwrap_or_else(|e| panic!(e));
let girs_dir = match args.get_str("-d") {
"" => toml.lookup("options.girs_dir")
.expect("No options.girs_dir in config")
.as_str().unwrap(),
a => a
};
let (library_name, library_version) =
match (args.get_str("<library>"), args.get_str("<version>")) {
("", "") => (
toml.lookup("options.library")
.expect("No options.library in config")
.as_str().unwrap(),
toml.lookup("options.version")
.expect("No options.version in config")
.as_str().unwrap()
),
("", _) | (_, "") => panic!("Library and version can not be specified separately"),
(a, b) => (a, b)
};
let target_path = match args.get_str("-o") {
"" => toml.lookup("options.target_path")
.expect("No target path specified")
.as_str().unwrap(),
a => a
};
let mut objects = toml.lookup("object").map(|t| gobjects::parse_toml(t))
.unwrap_or_else(|| Default::default());
gobjects::parse_status_shorthands(&mut objects, &toml);
let external_libraries = toml.lookup("options.external_libraries")
.map(|a| a.as_slice().unwrap().iter()
.filter_map(|v|
if let &toml::Value::String(ref s) = v { Some(s.clone()) } else { None } )
.collect())
.unwrap_or_else(|| Vec::new());
let min_cfg_version = toml.lookup("options.min_cfg_version")
.map_or_else(|| Ok(Default::default()), |t| t.as_str().unwrap().parse())
.unwrap_or_else(|e| panic!(e));
let make_backup = args.get_bool("-b");
Config {
work_mode: work_mode,
girs_dir: girs_dir.into(),
library_name: library_name.into(),
library_version: library_version.into(),
target_path: target_path.into(),
external_libraries: external_libraries,
objects: objects,
min_cfg_version: min_cfg_version,
make_backup: make_backup,
}
}
pub fn library_full_name(&self) -> String {
format!("{}-{}", self.library_name, self.library_version)
}
}
fn read_toml(filename: &str) -> toml::Value {
let mut input = String::new();
File::open(filename).and_then(|mut f| {
f.read_to_string(&mut input)
}).unwrap();
let mut parser = toml::Parser::new(&input);
match parser.parse() {
Some(toml) => toml::Value::Table(toml),
None => {
for err in &parser.errors {
let (loline, locol) = parser.to_linecol(err.lo);
let (hiline, hicol) = parser.to_linecol(err.hi);
println!("{}:{}:{}-{}:{} error: {}",
filename, loline, locol, hiline, hicol, err.desc);
}
panic!("Errors in config")
}
}
}
| from_str | identifier_name |
config.rs | use std::io::prelude::*;
use std::fs::File;
use std::str::FromStr;
use docopt::Docopt;
use toml;
use gobjects;
use version::Version;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum WorkMode {
Normal, //generate widgets etc.
Sys, //generate -sys with ffi
}
impl Default for WorkMode {
fn default() -> WorkMode { WorkMode::Normal }
}
impl FromStr for WorkMode {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> |
}
static USAGE: &'static str = "
Usage: gir [options] [<library> <version>]
gir --help
Options:
-h, --help Show this message.
-c CONFIG Config file path (default: Gir.toml)
-d GIRSPATH Directory for girs
-m MODE Work mode: normal or sys
-o PATH Target path
-b, --make_backup Make backup before generating
";
#[derive(Debug)]
pub struct Config {
pub work_mode: WorkMode,
pub girs_dir: String,
pub library_name: String,
pub library_version: String,
pub target_path: String,
pub external_libraries: Vec<String>,
pub objects: gobjects::GObjects,
pub min_cfg_version: Version,
pub make_backup: bool,
}
impl Config {
pub fn new() -> Config {
let args = Docopt::new(USAGE)
.and_then(|dopt| dopt.parse())
.unwrap_or_else(|e| e.exit());
let config_file = match args.get_str("-c") {
"" => "Gir.toml",
a => a,
};
//TODO: add check file existence when stable std::fs::PathExt
let toml = read_toml(config_file);
let work_mode_str = match args.get_str("-m") {
"" => toml.lookup("options.work_mode")
.expect("No options.work_mode in config")
.as_str().unwrap(),
a => a,
};
let work_mode = WorkMode::from_str(work_mode_str)
.unwrap_or_else(|e| panic!(e));
let girs_dir = match args.get_str("-d") {
"" => toml.lookup("options.girs_dir")
.expect("No options.girs_dir in config")
.as_str().unwrap(),
a => a
};
let (library_name, library_version) =
match (args.get_str("<library>"), args.get_str("<version>")) {
("", "") => (
toml.lookup("options.library")
.expect("No options.library in config")
.as_str().unwrap(),
toml.lookup("options.version")
.expect("No options.version in config")
.as_str().unwrap()
),
("", _) | (_, "") => panic!("Library and version can not be specified separately"),
(a, b) => (a, b)
};
let target_path = match args.get_str("-o") {
"" => toml.lookup("options.target_path")
.expect("No target path specified")
.as_str().unwrap(),
a => a
};
let mut objects = toml.lookup("object").map(|t| gobjects::parse_toml(t))
.unwrap_or_else(|| Default::default());
gobjects::parse_status_shorthands(&mut objects, &toml);
let external_libraries = toml.lookup("options.external_libraries")
.map(|a| a.as_slice().unwrap().iter()
.filter_map(|v|
if let &toml::Value::String(ref s) = v { Some(s.clone()) } else { None } )
.collect())
.unwrap_or_else(|| Vec::new());
let min_cfg_version = toml.lookup("options.min_cfg_version")
.map_or_else(|| Ok(Default::default()), |t| t.as_str().unwrap().parse())
.unwrap_or_else(|e| panic!(e));
let make_backup = args.get_bool("-b");
Config {
work_mode: work_mode,
girs_dir: girs_dir.into(),
library_name: library_name.into(),
library_version: library_version.into(),
target_path: target_path.into(),
external_libraries: external_libraries,
objects: objects,
min_cfg_version: min_cfg_version,
make_backup: make_backup,
}
}
pub fn library_full_name(&self) -> String {
format!("{}-{}", self.library_name, self.library_version)
}
}
fn read_toml(filename: &str) -> toml::Value {
let mut input = String::new();
File::open(filename).and_then(|mut f| {
f.read_to_string(&mut input)
}).unwrap();
let mut parser = toml::Parser::new(&input);
match parser.parse() {
Some(toml) => toml::Value::Table(toml),
None => {
for err in &parser.errors {
let (loline, locol) = parser.to_linecol(err.lo);
let (hiline, hicol) = parser.to_linecol(err.hi);
println!("{}:{}:{}-{}:{} error: {}",
filename, loline, locol, hiline, hicol, err.desc);
}
panic!("Errors in config")
}
}
}
| {
match s {
"normal" => Ok(WorkMode::Normal),
"sys" => Ok(WorkMode::Sys),
_ => Err("Wrong work mode".into())
}
} | identifier_body |
karma.conf.js | // Karma configuration
// Generated on Wed Jun 01 2016 16:04:37 GMT-0400 (EDT)
module.exports = function (config) {
config.set({
basePath: '',
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha', 'browserify'],
// include only tests here; browserify will find the rest
files: [
'test/**/*-spec.+(js|jsx)'
],
exclude: [],
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
'test/**/*-spec.+(js|jsx)': ['browserify']
},
browserify: {
debug: true,
transform: ['babelify'],
extensions: ['.js', '.jsx'],
// needed for enzyme
configure: function (bundle) {
bundle.on('prebundle', function () {
bundle.external('react/addons');
bundle.external('react/lib/ReactContext');
bundle.external('react/lib/ExecutionEnvironment');
});
}
},
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['mocha'], |
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: false,
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['PhantomJS'],
// if true, Karma captures browsers, runs the tests and exits
singleRun: true,
// how many browser should be started simultaneous
concurrency: Infinity
})
}; | random_line_split |
|
dompoint.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMPointBinding::{DOMPointInit, DOMPointMethods, Wrap};
use dom::bindings::codegen::Bindings::DOMPointReadOnlyBinding::DOMPointReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::dompointreadonly::{DOMPointReadOnly, DOMPointWriteMethods};
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
// http://dev.w3.org/fxtf/geometry/Overview.html#dompoint
#[dom_struct]
pub struct DOMPoint {
point: DOMPointReadOnly,
}
impl DOMPoint {
fn new_inherited(x: f64, y: f64, z: f64, w: f64) -> DOMPoint {
DOMPoint {
point: DOMPointReadOnly::new_inherited(x, y, z, w),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, z: f64, w: f64) -> DomRoot<DOMPoint> {
reflect_dom_object(Box::new(DOMPoint::new_inherited(x, y, z, w)), global, Wrap)
}
pub fn Constructor(
global: &GlobalScope,
x: f64,
y: f64,
z: f64,
w: f64,
) -> Fallible<DomRoot<DOMPoint>> {
Ok(DOMPoint::new(global, x, y, z, w))
}
pub fn new_from_init(global: &GlobalScope, p: &DOMPointInit) -> DomRoot<DOMPoint> |
}
impl DOMPointMethods for DOMPoint {
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-x
fn X(&self) -> f64 {
self.point.X()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-x
fn SetX(&self, value: f64) {
self.point.SetX(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-y
fn Y(&self) -> f64 {
self.point.Y()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-y
fn SetY(&self, value: f64) {
self.point.SetY(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-z
fn Z(&self) -> f64 {
self.point.Z()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-z
fn SetZ(&self, value: f64) {
self.point.SetZ(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-w
fn W(&self) -> f64 {
self.point.W()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-w
fn SetW(&self, value: f64) {
self.point.SetW(value);
}
}
| {
DOMPoint::new(global, p.x, p.y, p.z, p.w)
} | identifier_body |
dompoint.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMPointBinding::{DOMPointInit, DOMPointMethods, Wrap};
use dom::bindings::codegen::Bindings::DOMPointReadOnlyBinding::DOMPointReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::dompointreadonly::{DOMPointReadOnly, DOMPointWriteMethods};
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
// http://dev.w3.org/fxtf/geometry/Overview.html#dompoint
#[dom_struct]
pub struct DOMPoint {
point: DOMPointReadOnly,
}
impl DOMPoint {
fn new_inherited(x: f64, y: f64, z: f64, w: f64) -> DOMPoint {
DOMPoint {
point: DOMPointReadOnly::new_inherited(x, y, z, w),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, z: f64, w: f64) -> DomRoot<DOMPoint> {
reflect_dom_object(Box::new(DOMPoint::new_inherited(x, y, z, w)), global, Wrap)
}
pub fn Constructor(
global: &GlobalScope,
x: f64,
y: f64,
z: f64,
w: f64,
) -> Fallible<DomRoot<DOMPoint>> {
Ok(DOMPoint::new(global, x, y, z, w))
}
pub fn new_from_init(global: &GlobalScope, p: &DOMPointInit) -> DomRoot<DOMPoint> {
DOMPoint::new(global, p.x, p.y, p.z, p.w)
}
}
impl DOMPointMethods for DOMPoint {
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-x
fn X(&self) -> f64 {
self.point.X()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-x
fn SetX(&self, value: f64) {
self.point.SetX(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-y
fn Y(&self) -> f64 {
self.point.Y()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-y
fn SetY(&self, value: f64) {
self.point.SetY(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-z
fn Z(&self) -> f64 {
self.point.Z()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-z
fn SetZ(&self, value: f64) {
self.point.SetZ(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-w
fn W(&self) -> f64 { | }
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-w
fn SetW(&self, value: f64) {
self.point.SetW(value);
}
} | self.point.W() | random_line_split |
dompoint.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DOMPointBinding::{DOMPointInit, DOMPointMethods, Wrap};
use dom::bindings::codegen::Bindings::DOMPointReadOnlyBinding::DOMPointReadOnlyMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::dompointreadonly::{DOMPointReadOnly, DOMPointWriteMethods};
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
// http://dev.w3.org/fxtf/geometry/Overview.html#dompoint
#[dom_struct]
pub struct DOMPoint {
point: DOMPointReadOnly,
}
impl DOMPoint {
fn new_inherited(x: f64, y: f64, z: f64, w: f64) -> DOMPoint {
DOMPoint {
point: DOMPointReadOnly::new_inherited(x, y, z, w),
}
}
pub fn new(global: &GlobalScope, x: f64, y: f64, z: f64, w: f64) -> DomRoot<DOMPoint> {
reflect_dom_object(Box::new(DOMPoint::new_inherited(x, y, z, w)), global, Wrap)
}
pub fn Constructor(
global: &GlobalScope,
x: f64,
y: f64,
z: f64,
w: f64,
) -> Fallible<DomRoot<DOMPoint>> {
Ok(DOMPoint::new(global, x, y, z, w))
}
pub fn new_from_init(global: &GlobalScope, p: &DOMPointInit) -> DomRoot<DOMPoint> {
DOMPoint::new(global, p.x, p.y, p.z, p.w)
}
}
impl DOMPointMethods for DOMPoint {
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-x
fn X(&self) -> f64 {
self.point.X()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-x
fn SetX(&self, value: f64) {
self.point.SetX(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-y
fn Y(&self) -> f64 {
self.point.Y()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-y
fn SetY(&self, value: f64) {
self.point.SetY(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-z
fn | (&self) -> f64 {
self.point.Z()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-z
fn SetZ(&self, value: f64) {
self.point.SetZ(value);
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-w
fn W(&self) -> f64 {
self.point.W()
}
// https://dev.w3.org/fxtf/geometry/Overview.html#dom-dompointreadonly-w
fn SetW(&self, value: f64) {
self.point.SetW(value);
}
}
| Z | identifier_name |
NestedBinding.ts | import array = require('dojo/_base/array');
import binding = require('../interfaces');
import Binding = require('../Binding');
import lang = require('dojo/_base/lang');
import util = require('../../util');
var SEPARATOR:string = '.';
/**
* The NestedBinding class enables binding to arbitrarily deep children of a source object. It can bind to properties
* that may not exist at the time the object is initially bound, or whose parents change during the course of the
* lifetime of the root object.
*/
class NestedBinding<T> extends Binding<T> {
static test(kwArgs:binding.IBindingArguments):boolean {
return kwArgs.object != null && kwArgs.path && util.escapedIndexOf(kwArgs.path, SEPARATOR) > -1;
}
/**
* The string that identifies the sub-property to be bound.
*/
private _path:string[];
/**
* The watch handles for each binding.
*/
private _bindings:binding.IBinding<any>[] = [];
/**
* The property at the end of the bound chain of properties.
*/
private _source:binding.IBinding<T>;
constructor(kwArgs:binding.IBindingArguments) {
super(kwArgs);
this._binder = kwArgs.binder;
this._path = util.escapedSplit(kwArgs.path, SEPARATOR);
this._rebind(kwArgs.object, 0);
}
destroy():void {
super.destroy();
var bindings = this._bindings;
for (var i = 0, binding:binding.IBinding<any>; (binding = bindings[i]); ++i) {
binding.destroy();
}
this._source = this._bindings = this._path = null;
}
get():T {
return this._source && this._source.get ? this._source.get() : undefined;
}
getObject():{} {
return this._source ? this._source.getObject() : undefined;
}
/**
* Removes and rebinds to all objects in the object chain.
*/
private _rebind(fromObject:Object, fromIndex:number):void {
var bindings = this._bindings;
// Stop watching objects that are no longer part of this binding's object chain because a parent object
// was replaced
array.forEach(bindings.splice(fromIndex), function (binding:binding.IBinding<any>):void {
binding.destroy();
});
var self = this;
var path:string;
var index:number = fromIndex;
var object:any = fromObject;
var binding:binding.IBinding<any>;
var length:number = this._path.length;
// If any of the intermediate objects between `object` and the property we are actually binding
// change, we need to rebind the entire object chain starting from the changed object
for (; index < length - 1 && object; ++index) |
// If `object` exists, it will be the final object in the chain, the one on which we are actually looking
// for values
var value:any;
if (object) {
// If the values on this final object change we only need to update the value, not rebind
// any intermediate objects
binding = this._binder.createBinding(object, this._path[index], { useScheduler: false });
binding.observe(function (change:binding.IChangeRecord<T>):void {
self.notify(change);
});
bindings.push(binding);
value = binding.get();
}
else {
binding = null;
}
this._source = binding;
this.notify({ value: value });
}
set(value:T):void {
this._source && this._source.set && this._source.set(value);
}
}
export = NestedBinding;
| {
path = this._path[index];
binding = this._binder.createBinding(object, path, { useScheduler: false });
binding.observe(<binding.IObserver<any>> lang.partial(function (index:number, change:binding.IChangeRecord<T>):void {
self._rebind(change.value, index + 1);
}, index));
bindings.push(binding);
// If there is no object here, we cannot rebind any further; presumably, at some point in the future, an
// object will exist here and then binding can continue
if ((object = binding.get()) == null) {
break;
}
// If object is a promise resolve it and rebind
// TODO: Should probably use an explicit syntax for resolving promises instead of doing it implicitly
if (typeof object.then === 'function') {
object.then(function (value:any):void {
self._rebind(value, index + 1);
});
return;
}
} | conditional_block |
NestedBinding.ts | import array = require('dojo/_base/array');
import binding = require('../interfaces');
import Binding = require('../Binding');
import lang = require('dojo/_base/lang');
import util = require('../../util');
var SEPARATOR:string = '.';
/**
* The NestedBinding class enables binding to arbitrarily deep children of a source object. It can bind to properties
* that may not exist at the time the object is initially bound, or whose parents change during the course of the
* lifetime of the root object.
*/
class NestedBinding<T> extends Binding<T> {
static test(kwArgs:binding.IBindingArguments):boolean {
return kwArgs.object != null && kwArgs.path && util.escapedIndexOf(kwArgs.path, SEPARATOR) > -1;
}
/**
* The string that identifies the sub-property to be bound.
*/
private _path:string[];
/**
* The watch handles for each binding.
*/
private _bindings:binding.IBinding<any>[] = [];
/**
* The property at the end of the bound chain of properties.
*/
private _source:binding.IBinding<T>;
constructor(kwArgs:binding.IBindingArguments) {
super(kwArgs);
this._binder = kwArgs.binder;
this._path = util.escapedSplit(kwArgs.path, SEPARATOR);
this._rebind(kwArgs.object, 0);
}
destroy():void {
super.destroy();
var bindings = this._bindings;
for (var i = 0, binding:binding.IBinding<any>; (binding = bindings[i]); ++i) {
binding.destroy();
}
this._source = this._bindings = this._path = null;
}
get():T {
return this._source && this._source.get ? this._source.get() : undefined;
}
getObject():{} {
return this._source ? this._source.getObject() : undefined;
}
/**
* Removes and rebinds to all objects in the object chain.
*/
private _rebind(fromObject:Object, fromIndex:number):void {
var bindings = this._bindings;
// Stop watching objects that are no longer part of this binding's object chain because a parent object
// was replaced
array.forEach(bindings.splice(fromIndex), function (binding:binding.IBinding<any>):void {
binding.destroy();
});
var self = this;
var path:string;
var index:number = fromIndex;
var object:any = fromObject;
var binding:binding.IBinding<any>;
var length:number = this._path.length; |
// If any of the intermediate objects between `object` and the property we are actually binding
// change, we need to rebind the entire object chain starting from the changed object
for (; index < length - 1 && object; ++index) {
path = this._path[index];
binding = this._binder.createBinding(object, path, { useScheduler: false });
binding.observe(<binding.IObserver<any>> lang.partial(function (index:number, change:binding.IChangeRecord<T>):void {
self._rebind(change.value, index + 1);
}, index));
bindings.push(binding);
// If there is no object here, we cannot rebind any further; presumably, at some point in the future, an
// object will exist here and then binding can continue
if ((object = binding.get()) == null) {
break;
}
// If object is a promise resolve it and rebind
// TODO: Should probably use an explicit syntax for resolving promises instead of doing it implicitly
if (typeof object.then === 'function') {
object.then(function (value:any):void {
self._rebind(value, index + 1);
});
return;
}
}
// If `object` exists, it will be the final object in the chain, the one on which we are actually looking
// for values
var value:any;
if (object) {
// If the values on this final object change we only need to update the value, not rebind
// any intermediate objects
binding = this._binder.createBinding(object, this._path[index], { useScheduler: false });
binding.observe(function (change:binding.IChangeRecord<T>):void {
self.notify(change);
});
bindings.push(binding);
value = binding.get();
}
else {
binding = null;
}
this._source = binding;
this.notify({ value: value });
}
set(value:T):void {
this._source && this._source.set && this._source.set(value);
}
}
export = NestedBinding; | random_line_split |
|
NestedBinding.ts | import array = require('dojo/_base/array');
import binding = require('../interfaces');
import Binding = require('../Binding');
import lang = require('dojo/_base/lang');
import util = require('../../util');
var SEPARATOR:string = '.';
/**
* The NestedBinding class enables binding to arbitrarily deep children of a source object. It can bind to properties
* that may not exist at the time the object is initially bound, or whose parents change during the course of the
* lifetime of the root object.
*/
class NestedBinding<T> extends Binding<T> {
static test(kwArgs:binding.IBindingArguments):boolean {
return kwArgs.object != null && kwArgs.path && util.escapedIndexOf(kwArgs.path, SEPARATOR) > -1;
}
/**
* The string that identifies the sub-property to be bound.
*/
private _path:string[];
/**
* The watch handles for each binding.
*/
private _bindings:binding.IBinding<any>[] = [];
/**
* The property at the end of the bound chain of properties.
*/
private _source:binding.IBinding<T>;
constructor(kwArgs:binding.IBindingArguments) {
super(kwArgs);
this._binder = kwArgs.binder;
this._path = util.escapedSplit(kwArgs.path, SEPARATOR);
this._rebind(kwArgs.object, 0);
}
destroy():void {
super.destroy();
var bindings = this._bindings;
for (var i = 0, binding:binding.IBinding<any>; (binding = bindings[i]); ++i) {
binding.destroy();
}
this._source = this._bindings = this._path = null;
}
get():T {
return this._source && this._source.get ? this._source.get() : undefined;
}
getObject():{} {
return this._source ? this._source.getObject() : undefined;
}
/**
* Removes and rebinds to all objects in the object chain.
*/
private | (fromObject:Object, fromIndex:number):void {
var bindings = this._bindings;
// Stop watching objects that are no longer part of this binding's object chain because a parent object
// was replaced
array.forEach(bindings.splice(fromIndex), function (binding:binding.IBinding<any>):void {
binding.destroy();
});
var self = this;
var path:string;
var index:number = fromIndex;
var object:any = fromObject;
var binding:binding.IBinding<any>;
var length:number = this._path.length;
// If any of the intermediate objects between `object` and the property we are actually binding
// change, we need to rebind the entire object chain starting from the changed object
for (; index < length - 1 && object; ++index) {
path = this._path[index];
binding = this._binder.createBinding(object, path, { useScheduler: false });
binding.observe(<binding.IObserver<any>> lang.partial(function (index:number, change:binding.IChangeRecord<T>):void {
self._rebind(change.value, index + 1);
}, index));
bindings.push(binding);
// If there is no object here, we cannot rebind any further; presumably, at some point in the future, an
// object will exist here and then binding can continue
if ((object = binding.get()) == null) {
break;
}
// If object is a promise resolve it and rebind
// TODO: Should probably use an explicit syntax for resolving promises instead of doing it implicitly
if (typeof object.then === 'function') {
object.then(function (value:any):void {
self._rebind(value, index + 1);
});
return;
}
}
// If `object` exists, it will be the final object in the chain, the one on which we are actually looking
// for values
var value:any;
if (object) {
// If the values on this final object change we only need to update the value, not rebind
// any intermediate objects
binding = this._binder.createBinding(object, this._path[index], { useScheduler: false });
binding.observe(function (change:binding.IChangeRecord<T>):void {
self.notify(change);
});
bindings.push(binding);
value = binding.get();
}
else {
binding = null;
}
this._source = binding;
this.notify({ value: value });
}
set(value:T):void {
this._source && this._source.set && this._source.set(value);
}
}
export = NestedBinding;
| _rebind | identifier_name |
base_test_onramp.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Yannick Vaucher, Emanuel Cino
#
# The licence is in the file __openerp__.py
#
##############################################################################
import base64
import urllib2
import httplib
import simplejson
from openerp.tests import common
class TestOnramp(common.HttpCase):
""" Base class for all Onramp tests. """
def setUp(self):
super(TestOnramp, self).setUp()
self.server_url = self.env['ir.config_parameter'].get_param(
'web.base.url',
default='http://localhost:8069'
)
api_client_secret = base64.b64encode("client:secret")
self.rest_url = '{0}/onramp?secret={1}'.format(
self.server_url, api_client_secret)
params_post = 'grant_type=client_credentials&scope=read+write'
header_post = {
"Authorization": "Basic " + api_client_secret,
"Content-type": "application/x-www-form-urlencoded",
"Content-Length": 46,
"Expect": "100-continue",
"Connection": "Keep-Alive",
}
conn = httplib.HTTPSConnection('api2.compassion.com')
conn.request("POST", "/core/connect/token", params_post, header_post)
response = conn.getresponse()
data_token = simplejson.loads(response.read())
conn.close()
self.headers = {
'Content-type': 'application/json',
'Authorization': '{token_type} {access_token}'.format(
**data_token),
"x-cim-MessageType": "http://schemas.ci.org/ci/services/"
"communications/2015/09/SBCStructured",
"x-cim-FromAddress": "CHTest",
"x-cim-ToAddress": "CH",
}
def _test_no_token(self):
""" Check we have an access denied if token is not provided
""" | self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_bad_token(self):
""" Check we have an access denied if token is not valid
"""
self.headers['Authorization'] = 'Bearer notarealtoken'
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_body_no_json(self):
req = urllib2.Request(self.rest_url, "This is not json", self.headers)
with self.assertRaises(urllib2.HTTPError):
urllib2.urlopen(req)
def _send_post(self, vals):
data = simplejson.dumps(vals)
req = urllib2.Request(self.rest_url, data, self.headers)
return urllib2.urlopen(req) | del self.headers['Authorization']
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'}) | random_line_split |
base_test_onramp.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Yannick Vaucher, Emanuel Cino
#
# The licence is in the file __openerp__.py
#
##############################################################################
import base64
import urllib2
import httplib
import simplejson
from openerp.tests import common
class TestOnramp(common.HttpCase):
""" Base class for all Onramp tests. """
def setUp(self):
super(TestOnramp, self).setUp()
self.server_url = self.env['ir.config_parameter'].get_param(
'web.base.url',
default='http://localhost:8069'
)
api_client_secret = base64.b64encode("client:secret")
self.rest_url = '{0}/onramp?secret={1}'.format(
self.server_url, api_client_secret)
params_post = 'grant_type=client_credentials&scope=read+write'
header_post = {
"Authorization": "Basic " + api_client_secret,
"Content-type": "application/x-www-form-urlencoded",
"Content-Length": 46,
"Expect": "100-continue",
"Connection": "Keep-Alive",
}
conn = httplib.HTTPSConnection('api2.compassion.com')
conn.request("POST", "/core/connect/token", params_post, header_post)
response = conn.getresponse()
data_token = simplejson.loads(response.read())
conn.close()
self.headers = {
'Content-type': 'application/json',
'Authorization': '{token_type} {access_token}'.format(
**data_token),
"x-cim-MessageType": "http://schemas.ci.org/ci/services/"
"communications/2015/09/SBCStructured",
"x-cim-FromAddress": "CHTest",
"x-cim-ToAddress": "CH",
}
def _test_no_token(self):
|
def _test_bad_token(self):
""" Check we have an access denied if token is not valid
"""
self.headers['Authorization'] = 'Bearer notarealtoken'
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_body_no_json(self):
req = urllib2.Request(self.rest_url, "This is not json", self.headers)
with self.assertRaises(urllib2.HTTPError):
urllib2.urlopen(req)
def _send_post(self, vals):
data = simplejson.dumps(vals)
req = urllib2.Request(self.rest_url, data, self.headers)
return urllib2.urlopen(req)
| """ Check we have an access denied if token is not provided
"""
del self.headers['Authorization']
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED') | identifier_body |
base_test_onramp.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Yannick Vaucher, Emanuel Cino
#
# The licence is in the file __openerp__.py
#
##############################################################################
import base64
import urllib2
import httplib
import simplejson
from openerp.tests import common
class TestOnramp(common.HttpCase):
""" Base class for all Onramp tests. """
def | (self):
super(TestOnramp, self).setUp()
self.server_url = self.env['ir.config_parameter'].get_param(
'web.base.url',
default='http://localhost:8069'
)
api_client_secret = base64.b64encode("client:secret")
self.rest_url = '{0}/onramp?secret={1}'.format(
self.server_url, api_client_secret)
params_post = 'grant_type=client_credentials&scope=read+write'
header_post = {
"Authorization": "Basic " + api_client_secret,
"Content-type": "application/x-www-form-urlencoded",
"Content-Length": 46,
"Expect": "100-continue",
"Connection": "Keep-Alive",
}
conn = httplib.HTTPSConnection('api2.compassion.com')
conn.request("POST", "/core/connect/token", params_post, header_post)
response = conn.getresponse()
data_token = simplejson.loads(response.read())
conn.close()
self.headers = {
'Content-type': 'application/json',
'Authorization': '{token_type} {access_token}'.format(
**data_token),
"x-cim-MessageType": "http://schemas.ci.org/ci/services/"
"communications/2015/09/SBCStructured",
"x-cim-FromAddress": "CHTest",
"x-cim-ToAddress": "CH",
}
def _test_no_token(self):
""" Check we have an access denied if token is not provided
"""
del self.headers['Authorization']
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_bad_token(self):
""" Check we have an access denied if token is not valid
"""
self.headers['Authorization'] = 'Bearer notarealtoken'
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_body_no_json(self):
req = urllib2.Request(self.rest_url, "This is not json", self.headers)
with self.assertRaises(urllib2.HTTPError):
urllib2.urlopen(req)
def _send_post(self, vals):
data = simplejson.dumps(vals)
req = urllib2.Request(self.rest_url, data, self.headers)
return urllib2.urlopen(req)
| setUp | identifier_name |
resources.component.ts | import { Component, OnInit } from '@angular/core';
import { IF_result } from '../shared/index';
import { ResourceService } from '../shared/api/resource.service';
/**
* 环境声明
* @type {any}
*/
declare var $:any;
/**
* interface - 资源单
*/
interface IF_resources {
isLoader : boolean;
isLast : boolean;
page : number;
limit : number;
total: number;
cityid : string;
adjusting : string;
created : string;
category : string;
keyword : string;
result : any[];
}
/**
* This class represents the lazy loaded ResourcesComponent.
*/
@Component({
moduleId: module.id,
selector: 'sd-resources',
templateUrl: 'resources.component.html',
styleUrls: ['resources.component.css'],
// providers: [ MobiscrollDirective ],
// directives: [ MobiscrollDirective ]
})
export class ResourcesComponent implements OnInit {
/**
* 属性
*/
errorMessage: string;
Categorys:any[] = [{id: 0,name: "全部",sortname: ""}];
Citys:any[] = [{id:0,name:'全部地区'}];
IsWeiXin: boolean = false;
Resources:IF_resources = {
isLoader : true,
isLast : false,
page : 1,
limit : 6,
total: 0,
cityid : "",
adjusting : "0",
created : "0",
category : "",
keyword : "",
result : []
};
/**
* 构造函数 - 创建服务的实例
* @param {ResourceService} public resourceService [description]
*/
constructor(
public resourceService: ResourceService
) {}
/**
* 初始化
*/
ngOnInit() {
$('#Resources').css({
'min-height' : $(window).height()
});
this.getResourceData();
this.getResourceList();
}
/**
* 获取产品类型和地区
*/
getResourceData() {
this.resourceService.getResourceData()
.subscribe(
result => {
// console.log(result);
if (result.success == "0") {
for (var key of Object.keys(result.data.cities)) {
this.Citys.push({
id: key,
name: result.data.cities[key]
});
}
for (var key of Object.keys(result.data.categories)) {
this.Categorys.push(result.data.categories[key]);
}
}
},
error => this.errorMessage = <any>error
);
}
/**
* 获取资源单列表
*/
getResourceList(isGetMore: boolean = false) {
if(!isGetMore){
this.Resources.result = [];
this.Resources.page = 1;
}
this.Res | ice.getResourceList(this.Resources)
.subscribe(
result => {
// console.log(result);
if (result.success == "0") {
this.Resources.total = Math.ceil(result.data.count / this.Resources.limit) ;
if(!isGetMore){
for (let value of result.data.Respurces) {
value.isMarquee = (this.realLength(value.description)*19) > (window.innerWidth-145);
}
this.Resources.result = result.data.Respurces;
}else{
for (let value of result.data.Respurces) {
value.isMarquee = (this.realLength(value.description)*19) > (window.innerWidth-145);
this.Resources.result.push(value);
}
}
this.Resources.isLoader = false;
this.Resources.isLast = (this.Resources.page >= this.Resources.total);
if (this.Resources.result.length > 0) {
this.renderScroll();
}
} else {
alert(result.message);
}
// console.log(this.Resources)
},
);
}
/**
* 字符串真实长度
* @param {any} str [description]
* @return {number} [description]
*/
realLength(str:any): number {
var L=0.0;
for(var i in str){
L+=(str.charCodeAt(i)>255)?1.0:0.5;
}
return Math.ceil(L);
}
/**
* 搜索框 清除文本 事件
* @param {any} e [description]
*/
seachTextClear(e:any) {
e.stopPropagation();
this.Resources.category = "";
}
/**
* 搜索框 focus 事件
* @param {any} e [description]
*/
seachTextFocus(e:any) {
$(e.target).parent().animate({width:"192px"},'fast');
}
/**
* 搜索框 blur 事件
* @param {any} e [description]
*/
seachTextBlur(e:any) {
$(e.target).parent().animate({width:"100px"},'fast');
setTimeout(()=> {
if ($.trim(this.Resources.category) == "") {
this.Resources.category = "";
}
this.getResourceList();
},300);
}
/**
* 排序&条件变更 事件
* @param {boolean = false} type [description]
*/
changeList(type:boolean = false) {
if (type) {
this.Resources.created = "0";
this.Resources.adjusting = "0";
$('#sel-default').mobiscroll('clear');
$('#sel-date').mobiscroll('clear');
}
this.getResourceList();
}
/**
* 获取更多资源单
*/
getMoreList() {
this.Resources.page+=1;
this.getResourceList(true);
}
renderScroll() {
setTimeout(()=> {
let intervalScroll: any[] = [];
$('.main_product').each(function(i:any,item:any) {
if ($(item).find('p').length > 1) {
intervalScroll[i] = setInterval(function() {
var $firstP = $(item).find('p:first'),
height = $firstP.height();
$firstP.animate({
height:0
},1000,'swing',function() {
$firstP.height(height);
$(item).append($firstP.clone());
$firstP.remove();
})
}, 3000);
}
});
let scrollMarquee: any[] = [];
$('.marquee span:not(.active)').each(function(i:any,item:any){
$(item).addClass('active');
var spanwidth = $(item).get(0).offsetWidth,
pWidth = $(item).parent().width(),
left = -18;
scrollMarquee[i] = setInterval(function(){
if (left <= spanwidth) {
left+=2;
$(item).css('left',-left);
} else {
$(item).css('left',pWidth);
left= -pWidth;
}
},50)
});
},300);
}
}
| ources.isLoader = true;
console.log(this.Resources)
this.resourceServ | conditional_block |
resources.component.ts | import { Component, OnInit } from '@angular/core';
import { IF_result } from '../shared/index';
import { ResourceService } from '../shared/api/resource.service';
/**
* 环境声明
* @type {any}
*/
declare var $:any;
/**
* interface - 资源单
*/
interface IF_resources {
isLoader : boolean;
isLast : boolean;
page : number;
limit : number;
total: number;
cityid : string;
adjusting : string;
created : string;
category : string;
keyword : string;
result : any[];
}
/**
* This class represents the lazy loaded ResourcesComponent.
*/
@Component({
moduleId: module.id,
selector: 'sd-resources',
templateUrl: 'resources.component.html',
styleUrls: ['resources.component.css'],
// providers: [ MobiscrollDirective ],
// directives: [ MobiscrollDirective ]
})
export class ResourcesComponent implements OnInit {
/**
* 属性
*/
errorMessage: string;
Categorys:any[] = [{id: 0,name: "全部",sortname: ""}];
Citys:any[] = [{id:0,name:'全部地区'}];
IsWeiXin: boolean = false;
Resources:IF_resources = {
isLoader : true,
isLast : false,
page : 1,
limit : 6,
total: 0,
cityid : "",
adjusting : "0",
created : "0",
category : "",
keyword : "",
result : []
};
/**
* 构造函数 - 创建服务的实例
* @param {ResourceService} public resourceService [description]
*/
constructor(
public resourceService: ResourceService
) {}
/**
* 初始化
*/
ngOnInit() {
$('#Resources').css({
'min-height' : $(window).height()
});
this.getResourceData();
this.getResourceList();
}
/**
* 获取产品类型和地区
*/
getResourceData() {
this.resourceService.getResourceData()
.subscribe(
result => {
// console.log(result);
if (result.success == "0") {
for (var key of Object.keys(result.data.cities)) {
this.Citys.push({
id: key,
name: result.data.cities[key]
});
}
for (var key of Object.keys(result.data.categories)) {
this.Categorys.push(result.data.categories[key]);
}
}
},
error => this.errorMessage = <any>error
);
}
/**
* 获取资源单列表
*/
getResourceList(isGetMore: boolean = false) {
if(!isGetMore){
this.Resources.r | this.Resources.page = 1;
}
this.Resources.isLoader = true;
console.log(this.Resources)
this.resourceService.getResourceList(this.Resources)
.subscribe(
result => {
// console.log(result);
if (result.success == "0") {
this.Resources.total = Math.ceil(result.data.count / this.Resources.limit) ;
if(!isGetMore){
for (let value of result.data.Respurces) {
value.isMarquee = (this.realLength(value.description)*19) > (window.innerWidth-145);
}
this.Resources.result = result.data.Respurces;
}else{
for (let value of result.data.Respurces) {
value.isMarquee = (this.realLength(value.description)*19) > (window.innerWidth-145);
this.Resources.result.push(value);
}
}
this.Resources.isLoader = false;
this.Resources.isLast = (this.Resources.page >= this.Resources.total);
if (this.Resources.result.length > 0) {
this.renderScroll();
}
} else {
alert(result.message);
}
// console.log(this.Resources)
},
);
}
/**
* 字符串真实长度
* @param {any} str [description]
* @return {number} [description]
*/
realLength(str:any): number {
var L=0.0;
for(var i in str){
L+=(str.charCodeAt(i)>255)?1.0:0.5;
}
return Math.ceil(L);
}
/**
* 搜索框 清除文本 事件
* @param {any} e [description]
*/
seachTextClear(e:any) {
e.stopPropagation();
this.Resources.category = "";
}
/**
* 搜索框 focus 事件
* @param {any} e [description]
*/
seachTextFocus(e:any) {
$(e.target).parent().animate({width:"192px"},'fast');
}
/**
* 搜索框 blur 事件
* @param {any} e [description]
*/
seachTextBlur(e:any) {
$(e.target).parent().animate({width:"100px"},'fast');
setTimeout(()=> {
if ($.trim(this.Resources.category) == "") {
this.Resources.category = "";
}
this.getResourceList();
},300);
}
/**
* 排序&条件变更 事件
* @param {boolean = false} type [description]
*/
changeList(type:boolean = false) {
if (type) {
this.Resources.created = "0";
this.Resources.adjusting = "0";
$('#sel-default').mobiscroll('clear');
$('#sel-date').mobiscroll('clear');
}
this.getResourceList();
}
/**
* 获取更多资源单
*/
getMoreList() {
this.Resources.page+=1;
this.getResourceList(true);
}
renderScroll() {
setTimeout(()=> {
let intervalScroll: any[] = [];
$('.main_product').each(function(i:any,item:any) {
if ($(item).find('p').length > 1) {
intervalScroll[i] = setInterval(function() {
var $firstP = $(item).find('p:first'),
height = $firstP.height();
$firstP.animate({
height:0
},1000,'swing',function() {
$firstP.height(height);
$(item).append($firstP.clone());
$firstP.remove();
})
}, 3000);
}
});
let scrollMarquee: any[] = [];
$('.marquee span:not(.active)').each(function(i:any,item:any){
$(item).addClass('active');
var spanwidth = $(item).get(0).offsetWidth,
pWidth = $(item).parent().width(),
left = -18;
scrollMarquee[i] = setInterval(function(){
if (left <= spanwidth) {
left+=2;
$(item).css('left',-left);
} else {
$(item).css('left',pWidth);
left= -pWidth;
}
},50)
});
},300);
}
}
| esult = [];
| identifier_name |
resources.component.ts | import { Component, OnInit } from '@angular/core';
import { IF_result } from '../shared/index';
import { ResourceService } from '../shared/api/resource.service';
/**
* 环境声明
* @type {any}
*/
declare var $:any;
/**
* interface - 资源单
*/
interface IF_resources {
isLoader : boolean;
isLast : boolean;
page : number;
limit : number;
total: number;
cityid : string;
adjusting : string;
created : string;
category : string;
keyword : string;
result : any[];
}
/**
* This class represents the lazy loaded ResourcesComponent.
*/
@Component({
moduleId: module.id,
selector: 'sd-resources',
templateUrl: 'resources.component.html',
styleUrls: ['resources.component.css'],
// providers: [ MobiscrollDirective ],
// directives: [ MobiscrollDirective ]
})
export class ResourcesComponent implements OnInit {
/**
* 属性
*/
errorMessage: string;
Categorys:any[] = [{id: 0,name: "全部",sortname: ""}];
Citys:any[] = [{id:0,name:'全部地区'}];
IsWeiXin: boolean = false;
Resources:IF_resources = {
isLoader : true,
isLast : false,
page : 1,
limit : 6,
total: 0,
cityid : "",
adjusting : "0",
created : "0",
category : "",
keyword : "",
result : []
};
/**
* 构造函数 - 创建服务的实例
* @param {ResourceService} public resourceService [description]
*/
constructor(
public resourceService: ResourceService
) {}
/**
* 初始化
*/
ngOnInit() {
$('#Resources').css({
'min-height' : $(window).height()
});
this.getResourceData();
this.getResourceList();
}
/**
* 获取产品类型和地区
*/
getResourceData() {
this.resourceService.getResourceData()
.subscribe(
result => {
// console.log(result);
if (result.success == "0") {
for (var key of Object.keys(result.data.cities)) {
this.Citys.push({
id: key,
name: result.data.cities[key]
});
}
for (var key of Object.keys(result.data.categories)) {
this.Categorys.push(result.data.categories[key]);
}
}
},
error => this.errorMessage = <any>error
);
}
/**
* 获取资源单列表
*/
getResourceList(isGetMore: boolean = false) {
if(!isGetMore){
this.Resources.result = [];
this.Resources.page = 1;
}
this.Resources.isLoader = true;
console.log(this.Resources)
this.resourceService.getResourceList(this.Resources)
.subscribe(
result => {
// console.log(result);
if (result.success == "0") {
this.Resources.total = Math.ceil(result.data.count / this.Resources.limit) ;
if(!isGetMore){
for (let value of result.data.Respurces) {
value.isMarquee = (this.realLength(value.description)*19) > (window.innerWidth-145);
}
this.Resources.result = result.data.Respurces;
}else{
for (let value of result.data.Respurces) {
value.isMarquee = (this.realLength(value.description)*19) > (window.innerWidth-145);
this.Resources.result.push(value);
}
}
this.Resources.isLoader = false;
this.Resources.isLast = (this.Resources.page >= this.Resources.total);
if (this.Resources.result.length > 0) {
this.renderScroll();
}
} else {
alert(result.message);
}
// console.log(this.Resources)
},
);
}
/**
* 字符串真实长度
* @param {any} str [description]
* @return {number} [description]
*/
realLength(str:any): number {
var L=0.0;
for(var i in str){
L+=(str.charCodeAt(i)>255)?1.0:0.5;
}
return Math.ceil(L);
}
/**
* 搜索框 清除文本 事件
* @param {any} e [description]
*/
seachTextClear(e:any) {
e.stopPropagation();
this.Resources.category = "";
}
/**
* 搜索框 focus 事件
* @param {any} e [description]
*/
seachTextFocus(e:any) {
$(e.target).parent().animate({width:"192px"},'fast');
}
/**
* 搜索框 blur 事件
* @param {any} e [description]
*/
seachTextBlur(e:any) {
$(e.target).parent().animate({width:"100px"},'fast'); | this.getResourceList();
},300);
}
/**
* 排序&条件变更 事件
* @param {boolean = false} type [description]
*/
changeList(type:boolean = false) {
if (type) {
this.Resources.created = "0";
this.Resources.adjusting = "0";
$('#sel-default').mobiscroll('clear');
$('#sel-date').mobiscroll('clear');
}
this.getResourceList();
}
/**
* 获取更多资源单
*/
getMoreList() {
this.Resources.page+=1;
this.getResourceList(true);
}
renderScroll() {
setTimeout(()=> {
let intervalScroll: any[] = [];
$('.main_product').each(function(i:any,item:any) {
if ($(item).find('p').length > 1) {
intervalScroll[i] = setInterval(function() {
var $firstP = $(item).find('p:first'),
height = $firstP.height();
$firstP.animate({
height:0
},1000,'swing',function() {
$firstP.height(height);
$(item).append($firstP.clone());
$firstP.remove();
})
}, 3000);
}
});
let scrollMarquee: any[] = [];
$('.marquee span:not(.active)').each(function(i:any,item:any){
$(item).addClass('active');
var spanwidth = $(item).get(0).offsetWidth,
pWidth = $(item).parent().width(),
left = -18;
scrollMarquee[i] = setInterval(function(){
if (left <= spanwidth) {
left+=2;
$(item).css('left',-left);
} else {
$(item).css('left',pWidth);
left= -pWidth;
}
},50)
});
},300);
}
} | setTimeout(()=> {
if ($.trim(this.Resources.category) == "") {
this.Resources.category = "";
} | random_line_split |
_configuration_async.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
| from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ArtifactsClientConfiguration(Configuration):
"""Configuration for ArtifactsClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
endpoint: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
super(ArtifactsClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.endpoint = endpoint
self.api_version = "2019-06-01-preview"
self.credential_scopes = ['https://dev.azuresynapse.net/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) | random_line_split |
|
_configuration_async.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ArtifactsClientConfiguration(Configuration):
"""Configuration for ArtifactsClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
endpoint: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
super(ArtifactsClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.endpoint = endpoint
self.api_version = "2019-06-01-preview"
self.credential_scopes = ['https://dev.azuresynapse.net/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION))
self._configure(**kwargs)
def | (
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| _configure | identifier_name |
_configuration_async.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ArtifactsClientConfiguration(Configuration):
| """Configuration for ArtifactsClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
endpoint: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
super(ArtifactsClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.endpoint = endpoint
self.api_version = "2019-06-01-preview"
self.credential_scopes = ['https://dev.azuresynapse.net/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) | identifier_body |
|
_configuration_async.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ArtifactsClientConfiguration(Configuration):
"""Configuration for ArtifactsClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
endpoint: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
super(ArtifactsClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.endpoint = endpoint
self.api_version = "2019-06-01-preview"
self.credential_scopes = ['https://dev.azuresynapse.net/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
| self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) | conditional_block |
|
S15.10.6.3_A1_T22.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: |
RegExp.prototype.test behavior depends on the lastIndex property:
ToLength(lastIndex) is the starting point for the search, so
negative numbers result in searching from 0.
es5id: 15.10.6.3_A1_T22
description: "Set lastIndex to -1 and call /(?:ab|cd)\\d?/g.test(\"aacd22 \")"
---*/
var __re = /(?:ab|cd)\d?/g;
__re.lastIndex=-1;
var __executed = __re.test("aacd22 ");
//CHECK#1
if (!__executed) {
$ERROR('#1: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __executed === true');
} | if (__re.lastIndex !== 5) {
$ERROR('#2: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __re.lastIndex === 5. Actual: ' + (__re.lastIndex));
}
__re.lastIndex=-100;
__executed = __re.test("aacd22 ");
//CHECK#3
if (!__executed) {
$ERROR('#3: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __re.lastIndex=-100; __executed = __re.test("aacd22 "); __executed === true');
}
//CHECK#4
if (__re.lastIndex !== 5) {
$ERROR('#4: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __re.lastIndex=-100; __executed = __re.test("aacd22 "); __re.lastIndex === 5. Actual: ' + (__re.lastIndex));
} |
//CHECK#2 | random_line_split |
S15.10.6.3_A1_T22.js | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: |
RegExp.prototype.test behavior depends on the lastIndex property:
ToLength(lastIndex) is the starting point for the search, so
negative numbers result in searching from 0.
es5id: 15.10.6.3_A1_T22
description: "Set lastIndex to -1 and call /(?:ab|cd)\\d?/g.test(\"aacd22 \")"
---*/
var __re = /(?:ab|cd)\d?/g;
__re.lastIndex=-1;
var __executed = __re.test("aacd22 ");
//CHECK#1
if (!__executed) {
$ERROR('#1: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __executed === true');
}
//CHECK#2
if (__re.lastIndex !== 5) {
$ERROR('#2: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __re.lastIndex === 5. Actual: ' + (__re.lastIndex));
}
__re.lastIndex=-100;
__executed = __re.test("aacd22 ");
//CHECK#3
if (!__executed) |
//CHECK#4
if (__re.lastIndex !== 5) {
$ERROR('#4: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __re.lastIndex=-100; __executed = __re.test("aacd22 "); __re.lastIndex === 5. Actual: ' + (__re.lastIndex));
}
| {
$ERROR('#3: __re = /(?:ab|cd)\\d?/g; __re.lastIndex=-1; __executed = __re.test("aacd22 "); __re.lastIndex=-100; __executed = __re.test("aacd22 "); __executed === true');
} | conditional_block |
DragDrop.ts | /*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
/**
* Module: TYPO3/CMS/Backend/LayoutModule/DragDrop
* this JS code does the drag+drop logic for the Layout module (Web => Page)
* based on jQuery UI
*/
import $ from 'jquery';
import 'jquery-ui/droppable';
import DataHandler = require('../AjaxDataHandler');
import Icons = require('../Icons');
import ResponseInterface from '../AjaxDataHandler/ResponseInterface';
interface Parameters {
cmd?: { tt_content: { [key: string]: any } };
data?: { tt_content: { [key: string]: any } };
CB?: { paste: string, update: { colPos: number | boolean, sys_language_uid: number }};
}
interface DroppableEventUIParam {
draggable: JQuery;
helper: JQuery;
position: { top: number; left: number; };
offset: { top: number; left: number; };
}
class DragDrop {
private static readonly contentIdentifier: string = '.t3js-page-ce';
private static readonly dragIdentifier: string = '.t3-page-ce-dragitem';
private static readonly dragHeaderIdentifier: string = '.t3js-page-ce-draghandle';
private static readonly dropZoneIdentifier: string = '.t3js-page-ce-dropzone-available';
private static readonly columnIdentifier: string = '.t3js-page-column';
private static readonly validDropZoneClass: string = 'active';
private static readonly dropPossibleHoverClass: string = 't3-page-ce-dropzone-possible';
private static readonly addContentIdentifier: string = '.t3js-page-new-ce';
private static originalStyles: string = '';
/**
* initializes Drag+Drop for all content elements on the page
*/
public static initialize(): void {
$(DragDrop.contentIdentifier).draggable({
handle: DragDrop.dragHeaderIdentifier,
scope: 'tt_content',
cursor: 'move',
distance: 20,
// removed because of incompatible types:
// addClasses: 'active-drag',
revert: 'invalid',
zIndex: 100,
start: (evt: JQueryEventObject): void => {
DragDrop.onDragStart($(evt.target));
},
stop: (evt: JQueryEventObject): void => {
DragDrop.onDragStop($(evt.target));
},
});
$(DragDrop.dropZoneIdentifier).droppable({
accept: this.contentIdentifier,
scope: 'tt_content',
tolerance: 'pointer',
over: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOver($(ui.draggable), $(evt.target));
},
out: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOut($(ui.draggable), $(evt.target));
},
drop: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDrop($(ui.draggable), $(evt.target), evt);
},
});
}
/**
* called when a draggable is selected to be moved
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStart($element: JQuery): void |
/**
* called when a draggable is released
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStop($element: JQuery): void {
// Remove css class for the drag shadow
$element.children(DragDrop.dragIdentifier).removeClass('dragitem-shadow');
// Show create new element button
$element.children(DragDrop.dropZoneIdentifier).removeClass('drag-start');
$element.closest(DragDrop.columnIdentifier).addClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).show();
$element.find(DragDrop.dropZoneIdentifier).show();
$element.find('.ui-draggable-copy-message').remove();
// Reset inline style
$element.get(0).style.cssText = DragDrop.originalStyles;
$(DragDrop.dropZoneIdentifier + '.' + DragDrop.validDropZoneClass).removeClass(DragDrop.validDropZoneClass);
}
/**
* adds CSS classes when hovering over a dropzone
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOver($draggableElement: JQuery, $droppableElement: JQuery): void {
if ($droppableElement.hasClass(DragDrop.validDropZoneClass)) {
$droppableElement.addClass(DragDrop.dropPossibleHoverClass);
}
}
/**
* removes the CSS classes after hovering out of a dropzone again
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOut($draggableElement: JQuery, $droppableElement: JQuery): void {
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
}
/**
* this method does the whole logic when a draggable is dropped on to a dropzone
* sending out the request and afterwards move the HTML element in the right place.
*
* @param $draggableElement
* @param $droppableElement
* @param {Event} evt the event
* @private
*/
public static onDrop($draggableElement: JQuery, $droppableElement: JQuery, evt: JQueryEventObject): void {
const newColumn = DragDrop.getColumnPositionForElement($droppableElement);
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
// send an AJAX request via the AjaxDataHandler
const contentElementUid: number = parseInt($draggableElement.data('uid'), 10);
if (typeof(contentElementUid) === 'number' && contentElementUid > 0) {
let parameters: Parameters = {};
// add the information about a possible column position change
const targetFound = $droppableElement.closest(DragDrop.contentIdentifier).data('uid');
// the item was moved to the top of the colPos, so the page ID is used here
let targetPid = 0;
if (typeof targetFound === 'undefined') {
// the actual page is needed. Read it from the container into which the element was dropped.
targetPid = parseInt((<HTMLElement>evt.target).offsetParent.getAttribute('data-page'), 10);
} else {
// the negative value of the content element after where it should be moved
targetPid = 0 - parseInt(targetFound, 10);
}
// the dragged elements language uid
let language: number = parseInt($draggableElement.data('language-uid'), 10);
if (language !== -1) {
// new elements language must be the same as the column the element is dropped in if element is not -1
language = parseInt($droppableElement.closest('[data-language-uid]').data('language-uid'), 10);
}
let colPos: number | boolean = 0;
if (targetPid !== 0) {
colPos = newColumn;
}
parameters.cmd = {tt_content: {}};
parameters.data = {tt_content: {}};
const copyAction = (evt && (<JQueryInputEventObject>evt.originalEvent).ctrlKey || $droppableElement.hasClass('t3js-paste-copy'));
if (copyAction) {
parameters.cmd.tt_content[contentElementUid] = {
copy: {
action: 'paste',
target: targetPid,
update: {
colPos: colPos,
sys_language_uid: language,
},
},
};
} else {
parameters.data.tt_content[contentElementUid] = {
colPos: colPos,
sys_language_uid: language,
};
parameters.cmd.tt_content[contentElementUid] = {move: targetPid};
}
DragDrop.ajaxAction($droppableElement, $draggableElement, parameters, copyAction).then((): void => {
const $languageDescriber = $(`.t3-page-column-lang-name[data-language-uid="${language}"]`);
if ($languageDescriber.length === 0) {
return;
}
const newFlagIdentifier = $languageDescriber.data('flagIdentifier');
const newLanguageTitle = $languageDescriber.data('languageTitle');
$draggableElement.find('.t3js-language-title').text(newLanguageTitle);
Icons.getIcon(newFlagIdentifier, Icons.sizes.small).then((markup: string): void => {
const $flagIcon = $draggableElement.find('.t3js-flag');
$flagIcon.attr('title', newLanguageTitle).html(markup);
});
});
}
}
/**
* this method does the actual AJAX request for both, the move and the copy action.
*
* @param {JQuery} $droppableElement
* @param {JQuery} $draggableElement
* @param {Parameters} parameters
* @param {boolean} copyAction
* @private
*/
public static ajaxAction($droppableElement: JQuery, $draggableElement: JQuery, parameters: Parameters, copyAction: boolean): Promise<any> {
return DataHandler.process(parameters).then((result: ResponseInterface): void => {
if (result.hasErrors) {
throw result.messages;
}
// insert draggable on the new position
if (!$droppableElement.parent().hasClass(DragDrop.contentIdentifier.substring(1))) {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.dropZoneIdentifier));
} else {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.contentIdentifier));
}
if (copyAction) {
self.location.reload();
}
});
}
/**
* returns the next "upper" container colPos parameter inside the code
* @param $element
* @return int|null the colPos
*/
public static getColumnPositionForElement($element: JQuery): number | boolean {
const $columnContainer = $element.closest('[data-colpos]');
if ($columnContainer.length && $columnContainer.data('colpos') !== 'undefined') {
return $columnContainer.data('colpos');
} else {
return false;
}
}
}
export default DragDrop;
$(DragDrop.initialize);
| {
// Add css class for the drag shadow
DragDrop.originalStyles = $element.get(0).style.cssText;
$element.children(DragDrop.dragIdentifier).addClass('dragitem-shadow');
$element.append('<div class="ui-draggable-copy-message">' + TYPO3.lang['dragdrop.copy.message'] + '</div>');
// Hide create new element button
$element.children(DragDrop.dropZoneIdentifier).addClass('drag-start');
$element.closest(DragDrop.columnIdentifier).removeClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).hide();
$element.find(DragDrop.dropZoneIdentifier).hide();
$(DragDrop.dropZoneIdentifier).each((index: number, element: HTMLElement): void => {
const $me = $(element);
if ($me.parent().find('.t3js-toggle-new-content-element-wizard').length) {
$me.addClass(DragDrop.validDropZoneClass);
} else {
$me.closest(DragDrop.contentIdentifier)
.find('> ' + DragDrop.addContentIdentifier + ', > > ' + DragDrop.addContentIdentifier).show();
}
});
} | identifier_body |
DragDrop.ts | /*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
/**
* Module: TYPO3/CMS/Backend/LayoutModule/DragDrop
* this JS code does the drag+drop logic for the Layout module (Web => Page)
* based on jQuery UI
*/
import $ from 'jquery';
import 'jquery-ui/droppable';
import DataHandler = require('../AjaxDataHandler');
import Icons = require('../Icons');
import ResponseInterface from '../AjaxDataHandler/ResponseInterface';
interface Parameters {
cmd?: { tt_content: { [key: string]: any } };
data?: { tt_content: { [key: string]: any } };
CB?: { paste: string, update: { colPos: number | boolean, sys_language_uid: number }};
}
interface DroppableEventUIParam {
draggable: JQuery;
helper: JQuery;
position: { top: number; left: number; };
offset: { top: number; left: number; };
}
class DragDrop {
private static readonly contentIdentifier: string = '.t3js-page-ce';
private static readonly dragIdentifier: string = '.t3-page-ce-dragitem';
private static readonly dragHeaderIdentifier: string = '.t3js-page-ce-draghandle';
private static readonly dropZoneIdentifier: string = '.t3js-page-ce-dropzone-available';
private static readonly columnIdentifier: string = '.t3js-page-column';
private static readonly validDropZoneClass: string = 'active';
private static readonly dropPossibleHoverClass: string = 't3-page-ce-dropzone-possible';
private static readonly addContentIdentifier: string = '.t3js-page-new-ce';
private static originalStyles: string = '';
/**
* initializes Drag+Drop for all content elements on the page
*/
public static initialize(): void {
$(DragDrop.contentIdentifier).draggable({
handle: DragDrop.dragHeaderIdentifier,
scope: 'tt_content',
cursor: 'move',
distance: 20,
// removed because of incompatible types:
// addClasses: 'active-drag',
revert: 'invalid',
zIndex: 100,
start: (evt: JQueryEventObject): void => {
DragDrop.onDragStart($(evt.target));
},
stop: (evt: JQueryEventObject): void => {
DragDrop.onDragStop($(evt.target));
},
});
$(DragDrop.dropZoneIdentifier).droppable({
accept: this.contentIdentifier,
scope: 'tt_content',
tolerance: 'pointer',
over: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOver($(ui.draggable), $(evt.target));
},
out: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOut($(ui.draggable), $(evt.target));
},
drop: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDrop($(ui.draggable), $(evt.target), evt);
},
});
}
/**
* called when a draggable is selected to be moved
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStart($element: JQuery): void {
// Add css class for the drag shadow
DragDrop.originalStyles = $element.get(0).style.cssText;
$element.children(DragDrop.dragIdentifier).addClass('dragitem-shadow');
$element.append('<div class="ui-draggable-copy-message">' + TYPO3.lang['dragdrop.copy.message'] + '</div>');
// Hide create new element button
$element.children(DragDrop.dropZoneIdentifier).addClass('drag-start');
$element.closest(DragDrop.columnIdentifier).removeClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).hide();
$element.find(DragDrop.dropZoneIdentifier).hide();
$(DragDrop.dropZoneIdentifier).each((index: number, element: HTMLElement): void => {
const $me = $(element);
if ($me.parent().find('.t3js-toggle-new-content-element-wizard').length) {
$me.addClass(DragDrop.validDropZoneClass);
} else {
$me.closest(DragDrop.contentIdentifier)
.find('> ' + DragDrop.addContentIdentifier + ', > > ' + DragDrop.addContentIdentifier).show();
}
});
}
/**
* called when a draggable is released
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStop($element: JQuery): void {
// Remove css class for the drag shadow
$element.children(DragDrop.dragIdentifier).removeClass('dragitem-shadow');
// Show create new element button
$element.children(DragDrop.dropZoneIdentifier).removeClass('drag-start');
$element.closest(DragDrop.columnIdentifier).addClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).show();
$element.find(DragDrop.dropZoneIdentifier).show();
$element.find('.ui-draggable-copy-message').remove();
// Reset inline style
$element.get(0).style.cssText = DragDrop.originalStyles;
$(DragDrop.dropZoneIdentifier + '.' + DragDrop.validDropZoneClass).removeClass(DragDrop.validDropZoneClass);
}
/**
* adds CSS classes when hovering over a dropzone
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOver($draggableElement: JQuery, $droppableElement: JQuery): void {
if ($droppableElement.hasClass(DragDrop.validDropZoneClass)) {
$droppableElement.addClass(DragDrop.dropPossibleHoverClass);
}
}
/**
* removes the CSS classes after hovering out of a dropzone again
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOut($draggableElement: JQuery, $droppableElement: JQuery): void {
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
}
/**
* this method does the whole logic when a draggable is dropped on to a dropzone
* sending out the request and afterwards move the HTML element in the right place.
*
* @param $draggableElement
* @param $droppableElement
* @param {Event} evt the event
* @private
*/
public static onDrop($draggableElement: JQuery, $droppableElement: JQuery, evt: JQueryEventObject): void {
const newColumn = DragDrop.getColumnPositionForElement($droppableElement);
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
// send an AJAX request via the AjaxDataHandler
const contentElementUid: number = parseInt($draggableElement.data('uid'), 10);
if (typeof(contentElementUid) === 'number' && contentElementUid > 0) {
let parameters: Parameters = {};
// add the information about a possible column position change
const targetFound = $droppableElement.closest(DragDrop.contentIdentifier).data('uid');
// the item was moved to the top of the colPos, so the page ID is used here
let targetPid = 0;
if (typeof targetFound === 'undefined') {
// the actual page is needed. Read it from the container into which the element was dropped.
targetPid = parseInt((<HTMLElement>evt.target).offsetParent.getAttribute('data-page'), 10);
} else {
// the negative value of the content element after where it should be moved
targetPid = 0 - parseInt(targetFound, 10);
}
// the dragged elements language uid
let language: number = parseInt($draggableElement.data('language-uid'), 10);
if (language !== -1) {
// new elements language must be the same as the column the element is dropped in if element is not -1
language = parseInt($droppableElement.closest('[data-language-uid]').data('language-uid'), 10);
}
let colPos: number | boolean = 0;
if (targetPid !== 0) {
colPos = newColumn;
}
parameters.cmd = {tt_content: {}};
parameters.data = {tt_content: {}};
const copyAction = (evt && (<JQueryInputEventObject>evt.originalEvent).ctrlKey || $droppableElement.hasClass('t3js-paste-copy'));
if (copyAction) | else {
parameters.data.tt_content[contentElementUid] = {
colPos: colPos,
sys_language_uid: language,
};
parameters.cmd.tt_content[contentElementUid] = {move: targetPid};
}
DragDrop.ajaxAction($droppableElement, $draggableElement, parameters, copyAction).then((): void => {
const $languageDescriber = $(`.t3-page-column-lang-name[data-language-uid="${language}"]`);
if ($languageDescriber.length === 0) {
return;
}
const newFlagIdentifier = $languageDescriber.data('flagIdentifier');
const newLanguageTitle = $languageDescriber.data('languageTitle');
$draggableElement.find('.t3js-language-title').text(newLanguageTitle);
Icons.getIcon(newFlagIdentifier, Icons.sizes.small).then((markup: string): void => {
const $flagIcon = $draggableElement.find('.t3js-flag');
$flagIcon.attr('title', newLanguageTitle).html(markup);
});
});
}
}
/**
* this method does the actual AJAX request for both, the move and the copy action.
*
* @param {JQuery} $droppableElement
* @param {JQuery} $draggableElement
* @param {Parameters} parameters
* @param {boolean} copyAction
* @private
*/
public static ajaxAction($droppableElement: JQuery, $draggableElement: JQuery, parameters: Parameters, copyAction: boolean): Promise<any> {
return DataHandler.process(parameters).then((result: ResponseInterface): void => {
if (result.hasErrors) {
throw result.messages;
}
// insert draggable on the new position
if (!$droppableElement.parent().hasClass(DragDrop.contentIdentifier.substring(1))) {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.dropZoneIdentifier));
} else {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.contentIdentifier));
}
if (copyAction) {
self.location.reload();
}
});
}
/**
* returns the next "upper" container colPos parameter inside the code
* @param $element
* @return int|null the colPos
*/
public static getColumnPositionForElement($element: JQuery): number | boolean {
const $columnContainer = $element.closest('[data-colpos]');
if ($columnContainer.length && $columnContainer.data('colpos') !== 'undefined') {
return $columnContainer.data('colpos');
} else {
return false;
}
}
}
export default DragDrop;
$(DragDrop.initialize);
| {
parameters.cmd.tt_content[contentElementUid] = {
copy: {
action: 'paste',
target: targetPid,
update: {
colPos: colPos,
sys_language_uid: language,
},
},
};
} | conditional_block |
DragDrop.ts | /*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
/**
* Module: TYPO3/CMS/Backend/LayoutModule/DragDrop
* this JS code does the drag+drop logic for the Layout module (Web => Page) | import $ from 'jquery';
import 'jquery-ui/droppable';
import DataHandler = require('../AjaxDataHandler');
import Icons = require('../Icons');
import ResponseInterface from '../AjaxDataHandler/ResponseInterface';
interface Parameters {
cmd?: { tt_content: { [key: string]: any } };
data?: { tt_content: { [key: string]: any } };
CB?: { paste: string, update: { colPos: number | boolean, sys_language_uid: number }};
}
interface DroppableEventUIParam {
draggable: JQuery;
helper: JQuery;
position: { top: number; left: number; };
offset: { top: number; left: number; };
}
class DragDrop {
private static readonly contentIdentifier: string = '.t3js-page-ce';
private static readonly dragIdentifier: string = '.t3-page-ce-dragitem';
private static readonly dragHeaderIdentifier: string = '.t3js-page-ce-draghandle';
private static readonly dropZoneIdentifier: string = '.t3js-page-ce-dropzone-available';
private static readonly columnIdentifier: string = '.t3js-page-column';
private static readonly validDropZoneClass: string = 'active';
private static readonly dropPossibleHoverClass: string = 't3-page-ce-dropzone-possible';
private static readonly addContentIdentifier: string = '.t3js-page-new-ce';
private static originalStyles: string = '';
/**
* initializes Drag+Drop for all content elements on the page
*/
public static initialize(): void {
$(DragDrop.contentIdentifier).draggable({
handle: DragDrop.dragHeaderIdentifier,
scope: 'tt_content',
cursor: 'move',
distance: 20,
// removed because of incompatible types:
// addClasses: 'active-drag',
revert: 'invalid',
zIndex: 100,
start: (evt: JQueryEventObject): void => {
DragDrop.onDragStart($(evt.target));
},
stop: (evt: JQueryEventObject): void => {
DragDrop.onDragStop($(evt.target));
},
});
$(DragDrop.dropZoneIdentifier).droppable({
accept: this.contentIdentifier,
scope: 'tt_content',
tolerance: 'pointer',
over: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOver($(ui.draggable), $(evt.target));
},
out: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOut($(ui.draggable), $(evt.target));
},
drop: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDrop($(ui.draggable), $(evt.target), evt);
},
});
}
/**
* called when a draggable is selected to be moved
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStart($element: JQuery): void {
// Add css class for the drag shadow
DragDrop.originalStyles = $element.get(0).style.cssText;
$element.children(DragDrop.dragIdentifier).addClass('dragitem-shadow');
$element.append('<div class="ui-draggable-copy-message">' + TYPO3.lang['dragdrop.copy.message'] + '</div>');
// Hide create new element button
$element.children(DragDrop.dropZoneIdentifier).addClass('drag-start');
$element.closest(DragDrop.columnIdentifier).removeClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).hide();
$element.find(DragDrop.dropZoneIdentifier).hide();
$(DragDrop.dropZoneIdentifier).each((index: number, element: HTMLElement): void => {
const $me = $(element);
if ($me.parent().find('.t3js-toggle-new-content-element-wizard').length) {
$me.addClass(DragDrop.validDropZoneClass);
} else {
$me.closest(DragDrop.contentIdentifier)
.find('> ' + DragDrop.addContentIdentifier + ', > > ' + DragDrop.addContentIdentifier).show();
}
});
}
/**
* called when a draggable is released
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStop($element: JQuery): void {
// Remove css class for the drag shadow
$element.children(DragDrop.dragIdentifier).removeClass('dragitem-shadow');
// Show create new element button
$element.children(DragDrop.dropZoneIdentifier).removeClass('drag-start');
$element.closest(DragDrop.columnIdentifier).addClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).show();
$element.find(DragDrop.dropZoneIdentifier).show();
$element.find('.ui-draggable-copy-message').remove();
// Reset inline style
$element.get(0).style.cssText = DragDrop.originalStyles;
$(DragDrop.dropZoneIdentifier + '.' + DragDrop.validDropZoneClass).removeClass(DragDrop.validDropZoneClass);
}
/**
* adds CSS classes when hovering over a dropzone
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOver($draggableElement: JQuery, $droppableElement: JQuery): void {
if ($droppableElement.hasClass(DragDrop.validDropZoneClass)) {
$droppableElement.addClass(DragDrop.dropPossibleHoverClass);
}
}
/**
* removes the CSS classes after hovering out of a dropzone again
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOut($draggableElement: JQuery, $droppableElement: JQuery): void {
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
}
/**
* this method does the whole logic when a draggable is dropped on to a dropzone
* sending out the request and afterwards move the HTML element in the right place.
*
* @param $draggableElement
* @param $droppableElement
* @param {Event} evt the event
* @private
*/
public static onDrop($draggableElement: JQuery, $droppableElement: JQuery, evt: JQueryEventObject): void {
const newColumn = DragDrop.getColumnPositionForElement($droppableElement);
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
// send an AJAX request via the AjaxDataHandler
const contentElementUid: number = parseInt($draggableElement.data('uid'), 10);
if (typeof(contentElementUid) === 'number' && contentElementUid > 0) {
let parameters: Parameters = {};
// add the information about a possible column position change
const targetFound = $droppableElement.closest(DragDrop.contentIdentifier).data('uid');
// the item was moved to the top of the colPos, so the page ID is used here
let targetPid = 0;
if (typeof targetFound === 'undefined') {
// the actual page is needed. Read it from the container into which the element was dropped.
targetPid = parseInt((<HTMLElement>evt.target).offsetParent.getAttribute('data-page'), 10);
} else {
// the negative value of the content element after where it should be moved
targetPid = 0 - parseInt(targetFound, 10);
}
// the dragged elements language uid
let language: number = parseInt($draggableElement.data('language-uid'), 10);
if (language !== -1) {
// new elements language must be the same as the column the element is dropped in if element is not -1
language = parseInt($droppableElement.closest('[data-language-uid]').data('language-uid'), 10);
}
let colPos: number | boolean = 0;
if (targetPid !== 0) {
colPos = newColumn;
}
parameters.cmd = {tt_content: {}};
parameters.data = {tt_content: {}};
const copyAction = (evt && (<JQueryInputEventObject>evt.originalEvent).ctrlKey || $droppableElement.hasClass('t3js-paste-copy'));
if (copyAction) {
parameters.cmd.tt_content[contentElementUid] = {
copy: {
action: 'paste',
target: targetPid,
update: {
colPos: colPos,
sys_language_uid: language,
},
},
};
} else {
parameters.data.tt_content[contentElementUid] = {
colPos: colPos,
sys_language_uid: language,
};
parameters.cmd.tt_content[contentElementUid] = {move: targetPid};
}
DragDrop.ajaxAction($droppableElement, $draggableElement, parameters, copyAction).then((): void => {
const $languageDescriber = $(`.t3-page-column-lang-name[data-language-uid="${language}"]`);
if ($languageDescriber.length === 0) {
return;
}
const newFlagIdentifier = $languageDescriber.data('flagIdentifier');
const newLanguageTitle = $languageDescriber.data('languageTitle');
$draggableElement.find('.t3js-language-title').text(newLanguageTitle);
Icons.getIcon(newFlagIdentifier, Icons.sizes.small).then((markup: string): void => {
const $flagIcon = $draggableElement.find('.t3js-flag');
$flagIcon.attr('title', newLanguageTitle).html(markup);
});
});
}
}
/**
* this method does the actual AJAX request for both, the move and the copy action.
*
* @param {JQuery} $droppableElement
* @param {JQuery} $draggableElement
* @param {Parameters} parameters
* @param {boolean} copyAction
* @private
*/
public static ajaxAction($droppableElement: JQuery, $draggableElement: JQuery, parameters: Parameters, copyAction: boolean): Promise<any> {
return DataHandler.process(parameters).then((result: ResponseInterface): void => {
if (result.hasErrors) {
throw result.messages;
}
// insert draggable on the new position
if (!$droppableElement.parent().hasClass(DragDrop.contentIdentifier.substring(1))) {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.dropZoneIdentifier));
} else {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.contentIdentifier));
}
if (copyAction) {
self.location.reload();
}
});
}
/**
* returns the next "upper" container colPos parameter inside the code
* @param $element
* @return int|null the colPos
*/
public static getColumnPositionForElement($element: JQuery): number | boolean {
const $columnContainer = $element.closest('[data-colpos]');
if ($columnContainer.length && $columnContainer.data('colpos') !== 'undefined') {
return $columnContainer.data('colpos');
} else {
return false;
}
}
}
export default DragDrop;
$(DragDrop.initialize); | * based on jQuery UI
*/ | random_line_split |
DragDrop.ts | /*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
/**
* Module: TYPO3/CMS/Backend/LayoutModule/DragDrop
* this JS code does the drag+drop logic for the Layout module (Web => Page)
* based on jQuery UI
*/
import $ from 'jquery';
import 'jquery-ui/droppable';
import DataHandler = require('../AjaxDataHandler');
import Icons = require('../Icons');
import ResponseInterface from '../AjaxDataHandler/ResponseInterface';
interface Parameters {
cmd?: { tt_content: { [key: string]: any } };
data?: { tt_content: { [key: string]: any } };
CB?: { paste: string, update: { colPos: number | boolean, sys_language_uid: number }};
}
interface DroppableEventUIParam {
draggable: JQuery;
helper: JQuery;
position: { top: number; left: number; };
offset: { top: number; left: number; };
}
class DragDrop {
private static readonly contentIdentifier: string = '.t3js-page-ce';
private static readonly dragIdentifier: string = '.t3-page-ce-dragitem';
private static readonly dragHeaderIdentifier: string = '.t3js-page-ce-draghandle';
private static readonly dropZoneIdentifier: string = '.t3js-page-ce-dropzone-available';
private static readonly columnIdentifier: string = '.t3js-page-column';
private static readonly validDropZoneClass: string = 'active';
private static readonly dropPossibleHoverClass: string = 't3-page-ce-dropzone-possible';
private static readonly addContentIdentifier: string = '.t3js-page-new-ce';
private static originalStyles: string = '';
/**
* initializes Drag+Drop for all content elements on the page
*/
public static initialize(): void {
$(DragDrop.contentIdentifier).draggable({
handle: DragDrop.dragHeaderIdentifier,
scope: 'tt_content',
cursor: 'move',
distance: 20,
// removed because of incompatible types:
// addClasses: 'active-drag',
revert: 'invalid',
zIndex: 100,
start: (evt: JQueryEventObject): void => {
DragDrop.onDragStart($(evt.target));
},
stop: (evt: JQueryEventObject): void => {
DragDrop.onDragStop($(evt.target));
},
});
$(DragDrop.dropZoneIdentifier).droppable({
accept: this.contentIdentifier,
scope: 'tt_content',
tolerance: 'pointer',
over: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOver($(ui.draggable), $(evt.target));
},
out: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDropHoverOut($(ui.draggable), $(evt.target));
},
drop: (evt: JQueryEventObject, ui: DroppableEventUIParam): void => {
DragDrop.onDrop($(ui.draggable), $(evt.target), evt);
},
});
}
/**
* called when a draggable is selected to be moved
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStart($element: JQuery): void {
// Add css class for the drag shadow
DragDrop.originalStyles = $element.get(0).style.cssText;
$element.children(DragDrop.dragIdentifier).addClass('dragitem-shadow');
$element.append('<div class="ui-draggable-copy-message">' + TYPO3.lang['dragdrop.copy.message'] + '</div>');
// Hide create new element button
$element.children(DragDrop.dropZoneIdentifier).addClass('drag-start');
$element.closest(DragDrop.columnIdentifier).removeClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).hide();
$element.find(DragDrop.dropZoneIdentifier).hide();
$(DragDrop.dropZoneIdentifier).each((index: number, element: HTMLElement): void => {
const $me = $(element);
if ($me.parent().find('.t3js-toggle-new-content-element-wizard').length) {
$me.addClass(DragDrop.validDropZoneClass);
} else {
$me.closest(DragDrop.contentIdentifier)
.find('> ' + DragDrop.addContentIdentifier + ', > > ' + DragDrop.addContentIdentifier).show();
}
});
}
/**
* called when a draggable is released
* @param $element a jQuery object for the draggable
* @private
*/
public static onDragStop($element: JQuery): void {
// Remove css class for the drag shadow
$element.children(DragDrop.dragIdentifier).removeClass('dragitem-shadow');
// Show create new element button
$element.children(DragDrop.dropZoneIdentifier).removeClass('drag-start');
$element.closest(DragDrop.columnIdentifier).addClass('active');
// TODO decide what to do with this
// $element.parents(DragDrop.columnHolderIdentifier).find(DragDrop.addContentIdentifier).show();
$element.find(DragDrop.dropZoneIdentifier).show();
$element.find('.ui-draggable-copy-message').remove();
// Reset inline style
$element.get(0).style.cssText = DragDrop.originalStyles;
$(DragDrop.dropZoneIdentifier + '.' + DragDrop.validDropZoneClass).removeClass(DragDrop.validDropZoneClass);
}
/**
* adds CSS classes when hovering over a dropzone
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static | ($draggableElement: JQuery, $droppableElement: JQuery): void {
if ($droppableElement.hasClass(DragDrop.validDropZoneClass)) {
$droppableElement.addClass(DragDrop.dropPossibleHoverClass);
}
}
/**
* removes the CSS classes after hovering out of a dropzone again
* @param $draggableElement
* @param $droppableElement
* @private
*/
public static onDropHoverOut($draggableElement: JQuery, $droppableElement: JQuery): void {
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
}
/**
* this method does the whole logic when a draggable is dropped on to a dropzone
* sending out the request and afterwards move the HTML element in the right place.
*
* @param $draggableElement
* @param $droppableElement
* @param {Event} evt the event
* @private
*/
public static onDrop($draggableElement: JQuery, $droppableElement: JQuery, evt: JQueryEventObject): void {
const newColumn = DragDrop.getColumnPositionForElement($droppableElement);
$droppableElement.removeClass(DragDrop.dropPossibleHoverClass);
// send an AJAX request via the AjaxDataHandler
const contentElementUid: number = parseInt($draggableElement.data('uid'), 10);
if (typeof(contentElementUid) === 'number' && contentElementUid > 0) {
let parameters: Parameters = {};
// add the information about a possible column position change
const targetFound = $droppableElement.closest(DragDrop.contentIdentifier).data('uid');
// the item was moved to the top of the colPos, so the page ID is used here
let targetPid = 0;
if (typeof targetFound === 'undefined') {
// the actual page is needed. Read it from the container into which the element was dropped.
targetPid = parseInt((<HTMLElement>evt.target).offsetParent.getAttribute('data-page'), 10);
} else {
// the negative value of the content element after where it should be moved
targetPid = 0 - parseInt(targetFound, 10);
}
// the dragged elements language uid
let language: number = parseInt($draggableElement.data('language-uid'), 10);
if (language !== -1) {
// new elements language must be the same as the column the element is dropped in if element is not -1
language = parseInt($droppableElement.closest('[data-language-uid]').data('language-uid'), 10);
}
let colPos: number | boolean = 0;
if (targetPid !== 0) {
colPos = newColumn;
}
parameters.cmd = {tt_content: {}};
parameters.data = {tt_content: {}};
const copyAction = (evt && (<JQueryInputEventObject>evt.originalEvent).ctrlKey || $droppableElement.hasClass('t3js-paste-copy'));
if (copyAction) {
parameters.cmd.tt_content[contentElementUid] = {
copy: {
action: 'paste',
target: targetPid,
update: {
colPos: colPos,
sys_language_uid: language,
},
},
};
} else {
parameters.data.tt_content[contentElementUid] = {
colPos: colPos,
sys_language_uid: language,
};
parameters.cmd.tt_content[contentElementUid] = {move: targetPid};
}
DragDrop.ajaxAction($droppableElement, $draggableElement, parameters, copyAction).then((): void => {
const $languageDescriber = $(`.t3-page-column-lang-name[data-language-uid="${language}"]`);
if ($languageDescriber.length === 0) {
return;
}
const newFlagIdentifier = $languageDescriber.data('flagIdentifier');
const newLanguageTitle = $languageDescriber.data('languageTitle');
$draggableElement.find('.t3js-language-title').text(newLanguageTitle);
Icons.getIcon(newFlagIdentifier, Icons.sizes.small).then((markup: string): void => {
const $flagIcon = $draggableElement.find('.t3js-flag');
$flagIcon.attr('title', newLanguageTitle).html(markup);
});
});
}
}
/**
* this method does the actual AJAX request for both, the move and the copy action.
*
* @param {JQuery} $droppableElement
* @param {JQuery} $draggableElement
* @param {Parameters} parameters
* @param {boolean} copyAction
* @private
*/
public static ajaxAction($droppableElement: JQuery, $draggableElement: JQuery, parameters: Parameters, copyAction: boolean): Promise<any> {
return DataHandler.process(parameters).then((result: ResponseInterface): void => {
if (result.hasErrors) {
throw result.messages;
}
// insert draggable on the new position
if (!$droppableElement.parent().hasClass(DragDrop.contentIdentifier.substring(1))) {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.dropZoneIdentifier));
} else {
$draggableElement.detach().css({top: 0, left: 0})
.insertAfter($droppableElement.closest(DragDrop.contentIdentifier));
}
if (copyAction) {
self.location.reload();
}
});
}
/**
* returns the next "upper" container colPos parameter inside the code
* @param $element
* @return int|null the colPos
*/
public static getColumnPositionForElement($element: JQuery): number | boolean {
const $columnContainer = $element.closest('[data-colpos]');
if ($columnContainer.length && $columnContainer.data('colpos') !== 'undefined') {
return $columnContainer.data('colpos');
} else {
return false;
}
}
}
export default DragDrop;
$(DragDrop.initialize);
| onDropHoverOver | identifier_name |
xdatcar2xyz.1.04.py | # The MIT License (MIT)
#
# Copyright (c) 2014 Muratahan Aykol
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE
import numpy as np
xdatcar = open('XDATCAR', 'r')
xyz = open('XDATCAR.xyz', 'w')
xyz_fract = open('XDATCAR_fract.xyz', 'w')
system = xdatcar.readline()
scale = float(xdatcar.readline().rstrip('\n'))
print scale
#get lattice vectors
a1 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
a2 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
a3 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
print a1
print a2
print a3
#Save scaled lattice vectors
lat_rec = open('lattice.vectors', 'w')
lat_rec.write(str(a1[0])+' '+str(a1[1])+' '+str(a1[2])+'\n')
lat_rec.write(str(a2[0])+' '+str(a2[1])+' '+str(a2[2])+'\n')
lat_rec.write(str(a3[0])+' '+str(a3[1])+' '+str(a3[2]))
lat_rec.close()
#Read xdatcar
element_names = xdatcar.readline().rstrip('\n').split()
element_dict = {}
element_numbers = xdatcar.readline().rstrip('\n').split()
i = 0
N = 0
for t in range(len(element_names)):
|
print element_dict
while True:
line = xdatcar.readline()
if len(line) == 0:
break
xyz.write(str(N) + "\ncomment\n")
xyz_fract.write(str(N)+"\ncomment\n")
for el in element_names:
for i in range(element_dict[el]):
p = xdatcar.readline().rstrip('\n').split()
coords = np.array([ float(s) for s in p ])
# print coords
cartesian_coords = coords[0]*a1+coords[1]*a2+coords[2]*a3
xyz.write(el+ " " + str(cartesian_coords[0])+ " " + str(cartesian_coords[1]) + " " + str(cartesian_coords[2]) +"\n")
xyz_fract.write(el+ " " + str(coords[0])+ " " + str(coords[1]) + " " + str(coords[2]) +"\n")
xdatcar.close()
xyz.close()
xyz_fract.close()
| element_dict[element_names[t]] = int(element_numbers[i])
N += int(element_numbers[i])
i += 1 | conditional_block |
xdatcar2xyz.1.04.py | # The MIT License (MIT)
#
# Copyright (c) 2014 Muratahan Aykol
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE
import numpy as np
xdatcar = open('XDATCAR', 'r')
xyz = open('XDATCAR.xyz', 'w')
xyz_fract = open('XDATCAR_fract.xyz', 'w')
system = xdatcar.readline()
scale = float(xdatcar.readline().rstrip('\n'))
print scale
#get lattice vectors
a1 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
a2 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
a3 = np.array([ float(s)*scale for s in xdatcar.readline().rstrip('\n').split() ])
print a1
print a2
print a3
#Save scaled lattice vectors
lat_rec = open('lattice.vectors', 'w')
lat_rec.write(str(a1[0])+' '+str(a1[1])+' '+str(a1[2])+'\n')
lat_rec.write(str(a2[0])+' '+str(a2[1])+' '+str(a2[2])+'\n')
lat_rec.write(str(a3[0])+' '+str(a3[1])+' '+str(a3[2]))
lat_rec.close()
#Read xdatcar
element_names = xdatcar.readline().rstrip('\n').split()
element_dict = {}
element_numbers = xdatcar.readline().rstrip('\n').split()
i = 0
N = 0
for t in range(len(element_names)):
element_dict[element_names[t]] = int(element_numbers[i])
N += int(element_numbers[i])
i += 1
print element_dict
while True:
line = xdatcar.readline()
if len(line) == 0:
break
xyz.write(str(N) + "\ncomment\n")
xyz_fract.write(str(N)+"\ncomment\n") | cartesian_coords = coords[0]*a1+coords[1]*a2+coords[2]*a3
xyz.write(el+ " " + str(cartesian_coords[0])+ " " + str(cartesian_coords[1]) + " " + str(cartesian_coords[2]) +"\n")
xyz_fract.write(el+ " " + str(coords[0])+ " " + str(coords[1]) + " " + str(coords[2]) +"\n")
xdatcar.close()
xyz.close()
xyz_fract.close() | for el in element_names:
for i in range(element_dict[el]):
p = xdatcar.readline().rstrip('\n').split()
coords = np.array([ float(s) for s in p ])
# print coords | random_line_split |
mknet_vd2d3d.py | from __future__ import print_function
import sys, os, math
import numpy as np
from numpy import float32, int32, uint8, dtype
# Load PyGreentea
# Relative path to where PyGreentea resides
pygt_path = '../..'
sys.path.append(pygt_path)
import pygreentea.pygreentea as pygt
import caffe
from caffe import layers as L |
net = caffe.NetSpec()
net.data = L.MemoryData(dim=[1, 1], ntop=1)
net.label = L.MemoryData(dim=[1, 1], ntop=1, include=[dict(phase=0)])
fmaps_vd2d3d = [24, 24, 36, 36, 48, 48, 60, 60]
net.sknet = ML.SKNet(net.data,
fmap_start=24,
conv=[[1,3,3],[1,3,3],[1,2,2],[1,3,3],[1,3,3],[1,3,3],[1,3,3],[2,3,3],[2,3,3]],
activation=['relu', 'relu', 'tanh', 'relu', 'tanh', 'relu', 'tanh', 'relu', 'tanh', 'relu'],
pool=[[1,1,1],[1,1,1],[1,2,2],[1,1,1],[1,2,2],[1,1,1],[2,2,2],[1,1,1],[1,1,1]],
padding=[4,84,84],
fmap_inc_rule = lambda x: fmaps_vd2d3d.pop(0),
fmap_bridge_rule = lambda x: 100,
fmap_dec_rule = lambda x: 0,
ip_depth = 1,
hybrid_dimensions = [0],
dropout = 0.0)
net.out = L.Convolution(net.sknet, kernel_size=[1,1,1], num_output=2, weight_filler=dict(type='msra'), bias_filler=dict(type='constant'))
net.prob = L.Softmax(net.out, ntop=1, in_place=False, include=[dict(phase=1)])
net.loss = L.SoftmaxWithLoss(net.out, net.label, ntop=0, loss_weight=1.0, include=[dict(phase=0)])
pygt.fix_input_dims(net,
[net.data, net.label],
max_shapes = [[100,250,250],[100,250,250]],
shape_coupled = [-1, -1, 1])
protonet = net.to_proto()
protonet.name = 'net_vd2d3d';
# Store the network as prototxt
with open(protonet.name + '.prototxt', 'w') as f:
print(protonet, file=f) | from caffe import params as P
from caffe import to_proto
from pygreentea.pygreentea import metalayers as ML
| random_line_split |
season.py | #------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" A specified time period of the year, e.g., Spring, Summer, Fall, Winter
"""
# <<< imports
# @generated
from cpsm.element import Element
from cpsm.load_model import SeasonName
from google.appengine.ext import db
# >>> imports
class | (Element):
""" A specified time period of the year, e.g., Spring, Summer, Fall, Winter
"""
# <<< season.attributes
# @generated
# Date season ends
end_date = db.DateTimeProperty()
# Date season starts
start_date = db.DateTimeProperty()
# Name of the Season
name = SeasonName
# >>> season.attributes
# <<< season.references
# @generated
# Virtual property. Schedules that use this Season.
pass # season_day_type_schedules
# >>> season.references
# <<< season.operations
# @generated
# >>> season.operations
# EOF -------------------------------------------------------------------------
| Season | identifier_name |
season.py | #------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" A specified time period of the year, e.g., Spring, Summer, Fall, Winter
"""
# <<< imports
# @generated
from cpsm.element import Element
from cpsm.load_model import SeasonName
| """ A specified time period of the year, e.g., Spring, Summer, Fall, Winter
"""
# <<< season.attributes
# @generated
# Date season ends
end_date = db.DateTimeProperty()
# Date season starts
start_date = db.DateTimeProperty()
# Name of the Season
name = SeasonName
# >>> season.attributes
# <<< season.references
# @generated
# Virtual property. Schedules that use this Season.
pass # season_day_type_schedules
# >>> season.references
# <<< season.operations
# @generated
# >>> season.operations
# EOF ------------------------------------------------------------------------- | from google.appengine.ext import db
# >>> imports
class Season(Element): | random_line_split |
season.py | #------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" A specified time period of the year, e.g., Spring, Summer, Fall, Winter
"""
# <<< imports
# @generated
from cpsm.element import Element
from cpsm.load_model import SeasonName
from google.appengine.ext import db
# >>> imports
class Season(Element):
|
# EOF -------------------------------------------------------------------------
| """ A specified time period of the year, e.g., Spring, Summer, Fall, Winter
"""
# <<< season.attributes
# @generated
# Date season ends
end_date = db.DateTimeProperty()
# Date season starts
start_date = db.DateTimeProperty()
# Name of the Season
name = SeasonName
# >>> season.attributes
# <<< season.references
# @generated
# Virtual property. Schedules that use this Season.
pass # season_day_type_schedules
# >>> season.references
# <<< season.operations
# @generated
# >>> season.operations | identifier_body |
handlers.py | """App related signal handlers."""
import redis
from django.conf import settings
from django.db.models import signals
from django.dispatch import receiver
from modoboa.admin import models as admin_models
from . import constants
def set_message_limit(instance, key):
"""Store message limit in Redis."""
old_message_limit = instance._loaded_values.get("message_limit")
if old_message_limit == instance.message_limit:
return
rclient = redis.Redis(
host=settings.REDIS_HOST,
port=settings.REDIS_PORT,
db=settings.REDIS_QUOTA_DB
)
if instance.message_limit is None:
# delete existing key
if rclient.hexists(constants.REDIS_HASHNAME, key):
|
return
if old_message_limit is not None:
diff = instance.message_limit - old_message_limit
else:
diff = instance.message_limit
rclient.hincrby(constants.REDIS_HASHNAME, key, diff)
@receiver(signals.post_save, sender=admin_models.Domain)
def set_domain_message_limit(sender, instance, created, **kwargs):
"""Store domain message limit in Redis."""
set_message_limit(instance, instance.name)
@receiver(signals.post_save, sender=admin_models.Mailbox)
def set_mailbox_message_limit(sender, instance, created, **kwargs):
"""Store mailbox message limit in Redis."""
set_message_limit(instance, instance.full_address)
| rclient.hdel(constants.REDIS_HASHNAME, key) | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.