file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
url_extractor.py | """
The consumer's code.
It takes HTML from the queue and outputs the URIs found in it.
"""
import asyncio
import json
import logging
from typing import List
from urllib.parse import urljoin
import aioredis
from bs4 import BeautifulSoup
from . import app_cli, redis_queue
_log = logging.getLogger('url_extractor')
def _scrape_urls(html: str, base_url: str) -> List[str]:
"""Gets all valid links from a site and returns them as URIs (some links may be relative.
If the URIs scraped here would go back into the system to have more URIs scraped from their
HTML, we would need to filter out all those who are not HTTP or HTTPS.
Also, assuming that many consumers and many producers would be running at the same time,
connected to one Redis instance, we would need to cache normalized versions or visited URIs
without fragments (https://tools.ietf.org/html/rfc3986#section-3.5) so we don't fall into loops.
For example two sites referencing each other.
The cached entries could have time-to-live (Redis EXPIRE command), so we could refresh our
knowledge about a site eventually.
"""
soup = BeautifulSoup(html, 'html.parser')
href = 'href'
return [urljoin(base_url, link.get(href))
for link in soup.find_all('a') if link.has_attr(href)]
async def _scrape_urls_from_queued_html(redis_pool: aioredis.RedisPool):
_log.info('Processing HTML from queue...')
while True:
try:
html_payload = await redis_queue.pop(redis_pool)
_log.info('Processing HTML from URL %s', html_payload.url)
scraped_urls = _scrape_urls(html_payload.html, html_payload.url)
_log.info('Scraped URIs from URL %s', html_payload.url)
output_json = {html_payload.url: scraped_urls}
# flush for anyone who is watching the stream
print(json.dumps(output_json), flush=True)
except redis_queue.QueueEmptyError:
# wait for work to become available | def main():
"""Run the URL extractor (the consumer).
"""
app_cli.setup_logging()
args_parser = app_cli.get_redis_args_parser(
'Start a worker that will get URL/HTML pairs from a Redis queue and for each of those '
'pairs output (on separate lines) a JSON in format {ORIGINATING_URL: [FOUND_URLS_LIST]}')
args = args_parser.parse_args()
loop = app_cli.get_event_loop()
_log.info('Creating a pool of connections to Redis at %s:%d.',
args.redis_host, args.redis_port)
# the pool won't be closed explicitly, since the process needs to be terminated to stop anyway
redis_pool = loop.run_until_complete(
aioredis.create_pool((args.redis_host, args.redis_port)))
loop.run_until_complete(_scrape_urls_from_queued_html(redis_pool))
if __name__ == '__main__':
main() | await asyncio.sleep(1) # pragma: no cover
| random_line_split |
url_extractor.py | """
The consumer's code.
It takes HTML from the queue and outputs the URIs found in it.
"""
import asyncio
import json
import logging
from typing import List
from urllib.parse import urljoin
import aioredis
from bs4 import BeautifulSoup
from . import app_cli, redis_queue
_log = logging.getLogger('url_extractor')
def _scrape_urls(html: str, base_url: str) -> List[str]:
"""Gets all valid links from a site and returns them as URIs (some links may be relative.
If the URIs scraped here would go back into the system to have more URIs scraped from their
HTML, we would need to filter out all those who are not HTTP or HTTPS.
Also, assuming that many consumers and many producers would be running at the same time,
connected to one Redis instance, we would need to cache normalized versions or visited URIs
without fragments (https://tools.ietf.org/html/rfc3986#section-3.5) so we don't fall into loops.
For example two sites referencing each other.
The cached entries could have time-to-live (Redis EXPIRE command), so we could refresh our
knowledge about a site eventually.
"""
soup = BeautifulSoup(html, 'html.parser')
href = 'href'
return [urljoin(base_url, link.get(href))
for link in soup.find_all('a') if link.has_attr(href)]
async def _scrape_urls_from_queued_html(redis_pool: aioredis.RedisPool):
_log.info('Processing HTML from queue...')
while True:
|
def main():
"""Run the URL extractor (the consumer).
"""
app_cli.setup_logging()
args_parser = app_cli.get_redis_args_parser(
'Start a worker that will get URL/HTML pairs from a Redis queue and for each of those '
'pairs output (on separate lines) a JSON in format {ORIGINATING_URL: [FOUND_URLS_LIST]}')
args = args_parser.parse_args()
loop = app_cli.get_event_loop()
_log.info('Creating a pool of connections to Redis at %s:%d.',
args.redis_host, args.redis_port)
# the pool won't be closed explicitly, since the process needs to be terminated to stop anyway
redis_pool = loop.run_until_complete(
aioredis.create_pool((args.redis_host, args.redis_port)))
loop.run_until_complete(_scrape_urls_from_queued_html(redis_pool))
if __name__ == '__main__':
main()
| try:
html_payload = await redis_queue.pop(redis_pool)
_log.info('Processing HTML from URL %s', html_payload.url)
scraped_urls = _scrape_urls(html_payload.html, html_payload.url)
_log.info('Scraped URIs from URL %s', html_payload.url)
output_json = {html_payload.url: scraped_urls}
# flush for anyone who is watching the stream
print(json.dumps(output_json), flush=True)
except redis_queue.QueueEmptyError:
# wait for work to become available
await asyncio.sleep(1) # pragma: no cover | conditional_block |
0004_copy_exif_data_to_model.py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.gis.geos import geometry
from PIL import Image
from PIL.ExifTags import TAGS
from ..util import point_from_exif
class Migration(DataMigration):
def forwards(self, orm):
for photo in orm['photomap.Photo'].objects.all():
photo.location = point_from_exif(photo.image.path)
photo.save()
def backwards(self, orm):
raise NotImplementedError('Too lazy to write a method to write the'
' coordinates to the EXIF of the files')
models = {
u'photomap.photo': {
'Meta': {'object_name': 'Photo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'}) | }
}
complete_apps = ['photomap']
symmetrical = True | random_line_split |
|
0004_copy_exif_data_to_model.py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.gis.geos import geometry
from PIL import Image
from PIL.ExifTags import TAGS
from ..util import point_from_exif
class Migration(DataMigration):
def forwards(self, orm):
for photo in orm['photomap.Photo'].objects.all():
|
def backwards(self, orm):
raise NotImplementedError('Too lazy to write a method to write the'
' coordinates to the EXIF of the files')
models = {
u'photomap.photo': {
'Meta': {'object_name': 'Photo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'})
}
}
complete_apps = ['photomap']
symmetrical = True
| photo.location = point_from_exif(photo.image.path)
photo.save() | conditional_block |
0004_copy_exif_data_to_model.py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.gis.geos import geometry
from PIL import Image
from PIL.ExifTags import TAGS
from ..util import point_from_exif
class Migration(DataMigration):
| def forwards(self, orm):
for photo in orm['photomap.Photo'].objects.all():
photo.location = point_from_exif(photo.image.path)
photo.save()
def backwards(self, orm):
raise NotImplementedError('Too lazy to write a method to write the'
' coordinates to the EXIF of the files')
models = {
u'photomap.photo': {
'Meta': {'object_name': 'Photo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'})
}
}
complete_apps = ['photomap']
symmetrical = True | identifier_body |
|
0004_copy_exif_data_to_model.py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.gis.geos import geometry
from PIL import Image
from PIL.ExifTags import TAGS
from ..util import point_from_exif
class Migration(DataMigration):
def | (self, orm):
for photo in orm['photomap.Photo'].objects.all():
photo.location = point_from_exif(photo.image.path)
photo.save()
def backwards(self, orm):
raise NotImplementedError('Too lazy to write a method to write the'
' coordinates to the EXIF of the files')
models = {
u'photomap.photo': {
'Meta': {'object_name': 'Photo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True'})
}
}
complete_apps = ['photomap']
symmetrical = True
| forwards | identifier_name |
CleanMoviePrefix.py | # coding=gbk
import os
import re
import string
def isMov(filename):
# ÅжÏÊÇ·ñΪµçÓ°Îļþ
suffix = filename.split('.')[-1].lower() # ÌáÈ¡ºó׺
pattern = re.compile(r'mpg|mpeg|m2v|mkv|dat|vob|avi|wmv|rm|ram|rmvb|mov|avi|mp4|qt|viv')
if pattern.search(suffix): # Æ¥ÅäÊÇ·ñΪµçÓ°¸ñʽ
return True
else:
return False
if __name__=='__main__':
# | ¼
print '´¦ÀíÖС¡'
cnt = 1
for fp in os.listdir(os.getcwd()):
if os.path.isfile(fp) and isMov(fp): # ÊǵçÓ°Îļþ
if fp[0]=='[': # È¥µô¿ªÍ·µÄ[]
index = fp.find(']')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+1:])
os.rename(fp,fp[index+1:])
fp = fp[index+1:]
cnt+=1
elif fp[:2]=='¡¾': # È¥µô¿ªÍ·µÄ¡¾¡¿
index = fp.find('¡¿')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+2:])
os.rename(fp,fp[index+2:])
fp = fp[index+2:]
cnt+=1
if fp[0] =='.' or fp[0]=='-': # È¥µô¿ªÍ·µÄ'.' »ò '-'
print '[%d] %s ==> %s'%(cnt,fp,fp[1:])
os.rename(fp,fp[1:])
if cnt==1:
print 'ûÓÐÐèÒª´¦ÀíµÄµçÓ°Îļþ'
else:
print '´¦ÀíÍê±Ï'
| ±éÀúµ±Ç°Ä¿ | conditional_block |
CleanMoviePrefix.py | # coding=gbk
import os
import re
import string
def | (filename):
# ÅжÏÊÇ·ñΪµçÓ°Îļþ
suffix = filename.split('.')[-1].lower() # ÌáÈ¡ºó׺
pattern = re.compile(r'mpg|mpeg|m2v|mkv|dat|vob|avi|wmv|rm|ram|rmvb|mov|avi|mp4|qt|viv')
if pattern.search(suffix): # Æ¥ÅäÊÇ·ñΪµçÓ°¸ñʽ
return True
else:
return False
if __name__=='__main__':
# ±éÀúµ±Ç°Ä¿Â¼
print '´¦ÀíÖС¡'
cnt = 1
for fp in os.listdir(os.getcwd()):
if os.path.isfile(fp) and isMov(fp): # ÊǵçÓ°Îļþ
if fp[0]=='[': # È¥µô¿ªÍ·µÄ[]
index = fp.find(']')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+1:])
os.rename(fp,fp[index+1:])
fp = fp[index+1:]
cnt+=1
elif fp[:2]=='¡¾': # È¥µô¿ªÍ·µÄ¡¾¡¿
index = fp.find('¡¿')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+2:])
os.rename(fp,fp[index+2:])
fp = fp[index+2:]
cnt+=1
if fp[0] =='.' or fp[0]=='-': # È¥µô¿ªÍ·µÄ'.' »ò '-'
print '[%d] %s ==> %s'%(cnt,fp,fp[1:])
os.rename(fp,fp[1:])
if cnt==1:
print 'ûÓÐÐèÒª´¦ÀíµÄµçÓ°Îļþ'
else:
print '´¦ÀíÍê±Ï'
| isMov | identifier_name |
CleanMoviePrefix.py | # coding=gbk
import os
import re
import string
def isMov(filename):
# ÅжÏÊÇ·ñΪµçÓ°Îļþ
suffix = filename.split('.')[-1].lower() # ÌáÈ¡ºó׺
pattern = re.compile(r'mpg|mpeg|m2v|mkv|dat|vob|avi|wmv|rm|ram|rmvb|mov|avi|mp4|qt|viv')
if pattern.search(suffix): # Æ¥ÅäÊÇ·ñΪµçÓ°¸ñʽ
return True
else:
return False
if __name__=='__main__':
# ±éÀúµ±Ç°Ä¿Â¼
print '´¦ÀíÖС¡'
cnt = 1
for fp in os.listdir(os.getcwd()):
if os.path.isfile(fp) and isMov(fp): # ÊǵçÓ°Îļþ
if fp[0]=='[': # È¥µô¿ªÍ·µÄ[]
index = fp.find(']')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+1:])
os.rename(fp,fp[index+1:])
fp = fp[index+1:]
cnt+=1
elif fp[:2]=='¡¾': # È¥µô¿ªÍ·µÄ¡¾¡¿
index = fp.find('¡¿')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+2:])
os.rename(fp,fp[index+2:])
fp = fp[index+2:]
cnt+=1
if fp[0] =='.' or fp[0]=='-': # È¥µô¿ªÍ·µÄ'.' »ò '-'
print '[%d] %s ==> %s'%(cnt,fp,fp[1:])
os.rename(fp,fp[1:]) |
if cnt==1:
print 'ûÓÐÐèÒª´¦ÀíµÄµçÓ°Îļþ'
else:
print '´¦ÀíÍê±Ï' | random_line_split |
|
CleanMoviePrefix.py | # coding=gbk
import os
import re
import string
def isMov(filename):
# ÅжÏÊÇ·ñΪµçÓ°Îļþ
suffix = filename. | ¼
print '´¦ÀíÖС¡'
cnt = 1
for fp in os.listdir(os.getcwd()):
if os.path.isfile(fp) and isMov(fp): # ÊǵçÓ°Îļþ
if fp[0]=='[': # È¥µô¿ªÍ·µÄ[]
index = fp.find(']')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+1:])
os.rename(fp,fp[index+1:])
fp = fp[index+1:]
cnt+=1
elif fp[:2]=='¡¾': # È¥µô¿ªÍ·µÄ¡¾¡¿
index = fp.find('¡¿')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+2:])
os.rename(fp,fp[index+2:])
fp = fp[index+2:]
cnt+=1
if fp[0] =='.' or fp[0]=='-': # È¥µô¿ªÍ·µÄ'.' »ò '-'
print '[%d] %s ==> %s'%(cnt,fp,fp[1:])
os.rename(fp,fp[1:])
if cnt==1:
print 'ûÓÐÐèÒª´¦ÀíµÄµçÓ°Îļþ'
else:
print '´¦ÀíÍê±Ï'
| split('.')[-1].lower() # ÌáÈ¡ºó׺
pattern = re.compile(r'mpg|mpeg|m2v|mkv|dat|vob|avi|wmv|rm|ram|rmvb|mov|avi|mp4|qt|viv')
if pattern.search(suffix): # Æ¥ÅäÊÇ·ñΪµçÓ°¸ñʽ
return True
else:
return False
if __name__=='__main__':
# ±éÀúµ±Ç°Ä¿ | identifier_body |
anysex.py | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
)
class AnySexIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?anysex\.com/(?P<id>\d+)'
_TEST = {
'url': 'http://anysex.com/156592/',
'md5': '023e9fbb7f7987f5529a394c34ad3d3d',
'info_dict': {
'id': '156592',
'ext': 'mp4',
'title': 'Busty and sexy blondie in her bikini strips for you',
'description': 'md5:de9e418178e2931c10b62966474e1383',
'categories': ['Erotic'],
'duration': 270,
'age_limit': 18,
}
}
def _real_extract(self, url):
| mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r"video_url\s*:\s*'([^']+)'", webpage, 'video URL')
title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
description = self._html_search_regex(
r'<div class="description"[^>]*>([^<]+)</div>', webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'preview_url\s*:\s*\'(.*?)\'', webpage, 'thumbnail', fatal=False)
categories = re.findall(
r'<a href="http://anysex\.com/categories/[^"]+" title="[^"]*">([^<]+)</a>', webpage)
duration = parse_duration(self._search_regex(
r'<b>Duration:</b> (?:<q itemprop="duration">)?(\d+:\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<b>Views:</b> (\d+)', webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
} | identifier_body |
|
anysex.py | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
)
class | (InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?anysex\.com/(?P<id>\d+)'
_TEST = {
'url': 'http://anysex.com/156592/',
'md5': '023e9fbb7f7987f5529a394c34ad3d3d',
'info_dict': {
'id': '156592',
'ext': 'mp4',
'title': 'Busty and sexy blondie in her bikini strips for you',
'description': 'md5:de9e418178e2931c10b62966474e1383',
'categories': ['Erotic'],
'duration': 270,
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r"video_url\s*:\s*'([^']+)'", webpage, 'video URL')
title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
description = self._html_search_regex(
r'<div class="description"[^>]*>([^<]+)</div>', webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'preview_url\s*:\s*\'(.*?)\'', webpage, 'thumbnail', fatal=False)
categories = re.findall(
r'<a href="http://anysex\.com/categories/[^"]+" title="[^"]*">([^<]+)</a>', webpage)
duration = parse_duration(self._search_regex(
r'<b>Duration:</b> (?:<q itemprop="duration">)?(\d+:\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<b>Views:</b> (\d+)', webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
}
| AnySexIE | identifier_name |
anysex.py | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
)
| 'url': 'http://anysex.com/156592/',
'md5': '023e9fbb7f7987f5529a394c34ad3d3d',
'info_dict': {
'id': '156592',
'ext': 'mp4',
'title': 'Busty and sexy blondie in her bikini strips for you',
'description': 'md5:de9e418178e2931c10b62966474e1383',
'categories': ['Erotic'],
'duration': 270,
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r"video_url\s*:\s*'([^']+)'", webpage, 'video URL')
title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
description = self._html_search_regex(
r'<div class="description"[^>]*>([^<]+)</div>', webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'preview_url\s*:\s*\'(.*?)\'', webpage, 'thumbnail', fatal=False)
categories = re.findall(
r'<a href="http://anysex\.com/categories/[^"]+" title="[^"]*">([^<]+)</a>', webpage)
duration = parse_duration(self._search_regex(
r'<b>Duration:</b> (?:<q itemprop="duration">)?(\d+:\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<b>Views:</b> (\d+)', webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
} | class AnySexIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?anysex\.com/(?P<id>\d+)'
_TEST = { | random_line_split |
typeReference.tsx | import { ITypeReference, isDynamic, isKeyword, isGenericParameter, isRequiredModifier, isPointer, isArray, IArrayDimension, isGenericInstance, isSimpleOrOpenGeneric } from "../../structure";
import { ReactFragment, FormatContext, locationDnaid, join, array, keyword, location, concreteTypeReference } from ".";
function arrayDimension(dim: IArrayDimension): string {
return dim.u ? dim.u.toString() : '';
}
export function | (context: FormatContext, value: ITypeReference): ReactFragment {
if (isDynamic(value))
return keyword('dynamic');
else if (isKeyword(value))
return location(context, value.l, keyword(value.n));
else if (isGenericParameter(value))
return value.n; // TODO: syntax highlighting for this?
else if (isRequiredModifier(value))
return locationDnaid(value.l) === 'System.Runtime.CompilerServices.IsVolatile' ?
[location(context, value.l, keyword('volatile')), ' ', typeReference(context, value.t)] :
typeReference(context, value.t);
else if (isPointer(value))
return [typeReference(context, value.t), '*'];
else if (isArray(value))
return [typeReference(context, value.t), '[', array(value.d).map(x => arrayDimension(x)).join(','), ']'];
else if (isGenericInstance(value))
return join(value.t.map(x => concreteTypeReference(context, x)), '.');
else if (isSimpleOrOpenGeneric(value))
return [
value.t ? [typeReference(context, value.t), '.'] : null,
location(context, value.l, value.n),
value.a ? ['<', new Array(value.a).join(','), '>'] : null
];
return null;
}
| typeReference | identifier_name |
typeReference.tsx | import { ITypeReference, isDynamic, isKeyword, isGenericParameter, isRequiredModifier, isPointer, isArray, IArrayDimension, isGenericInstance, isSimpleOrOpenGeneric } from "../../structure";
import { ReactFragment, FormatContext, locationDnaid, join, array, keyword, location, concreteTypeReference } from ".";
function arrayDimension(dim: IArrayDimension): string {
return dim.u ? dim.u.toString() : '';
}
export function typeReference(context: FormatContext, value: ITypeReference): ReactFragment {
if (isDynamic(value))
return keyword('dynamic');
else if (isKeyword(value))
return location(context, value.l, keyword(value.n));
else if (isGenericParameter(value))
return value.n; // TODO: syntax highlighting for this?
else if (isRequiredModifier(value))
return locationDnaid(value.l) === 'System.Runtime.CompilerServices.IsVolatile' ?
[location(context, value.l, keyword('volatile')), ' ', typeReference(context, value.t)] :
typeReference(context, value.t);
else if (isPointer(value))
return [typeReference(context, value.t), '*'];
else if (isArray(value))
return [typeReference(context, value.t), '[', array(value.d).map(x => arrayDimension(x)).join(','), ']'];
else if (isGenericInstance(value))
return join(value.t.map(x => concreteTypeReference(context, x)), '.');
else if (isSimpleOrOpenGeneric(value))
return [
value.t ? [typeReference(context, value.t), '.'] : null,
location(context, value.l, value.n),
value.a ? ['<', new Array(value.a).join(','), '>'] : null | return null;
} | ]; | random_line_split |
typeReference.tsx | import { ITypeReference, isDynamic, isKeyword, isGenericParameter, isRequiredModifier, isPointer, isArray, IArrayDimension, isGenericInstance, isSimpleOrOpenGeneric } from "../../structure";
import { ReactFragment, FormatContext, locationDnaid, join, array, keyword, location, concreteTypeReference } from ".";
function arrayDimension(dim: IArrayDimension): string {
return dim.u ? dim.u.toString() : '';
}
export function typeReference(context: FormatContext, value: ITypeReference): ReactFragment | {
if (isDynamic(value))
return keyword('dynamic');
else if (isKeyword(value))
return location(context, value.l, keyword(value.n));
else if (isGenericParameter(value))
return value.n; // TODO: syntax highlighting for this?
else if (isRequiredModifier(value))
return locationDnaid(value.l) === 'System.Runtime.CompilerServices.IsVolatile' ?
[location(context, value.l, keyword('volatile')), ' ', typeReference(context, value.t)] :
typeReference(context, value.t);
else if (isPointer(value))
return [typeReference(context, value.t), '*'];
else if (isArray(value))
return [typeReference(context, value.t), '[', array(value.d).map(x => arrayDimension(x)).join(','), ']'];
else if (isGenericInstance(value))
return join(value.t.map(x => concreteTypeReference(context, x)), '.');
else if (isSimpleOrOpenGeneric(value))
return [
value.t ? [typeReference(context, value.t), '.'] : null,
location(context, value.l, value.n),
value.a ? ['<', new Array(value.a).join(','), '>'] : null
];
return null;
} | identifier_body |
|
localfileview.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2010 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os.path
import gtk
import logging
import pango
from gtk import gdk
from locale import strcoll
from translate.lang import factory as lang_factory
from translate.storage import factory as store_factory
from virtaal.common.pan_app import ui_language
from virtaal.views.baseview import BaseView
from virtaal.views import rendering
from virtaal.views.theme import current_theme
class LocalFileView:
"""
Class that manages the localfile terminology plug-in's GUI presense and interaction.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.controller = model.controller
self.mainview = model.controller.main_controller.view
self._signal_ids = []
self._setup_menus()
self.addterm = TermAddDialog(model=model)
self.fileselect = FileSelectDialog(model=model)
# METHODS #
def _setup_menus(self):
mnu_transfer = self.mainview.gui.get_widget('mnu_placnext')
self.mnui_edit = self.mainview.gui.get_widget('menuitem_edit')
self.menu = self.mnui_edit.get_submenu()
self.mnu_select_files, _menu = self.mainview.find_menu_item(_('Terminology _Files...'), self.mnui_edit)
if not self.mnu_select_files:
self.mnu_select_files = self.mainview.append_menu_item(_('Terminology _Files...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_select_files,
self.mnu_select_files.connect('activate', self._on_select_term_files)
))
self.mnu_add_term, _menu = self.mainview.find_menu_item(_('Add _Term...'), self.mnui_edit)
if not self.mnu_add_term:
self.mnu_add_term = self.mainview.append_menu_item(_('Add _Term...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_add_term,
self.mnu_add_term.connect('activate', self._on_add_term)
))
gtk.accel_map_add_entry("<Virtaal>/Terminology/Add Term", gtk.keysyms.t, gdk.CONTROL_MASK)
accel_group = self.menu.get_accel_group()
if accel_group is None:
accel_group = gtk.AccelGroup()
self.menu.set_accel_group(accel_group)
self.mnu_add_term.set_accel_path("<Virtaal>/Terminology/Add Term")
self.menu.set_accel_group(accel_group)
def destroy(self):
for gobj, signal_id in self._signal_ids:
gobj.disconnect(signal_id)
self.menu.remove(self.mnu_select_files)
self.menu.remove(self.mnu_add_term)
# EVENT HANDLERS #
def _on_add_term(self, menuitem):
self.addterm.run(parent=self.mainview.main_window)
def _on_select_term_files(self, menuitem):
self.fileselect.run(parent=self.mainview.main_window)
class FileSelectDialog:
"""
Wrapper for the selection dialog, created in Glade, to manage the list of
files used by this plug-in.
"""
COL_FILE, COL_EXTEND = range(2)
# INITIALIZERS #
def __init__(self, model):
self.controller = model.controller
self.term_model = model
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermFilesDlg',
domain='virtaal'
)
self._get_widgets()
self._init_treeview()
self._init_add_chooser()
def _get_widgets(self):
widget_names = ('btn_add_file', 'btn_remove_file', 'btn_open_termfile', 'tvw_termfiles')
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermFilesDlg')
self.btn_add_file.connect('clicked', self._on_add_file_clicked)
self.btn_remove_file.connect('clicked', self._on_remove_file_clicked)
self.btn_open_termfile.connect('clicked', self._on_open_termfile_clicked)
self.tvw_termfiles.get_selection().connect('changed', self._on_selection_changed)
def _init_treeview(self):
self.lst_files = gtk.ListStore(str, bool)
self.tvw_termfiles.set_model(self.lst_files)
cell = gtk.CellRendererText()
cell.props.ellipsize = pango.ELLIPSIZE_MIDDLE
col = gtk.TreeViewColumn(_('File'))
col.pack_start(cell)
col.add_attribute(cell, 'text', self.COL_FILE)
col.set_expand(True)
col.set_sort_column_id(0)
self.tvw_termfiles.append_column(col)
cell = gtk.CellRendererToggle()
cell.set_radio(True)
cell.connect('toggled', self._on_toggle)
col = gtk.TreeViewColumn(_('Extendable'))
col.pack_start(cell)
col.add_attribute(cell, 'active', self.COL_EXTEND)
col.set_expand(False)
self.tvw_termfiles.append_column(col)
extend_file = self.term_model.config.get('extendfile', '')
files = self.term_model.config['files']
for f in files:
self.lst_files.append([f, f == extend_file])
# If there was no extend file, select the first one
for row in self.lst_files:
if row[self.COL_EXTEND]:
break
else:
itr = self.lst_files.get_iter_first()
if itr and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = self.lst_files.get_value(itr, self.COL_FILE)
self.term_model.save_config()
def _init_add_chooser(self):
# The following code was mostly copied from virtaal.views.MainView._create_dialogs()
dlg = gtk.FileChooserDialog(
_('Add Files'),
self.controller.main_controller.view.main_window,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
)
dlg.set_default_response(gtk.RESPONSE_OK)
all_supported_filter = gtk.FileFilter()
all_supported_filter.set_name(_("All Supported Files"))
dlg.add_filter(all_supported_filter)
supported_files_dict = dict([ (_(name), (extension, mimetype)) for name, extension, mimetype in store_factory.supported_files() ])
supported_file_names = supported_files_dict.keys()
supported_file_names.sort(cmp=strcoll)
for name in supported_file_names:
extensions, mimetypes = supported_files_dict[name]
#XXX: we can't open generic .csv formats, so listing it is probably
# more harmful than good.
if "csv" in extensions:
continue
new_filter = gtk.FileFilter()
new_filter.set_name(name)
if extensions:
for extension in extensions:
new_filter.add_pattern("*." + extension)
all_supported_filter.add_pattern("*." + extension)
for compress_extension in store_factory.decompressclass.keys():
new_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
all_supported_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
if mimetypes:
for mimetype in mimetypes:
new_filter.add_mime_type(mimetype)
all_supported_filter.add_mime_type(mimetype)
dlg.add_filter(new_filter)
all_filter = gtk.FileFilter()
all_filter.set_name(_("All Files"))
all_filter.add_pattern("*")
dlg.add_filter(all_filter)
dlg.set_select_multiple(True)
self.add_chooser = dlg
# METHODS #
def clear_selection(self):
self.tvw_termfiles.get_selection().unselect_all()
def run(self, parent=None):
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.clear_selection()
self.dialog.show_all()
self.dialog.run()
self.dialog.hide()
# EVENT HANDLERS #
def _on_add_file_clicked(self, button):
self.add_chooser.show_all()
response = self.add_chooser.run()
self.add_chooser.hide()
if response != gtk.RESPONSE_OK:
return
mainview = self.term_model.controller.main_controller.view
currfiles = [row[self.COL_FILE] for row in self.lst_files]
for filename in self.add_chooser.get_filenames():
if filename in currfiles:
continue
# Try and open filename as a translation store
try:
if not os.path.isfile(filename):
raise IOError(_('"%s" is not a usable file.') % filename)
store = store_factory.getobject(filename)
currfiles.append(filename)
self.lst_files.append([filename, False])
except Exception, exc:
message = _('Unable to load %(filename)s:\n\n%(errormsg)s') % {'filename': filename, 'errormsg': str(exc)}
mainview.show_error_dialog(title=_('Error opening file'), message=message)
self.term_model.config['files'] = currfiles
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only load and add the new selected files.
def _on_remove_file_clicked(self, button):
model, selected = self.tvw_termfiles.get_selection().get_selected()
if not selected:
return
remfile = model.get_value(selected, self.COL_FILE)
extend = model.get_value(selected, self.COL_EXTEND)
self.term_model.config['files'].remove(remfile)
if extend:
self.term_model.config['extendfile'] = ''
itr = model.get_iter_first()
if itr and model.iter_is_valid(itr):
model.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = model.get_value(itr, self.COL_FILE)
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only remove the selected file from the terminology matcher.
model.remove(selected)
def _on_open_termfile_clicked(self, button):
selection = self.tvw_termfiles.get_selection()
model, itr = selection.get_selected()
if itr is None:
return
selected_file = model.get_value(itr, self.COL_FILE)
self.term_model.controller.main_controller.open_file(selected_file)
def _on_selection_changed(self, treesel):
model, itr = treesel.get_selected()
enabled = itr is not None
self.btn_open_termfile.set_sensitive(enabled)
self.btn_remove_file.set_sensitive(enabled)
def _on_toggle(self, renderer, path):
toggled_file = self.lst_files.get_value(self.lst_files.get_iter(path), self.COL_FILE)
itr = self.lst_files.get_iter_first()
while itr is not None and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, self.lst_files.get_value(itr, self.COL_FILE) == toggled_file)
itr = self.lst_files.iter_next(itr)
self.term_model.config['extendfile'] = toggled_file
self.term_model.save_config()
class TermAddDialog:
"""
Wrapper for the dialog used to add a new term to the terminology file.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.lang_controller = model.controller.main_controller.lang_controller
self.unit_controller = model.controller.main_controller.unit_controller
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermAddDlg',
domain='virtaal'
)
self._get_widgets()
def _get_widgets(self):
widget_names = (
'btn_add_term', 'cmb_termfile', 'eb_add_term_errors', 'ent_source',
'ent_target', 'lbl_add_term_errors', 'lbl_srclang', 'lbl_tgtlang',
'txt_comment'
)
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermAddDlg')
cellr = gtk.CellRendererText()
cellr.props.ellipsize = pango.ELLIPSIZE_MIDDLE
self.lst_termfiles = gtk.ListStore(str)
self.cmb_termfile.set_model(self.lst_termfiles)
self.cmb_termfile.pack_start(cellr)
self.cmb_termfile.add_attribute(cellr, 'text', 0)
self.ent_source.connect('changed', self._on_entry_changed)
self.ent_target.connect('changed', self._on_entry_changed)
# METHODS #
def add_term_unit(self, source, target):
filename = self.cmb_termfile.get_active_text()
store = self.term_model.get_store_for_filename(filename)
if store is None:
logging.debug('No terminology store to extend :(')
return
unit = store.addsourceunit(source)
unit.target = target
buff = self.txt_comment.get_buffer()
comments = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
if comments:
unit.addnote(comments)
store.save()
self.term_model.matcher.extendtm(unit)
#logging.debug('Added new term: [%s] => [%s], file=%s' % (source, target, store.filename))
def reset(self):
unitview = self.unit_controller.view
source_text = u''
for src in unitview.sources:
selection = src.buffer.get_selection_bounds()
if selection:
source_text = src.get_text(*selection)
break
self.ent_source.modify_font(rendering.get_source_font_description())
self.ent_source.set_text(source_text.strip())
target_text = u''
for tgt in unitview.targets:
selection = tgt.buffer.get_selection_bounds()
if selection:
target_text = tgt.get_text(*selection)
break
self.ent_target.modify_font(rendering.get_target_font_description())
self.ent_target.set_text(target_text.strip())
self.txt_comment.get_buffer().set_text('')
self.eb_add_term_errors.hide()
self.btn_add_term.props.sensitive = True
self.lbl_srclang.set_text_with_mnemonic(_(u'_Source term — %(langname)s') % {'langname': self.lang_controller.source_lang.name})
self.lbl_tgtlang.set_text_with_mnemonic(_(u'_Target term — %(langname)s') % {'langname': self.lang_controller.target_lang.name})
self.lst_termfiles.clear()
extendfile = self.term_model.config.get('extendfile', None)
select_index = -1
i = 0
for f in self.term_model.config['files']:
if f == extendfile:
select_index = i
self.lst_termfiles.append([f])
i += 1
if select_index >= 0:
self.cmb_termfile.set_active(select_index)
def run(self, parent=None):
self.reset()
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.dialog.show()
self._on_entry_changed(None)
self.ent_source.grab_focus()
response = self.dialog.run()
self.dialog.hide()
if response != gtk.RESPONSE_OK:
return
self.add_term_unit(self.ent_source.get_text(), self.ent_target.get_text())
# EVENT HANDLERS #
def _on_entry_changed(self, entry):
self.btn_add_term.props.sensitive = True
self.eb_add_term_errors.hide()
src_text = self.ent_source.get_text()
tgt_text = self.ent_target.get_text()
dup = self.term_model.get_duplicates(src_text, tgt_text)
if dup:
self | same_src_units = self.term_model.get_units_with_source(src_text)
if src_text and same_src_units:
# We want to separate multiple terms with the correct list
# separator for the UI language:
separator = lang_factory.getlanguage(ui_language).listseperator
#l10n: The variable is an existing term formatted for emphasis. The default is bold formatting, but you can remove/change the markup if needed. Leave it unchanged if you are unsure.
translations = separator.join([_('<b>%s</b>') % (u.target) for u in same_src_units])
errormsg = _('Existing translations: %(translations)s') % {
'translations': translations
}
self.lbl_add_term_errors.set_markup(errormsg)
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
return
| .lbl_add_term_errors.set_text(_('Identical entry already exists.'))
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
self.btn_add_term.props.sensitive = False
return
| conditional_block |
localfileview.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2010 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os.path
import gtk
import logging
import pango
from gtk import gdk
from locale import strcoll
from translate.lang import factory as lang_factory
from translate.storage import factory as store_factory
from virtaal.common.pan_app import ui_language
from virtaal.views.baseview import BaseView
from virtaal.views import rendering
from virtaal.views.theme import current_theme
class | :
"""
Class that manages the localfile terminology plug-in's GUI presense and interaction.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.controller = model.controller
self.mainview = model.controller.main_controller.view
self._signal_ids = []
self._setup_menus()
self.addterm = TermAddDialog(model=model)
self.fileselect = FileSelectDialog(model=model)
# METHODS #
def _setup_menus(self):
mnu_transfer = self.mainview.gui.get_widget('mnu_placnext')
self.mnui_edit = self.mainview.gui.get_widget('menuitem_edit')
self.menu = self.mnui_edit.get_submenu()
self.mnu_select_files, _menu = self.mainview.find_menu_item(_('Terminology _Files...'), self.mnui_edit)
if not self.mnu_select_files:
self.mnu_select_files = self.mainview.append_menu_item(_('Terminology _Files...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_select_files,
self.mnu_select_files.connect('activate', self._on_select_term_files)
))
self.mnu_add_term, _menu = self.mainview.find_menu_item(_('Add _Term...'), self.mnui_edit)
if not self.mnu_add_term:
self.mnu_add_term = self.mainview.append_menu_item(_('Add _Term...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_add_term,
self.mnu_add_term.connect('activate', self._on_add_term)
))
gtk.accel_map_add_entry("<Virtaal>/Terminology/Add Term", gtk.keysyms.t, gdk.CONTROL_MASK)
accel_group = self.menu.get_accel_group()
if accel_group is None:
accel_group = gtk.AccelGroup()
self.menu.set_accel_group(accel_group)
self.mnu_add_term.set_accel_path("<Virtaal>/Terminology/Add Term")
self.menu.set_accel_group(accel_group)
def destroy(self):
for gobj, signal_id in self._signal_ids:
gobj.disconnect(signal_id)
self.menu.remove(self.mnu_select_files)
self.menu.remove(self.mnu_add_term)
# EVENT HANDLERS #
def _on_add_term(self, menuitem):
self.addterm.run(parent=self.mainview.main_window)
def _on_select_term_files(self, menuitem):
self.fileselect.run(parent=self.mainview.main_window)
class FileSelectDialog:
"""
Wrapper for the selection dialog, created in Glade, to manage the list of
files used by this plug-in.
"""
COL_FILE, COL_EXTEND = range(2)
# INITIALIZERS #
def __init__(self, model):
self.controller = model.controller
self.term_model = model
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermFilesDlg',
domain='virtaal'
)
self._get_widgets()
self._init_treeview()
self._init_add_chooser()
def _get_widgets(self):
widget_names = ('btn_add_file', 'btn_remove_file', 'btn_open_termfile', 'tvw_termfiles')
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermFilesDlg')
self.btn_add_file.connect('clicked', self._on_add_file_clicked)
self.btn_remove_file.connect('clicked', self._on_remove_file_clicked)
self.btn_open_termfile.connect('clicked', self._on_open_termfile_clicked)
self.tvw_termfiles.get_selection().connect('changed', self._on_selection_changed)
def _init_treeview(self):
self.lst_files = gtk.ListStore(str, bool)
self.tvw_termfiles.set_model(self.lst_files)
cell = gtk.CellRendererText()
cell.props.ellipsize = pango.ELLIPSIZE_MIDDLE
col = gtk.TreeViewColumn(_('File'))
col.pack_start(cell)
col.add_attribute(cell, 'text', self.COL_FILE)
col.set_expand(True)
col.set_sort_column_id(0)
self.tvw_termfiles.append_column(col)
cell = gtk.CellRendererToggle()
cell.set_radio(True)
cell.connect('toggled', self._on_toggle)
col = gtk.TreeViewColumn(_('Extendable'))
col.pack_start(cell)
col.add_attribute(cell, 'active', self.COL_EXTEND)
col.set_expand(False)
self.tvw_termfiles.append_column(col)
extend_file = self.term_model.config.get('extendfile', '')
files = self.term_model.config['files']
for f in files:
self.lst_files.append([f, f == extend_file])
# If there was no extend file, select the first one
for row in self.lst_files:
if row[self.COL_EXTEND]:
break
else:
itr = self.lst_files.get_iter_first()
if itr and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = self.lst_files.get_value(itr, self.COL_FILE)
self.term_model.save_config()
def _init_add_chooser(self):
# The following code was mostly copied from virtaal.views.MainView._create_dialogs()
dlg = gtk.FileChooserDialog(
_('Add Files'),
self.controller.main_controller.view.main_window,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
)
dlg.set_default_response(gtk.RESPONSE_OK)
all_supported_filter = gtk.FileFilter()
all_supported_filter.set_name(_("All Supported Files"))
dlg.add_filter(all_supported_filter)
supported_files_dict = dict([ (_(name), (extension, mimetype)) for name, extension, mimetype in store_factory.supported_files() ])
supported_file_names = supported_files_dict.keys()
supported_file_names.sort(cmp=strcoll)
for name in supported_file_names:
extensions, mimetypes = supported_files_dict[name]
#XXX: we can't open generic .csv formats, so listing it is probably
# more harmful than good.
if "csv" in extensions:
continue
new_filter = gtk.FileFilter()
new_filter.set_name(name)
if extensions:
for extension in extensions:
new_filter.add_pattern("*." + extension)
all_supported_filter.add_pattern("*." + extension)
for compress_extension in store_factory.decompressclass.keys():
new_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
all_supported_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
if mimetypes:
for mimetype in mimetypes:
new_filter.add_mime_type(mimetype)
all_supported_filter.add_mime_type(mimetype)
dlg.add_filter(new_filter)
all_filter = gtk.FileFilter()
all_filter.set_name(_("All Files"))
all_filter.add_pattern("*")
dlg.add_filter(all_filter)
dlg.set_select_multiple(True)
self.add_chooser = dlg
# METHODS #
def clear_selection(self):
self.tvw_termfiles.get_selection().unselect_all()
def run(self, parent=None):
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.clear_selection()
self.dialog.show_all()
self.dialog.run()
self.dialog.hide()
# EVENT HANDLERS #
def _on_add_file_clicked(self, button):
self.add_chooser.show_all()
response = self.add_chooser.run()
self.add_chooser.hide()
if response != gtk.RESPONSE_OK:
return
mainview = self.term_model.controller.main_controller.view
currfiles = [row[self.COL_FILE] for row in self.lst_files]
for filename in self.add_chooser.get_filenames():
if filename in currfiles:
continue
# Try and open filename as a translation store
try:
if not os.path.isfile(filename):
raise IOError(_('"%s" is not a usable file.') % filename)
store = store_factory.getobject(filename)
currfiles.append(filename)
self.lst_files.append([filename, False])
except Exception, exc:
message = _('Unable to load %(filename)s:\n\n%(errormsg)s') % {'filename': filename, 'errormsg': str(exc)}
mainview.show_error_dialog(title=_('Error opening file'), message=message)
self.term_model.config['files'] = currfiles
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only load and add the new selected files.
def _on_remove_file_clicked(self, button):
model, selected = self.tvw_termfiles.get_selection().get_selected()
if not selected:
return
remfile = model.get_value(selected, self.COL_FILE)
extend = model.get_value(selected, self.COL_EXTEND)
self.term_model.config['files'].remove(remfile)
if extend:
self.term_model.config['extendfile'] = ''
itr = model.get_iter_first()
if itr and model.iter_is_valid(itr):
model.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = model.get_value(itr, self.COL_FILE)
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only remove the selected file from the terminology matcher.
model.remove(selected)
def _on_open_termfile_clicked(self, button):
selection = self.tvw_termfiles.get_selection()
model, itr = selection.get_selected()
if itr is None:
return
selected_file = model.get_value(itr, self.COL_FILE)
self.term_model.controller.main_controller.open_file(selected_file)
def _on_selection_changed(self, treesel):
model, itr = treesel.get_selected()
enabled = itr is not None
self.btn_open_termfile.set_sensitive(enabled)
self.btn_remove_file.set_sensitive(enabled)
def _on_toggle(self, renderer, path):
toggled_file = self.lst_files.get_value(self.lst_files.get_iter(path), self.COL_FILE)
itr = self.lst_files.get_iter_first()
while itr is not None and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, self.lst_files.get_value(itr, self.COL_FILE) == toggled_file)
itr = self.lst_files.iter_next(itr)
self.term_model.config['extendfile'] = toggled_file
self.term_model.save_config()
class TermAddDialog:
"""
Wrapper for the dialog used to add a new term to the terminology file.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.lang_controller = model.controller.main_controller.lang_controller
self.unit_controller = model.controller.main_controller.unit_controller
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermAddDlg',
domain='virtaal'
)
self._get_widgets()
def _get_widgets(self):
widget_names = (
'btn_add_term', 'cmb_termfile', 'eb_add_term_errors', 'ent_source',
'ent_target', 'lbl_add_term_errors', 'lbl_srclang', 'lbl_tgtlang',
'txt_comment'
)
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermAddDlg')
cellr = gtk.CellRendererText()
cellr.props.ellipsize = pango.ELLIPSIZE_MIDDLE
self.lst_termfiles = gtk.ListStore(str)
self.cmb_termfile.set_model(self.lst_termfiles)
self.cmb_termfile.pack_start(cellr)
self.cmb_termfile.add_attribute(cellr, 'text', 0)
self.ent_source.connect('changed', self._on_entry_changed)
self.ent_target.connect('changed', self._on_entry_changed)
# METHODS #
def add_term_unit(self, source, target):
filename = self.cmb_termfile.get_active_text()
store = self.term_model.get_store_for_filename(filename)
if store is None:
logging.debug('No terminology store to extend :(')
return
unit = store.addsourceunit(source)
unit.target = target
buff = self.txt_comment.get_buffer()
comments = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
if comments:
unit.addnote(comments)
store.save()
self.term_model.matcher.extendtm(unit)
#logging.debug('Added new term: [%s] => [%s], file=%s' % (source, target, store.filename))
def reset(self):
unitview = self.unit_controller.view
source_text = u''
for src in unitview.sources:
selection = src.buffer.get_selection_bounds()
if selection:
source_text = src.get_text(*selection)
break
self.ent_source.modify_font(rendering.get_source_font_description())
self.ent_source.set_text(source_text.strip())
target_text = u''
for tgt in unitview.targets:
selection = tgt.buffer.get_selection_bounds()
if selection:
target_text = tgt.get_text(*selection)
break
self.ent_target.modify_font(rendering.get_target_font_description())
self.ent_target.set_text(target_text.strip())
self.txt_comment.get_buffer().set_text('')
self.eb_add_term_errors.hide()
self.btn_add_term.props.sensitive = True
self.lbl_srclang.set_text_with_mnemonic(_(u'_Source term — %(langname)s') % {'langname': self.lang_controller.source_lang.name})
self.lbl_tgtlang.set_text_with_mnemonic(_(u'_Target term — %(langname)s') % {'langname': self.lang_controller.target_lang.name})
self.lst_termfiles.clear()
extendfile = self.term_model.config.get('extendfile', None)
select_index = -1
i = 0
for f in self.term_model.config['files']:
if f == extendfile:
select_index = i
self.lst_termfiles.append([f])
i += 1
if select_index >= 0:
self.cmb_termfile.set_active(select_index)
def run(self, parent=None):
self.reset()
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.dialog.show()
self._on_entry_changed(None)
self.ent_source.grab_focus()
response = self.dialog.run()
self.dialog.hide()
if response != gtk.RESPONSE_OK:
return
self.add_term_unit(self.ent_source.get_text(), self.ent_target.get_text())
# EVENT HANDLERS #
def _on_entry_changed(self, entry):
self.btn_add_term.props.sensitive = True
self.eb_add_term_errors.hide()
src_text = self.ent_source.get_text()
tgt_text = self.ent_target.get_text()
dup = self.term_model.get_duplicates(src_text, tgt_text)
if dup:
self.lbl_add_term_errors.set_text(_('Identical entry already exists.'))
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
self.btn_add_term.props.sensitive = False
return
same_src_units = self.term_model.get_units_with_source(src_text)
if src_text and same_src_units:
# We want to separate multiple terms with the correct list
# separator for the UI language:
separator = lang_factory.getlanguage(ui_language).listseperator
#l10n: The variable is an existing term formatted for emphasis. The default is bold formatting, but you can remove/change the markup if needed. Leave it unchanged if you are unsure.
translations = separator.join([_('<b>%s</b>') % (u.target) for u in same_src_units])
errormsg = _('Existing translations: %(translations)s') % {
'translations': translations
}
self.lbl_add_term_errors.set_markup(errormsg)
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
return
| LocalFileView | identifier_name |
localfileview.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2010 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os.path
import gtk
import logging
import pango
from gtk import gdk
from locale import strcoll
from translate.lang import factory as lang_factory
from translate.storage import factory as store_factory
from virtaal.common.pan_app import ui_language
from virtaal.views.baseview import BaseView
from virtaal.views import rendering
from virtaal.views.theme import current_theme
class LocalFileView:
"""
Class that manages the localfile terminology plug-in's GUI presense and interaction.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.controller = model.controller
self.mainview = model.controller.main_controller.view
self._signal_ids = []
self._setup_menus()
self.addterm = TermAddDialog(model=model)
self.fileselect = FileSelectDialog(model=model)
# METHODS #
def _setup_menus(self):
mnu_transfer = self.mainview.gui.get_widget('mnu_placnext')
self.mnui_edit = self.mainview.gui.get_widget('menuitem_edit')
self.menu = self.mnui_edit.get_submenu()
self.mnu_select_files, _menu = self.mainview.find_menu_item(_('Terminology _Files...'), self.mnui_edit)
if not self.mnu_select_files:
self.mnu_select_files = self.mainview.append_menu_item(_('Terminology _Files...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_select_files,
self.mnu_select_files.connect('activate', self._on_select_term_files)
))
self.mnu_add_term, _menu = self.mainview.find_menu_item(_('Add _Term...'), self.mnui_edit)
if not self.mnu_add_term:
self.mnu_add_term = self.mainview.append_menu_item(_('Add _Term...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_add_term,
self.mnu_add_term.connect('activate', self._on_add_term)
))
gtk.accel_map_add_entry("<Virtaal>/Terminology/Add Term", gtk.keysyms.t, gdk.CONTROL_MASK)
accel_group = self.menu.get_accel_group()
if accel_group is None:
accel_group = gtk.AccelGroup()
self.menu.set_accel_group(accel_group)
self.mnu_add_term.set_accel_path("<Virtaal>/Terminology/Add Term")
self.menu.set_accel_group(accel_group)
def destroy(self):
for gobj, signal_id in self._signal_ids:
gobj.disconnect(signal_id)
self.menu.remove(self.mnu_select_files)
self.menu.remove(self.mnu_add_term)
# EVENT HANDLERS #
def _on_add_term(self, menuitem):
self.addterm.run(parent=self.mainview.main_window)
def _on_select_term_files(self, menuitem):
self.fileselect.run(parent=self.mainview.main_window)
class FileSelectDialog:
"""
Wrapper for the selection dialog, created in Glade, to manage the list of
files used by this plug-in.
"""
COL_FILE, COL_EXTEND = range(2)
# INITIALIZERS #
def __init__(self, model):
self.controller = model.controller
self.term_model = model
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermFilesDlg',
domain='virtaal'
)
self._get_widgets()
self._init_treeview()
self._init_add_chooser()
def _get_widgets(self):
widget_names = ('btn_add_file', 'btn_remove_file', 'btn_open_termfile', 'tvw_termfiles')
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermFilesDlg')
self.btn_add_file.connect('clicked', self._on_add_file_clicked)
self.btn_remove_file.connect('clicked', self._on_remove_file_clicked)
self.btn_open_termfile.connect('clicked', self._on_open_termfile_clicked)
self.tvw_termfiles.get_selection().connect('changed', self._on_selection_changed)
def _init_treeview(self):
|
def _init_add_chooser(self):
# The following code was mostly copied from virtaal.views.MainView._create_dialogs()
dlg = gtk.FileChooserDialog(
_('Add Files'),
self.controller.main_controller.view.main_window,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
)
dlg.set_default_response(gtk.RESPONSE_OK)
all_supported_filter = gtk.FileFilter()
all_supported_filter.set_name(_("All Supported Files"))
dlg.add_filter(all_supported_filter)
supported_files_dict = dict([ (_(name), (extension, mimetype)) for name, extension, mimetype in store_factory.supported_files() ])
supported_file_names = supported_files_dict.keys()
supported_file_names.sort(cmp=strcoll)
for name in supported_file_names:
extensions, mimetypes = supported_files_dict[name]
#XXX: we can't open generic .csv formats, so listing it is probably
# more harmful than good.
if "csv" in extensions:
continue
new_filter = gtk.FileFilter()
new_filter.set_name(name)
if extensions:
for extension in extensions:
new_filter.add_pattern("*." + extension)
all_supported_filter.add_pattern("*." + extension)
for compress_extension in store_factory.decompressclass.keys():
new_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
all_supported_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
if mimetypes:
for mimetype in mimetypes:
new_filter.add_mime_type(mimetype)
all_supported_filter.add_mime_type(mimetype)
dlg.add_filter(new_filter)
all_filter = gtk.FileFilter()
all_filter.set_name(_("All Files"))
all_filter.add_pattern("*")
dlg.add_filter(all_filter)
dlg.set_select_multiple(True)
self.add_chooser = dlg
# METHODS #
def clear_selection(self):
self.tvw_termfiles.get_selection().unselect_all()
def run(self, parent=None):
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.clear_selection()
self.dialog.show_all()
self.dialog.run()
self.dialog.hide()
# EVENT HANDLERS #
def _on_add_file_clicked(self, button):
self.add_chooser.show_all()
response = self.add_chooser.run()
self.add_chooser.hide()
if response != gtk.RESPONSE_OK:
return
mainview = self.term_model.controller.main_controller.view
currfiles = [row[self.COL_FILE] for row in self.lst_files]
for filename in self.add_chooser.get_filenames():
if filename in currfiles:
continue
# Try and open filename as a translation store
try:
if not os.path.isfile(filename):
raise IOError(_('"%s" is not a usable file.') % filename)
store = store_factory.getobject(filename)
currfiles.append(filename)
self.lst_files.append([filename, False])
except Exception, exc:
message = _('Unable to load %(filename)s:\n\n%(errormsg)s') % {'filename': filename, 'errormsg': str(exc)}
mainview.show_error_dialog(title=_('Error opening file'), message=message)
self.term_model.config['files'] = currfiles
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only load and add the new selected files.
def _on_remove_file_clicked(self, button):
model, selected = self.tvw_termfiles.get_selection().get_selected()
if not selected:
return
remfile = model.get_value(selected, self.COL_FILE)
extend = model.get_value(selected, self.COL_EXTEND)
self.term_model.config['files'].remove(remfile)
if extend:
self.term_model.config['extendfile'] = ''
itr = model.get_iter_first()
if itr and model.iter_is_valid(itr):
model.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = model.get_value(itr, self.COL_FILE)
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only remove the selected file from the terminology matcher.
model.remove(selected)
def _on_open_termfile_clicked(self, button):
selection = self.tvw_termfiles.get_selection()
model, itr = selection.get_selected()
if itr is None:
return
selected_file = model.get_value(itr, self.COL_FILE)
self.term_model.controller.main_controller.open_file(selected_file)
def _on_selection_changed(self, treesel):
model, itr = treesel.get_selected()
enabled = itr is not None
self.btn_open_termfile.set_sensitive(enabled)
self.btn_remove_file.set_sensitive(enabled)
def _on_toggle(self, renderer, path):
toggled_file = self.lst_files.get_value(self.lst_files.get_iter(path), self.COL_FILE)
itr = self.lst_files.get_iter_first()
while itr is not None and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, self.lst_files.get_value(itr, self.COL_FILE) == toggled_file)
itr = self.lst_files.iter_next(itr)
self.term_model.config['extendfile'] = toggled_file
self.term_model.save_config()
class TermAddDialog:
"""
Wrapper for the dialog used to add a new term to the terminology file.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.lang_controller = model.controller.main_controller.lang_controller
self.unit_controller = model.controller.main_controller.unit_controller
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermAddDlg',
domain='virtaal'
)
self._get_widgets()
def _get_widgets(self):
widget_names = (
'btn_add_term', 'cmb_termfile', 'eb_add_term_errors', 'ent_source',
'ent_target', 'lbl_add_term_errors', 'lbl_srclang', 'lbl_tgtlang',
'txt_comment'
)
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermAddDlg')
cellr = gtk.CellRendererText()
cellr.props.ellipsize = pango.ELLIPSIZE_MIDDLE
self.lst_termfiles = gtk.ListStore(str)
self.cmb_termfile.set_model(self.lst_termfiles)
self.cmb_termfile.pack_start(cellr)
self.cmb_termfile.add_attribute(cellr, 'text', 0)
self.ent_source.connect('changed', self._on_entry_changed)
self.ent_target.connect('changed', self._on_entry_changed)
# METHODS #
def add_term_unit(self, source, target):
filename = self.cmb_termfile.get_active_text()
store = self.term_model.get_store_for_filename(filename)
if store is None:
logging.debug('No terminology store to extend :(')
return
unit = store.addsourceunit(source)
unit.target = target
buff = self.txt_comment.get_buffer()
comments = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
if comments:
unit.addnote(comments)
store.save()
self.term_model.matcher.extendtm(unit)
#logging.debug('Added new term: [%s] => [%s], file=%s' % (source, target, store.filename))
def reset(self):
unitview = self.unit_controller.view
source_text = u''
for src in unitview.sources:
selection = src.buffer.get_selection_bounds()
if selection:
source_text = src.get_text(*selection)
break
self.ent_source.modify_font(rendering.get_source_font_description())
self.ent_source.set_text(source_text.strip())
target_text = u''
for tgt in unitview.targets:
selection = tgt.buffer.get_selection_bounds()
if selection:
target_text = tgt.get_text(*selection)
break
self.ent_target.modify_font(rendering.get_target_font_description())
self.ent_target.set_text(target_text.strip())
self.txt_comment.get_buffer().set_text('')
self.eb_add_term_errors.hide()
self.btn_add_term.props.sensitive = True
self.lbl_srclang.set_text_with_mnemonic(_(u'_Source term — %(langname)s') % {'langname': self.lang_controller.source_lang.name})
self.lbl_tgtlang.set_text_with_mnemonic(_(u'_Target term — %(langname)s') % {'langname': self.lang_controller.target_lang.name})
self.lst_termfiles.clear()
extendfile = self.term_model.config.get('extendfile', None)
select_index = -1
i = 0
for f in self.term_model.config['files']:
if f == extendfile:
select_index = i
self.lst_termfiles.append([f])
i += 1
if select_index >= 0:
self.cmb_termfile.set_active(select_index)
def run(self, parent=None):
self.reset()
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.dialog.show()
self._on_entry_changed(None)
self.ent_source.grab_focus()
response = self.dialog.run()
self.dialog.hide()
if response != gtk.RESPONSE_OK:
return
self.add_term_unit(self.ent_source.get_text(), self.ent_target.get_text())
# EVENT HANDLERS #
def _on_entry_changed(self, entry):
self.btn_add_term.props.sensitive = True
self.eb_add_term_errors.hide()
src_text = self.ent_source.get_text()
tgt_text = self.ent_target.get_text()
dup = self.term_model.get_duplicates(src_text, tgt_text)
if dup:
self.lbl_add_term_errors.set_text(_('Identical entry already exists.'))
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
self.btn_add_term.props.sensitive = False
return
same_src_units = self.term_model.get_units_with_source(src_text)
if src_text and same_src_units:
# We want to separate multiple terms with the correct list
# separator for the UI language:
separator = lang_factory.getlanguage(ui_language).listseperator
#l10n: The variable is an existing term formatted for emphasis. The default is bold formatting, but you can remove/change the markup if needed. Leave it unchanged if you are unsure.
translations = separator.join([_('<b>%s</b>') % (u.target) for u in same_src_units])
errormsg = _('Existing translations: %(translations)s') % {
'translations': translations
}
self.lbl_add_term_errors.set_markup(errormsg)
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
return
| self.lst_files = gtk.ListStore(str, bool)
self.tvw_termfiles.set_model(self.lst_files)
cell = gtk.CellRendererText()
cell.props.ellipsize = pango.ELLIPSIZE_MIDDLE
col = gtk.TreeViewColumn(_('File'))
col.pack_start(cell)
col.add_attribute(cell, 'text', self.COL_FILE)
col.set_expand(True)
col.set_sort_column_id(0)
self.tvw_termfiles.append_column(col)
cell = gtk.CellRendererToggle()
cell.set_radio(True)
cell.connect('toggled', self._on_toggle)
col = gtk.TreeViewColumn(_('Extendable'))
col.pack_start(cell)
col.add_attribute(cell, 'active', self.COL_EXTEND)
col.set_expand(False)
self.tvw_termfiles.append_column(col)
extend_file = self.term_model.config.get('extendfile', '')
files = self.term_model.config['files']
for f in files:
self.lst_files.append([f, f == extend_file])
# If there was no extend file, select the first one
for row in self.lst_files:
if row[self.COL_EXTEND]:
break
else:
itr = self.lst_files.get_iter_first()
if itr and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = self.lst_files.get_value(itr, self.COL_FILE)
self.term_model.save_config() | identifier_body |
localfileview.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2010 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License | import pango
from gtk import gdk
from locale import strcoll
from translate.lang import factory as lang_factory
from translate.storage import factory as store_factory
from virtaal.common.pan_app import ui_language
from virtaal.views.baseview import BaseView
from virtaal.views import rendering
from virtaal.views.theme import current_theme
class LocalFileView:
"""
Class that manages the localfile terminology plug-in's GUI presense and interaction.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.controller = model.controller
self.mainview = model.controller.main_controller.view
self._signal_ids = []
self._setup_menus()
self.addterm = TermAddDialog(model=model)
self.fileselect = FileSelectDialog(model=model)
# METHODS #
def _setup_menus(self):
mnu_transfer = self.mainview.gui.get_widget('mnu_placnext')
self.mnui_edit = self.mainview.gui.get_widget('menuitem_edit')
self.menu = self.mnui_edit.get_submenu()
self.mnu_select_files, _menu = self.mainview.find_menu_item(_('Terminology _Files...'), self.mnui_edit)
if not self.mnu_select_files:
self.mnu_select_files = self.mainview.append_menu_item(_('Terminology _Files...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_select_files,
self.mnu_select_files.connect('activate', self._on_select_term_files)
))
self.mnu_add_term, _menu = self.mainview.find_menu_item(_('Add _Term...'), self.mnui_edit)
if not self.mnu_add_term:
self.mnu_add_term = self.mainview.append_menu_item(_('Add _Term...'), self.mnui_edit, after=mnu_transfer)
self._signal_ids.append((
self.mnu_add_term,
self.mnu_add_term.connect('activate', self._on_add_term)
))
gtk.accel_map_add_entry("<Virtaal>/Terminology/Add Term", gtk.keysyms.t, gdk.CONTROL_MASK)
accel_group = self.menu.get_accel_group()
if accel_group is None:
accel_group = gtk.AccelGroup()
self.menu.set_accel_group(accel_group)
self.mnu_add_term.set_accel_path("<Virtaal>/Terminology/Add Term")
self.menu.set_accel_group(accel_group)
def destroy(self):
for gobj, signal_id in self._signal_ids:
gobj.disconnect(signal_id)
self.menu.remove(self.mnu_select_files)
self.menu.remove(self.mnu_add_term)
# EVENT HANDLERS #
def _on_add_term(self, menuitem):
self.addterm.run(parent=self.mainview.main_window)
def _on_select_term_files(self, menuitem):
self.fileselect.run(parent=self.mainview.main_window)
class FileSelectDialog:
"""
Wrapper for the selection dialog, created in Glade, to manage the list of
files used by this plug-in.
"""
COL_FILE, COL_EXTEND = range(2)
# INITIALIZERS #
def __init__(self, model):
self.controller = model.controller
self.term_model = model
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermFilesDlg',
domain='virtaal'
)
self._get_widgets()
self._init_treeview()
self._init_add_chooser()
def _get_widgets(self):
widget_names = ('btn_add_file', 'btn_remove_file', 'btn_open_termfile', 'tvw_termfiles')
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermFilesDlg')
self.btn_add_file.connect('clicked', self._on_add_file_clicked)
self.btn_remove_file.connect('clicked', self._on_remove_file_clicked)
self.btn_open_termfile.connect('clicked', self._on_open_termfile_clicked)
self.tvw_termfiles.get_selection().connect('changed', self._on_selection_changed)
def _init_treeview(self):
self.lst_files = gtk.ListStore(str, bool)
self.tvw_termfiles.set_model(self.lst_files)
cell = gtk.CellRendererText()
cell.props.ellipsize = pango.ELLIPSIZE_MIDDLE
col = gtk.TreeViewColumn(_('File'))
col.pack_start(cell)
col.add_attribute(cell, 'text', self.COL_FILE)
col.set_expand(True)
col.set_sort_column_id(0)
self.tvw_termfiles.append_column(col)
cell = gtk.CellRendererToggle()
cell.set_radio(True)
cell.connect('toggled', self._on_toggle)
col = gtk.TreeViewColumn(_('Extendable'))
col.pack_start(cell)
col.add_attribute(cell, 'active', self.COL_EXTEND)
col.set_expand(False)
self.tvw_termfiles.append_column(col)
extend_file = self.term_model.config.get('extendfile', '')
files = self.term_model.config['files']
for f in files:
self.lst_files.append([f, f == extend_file])
# If there was no extend file, select the first one
for row in self.lst_files:
if row[self.COL_EXTEND]:
break
else:
itr = self.lst_files.get_iter_first()
if itr and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = self.lst_files.get_value(itr, self.COL_FILE)
self.term_model.save_config()
def _init_add_chooser(self):
# The following code was mostly copied from virtaal.views.MainView._create_dialogs()
dlg = gtk.FileChooserDialog(
_('Add Files'),
self.controller.main_controller.view.main_window,
gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
)
dlg.set_default_response(gtk.RESPONSE_OK)
all_supported_filter = gtk.FileFilter()
all_supported_filter.set_name(_("All Supported Files"))
dlg.add_filter(all_supported_filter)
supported_files_dict = dict([ (_(name), (extension, mimetype)) for name, extension, mimetype in store_factory.supported_files() ])
supported_file_names = supported_files_dict.keys()
supported_file_names.sort(cmp=strcoll)
for name in supported_file_names:
extensions, mimetypes = supported_files_dict[name]
#XXX: we can't open generic .csv formats, so listing it is probably
# more harmful than good.
if "csv" in extensions:
continue
new_filter = gtk.FileFilter()
new_filter.set_name(name)
if extensions:
for extension in extensions:
new_filter.add_pattern("*." + extension)
all_supported_filter.add_pattern("*." + extension)
for compress_extension in store_factory.decompressclass.keys():
new_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
all_supported_filter.add_pattern("*.%s.%s" % (extension, compress_extension))
if mimetypes:
for mimetype in mimetypes:
new_filter.add_mime_type(mimetype)
all_supported_filter.add_mime_type(mimetype)
dlg.add_filter(new_filter)
all_filter = gtk.FileFilter()
all_filter.set_name(_("All Files"))
all_filter.add_pattern("*")
dlg.add_filter(all_filter)
dlg.set_select_multiple(True)
self.add_chooser = dlg
# METHODS #
def clear_selection(self):
self.tvw_termfiles.get_selection().unselect_all()
def run(self, parent=None):
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.clear_selection()
self.dialog.show_all()
self.dialog.run()
self.dialog.hide()
# EVENT HANDLERS #
def _on_add_file_clicked(self, button):
self.add_chooser.show_all()
response = self.add_chooser.run()
self.add_chooser.hide()
if response != gtk.RESPONSE_OK:
return
mainview = self.term_model.controller.main_controller.view
currfiles = [row[self.COL_FILE] for row in self.lst_files]
for filename in self.add_chooser.get_filenames():
if filename in currfiles:
continue
# Try and open filename as a translation store
try:
if not os.path.isfile(filename):
raise IOError(_('"%s" is not a usable file.') % filename)
store = store_factory.getobject(filename)
currfiles.append(filename)
self.lst_files.append([filename, False])
except Exception, exc:
message = _('Unable to load %(filename)s:\n\n%(errormsg)s') % {'filename': filename, 'errormsg': str(exc)}
mainview.show_error_dialog(title=_('Error opening file'), message=message)
self.term_model.config['files'] = currfiles
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only load and add the new selected files.
def _on_remove_file_clicked(self, button):
model, selected = self.tvw_termfiles.get_selection().get_selected()
if not selected:
return
remfile = model.get_value(selected, self.COL_FILE)
extend = model.get_value(selected, self.COL_EXTEND)
self.term_model.config['files'].remove(remfile)
if extend:
self.term_model.config['extendfile'] = ''
itr = model.get_iter_first()
if itr and model.iter_is_valid(itr):
model.set_value(itr, self.COL_EXTEND, True)
self.term_model.config['extendfile'] = model.get_value(itr, self.COL_FILE)
self.term_model.save_config()
self.term_model.load_files() # FIXME: This could be optimized to only remove the selected file from the terminology matcher.
model.remove(selected)
def _on_open_termfile_clicked(self, button):
selection = self.tvw_termfiles.get_selection()
model, itr = selection.get_selected()
if itr is None:
return
selected_file = model.get_value(itr, self.COL_FILE)
self.term_model.controller.main_controller.open_file(selected_file)
def _on_selection_changed(self, treesel):
model, itr = treesel.get_selected()
enabled = itr is not None
self.btn_open_termfile.set_sensitive(enabled)
self.btn_remove_file.set_sensitive(enabled)
def _on_toggle(self, renderer, path):
toggled_file = self.lst_files.get_value(self.lst_files.get_iter(path), self.COL_FILE)
itr = self.lst_files.get_iter_first()
while itr is not None and self.lst_files.iter_is_valid(itr):
self.lst_files.set_value(itr, self.COL_EXTEND, self.lst_files.get_value(itr, self.COL_FILE) == toggled_file)
itr = self.lst_files.iter_next(itr)
self.term_model.config['extendfile'] = toggled_file
self.term_model.save_config()
class TermAddDialog:
"""
Wrapper for the dialog used to add a new term to the terminology file.
"""
# INITIALIZERS #
def __init__(self, model):
self.term_model = model
self.lang_controller = model.controller.main_controller.lang_controller
self.unit_controller = model.controller.main_controller.unit_controller
self.gladefilename, self.gui = BaseView.load_glade_file(
["virtaal", "virtaal.glade"],
root='TermAddDlg',
domain='virtaal'
)
self._get_widgets()
def _get_widgets(self):
widget_names = (
'btn_add_term', 'cmb_termfile', 'eb_add_term_errors', 'ent_source',
'ent_target', 'lbl_add_term_errors', 'lbl_srclang', 'lbl_tgtlang',
'txt_comment'
)
for name in widget_names:
setattr(self, name, self.gui.get_widget(name))
self.dialog = self.gui.get_widget('TermAddDlg')
cellr = gtk.CellRendererText()
cellr.props.ellipsize = pango.ELLIPSIZE_MIDDLE
self.lst_termfiles = gtk.ListStore(str)
self.cmb_termfile.set_model(self.lst_termfiles)
self.cmb_termfile.pack_start(cellr)
self.cmb_termfile.add_attribute(cellr, 'text', 0)
self.ent_source.connect('changed', self._on_entry_changed)
self.ent_target.connect('changed', self._on_entry_changed)
# METHODS #
def add_term_unit(self, source, target):
filename = self.cmb_termfile.get_active_text()
store = self.term_model.get_store_for_filename(filename)
if store is None:
logging.debug('No terminology store to extend :(')
return
unit = store.addsourceunit(source)
unit.target = target
buff = self.txt_comment.get_buffer()
comments = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
if comments:
unit.addnote(comments)
store.save()
self.term_model.matcher.extendtm(unit)
#logging.debug('Added new term: [%s] => [%s], file=%s' % (source, target, store.filename))
def reset(self):
unitview = self.unit_controller.view
source_text = u''
for src in unitview.sources:
selection = src.buffer.get_selection_bounds()
if selection:
source_text = src.get_text(*selection)
break
self.ent_source.modify_font(rendering.get_source_font_description())
self.ent_source.set_text(source_text.strip())
target_text = u''
for tgt in unitview.targets:
selection = tgt.buffer.get_selection_bounds()
if selection:
target_text = tgt.get_text(*selection)
break
self.ent_target.modify_font(rendering.get_target_font_description())
self.ent_target.set_text(target_text.strip())
self.txt_comment.get_buffer().set_text('')
self.eb_add_term_errors.hide()
self.btn_add_term.props.sensitive = True
self.lbl_srclang.set_text_with_mnemonic(_(u'_Source term — %(langname)s') % {'langname': self.lang_controller.source_lang.name})
self.lbl_tgtlang.set_text_with_mnemonic(_(u'_Target term — %(langname)s') % {'langname': self.lang_controller.target_lang.name})
self.lst_termfiles.clear()
extendfile = self.term_model.config.get('extendfile', None)
select_index = -1
i = 0
for f in self.term_model.config['files']:
if f == extendfile:
select_index = i
self.lst_termfiles.append([f])
i += 1
if select_index >= 0:
self.cmb_termfile.set_active(select_index)
def run(self, parent=None):
self.reset()
if isinstance(parent, gtk.Widget):
self.dialog.set_transient_for(parent)
self.dialog.show()
self._on_entry_changed(None)
self.ent_source.grab_focus()
response = self.dialog.run()
self.dialog.hide()
if response != gtk.RESPONSE_OK:
return
self.add_term_unit(self.ent_source.get_text(), self.ent_target.get_text())
# EVENT HANDLERS #
def _on_entry_changed(self, entry):
self.btn_add_term.props.sensitive = True
self.eb_add_term_errors.hide()
src_text = self.ent_source.get_text()
tgt_text = self.ent_target.get_text()
dup = self.term_model.get_duplicates(src_text, tgt_text)
if dup:
self.lbl_add_term_errors.set_text(_('Identical entry already exists.'))
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
self.btn_add_term.props.sensitive = False
return
same_src_units = self.term_model.get_units_with_source(src_text)
if src_text and same_src_units:
# We want to separate multiple terms with the correct list
# separator for the UI language:
separator = lang_factory.getlanguage(ui_language).listseperator
#l10n: The variable is an existing term formatted for emphasis. The default is bold formatting, but you can remove/change the markup if needed. Leave it unchanged if you are unsure.
translations = separator.join([_('<b>%s</b>') % (u.target) for u in same_src_units])
errormsg = _('Existing translations: %(translations)s') % {
'translations': translations
}
self.lbl_add_term_errors.set_markup(errormsg)
self.eb_add_term_errors.modify_bg(gtk.STATE_NORMAL, gdk.color_parse(current_theme['warning_bg']))
self.eb_add_term_errors.show_all()
return | # along with this program; if not, see <http://www.gnu.org/licenses/>.
import os.path
import gtk
import logging | random_line_split |
mod.rs | //! Simple Rust AST. This is what the various code generators create,
//! which then gets serialized.
use grammar::repr::Grammar;
use grammar::parse_tree::Visibility;
use tls::Tls;
use std::fmt;
use std::io::{self, Write};
macro_rules! rust {
($w:expr, $($args:tt)*) => {
try!(($w).writeln(&::std::fmt::format(format_args!($($args)*))))
}
}
/// A wrapper around a Write instance that handles indentation for
/// Rust code. It expects Rust code to be written in a stylized way,
/// with lots of braces and newlines (example shown here with no
/// indentation). Over time maybe we can extend this to make things
/// look prettier, but seems like...meh, just run it through some
/// rustfmt tool.
///
/// ```ignore
/// fn foo(
/// arg1: Type1,
/// arg2: Type2,
/// arg3: Type3)
/// -> ReturnType
/// {
/// match foo {
/// Variant => {
/// }
/// }
/// }
/// ```
pub struct RustWrite<W: Write> {
write: W,
indent: usize,
}
const TAB: usize = 4;
impl<W: Write> RustWrite<W> {
pub fn new(w: W) -> RustWrite<W> {
RustWrite {
write: w,
indent: 0,
}
}
pub fn into_inner(self) -> W {
self.write
}
fn write_indentation(&mut self) -> io::Result<()> {
write!(self.write, "{0:1$}", "", self.indent)
}
fn write_indented(&mut self, out: &str) -> io::Result<()> {
writeln!(self.write, "{0:1$}{2}", "", self.indent, out)
}
pub fn write_table_row<I, C>(&mut self, iterable: I) -> io::Result<()>
where
I: IntoIterator<Item = (i32, C)>,
C: fmt::Display,
{
if Tls::session().emit_comments {
for (i, comment) in iterable {
try!(self.write_indentation());
try!(writeln!(self.write, "{}, {}", i, comment));
}
} else {
try!(self.write_indentation());
let mut first = true; | try!(write!(self.write, " "));
}
try!(write!(self.write, "{},", i));
first = false;
}
}
writeln!(self.write, "")
}
pub fn writeln(&mut self, out: &str) -> io::Result<()> {
let buf = out.as_bytes();
// pass empty lines through with no indentation
if buf.is_empty() {
return self.write.write_all("\n".as_bytes());
}
let n = buf.len() - 1;
// If the line begins with a `}`, `]`, or `)`, first decrement the indentation.
if buf[0] == ('}' as u8) || buf[0] == (']' as u8) || buf[0] == (')' as u8) {
self.indent -= TAB;
}
try!(self.write_indented(out));
// Detect a line that ends in a `{` or `(` and increase indentation for future lines.
if buf[n] == ('{' as u8) || buf[n] == ('[' as u8) || buf[n] == ('(' as u8) {
self.indent += TAB;
}
Ok(())
}
pub fn write_fn_header(
&mut self,
grammar: &Grammar,
visibility: &Visibility,
name: String,
type_parameters: Vec<String>,
first_parameter: Option<String>,
parameters: Vec<String>,
return_type: String,
where_clauses: Vec<String>,
) -> io::Result<()> {
rust!(self, "{}fn {}<", visibility, name);
for type_parameter in &grammar.type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
for type_parameter in type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
rust!(self, ">(");
if let Some(param) = first_parameter {
rust!(self, "{},", param);
}
for parameter in &grammar.parameters {
rust!(self, "{}: {},", parameter.name, parameter.ty);
}
for parameter in ¶meters {
rust!(self, "{},", parameter);
}
if !grammar.where_clauses.is_empty() || !where_clauses.is_empty() {
rust!(self, ") -> {} where", return_type);
for where_clause in &grammar.where_clauses {
rust!(self, " {},", where_clause);
}
for where_clause in &where_clauses {
rust!(self, " {},", where_clause);
}
} else {
rust!(self, ") -> {}", return_type);
}
Ok(())
}
pub fn write_module_attributes(&mut self, grammar: &Grammar) -> io::Result<()> {
for attribute in grammar.module_attributes.iter() {
rust!(self, "{}", attribute);
}
Ok(())
}
pub fn write_uses(&mut self, super_prefix: &str, grammar: &Grammar) -> io::Result<()> {
// things the user wrote
for u in &grammar.uses {
if u.starts_with("super::") {
rust!(self, "use {}{};", super_prefix, u);
} else {
rust!(self, "use {};", u);
}
}
self.write_standard_uses(&grammar.prefix)
}
pub fn write_standard_uses(&mut self, prefix: &str) -> io::Result<()> {
// Stuff that we plan to use.
// Occasionally we happen to not use it after all, hence the allow.
rust!(self, "#[allow(unused_extern_crates)]");
rust!(self, "extern crate lalrpop_util as {}lalrpop_util;", prefix);
Ok(())
}
} | for (i, _comment) in iterable {
if !first { | random_line_split |
mod.rs | //! Simple Rust AST. This is what the various code generators create,
//! which then gets serialized.
use grammar::repr::Grammar;
use grammar::parse_tree::Visibility;
use tls::Tls;
use std::fmt;
use std::io::{self, Write};
macro_rules! rust {
($w:expr, $($args:tt)*) => {
try!(($w).writeln(&::std::fmt::format(format_args!($($args)*))))
}
}
/// A wrapper around a Write instance that handles indentation for
/// Rust code. It expects Rust code to be written in a stylized way,
/// with lots of braces and newlines (example shown here with no
/// indentation). Over time maybe we can extend this to make things
/// look prettier, but seems like...meh, just run it through some
/// rustfmt tool.
///
/// ```ignore
/// fn foo(
/// arg1: Type1,
/// arg2: Type2,
/// arg3: Type3)
/// -> ReturnType
/// {
/// match foo {
/// Variant => {
/// }
/// }
/// }
/// ```
pub struct RustWrite<W: Write> {
write: W,
indent: usize,
}
const TAB: usize = 4;
impl<W: Write> RustWrite<W> {
pub fn new(w: W) -> RustWrite<W> {
RustWrite {
write: w,
indent: 0,
}
}
pub fn into_inner(self) -> W {
self.write
}
fn write_indentation(&mut self) -> io::Result<()> {
write!(self.write, "{0:1$}", "", self.indent)
}
fn write_indented(&mut self, out: &str) -> io::Result<()> {
writeln!(self.write, "{0:1$}{2}", "", self.indent, out)
}
pub fn write_table_row<I, C>(&mut self, iterable: I) -> io::Result<()>
where
I: IntoIterator<Item = (i32, C)>,
C: fmt::Display,
{
if Tls::session().emit_comments {
for (i, comment) in iterable {
try!(self.write_indentation());
try!(writeln!(self.write, "{}, {}", i, comment));
}
} else {
try!(self.write_indentation());
let mut first = true;
for (i, _comment) in iterable {
if !first {
try!(write!(self.write, " "));
}
try!(write!(self.write, "{},", i));
first = false;
}
}
writeln!(self.write, "")
}
pub fn writeln(&mut self, out: &str) -> io::Result<()> {
let buf = out.as_bytes();
// pass empty lines through with no indentation
if buf.is_empty() {
return self.write.write_all("\n".as_bytes());
}
let n = buf.len() - 1;
// If the line begins with a `}`, `]`, or `)`, first decrement the indentation.
if buf[0] == ('}' as u8) || buf[0] == (']' as u8) || buf[0] == (')' as u8) {
self.indent -= TAB;
}
try!(self.write_indented(out));
// Detect a line that ends in a `{` or `(` and increase indentation for future lines.
if buf[n] == ('{' as u8) || buf[n] == ('[' as u8) || buf[n] == ('(' as u8) {
self.indent += TAB;
}
Ok(())
}
pub fn write_fn_header(
&mut self,
grammar: &Grammar,
visibility: &Visibility,
name: String,
type_parameters: Vec<String>,
first_parameter: Option<String>,
parameters: Vec<String>,
return_type: String,
where_clauses: Vec<String>,
) -> io::Result<()> {
rust!(self, "{}fn {}<", visibility, name);
for type_parameter in &grammar.type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
for type_parameter in type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
rust!(self, ">(");
if let Some(param) = first_parameter {
rust!(self, "{},", param);
}
for parameter in &grammar.parameters {
rust!(self, "{}: {},", parameter.name, parameter.ty);
}
for parameter in ¶meters {
rust!(self, "{},", parameter);
}
if !grammar.where_clauses.is_empty() || !where_clauses.is_empty() {
rust!(self, ") -> {} where", return_type);
for where_clause in &grammar.where_clauses {
rust!(self, " {},", where_clause);
}
for where_clause in &where_clauses {
rust!(self, " {},", where_clause);
}
} else {
rust!(self, ") -> {}", return_type);
}
Ok(())
}
pub fn write_module_attributes(&mut self, grammar: &Grammar) -> io::Result<()> {
for attribute in grammar.module_attributes.iter() {
rust!(self, "{}", attribute);
}
Ok(())
}
pub fn write_uses(&mut self, super_prefix: &str, grammar: &Grammar) -> io::Result<()> {
// things the user wrote
for u in &grammar.uses {
if u.starts_with("super::") {
rust!(self, "use {}{};", super_prefix, u);
} else {
rust!(self, "use {};", u);
}
}
self.write_standard_uses(&grammar.prefix)
}
pub fn write_standard_uses(&mut self, prefix: &str) -> io::Result<()> |
}
| {
// Stuff that we plan to use.
// Occasionally we happen to not use it after all, hence the allow.
rust!(self, "#[allow(unused_extern_crates)]");
rust!(self, "extern crate lalrpop_util as {}lalrpop_util;", prefix);
Ok(())
} | identifier_body |
mod.rs | //! Simple Rust AST. This is what the various code generators create,
//! which then gets serialized.
use grammar::repr::Grammar;
use grammar::parse_tree::Visibility;
use tls::Tls;
use std::fmt;
use std::io::{self, Write};
macro_rules! rust {
($w:expr, $($args:tt)*) => {
try!(($w).writeln(&::std::fmt::format(format_args!($($args)*))))
}
}
/// A wrapper around a Write instance that handles indentation for
/// Rust code. It expects Rust code to be written in a stylized way,
/// with lots of braces and newlines (example shown here with no
/// indentation). Over time maybe we can extend this to make things
/// look prettier, but seems like...meh, just run it through some
/// rustfmt tool.
///
/// ```ignore
/// fn foo(
/// arg1: Type1,
/// arg2: Type2,
/// arg3: Type3)
/// -> ReturnType
/// {
/// match foo {
/// Variant => {
/// }
/// }
/// }
/// ```
pub struct RustWrite<W: Write> {
write: W,
indent: usize,
}
const TAB: usize = 4;
impl<W: Write> RustWrite<W> {
pub fn new(w: W) -> RustWrite<W> {
RustWrite {
write: w,
indent: 0,
}
}
pub fn into_inner(self) -> W {
self.write
}
fn write_indentation(&mut self) -> io::Result<()> {
write!(self.write, "{0:1$}", "", self.indent)
}
fn write_indented(&mut self, out: &str) -> io::Result<()> {
writeln!(self.write, "{0:1$}{2}", "", self.indent, out)
}
pub fn write_table_row<I, C>(&mut self, iterable: I) -> io::Result<()>
where
I: IntoIterator<Item = (i32, C)>,
C: fmt::Display,
{
if Tls::session().emit_comments {
for (i, comment) in iterable {
try!(self.write_indentation());
try!(writeln!(self.write, "{}, {}", i, comment));
}
} else {
try!(self.write_indentation());
let mut first = true;
for (i, _comment) in iterable {
if !first {
try!(write!(self.write, " "));
}
try!(write!(self.write, "{},", i));
first = false;
}
}
writeln!(self.write, "")
}
pub fn writeln(&mut self, out: &str) -> io::Result<()> {
let buf = out.as_bytes();
// pass empty lines through with no indentation
if buf.is_empty() {
return self.write.write_all("\n".as_bytes());
}
let n = buf.len() - 1;
// If the line begins with a `}`, `]`, or `)`, first decrement the indentation.
if buf[0] == ('}' as u8) || buf[0] == (']' as u8) || buf[0] == (')' as u8) {
self.indent -= TAB;
}
try!(self.write_indented(out));
// Detect a line that ends in a `{` or `(` and increase indentation for future lines.
if buf[n] == ('{' as u8) || buf[n] == ('[' as u8) || buf[n] == ('(' as u8) {
self.indent += TAB;
}
Ok(())
}
pub fn write_fn_header(
&mut self,
grammar: &Grammar,
visibility: &Visibility,
name: String,
type_parameters: Vec<String>,
first_parameter: Option<String>,
parameters: Vec<String>,
return_type: String,
where_clauses: Vec<String>,
) -> io::Result<()> {
rust!(self, "{}fn {}<", visibility, name);
for type_parameter in &grammar.type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
for type_parameter in type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
rust!(self, ">(");
if let Some(param) = first_parameter {
rust!(self, "{},", param);
}
for parameter in &grammar.parameters {
rust!(self, "{}: {},", parameter.name, parameter.ty);
}
for parameter in ¶meters {
rust!(self, "{},", parameter);
}
if !grammar.where_clauses.is_empty() || !where_clauses.is_empty() {
rust!(self, ") -> {} where", return_type);
for where_clause in &grammar.where_clauses {
rust!(self, " {},", where_clause);
}
for where_clause in &where_clauses {
rust!(self, " {},", where_clause);
}
} else |
Ok(())
}
pub fn write_module_attributes(&mut self, grammar: &Grammar) -> io::Result<()> {
for attribute in grammar.module_attributes.iter() {
rust!(self, "{}", attribute);
}
Ok(())
}
pub fn write_uses(&mut self, super_prefix: &str, grammar: &Grammar) -> io::Result<()> {
// things the user wrote
for u in &grammar.uses {
if u.starts_with("super::") {
rust!(self, "use {}{};", super_prefix, u);
} else {
rust!(self, "use {};", u);
}
}
self.write_standard_uses(&grammar.prefix)
}
pub fn write_standard_uses(&mut self, prefix: &str) -> io::Result<()> {
// Stuff that we plan to use.
// Occasionally we happen to not use it after all, hence the allow.
rust!(self, "#[allow(unused_extern_crates)]");
rust!(self, "extern crate lalrpop_util as {}lalrpop_util;", prefix);
Ok(())
}
}
| {
rust!(self, ") -> {}", return_type);
} | conditional_block |
mod.rs | //! Simple Rust AST. This is what the various code generators create,
//! which then gets serialized.
use grammar::repr::Grammar;
use grammar::parse_tree::Visibility;
use tls::Tls;
use std::fmt;
use std::io::{self, Write};
macro_rules! rust {
($w:expr, $($args:tt)*) => {
try!(($w).writeln(&::std::fmt::format(format_args!($($args)*))))
}
}
/// A wrapper around a Write instance that handles indentation for
/// Rust code. It expects Rust code to be written in a stylized way,
/// with lots of braces and newlines (example shown here with no
/// indentation). Over time maybe we can extend this to make things
/// look prettier, but seems like...meh, just run it through some
/// rustfmt tool.
///
/// ```ignore
/// fn foo(
/// arg1: Type1,
/// arg2: Type2,
/// arg3: Type3)
/// -> ReturnType
/// {
/// match foo {
/// Variant => {
/// }
/// }
/// }
/// ```
pub struct RustWrite<W: Write> {
write: W,
indent: usize,
}
const TAB: usize = 4;
impl<W: Write> RustWrite<W> {
pub fn new(w: W) -> RustWrite<W> {
RustWrite {
write: w,
indent: 0,
}
}
pub fn into_inner(self) -> W {
self.write
}
fn write_indentation(&mut self) -> io::Result<()> {
write!(self.write, "{0:1$}", "", self.indent)
}
fn write_indented(&mut self, out: &str) -> io::Result<()> {
writeln!(self.write, "{0:1$}{2}", "", self.indent, out)
}
pub fn write_table_row<I, C>(&mut self, iterable: I) -> io::Result<()>
where
I: IntoIterator<Item = (i32, C)>,
C: fmt::Display,
{
if Tls::session().emit_comments {
for (i, comment) in iterable {
try!(self.write_indentation());
try!(writeln!(self.write, "{}, {}", i, comment));
}
} else {
try!(self.write_indentation());
let mut first = true;
for (i, _comment) in iterable {
if !first {
try!(write!(self.write, " "));
}
try!(write!(self.write, "{},", i));
first = false;
}
}
writeln!(self.write, "")
}
pub fn writeln(&mut self, out: &str) -> io::Result<()> {
let buf = out.as_bytes();
// pass empty lines through with no indentation
if buf.is_empty() {
return self.write.write_all("\n".as_bytes());
}
let n = buf.len() - 1;
// If the line begins with a `}`, `]`, or `)`, first decrement the indentation.
if buf[0] == ('}' as u8) || buf[0] == (']' as u8) || buf[0] == (')' as u8) {
self.indent -= TAB;
}
try!(self.write_indented(out));
// Detect a line that ends in a `{` or `(` and increase indentation for future lines.
if buf[n] == ('{' as u8) || buf[n] == ('[' as u8) || buf[n] == ('(' as u8) {
self.indent += TAB;
}
Ok(())
}
pub fn write_fn_header(
&mut self,
grammar: &Grammar,
visibility: &Visibility,
name: String,
type_parameters: Vec<String>,
first_parameter: Option<String>,
parameters: Vec<String>,
return_type: String,
where_clauses: Vec<String>,
) -> io::Result<()> {
rust!(self, "{}fn {}<", visibility, name);
for type_parameter in &grammar.type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
for type_parameter in type_parameters {
rust!(self, "{0:1$}{2},", "", TAB, type_parameter);
}
rust!(self, ">(");
if let Some(param) = first_parameter {
rust!(self, "{},", param);
}
for parameter in &grammar.parameters {
rust!(self, "{}: {},", parameter.name, parameter.ty);
}
for parameter in ¶meters {
rust!(self, "{},", parameter);
}
if !grammar.where_clauses.is_empty() || !where_clauses.is_empty() {
rust!(self, ") -> {} where", return_type);
for where_clause in &grammar.where_clauses {
rust!(self, " {},", where_clause);
}
for where_clause in &where_clauses {
rust!(self, " {},", where_clause);
}
} else {
rust!(self, ") -> {}", return_type);
}
Ok(())
}
pub fn | (&mut self, grammar: &Grammar) -> io::Result<()> {
for attribute in grammar.module_attributes.iter() {
rust!(self, "{}", attribute);
}
Ok(())
}
pub fn write_uses(&mut self, super_prefix: &str, grammar: &Grammar) -> io::Result<()> {
// things the user wrote
for u in &grammar.uses {
if u.starts_with("super::") {
rust!(self, "use {}{};", super_prefix, u);
} else {
rust!(self, "use {};", u);
}
}
self.write_standard_uses(&grammar.prefix)
}
pub fn write_standard_uses(&mut self, prefix: &str) -> io::Result<()> {
// Stuff that we plan to use.
// Occasionally we happen to not use it after all, hence the allow.
rust!(self, "#[allow(unused_extern_crates)]");
rust!(self, "extern crate lalrpop_util as {}lalrpop_util;", prefix);
Ok(())
}
}
| write_module_attributes | identifier_name |
create.rs | static LOREM_IPSUM: &'static str =
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
";
use std::error::Error;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
fn | () {
let path = Path::new("out/lorem_ipsum.txt");
let display = path.display();
// 以只写模式打开文件,返回 `io::Result<File>`
let mut file = match File::create(&path) {
Err(why) => panic!("couldn't create {}: {}",
display,
why.description()),
Ok(file) => file,
};
// 将 `LOREM_IPSUM` 字符串写进 `file`,返回 `io::Result<()>`
match file.write_all(LOREM_IPSUM.as_bytes()) {
Err(why) => {
panic!("couldn't write to {}: {}", display,
why.description())
},
Ok(_) => println!("successfully wrote to {}", display),
}
}
| main | identifier_name |
create.rs | static LOREM_IPSUM: &'static str =
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
";
use std::error::Error;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
fn main() | {
let path = Path::new("out/lorem_ipsum.txt");
let display = path.display();
// 以只写模式打开文件,返回 `io::Result<File>`
let mut file = match File::create(&path) {
Err(why) => panic!("couldn't create {}: {}",
display,
why.description()),
Ok(file) => file,
};
// 将 `LOREM_IPSUM` 字符串写进 `file`,返回 `io::Result<()>`
match file.write_all(LOREM_IPSUM.as_bytes()) {
Err(why) => {
panic!("couldn't write to {}: {}", display,
why.description())
},
Ok(_) => println!("successfully wrote to {}", display),
}
}
| identifier_body |
|
create.rs | static LOREM_IPSUM: &'static str =
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
";
use std::error::Error;
use std::io::prelude::*;
use std::fs::File;
use std::path::Path;
fn main() {
let path = Path::new("out/lorem_ipsum.txt");
let display = path.display();
// 以只写模式打开文件,返回 `io::Result<File>`
let mut file = match File::create(&path) {
Err(why) => panic!("couldn't create {}: {}",
display,
why.description()),
Ok(file) => file,
};
// 将 `LOREM_IPSUM` 字符串写进 `file`,返回 `io::Result<()>`
match file.write_all(LOREM_IPSUM.as_bytes()) {
Err(why) => {
panic!("couldn't write to {}: {}", display,
why.description())
},
Ok(_) => println!("successfully wrote to {}", display),
} | } | random_line_split |
|
hasIn.ts | import { baseHasIn } from './_baseHasIn';
import { hasPath } from './_hasPath';
/**
* Checks if `path` is a direct or inherited property of `_object`.
*
* @param object The _object to query.
* @param path The path to check.
* Returns `true` if `path` exists, else `false`.
*
* var _object = _.create({ 'a': _.create({ 'b': 2 }) });
*
* _.hasIn(_object, 'a');
* // => true
*
* _.hasIn(_object, 'a.b');
* // => true
*
* _.hasIn(_object, ['a', 'b']);
* // => true
*
* _.hasIn(_object, 'b');
* // => false
*/
export function hasIn(object: any, path: string[] | string): boolean | {
return object != null && hasPath(object, path, baseHasIn);
} | identifier_body |
|
hasIn.ts | import { baseHasIn } from './_baseHasIn';
import { hasPath } from './_hasPath';
/**
* Checks if `path` is a direct or inherited property of `_object`.
*
* @param object The _object to query.
* @param path The path to check.
* Returns `true` if `path` exists, else `false`.
*
* var _object = _.create({ 'a': _.create({ 'b': 2 }) });
*
* _.hasIn(_object, 'a');
* // => true
*
* _.hasIn(_object, 'a.b');
* // => true
*
* _.hasIn(_object, ['a', 'b']);
* // => true
*
* _.hasIn(_object, 'b');
* // => false
*/
export function | (object: any, path: string[] | string): boolean {
return object != null && hasPath(object, path, baseHasIn);
}
| hasIn | identifier_name |
hasIn.ts | import { baseHasIn } from './_baseHasIn';
import { hasPath } from './_hasPath';
/**
* Checks if `path` is a direct or inherited property of `_object`. | *
* var _object = _.create({ 'a': _.create({ 'b': 2 }) });
*
* _.hasIn(_object, 'a');
* // => true
*
* _.hasIn(_object, 'a.b');
* // => true
*
* _.hasIn(_object, ['a', 'b']);
* // => true
*
* _.hasIn(_object, 'b');
* // => false
*/
export function hasIn(object: any, path: string[] | string): boolean {
return object != null && hasPath(object, path, baseHasIn);
} | *
* @param object The _object to query.
* @param path The path to check.
* Returns `true` if `path` exists, else `false`. | random_line_split |
Player.js | ~function () {
/**
* Create a new player.
*/
function Player(params) {
params || (params = {});
for (var key in params)
this[key] = params[key];
_createAnimation(this);
_createMesh(this);
_setThirdPersonCamera(this);
this.score = 0;
this.target = this.mesh.position.clone();
}
Player.prototype.run = function () {
this.anim.play("run");
}
/**
* Update the player at each frame.
* @param delta The delta time between this frame and
* the previous one.
*/
Player.prototype.update = function (delta) {
this.anim.update(delta * 1000);
if (THREE.Input.isKeyDown('leftArrow'))
_moveLeft(this);
if (THREE.Input.isKeyDown('rightArrow'))
_moveRight(this); |
if (this.isInvinsible()) {
this.invinsibleDelay -= delta;
this.mesh.visible = ~~(this.invinsibleDelay * 10) % 2;
} else
this.mesh.visible = true;
}
/**
* Check collision between the player and another entity.
*/
Player.prototype.hit = function (entity) {
var d = entity.mesh.position.z + 35 - this.mesh.position.z;
return d >= 0 && d <= 50 &&
entity.mesh.position.x === this.target.x;
}
/**
* Return true if the player is in invisible mode.
*/
Player.prototype.isInvinsible = function () {
return this.invinsibleDelay > 0;
}
/**
* Increase player score.
*/
Player.prototype.increaseScore = function () {
this.score++;
}
/**
* Decrease player lifes.
*/
Player.prototype.decreaseLifes = function () {
if (this.isInvinsible())
return ;
this.lifes--;
this.invinsibleDelay = 2;
}
/**
* Translate the player to the left.
*/
var _moveLeft = function (self) {
if (self.target.x === -50)
return ;
self.target.x -= 50;
}
/**
* Translate the player to the right.
*/
var _moveRight = function (self) {
if (self.target.x === 50)
return ;
self.target.x += 50;
}
/**
* Create the player sprite animation.
*/
var _createAnimation = function (self) {
var texture = new THREE.ImageUtils.loadTexture("resources/mario.png");
self.anim = new THREE.SpriteAnimation({
texture: texture,
tilesHorizontal: 6,
tilesVertical: 4,
numberOfTiles: 24,
delay: 42
});
self.anim.add("idle", {from: 22, to: 22});
self.anim.add("run", {from: 18, to: 23});
self.anim.play("idle");
}
/**
* Create the player mesh.
*/
var _createMesh = function (self) {
var material = new THREE.MeshBasicMaterial({
map: self.anim.texture
});
material.transparent = true;
self.mesh = new THREE.Mesh(
new THREE.PlaneGeometry(50, 50),
material
);
self.mesh.position.y += 25;
self.mesh.position.z += 25;
self.scene.add(self.mesh);
}
/**
* Attach a third person camera to the player.
*/
var _setThirdPersonCamera = function (self) {
self.controls = new THREE.ThirdPersonControls({
camera: self.camera,
target: self.mesh,
lerp: 0.05,
offset: new THREE.Vector3(0, 90, 200),
moveSpeed: 0, // on block les mouvements
contraints: new THREE.Vector2(1, 1) // et les rotations
});
self.camera.position.set(5000, 4000, 5000);
}
window.Player = Player;
}(); |
this.mesh.position.lerp(this.target, delta * 10); | random_line_split |
Player.js |
~function () {
/**
* Create a new player.
*/
function Player(params) |
Player.prototype.run = function () {
this.anim.play("run");
}
/**
* Update the player at each frame.
* @param delta The delta time between this frame and
* the previous one.
*/
Player.prototype.update = function (delta) {
this.anim.update(delta * 1000);
if (THREE.Input.isKeyDown('leftArrow'))
_moveLeft(this);
if (THREE.Input.isKeyDown('rightArrow'))
_moveRight(this);
this.mesh.position.lerp(this.target, delta * 10);
if (this.isInvinsible()) {
this.invinsibleDelay -= delta;
this.mesh.visible = ~~(this.invinsibleDelay * 10) % 2;
} else
this.mesh.visible = true;
}
/**
* Check collision between the player and another entity.
*/
Player.prototype.hit = function (entity) {
var d = entity.mesh.position.z + 35 - this.mesh.position.z;
return d >= 0 && d <= 50 &&
entity.mesh.position.x === this.target.x;
}
/**
* Return true if the player is in invisible mode.
*/
Player.prototype.isInvinsible = function () {
return this.invinsibleDelay > 0;
}
/**
* Increase player score.
*/
Player.prototype.increaseScore = function () {
this.score++;
}
/**
* Decrease player lifes.
*/
Player.prototype.decreaseLifes = function () {
if (this.isInvinsible())
return ;
this.lifes--;
this.invinsibleDelay = 2;
}
/**
* Translate the player to the left.
*/
var _moveLeft = function (self) {
if (self.target.x === -50)
return ;
self.target.x -= 50;
}
/**
* Translate the player to the right.
*/
var _moveRight = function (self) {
if (self.target.x === 50)
return ;
self.target.x += 50;
}
/**
* Create the player sprite animation.
*/
var _createAnimation = function (self) {
var texture = new THREE.ImageUtils.loadTexture("resources/mario.png");
self.anim = new THREE.SpriteAnimation({
texture: texture,
tilesHorizontal: 6,
tilesVertical: 4,
numberOfTiles: 24,
delay: 42
});
self.anim.add("idle", {from: 22, to: 22});
self.anim.add("run", {from: 18, to: 23});
self.anim.play("idle");
}
/**
* Create the player mesh.
*/
var _createMesh = function (self) {
var material = new THREE.MeshBasicMaterial({
map: self.anim.texture
});
material.transparent = true;
self.mesh = new THREE.Mesh(
new THREE.PlaneGeometry(50, 50),
material
);
self.mesh.position.y += 25;
self.mesh.position.z += 25;
self.scene.add(self.mesh);
}
/**
* Attach a third person camera to the player.
*/
var _setThirdPersonCamera = function (self) {
self.controls = new THREE.ThirdPersonControls({
camera: self.camera,
target: self.mesh,
lerp: 0.05,
offset: new THREE.Vector3(0, 90, 200),
moveSpeed: 0, // on block les mouvements
contraints: new THREE.Vector2(1, 1) // et les rotations
});
self.camera.position.set(5000, 4000, 5000);
}
window.Player = Player;
}(); | {
params || (params = {});
for (var key in params)
this[key] = params[key];
_createAnimation(this);
_createMesh(this);
_setThirdPersonCamera(this);
this.score = 0;
this.target = this.mesh.position.clone();
} | identifier_body |
Player.js |
~function () {
/**
* Create a new player.
*/
function | (params) {
params || (params = {});
for (var key in params)
this[key] = params[key];
_createAnimation(this);
_createMesh(this);
_setThirdPersonCamera(this);
this.score = 0;
this.target = this.mesh.position.clone();
}
Player.prototype.run = function () {
this.anim.play("run");
}
/**
* Update the player at each frame.
* @param delta The delta time between this frame and
* the previous one.
*/
Player.prototype.update = function (delta) {
this.anim.update(delta * 1000);
if (THREE.Input.isKeyDown('leftArrow'))
_moveLeft(this);
if (THREE.Input.isKeyDown('rightArrow'))
_moveRight(this);
this.mesh.position.lerp(this.target, delta * 10);
if (this.isInvinsible()) {
this.invinsibleDelay -= delta;
this.mesh.visible = ~~(this.invinsibleDelay * 10) % 2;
} else
this.mesh.visible = true;
}
/**
* Check collision between the player and another entity.
*/
Player.prototype.hit = function (entity) {
var d = entity.mesh.position.z + 35 - this.mesh.position.z;
return d >= 0 && d <= 50 &&
entity.mesh.position.x === this.target.x;
}
/**
* Return true if the player is in invisible mode.
*/
Player.prototype.isInvinsible = function () {
return this.invinsibleDelay > 0;
}
/**
* Increase player score.
*/
Player.prototype.increaseScore = function () {
this.score++;
}
/**
* Decrease player lifes.
*/
Player.prototype.decreaseLifes = function () {
if (this.isInvinsible())
return ;
this.lifes--;
this.invinsibleDelay = 2;
}
/**
* Translate the player to the left.
*/
var _moveLeft = function (self) {
if (self.target.x === -50)
return ;
self.target.x -= 50;
}
/**
* Translate the player to the right.
*/
var _moveRight = function (self) {
if (self.target.x === 50)
return ;
self.target.x += 50;
}
/**
* Create the player sprite animation.
*/
var _createAnimation = function (self) {
var texture = new THREE.ImageUtils.loadTexture("resources/mario.png");
self.anim = new THREE.SpriteAnimation({
texture: texture,
tilesHorizontal: 6,
tilesVertical: 4,
numberOfTiles: 24,
delay: 42
});
self.anim.add("idle", {from: 22, to: 22});
self.anim.add("run", {from: 18, to: 23});
self.anim.play("idle");
}
/**
* Create the player mesh.
*/
var _createMesh = function (self) {
var material = new THREE.MeshBasicMaterial({
map: self.anim.texture
});
material.transparent = true;
self.mesh = new THREE.Mesh(
new THREE.PlaneGeometry(50, 50),
material
);
self.mesh.position.y += 25;
self.mesh.position.z += 25;
self.scene.add(self.mesh);
}
/**
* Attach a third person camera to the player.
*/
var _setThirdPersonCamera = function (self) {
self.controls = new THREE.ThirdPersonControls({
camera: self.camera,
target: self.mesh,
lerp: 0.05,
offset: new THREE.Vector3(0, 90, 200),
moveSpeed: 0, // on block les mouvements
contraints: new THREE.Vector2(1, 1) // et les rotations
});
self.camera.position.set(5000, 4000, 5000);
}
window.Player = Player;
}(); | Player | identifier_name |
engine.js | // Enable source map support
// import 'source-map-support/register'
// Module Dependencies
import path from 'path'
import async from 'async'
import { Utils as UtilsClass } from './satellites/utils'
// FIXME: this is a temporary workaround, we must make this more professional
const Utils = new UtilsClass()
// This stores the number of times that Stellar was started. Every time that
// Stellar restarts this is incremented by 1
let startCount = 0
/**
* Main Stellar entry point class.
*
* This makes the system bootstrap, loading and execution all satellites. Each
* initializer load new features to the engine instance or perform a set of
* instruction to accomplish a certain goal.
*/
export default class Engine {
// --------------------------------------------------------------------------- [STATIC]
/**
* Default proprieties for the satellites.
*
* @type {{load: number, start: number, stop: number}}
*/
static defaultPriorities = {
load: 100,
start: 100,
stop: 100
}
/**
* Normalize satellite priorities.
*
* @param satellite Satellite instance to be normalized.
*/
static normalizeInitializerPriority (satellite) {
satellite.loadPriority = satellite.loadPriority || Engine.defaultPriorities.load
satellite.startPriority = satellite.startPriority || Engine.defaultPriorities.start
satellite.stopPriority = satellite.stopPriority || Engine.defaultPriorities.stop
}
/**
* Order satellites array by their priority.
*
* @param collection Satellites array to be ordered.
* @returns {Array} New ordered array.
*/
static flattenOrderedInitializer (collection) {
let keys = []
let output = []
// get keys from the collection
for (var key in collection) { keys.push(parseInt(key)) }
// sort the keys in ascendant way
keys.sort((a, b) => a - b)
// iterate the ordered keys and create the new ordered object to be
// outputted
keys.forEach(key => collection[ key ].forEach(d => output.push(d)))
// return the new ordered object
return output
}
/**
* Print fatal error on the console and exit from the engine execution.
*
* @private
* @param api API instance.
* @param errors String or array with the fatal error(s).
* @param type String with the error type.
*/
static fatalError (api, errors, type) {
// if errors variables if not defined return
if (!errors) { return }
// ensure the errors variable is an Array
if (!Array.isArray(errors)) { errors = [ errors ] }
// log an emergency message
console.log()
api.log(`Error with satellite step: ${type}`, 'emerg')
// log all the errors
errors.forEach(err => api.log(err, 'emerg'))
// finish the process execution
api.commands.stop.call(api, () => { process.exit(1) })
}
// --------------------------------------------------------------------------- [Class]
/**
* API object.
*
* This object will be shared across all the platform, it's here the
* satellites will load logic and the developers access the functions.
*
* @type {{}}
*/
api = {
bootTime: null,
status: 'stopped',
commands: {
initialize: null,
start: null,
stop: null,
restart: null
},
_self: null,
log: null,
scope: {}
}
/**
* List with all satellites.
*
* @type {{}}
*/
satellites = {}
/**
* Array with the initial satellites.
*
* @type {Array}
*/
initialSatellites = []
/**
* Array with the load satellites.
*
* This array contains all the satellites who has a load method.
*
* @type {Array}
*/
loadSatellites = []
/**
* Array with the start satellites.
*
* This array contains all the satellites who has a start method.
*
* @type {Array}
*/
startSatellites = []
/**
* Array with the stop satellites.
*
* This array contains all the satellites who has a stop method.
*
* @type {Array}
*/
stopSatellites = []
/**
* Create a new instance of the Engine.
*
* @param scope Initial scope
*/
constructor (scope) {
let self = this
// save current execution scope
self.api.scope = scope
// define args if them are not already defined
if (!self.api.scope.args) { self.api.scope.args = {} }
// save the engine reference for external calls
self.api._self = self
// define a dummy logger
//
// this only should print error, emergency levels
self.api.log = (msg, level = 'info') => {
// if we are on test environment don't use the console
if (process.env.NODE_ENV === 'test') { return }
if (level === 'emergency' || level === 'error') {
return console.error(`\x1b[31m[-] ${msg}\x1b[37m`)
} else if (level === 'info') {
return console.info(`[!] ${msg}`)
} else if (level !== 'debug') {
console.log(`[d] ${msg}`)
}
}
// define the available engine commands
self.api.commands = {
initialize: self.initialize,
start: self.start,
stop: self.stop,
restart: self.restart
}
}
// --------------------------------------------------------------------------- [State Manager Functions]
initialize (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
self.api._self = self
// print current execution path
self.api.log(`Current universe "${self.api.scope.rootPath}"`, 'info')
// execute the stage 0
this.stage0(callback)
}
/**
* Start engine execution.
*
* @param callback This function is called when the Engine finish their startup.
*/
start (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
self.api._self = self
// reset start counter
startCount = 0
// check if the engine was already initialized
if (self.api.status !== 'init_stage0') {
return self.initialize((error, api) => {
// if an error occurs we stop here
if (error) { return callback(error, api) }
// start stage1 loading method
self.stage1(callback)
})
}
// start stage1 loading method
return self.stage1(callback)
}
/**
* Stop the Engine execution.
*
* This method try shutdown the engine in a non violent way, this
* starts to execute all the stop method on the supported satellites.
*
* @param callback Callback function to be executed at the stop end execution.
*/
stop (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
if (self.api.status === 'running') {
// stop Engine
self.api.status = 'shutting_down'
// log a shutting down message
self.api.log('Shutting down open servers and stopping task processing', 'alert')
// if this is the second shutdown we need remove the `finalStopInitializer` callback
if (self.stopSatellites[ (self.stopSatellites.length - 1) ].name === 'finalStopInitializer') {
self.stopSatellites.pop()
}
// add the final callback
self.stopSatellites.push(function finalStopInitializer (next) {
// stop watch for file changes
self.api.configs.unwatchAllFiles()
// clear cluster PIDs
self.api.pids.clearPidFile()
// log a shutdown message
self.api.log('Stellar has been stopped', 'alert')
self.api.log('***', 'debug')
// mark server as stopped
self.api.status = 'stopped'
// execute the callback on the next tick
process.nextTick(() => {
if (callback !== null) { callback(null, self.api) }
})
// async callback
next()
})
// iterate all satellites and stop them
async.series(self.stopSatellites, errors => Engine.fatalError(self.api, errors, 'stop'))
} else if (self.api.status === 'shutting_down') {
// double sigterm; ignore it
} else {
// we can shutdown the Engine if it is not running
self.api.log('Cannot shutdown Stellar, not running', 'error')
// exists a callback?
if (callback !== null) { callback(null, self.api) }
}
}
/**
* Restart the Stellar Engine.
*
* This execute a stop action and execute the stage2 load actions.
*
* @param callback Callback function to be executed at the restart end.s
*/
restart (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
if (self.api.status === 'running') {
// stop the engine
self.stop(err => {
// log error if present
if (err) { self.api.log(err, 'error') }
// start the engine again
self.stage2(function (err) {
if (err) { self.api.log(err, 'error') }
// log a restart message
self.api.log('*** Stellar Restarted ***', 'info')
// exists a callback
if (callback !== null) { callback(null, self.api) }
})
})
} else {
self.stage2(err => {
// log any encountered error
if (err) { self.api.log(err, 'error') }
// log a restart message
self.api.log('*** Stellar Restarted ***', 'info')
// exists a callback
if (callback !== null) { callback(null, self.api) }
})
}
}
// --------------------------------------------------------------------------- [States Functions]
/**
* First startup stage.
*
* Steps:
* - executes the initial satellites;
* - call stage1
*
* @param callback This callback only are executed at the end of stage2.
*/
stage0 (callback = null) {
// set the state
this.api.status = 'init_stage0'
// we need to load the config first
let initialSatellites = [
path.resolve(`${__dirname}/satellites/utils.js`),
path.resolve(`${__dirname}/satellites/config.js`)
]
initialSatellites.forEach(file => {
// get full file name
let filename = file.replace(/^.*[\\\/]/, '')
// get the first part of the file name
let initializer = filename.split('.')[ 0 ]
// get the initializer
const Satellite = require(file).default
this.satellites[ initializer ] = new Satellite()
// add it to array
this.initialSatellites.push(next => this.satellites[ initializer ].load(this.api, next))
})
// execute stage0 satellites in series
async.series(this.initialSatellites, error => {
// execute the callback
callback(error, this.api)
// if an error occurs show
if (error) { Engine.fatalError(this.api, error, 'stage0') }
})
}
/**
* Second startup stage.
*
* Steps:
* - load all satellites into memory;
* - load satellites;
* - mark Engine like initialized;
* - call stage2.
*
* @param callback This callback only is executed at the stage2 end.
*/
stage1 (callback = null) {
// put the status in the next stage
this.api.status = 'init_stage1'
// ranked object for all stages
let loadSatellitesRankings = {}
let startSatellitesRankings = {}
let stopSatellitesRankings = {}
// reset satellites arrays
this.satellites = {}
// function to load the satellites in the right place
let loadSatellitesInPlace = satellitesFiles => {
// iterate all files
for (let key in satellitesFiles) {
let f = satellitesFiles[ key ]
// get satellite normalized file name and
let file = path.normalize(f)
let initializer = path.basename(f).split('.')[ 0 ]
let ext = file.split('.').pop()
// only load files with the `.js` extension
if (ext !== 'js') { continue }
// get initializer module and instantiate it
const Satellite = require(file).default
this.satellites[ initializer ] = new Satellite()
// initializer load function
let loadFunction = next => {
// check if the initializer have a load function
if (typeof this.satellites[ initializer ].load === 'function') {
this.api.log(` > load: ${initializer}`, 'debug')
// call `load` property
this.satellites[ initializer ].load(this.api, err => {
if (!err) { this.api.log(` loaded: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// initializer start function
let startFunction = next => {
// check if the initializer have a start function
if (typeof this.satellites[ initializer ].start === 'function') {
this.api.log(` > start: ${initializer}`, 'debug')
// execute start routine
this.satellites[ initializer ].start(this.api, err => {
if (!err) { this.api.log(` started: ${initializer}`, 'debug') }
next(err)
})
} else |
}
// initializer stop function
let stopFunction = next => {
if (typeof this.satellites[ initializer ].stop === 'function') {
this.api.log(` > stop: ${initializer}`, 'debug')
this.satellites[ initializer ].stop(this.api, err => {
if (!err) { this.api.log(` stopped: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// normalize satellite priorities
Engine.normalizeInitializerPriority(this.satellites[ initializer ])
loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ] = loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ] || []
startSatellitesRankings[ this.satellites[ initializer ].startPriority ] = startSatellitesRankings[ this.satellites[ initializer ].startPriority ] || []
stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ] = stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ] || []
// push loader state function to ranked arrays
loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ].push(loadFunction)
startSatellitesRankings[ this.satellites[ initializer ].startPriority ].push(startFunction)
stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ].push(stopFunction)
}
}
// get an array with all satellites
loadSatellitesInPlace(Utils.getFiles(`${__dirname}/satellites`))
// load satellites from all the active modules
this.api.config.modules.forEach(moduleName => {
// build the full path to the satellites folder
let moduleSatellitePaths = `${this.api.scope.rootPath}/modules/${moduleName}/satellites`
// check if the folder exists
if (Utils.directoryExists(moduleSatellitePaths)) { loadSatellitesInPlace(Utils.getFiles(moduleSatellitePaths)) }
})
// organize final array to match the satellites priorities
this.loadSatellites = Engine.flattenOrderedInitializer(loadSatellitesRankings)
this.startSatellites = Engine.flattenOrderedInitializer(startSatellitesRankings)
this.stopSatellites = Engine.flattenOrderedInitializer(stopSatellitesRankings)
// on the end of loading all satellites set engine like initialized
this.loadSatellites.push(() => { this.stage2(callback) })
// start initialization process
async.series(this.loadSatellites, errors => Engine.fatalError(this.api, errors, 'stage1'))
}
/**
* Third startup stage.
*
* Steps:
* - start satellites;
* - mark Engine as running.
*
* @param callback
*/
stage2 (callback = null) {
// put the engine in the stage2 state
this.api.status = 'init_stage2'
if (startCount === 0) {
this.startSatellites.push(next => {
// set the state
this.api.status = 'running'
this.api.bootTime = new Date().getTime()
if (startCount === 0) {
this.api.log('** Server Started @ ' + new Date() + ' ***', 'alert')
} else {
this.api.log('** Server Restarted @ ' + new Date() + ' ***', 'alert')
}
// increment the number of starts
startCount++
// call the callback if it's present
if (callback !== null) { callback(null, this.api) }
next()
})
}
// start all initializers
async.series(this.startSatellites, err => Engine.fatalError(this.api, err, 'stage2'))
}
}
| {
next()
} | conditional_block |
engine.js | // Enable source map support
// import 'source-map-support/register'
// Module Dependencies
import path from 'path'
import async from 'async'
import { Utils as UtilsClass } from './satellites/utils'
// FIXME: this is a temporary workaround, we must make this more professional
const Utils = new UtilsClass()
// This stores the number of times that Stellar was started. Every time that
// Stellar restarts this is incremented by 1
let startCount = 0
/**
* Main Stellar entry point class.
*
* This makes the system bootstrap, loading and execution all satellites. Each
* initializer load new features to the engine instance or perform a set of
* instruction to accomplish a certain goal.
*/
export default class Engine {
// --------------------------------------------------------------------------- [STATIC]
/**
* Default proprieties for the satellites.
*
* @type {{load: number, start: number, stop: number}}
*/
static defaultPriorities = {
load: 100,
start: 100,
stop: 100
}
/**
* Normalize satellite priorities.
*
* @param satellite Satellite instance to be normalized.
*/
static normalizeInitializerPriority (satellite) {
satellite.loadPriority = satellite.loadPriority || Engine.defaultPriorities.load
satellite.startPriority = satellite.startPriority || Engine.defaultPriorities.start
satellite.stopPriority = satellite.stopPriority || Engine.defaultPriorities.stop
}
/**
* Order satellites array by their priority.
*
* @param collection Satellites array to be ordered.
* @returns {Array} New ordered array.
*/
static flattenOrderedInitializer (collection) {
let keys = []
let output = []
// get keys from the collection
for (var key in collection) { keys.push(parseInt(key)) }
// sort the keys in ascendant way
keys.sort((a, b) => a - b)
// iterate the ordered keys and create the new ordered object to be
// outputted
keys.forEach(key => collection[ key ].forEach(d => output.push(d)))
// return the new ordered object
return output
}
/**
* Print fatal error on the console and exit from the engine execution.
*
* @private
* @param api API instance.
* @param errors String or array with the fatal error(s).
* @param type String with the error type.
*/
static fatalError (api, errors, type) {
// if errors variables if not defined return
if (!errors) { return }
// ensure the errors variable is an Array
if (!Array.isArray(errors)) { errors = [ errors ] }
// log an emergency message
console.log()
api.log(`Error with satellite step: ${type}`, 'emerg')
// log all the errors
errors.forEach(err => api.log(err, 'emerg'))
// finish the process execution
api.commands.stop.call(api, () => { process.exit(1) })
}
// --------------------------------------------------------------------------- [Class]
/**
* API object.
*
* This object will be shared across all the platform, it's here the
* satellites will load logic and the developers access the functions.
*
* @type {{}}
*/
api = {
bootTime: null,
status: 'stopped',
commands: {
initialize: null,
start: null,
stop: null,
restart: null
},
_self: null,
log: null,
scope: {}
}
/**
* List with all satellites.
*
* @type {{}}
*/
satellites = {}
/**
* Array with the initial satellites.
*
* @type {Array}
*/
initialSatellites = []
/**
* Array with the load satellites.
*
* This array contains all the satellites who has a load method.
*
* @type {Array}
*/
loadSatellites = []
/**
* Array with the start satellites.
*
* This array contains all the satellites who has a start method.
*
* @type {Array}
*/
startSatellites = []
/**
* Array with the stop satellites.
*
* This array contains all the satellites who has a stop method.
*
* @type {Array}
*/
stopSatellites = []
/**
* Create a new instance of the Engine.
*
* @param scope Initial scope
*/
constructor (scope) {
let self = this
// save current execution scope
self.api.scope = scope
// define args if them are not already defined
if (!self.api.scope.args) { self.api.scope.args = {} }
// save the engine reference for external calls
self.api._self = self
// define a dummy logger
//
// this only should print error, emergency levels
self.api.log = (msg, level = 'info') => {
// if we are on test environment don't use the console
if (process.env.NODE_ENV === 'test') { return }
if (level === 'emergency' || level === 'error') {
return console.error(`\x1b[31m[-] ${msg}\x1b[37m`)
} else if (level === 'info') {
return console.info(`[!] ${msg}`)
} else if (level !== 'debug') {
console.log(`[d] ${msg}`)
}
}
// define the available engine commands
self.api.commands = {
initialize: self.initialize,
start: self.start,
stop: self.stop,
restart: self.restart
}
}
// --------------------------------------------------------------------------- [State Manager Functions]
initialize (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
self.api._self = self
// print current execution path
self.api.log(`Current universe "${self.api.scope.rootPath}"`, 'info')
// execute the stage 0
this.stage0(callback)
}
/**
* Start engine execution.
*
* @param callback This function is called when the Engine finish their startup.
*/
start (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
self.api._self = self
// reset start counter
startCount = 0
// check if the engine was already initialized
if (self.api.status !== 'init_stage0') {
return self.initialize((error, api) => {
// if an error occurs we stop here
if (error) { return callback(error, api) }
// start stage1 loading method
self.stage1(callback)
})
}
// start stage1 loading method
return self.stage1(callback)
}
/**
* Stop the Engine execution.
*
* This method try shutdown the engine in a non violent way, this
* starts to execute all the stop method on the supported satellites.
*
* @param callback Callback function to be executed at the stop end execution.
*/
stop (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
if (self.api.status === 'running') {
// stop Engine
self.api.status = 'shutting_down'
// log a shutting down message
self.api.log('Shutting down open servers and stopping task processing', 'alert')
// if this is the second shutdown we need remove the `finalStopInitializer` callback
if (self.stopSatellites[ (self.stopSatellites.length - 1) ].name === 'finalStopInitializer') {
self.stopSatellites.pop()
}
// add the final callback
self.stopSatellites.push(function finalStopInitializer (next) {
// stop watch for file changes
self.api.configs.unwatchAllFiles()
// clear cluster PIDs
self.api.pids.clearPidFile()
// log a shutdown message
self.api.log('Stellar has been stopped', 'alert')
self.api.log('***', 'debug')
// mark server as stopped
self.api.status = 'stopped'
// execute the callback on the next tick
process.nextTick(() => {
if (callback !== null) { callback(null, self.api) }
})
// async callback
next()
})
// iterate all satellites and stop them
async.series(self.stopSatellites, errors => Engine.fatalError(self.api, errors, 'stop'))
} else if (self.api.status === 'shutting_down') {
// double sigterm; ignore it
} else {
// we can shutdown the Engine if it is not running
self.api.log('Cannot shutdown Stellar, not running', 'error')
// exists a callback?
if (callback !== null) { callback(null, self.api) }
}
}
/**
* Restart the Stellar Engine.
*
* This execute a stop action and execute the stage2 load actions.
*
* @param callback Callback function to be executed at the restart end.s
*/
| (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
if (self.api.status === 'running') {
// stop the engine
self.stop(err => {
// log error if present
if (err) { self.api.log(err, 'error') }
// start the engine again
self.stage2(function (err) {
if (err) { self.api.log(err, 'error') }
// log a restart message
self.api.log('*** Stellar Restarted ***', 'info')
// exists a callback
if (callback !== null) { callback(null, self.api) }
})
})
} else {
self.stage2(err => {
// log any encountered error
if (err) { self.api.log(err, 'error') }
// log a restart message
self.api.log('*** Stellar Restarted ***', 'info')
// exists a callback
if (callback !== null) { callback(null, self.api) }
})
}
}
// --------------------------------------------------------------------------- [States Functions]
/**
* First startup stage.
*
* Steps:
* - executes the initial satellites;
* - call stage1
*
* @param callback This callback only are executed at the end of stage2.
*/
stage0 (callback = null) {
// set the state
this.api.status = 'init_stage0'
// we need to load the config first
let initialSatellites = [
path.resolve(`${__dirname}/satellites/utils.js`),
path.resolve(`${__dirname}/satellites/config.js`)
]
initialSatellites.forEach(file => {
// get full file name
let filename = file.replace(/^.*[\\\/]/, '')
// get the first part of the file name
let initializer = filename.split('.')[ 0 ]
// get the initializer
const Satellite = require(file).default
this.satellites[ initializer ] = new Satellite()
// add it to array
this.initialSatellites.push(next => this.satellites[ initializer ].load(this.api, next))
})
// execute stage0 satellites in series
async.series(this.initialSatellites, error => {
// execute the callback
callback(error, this.api)
// if an error occurs show
if (error) { Engine.fatalError(this.api, error, 'stage0') }
})
}
/**
* Second startup stage.
*
* Steps:
* - load all satellites into memory;
* - load satellites;
* - mark Engine like initialized;
* - call stage2.
*
* @param callback This callback only is executed at the stage2 end.
*/
stage1 (callback = null) {
// put the status in the next stage
this.api.status = 'init_stage1'
// ranked object for all stages
let loadSatellitesRankings = {}
let startSatellitesRankings = {}
let stopSatellitesRankings = {}
// reset satellites arrays
this.satellites = {}
// function to load the satellites in the right place
let loadSatellitesInPlace = satellitesFiles => {
// iterate all files
for (let key in satellitesFiles) {
let f = satellitesFiles[ key ]
// get satellite normalized file name and
let file = path.normalize(f)
let initializer = path.basename(f).split('.')[ 0 ]
let ext = file.split('.').pop()
// only load files with the `.js` extension
if (ext !== 'js') { continue }
// get initializer module and instantiate it
const Satellite = require(file).default
this.satellites[ initializer ] = new Satellite()
// initializer load function
let loadFunction = next => {
// check if the initializer have a load function
if (typeof this.satellites[ initializer ].load === 'function') {
this.api.log(` > load: ${initializer}`, 'debug')
// call `load` property
this.satellites[ initializer ].load(this.api, err => {
if (!err) { this.api.log(` loaded: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// initializer start function
let startFunction = next => {
// check if the initializer have a start function
if (typeof this.satellites[ initializer ].start === 'function') {
this.api.log(` > start: ${initializer}`, 'debug')
// execute start routine
this.satellites[ initializer ].start(this.api, err => {
if (!err) { this.api.log(` started: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// initializer stop function
let stopFunction = next => {
if (typeof this.satellites[ initializer ].stop === 'function') {
this.api.log(` > stop: ${initializer}`, 'debug')
this.satellites[ initializer ].stop(this.api, err => {
if (!err) { this.api.log(` stopped: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// normalize satellite priorities
Engine.normalizeInitializerPriority(this.satellites[ initializer ])
loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ] = loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ] || []
startSatellitesRankings[ this.satellites[ initializer ].startPriority ] = startSatellitesRankings[ this.satellites[ initializer ].startPriority ] || []
stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ] = stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ] || []
// push loader state function to ranked arrays
loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ].push(loadFunction)
startSatellitesRankings[ this.satellites[ initializer ].startPriority ].push(startFunction)
stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ].push(stopFunction)
}
}
// get an array with all satellites
loadSatellitesInPlace(Utils.getFiles(`${__dirname}/satellites`))
// load satellites from all the active modules
this.api.config.modules.forEach(moduleName => {
// build the full path to the satellites folder
let moduleSatellitePaths = `${this.api.scope.rootPath}/modules/${moduleName}/satellites`
// check if the folder exists
if (Utils.directoryExists(moduleSatellitePaths)) { loadSatellitesInPlace(Utils.getFiles(moduleSatellitePaths)) }
})
// organize final array to match the satellites priorities
this.loadSatellites = Engine.flattenOrderedInitializer(loadSatellitesRankings)
this.startSatellites = Engine.flattenOrderedInitializer(startSatellitesRankings)
this.stopSatellites = Engine.flattenOrderedInitializer(stopSatellitesRankings)
// on the end of loading all satellites set engine like initialized
this.loadSatellites.push(() => { this.stage2(callback) })
// start initialization process
async.series(this.loadSatellites, errors => Engine.fatalError(this.api, errors, 'stage1'))
}
/**
* Third startup stage.
*
* Steps:
* - start satellites;
* - mark Engine as running.
*
* @param callback
*/
stage2 (callback = null) {
// put the engine in the stage2 state
this.api.status = 'init_stage2'
if (startCount === 0) {
this.startSatellites.push(next => {
// set the state
this.api.status = 'running'
this.api.bootTime = new Date().getTime()
if (startCount === 0) {
this.api.log('** Server Started @ ' + new Date() + ' ***', 'alert')
} else {
this.api.log('** Server Restarted @ ' + new Date() + ' ***', 'alert')
}
// increment the number of starts
startCount++
// call the callback if it's present
if (callback !== null) { callback(null, this.api) }
next()
})
}
// start all initializers
async.series(this.startSatellites, err => Engine.fatalError(this.api, err, 'stage2'))
}
}
| restart | identifier_name |
engine.js | // Enable source map support
// import 'source-map-support/register'
// Module Dependencies
import path from 'path'
import async from 'async'
import { Utils as UtilsClass } from './satellites/utils'
// FIXME: this is a temporary workaround, we must make this more professional
const Utils = new UtilsClass()
// This stores the number of times that Stellar was started. Every time that
// Stellar restarts this is incremented by 1
let startCount = 0
/**
* Main Stellar entry point class.
*
* This makes the system bootstrap, loading and execution all satellites. Each
* initializer load new features to the engine instance or perform a set of
* instruction to accomplish a certain goal.
*/
export default class Engine {
// --------------------------------------------------------------------------- [STATIC]
/**
* Default proprieties for the satellites.
*
* @type {{load: number, start: number, stop: number}}
*/
static defaultPriorities = {
load: 100,
start: 100,
stop: 100
}
/**
* Normalize satellite priorities.
*
* @param satellite Satellite instance to be normalized.
*/
static normalizeInitializerPriority (satellite) {
satellite.loadPriority = satellite.loadPriority || Engine.defaultPriorities.load
satellite.startPriority = satellite.startPriority || Engine.defaultPriorities.start
satellite.stopPriority = satellite.stopPriority || Engine.defaultPriorities.stop
}
/**
* Order satellites array by their priority.
*
* @param collection Satellites array to be ordered.
* @returns {Array} New ordered array.
*/
static flattenOrderedInitializer (collection) {
let keys = []
let output = []
// get keys from the collection
for (var key in collection) { keys.push(parseInt(key)) }
// sort the keys in ascendant way
keys.sort((a, b) => a - b)
// iterate the ordered keys and create the new ordered object to be
// outputted
keys.forEach(key => collection[ key ].forEach(d => output.push(d)))
// return the new ordered object
return output
}
/**
* Print fatal error on the console and exit from the engine execution.
*
* @private
* @param api API instance.
* @param errors String or array with the fatal error(s).
* @param type String with the error type.
*/
static fatalError (api, errors, type) {
// if errors variables if not defined return
if (!errors) { return }
// ensure the errors variable is an Array
if (!Array.isArray(errors)) { errors = [ errors ] }
// log an emergency message
console.log()
api.log(`Error with satellite step: ${type}`, 'emerg')
// log all the errors
errors.forEach(err => api.log(err, 'emerg'))
// finish the process execution
api.commands.stop.call(api, () => { process.exit(1) })
}
// --------------------------------------------------------------------------- [Class]
/**
* API object.
*
* This object will be shared across all the platform, it's here the
* satellites will load logic and the developers access the functions.
*
* @type {{}}
*/
api = {
bootTime: null,
status: 'stopped',
commands: {
initialize: null,
start: null,
stop: null,
restart: null
},
_self: null,
log: null,
scope: {}
}
/**
* List with all satellites.
*
* @type {{}}
*/
satellites = {}
/**
* Array with the initial satellites.
*
* @type {Array}
*/
initialSatellites = []
/**
* Array with the load satellites.
*
* This array contains all the satellites who has a load method.
*
* @type {Array}
*/
loadSatellites = []
/**
* Array with the start satellites.
*
* This array contains all the satellites who has a start method.
*
* @type {Array}
*/
startSatellites = []
/**
* Array with the stop satellites.
*
* This array contains all the satellites who has a stop method.
*
* @type {Array}
*/
stopSatellites = []
/**
* Create a new instance of the Engine.
*
* @param scope Initial scope
*/
constructor (scope) {
let self = this
// save current execution scope
self.api.scope = scope
// define args if them are not already defined
if (!self.api.scope.args) { self.api.scope.args = {} }
// save the engine reference for external calls
self.api._self = self
// define a dummy logger
//
// this only should print error, emergency levels
self.api.log = (msg, level = 'info') => {
// if we are on test environment don't use the console
if (process.env.NODE_ENV === 'test') { return }
if (level === 'emergency' || level === 'error') {
return console.error(`\x1b[31m[-] ${msg}\x1b[37m`)
} else if (level === 'info') {
return console.info(`[!] ${msg}`)
} else if (level !== 'debug') {
console.log(`[d] ${msg}`)
}
}
// define the available engine commands
self.api.commands = {
initialize: self.initialize,
start: self.start,
stop: self.stop,
restart: self.restart
}
}
// --------------------------------------------------------------------------- [State Manager Functions]
initialize (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
self.api._self = self
// print current execution path
self.api.log(`Current universe "${self.api.scope.rootPath}"`, 'info')
// execute the stage 0
this.stage0(callback)
}
/**
* Start engine execution.
*
* @param callback This function is called when the Engine finish their startup.
*/
start (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
self.api._self = self
// reset start counter
startCount = 0
// check if the engine was already initialized
if (self.api.status !== 'init_stage0') {
return self.initialize((error, api) => {
// if an error occurs we stop here
if (error) { return callback(error, api) }
// start stage1 loading method
self.stage1(callback)
})
}
// start stage1 loading method
return self.stage1(callback)
}
/**
* Stop the Engine execution.
*
* This method try shutdown the engine in a non violent way, this
* starts to execute all the stop method on the supported satellites.
*
* @param callback Callback function to be executed at the stop end execution.
*/
stop (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
if (self.api.status === 'running') {
// stop Engine
self.api.status = 'shutting_down'
// log a shutting down message
self.api.log('Shutting down open servers and stopping task processing', 'alert')
// if this is the second shutdown we need remove the `finalStopInitializer` callback
if (self.stopSatellites[ (self.stopSatellites.length - 1) ].name === 'finalStopInitializer') {
self.stopSatellites.pop()
}
// add the final callback
self.stopSatellites.push(function finalStopInitializer (next) {
// stop watch for file changes
self.api.configs.unwatchAllFiles()
// clear cluster PIDs
self.api.pids.clearPidFile()
// log a shutdown message
self.api.log('Stellar has been stopped', 'alert')
self.api.log('***', 'debug')
// mark server as stopped
self.api.status = 'stopped'
// execute the callback on the next tick
process.nextTick(() => {
if (callback !== null) { callback(null, self.api) }
})
// async callback
next()
})
// iterate all satellites and stop them
async.series(self.stopSatellites, errors => Engine.fatalError(self.api, errors, 'stop'))
} else if (self.api.status === 'shutting_down') {
// double sigterm; ignore it
} else {
// we can shutdown the Engine if it is not running
self.api.log('Cannot shutdown Stellar, not running', 'error')
// exists a callback?
if (callback !== null) { callback(null, self.api) }
}
}
/**
* Restart the Stellar Engine.
*
* This execute a stop action and execute the stage2 load actions.
*
* @param callback Callback function to be executed at the restart end.s
*/
restart (callback = null) {
let self = this
// if this function has called outside of the Engine the 'this'
// variable has an invalid reference
if (this._self) { self = this._self }
if (self.api.status === 'running') {
// stop the engine
self.stop(err => {
// log error if present
if (err) { self.api.log(err, 'error') }
// start the engine again
self.stage2(function (err) {
if (err) { self.api.log(err, 'error') }
// log a restart message
self.api.log('*** Stellar Restarted ***', 'info')
// exists a callback
if (callback !== null) { callback(null, self.api) }
})
})
} else {
self.stage2(err => {
// log any encountered error
if (err) { self.api.log(err, 'error') }
// log a restart message
self.api.log('*** Stellar Restarted ***', 'info')
// exists a callback
if (callback !== null) { callback(null, self.api) }
})
}
}
// --------------------------------------------------------------------------- [States Functions]
/**
* First startup stage.
*
* Steps:
* - executes the initial satellites;
* - call stage1
*
* @param callback This callback only are executed at the end of stage2.
*/
stage0 (callback = null) {
// set the state
this.api.status = 'init_stage0'
// we need to load the config first
let initialSatellites = [
path.resolve(`${__dirname}/satellites/utils.js`),
path.resolve(`${__dirname}/satellites/config.js`)
]
initialSatellites.forEach(file => {
// get full file name
let filename = file.replace(/^.*[\\\/]/, '')
// get the first part of the file name
let initializer = filename.split('.')[ 0 ]
// get the initializer
const Satellite = require(file).default
this.satellites[ initializer ] = new Satellite()
// add it to array
this.initialSatellites.push(next => this.satellites[ initializer ].load(this.api, next))
})
// execute stage0 satellites in series
async.series(this.initialSatellites, error => {
// execute the callback
callback(error, this.api)
// if an error occurs show
if (error) { Engine.fatalError(this.api, error, 'stage0') }
})
}
/**
* Second startup stage.
*
* Steps:
* - load all satellites into memory;
* - load satellites;
* - mark Engine like initialized;
* - call stage2.
*
* @param callback This callback only is executed at the stage2 end.
*/
stage1 (callback = null) {
// put the status in the next stage
this.api.status = 'init_stage1'
// ranked object for all stages
let loadSatellitesRankings = {}
let startSatellitesRankings = {}
let stopSatellitesRankings = {}
// reset satellites arrays
this.satellites = {}
// function to load the satellites in the right place
let loadSatellitesInPlace = satellitesFiles => {
// iterate all files
for (let key in satellitesFiles) {
let f = satellitesFiles[ key ]
// get satellite normalized file name and
let file = path.normalize(f)
let initializer = path.basename(f).split('.')[ 0 ]
let ext = file.split('.').pop()
// only load files with the `.js` extension
if (ext !== 'js') { continue }
// get initializer module and instantiate it
const Satellite = require(file).default
this.satellites[ initializer ] = new Satellite()
// initializer load function
let loadFunction = next => {
// check if the initializer have a load function
if (typeof this.satellites[ initializer ].load === 'function') {
this.api.log(` > load: ${initializer}`, 'debug')
// call `load` property
this.satellites[ initializer ].load(this.api, err => {
if (!err) { this.api.log(` loaded: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// initializer start function
let startFunction = next => {
// check if the initializer have a start function
if (typeof this.satellites[ initializer ].start === 'function') {
this.api.log(` > start: ${initializer}`, 'debug')
// execute start routine
this.satellites[ initializer ].start(this.api, err => {
if (!err) { this.api.log(` started: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// initializer stop function
let stopFunction = next => {
if (typeof this.satellites[ initializer ].stop === 'function') {
this.api.log(` > stop: ${initializer}`, 'debug')
this.satellites[ initializer ].stop(this.api, err => {
if (!err) { this.api.log(` stopped: ${initializer}`, 'debug') }
next(err)
})
} else {
next()
}
}
// normalize satellite priorities
Engine.normalizeInitializerPriority(this.satellites[ initializer ])
loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ] = loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ] || []
startSatellitesRankings[ this.satellites[ initializer ].startPriority ] = startSatellitesRankings[ this.satellites[ initializer ].startPriority ] || []
stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ] = stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ] || []
// push loader state function to ranked arrays
loadSatellitesRankings[ this.satellites[ initializer ].loadPriority ].push(loadFunction) | startSatellitesRankings[ this.satellites[ initializer ].startPriority ].push(startFunction)
stopSatellitesRankings[ this.satellites[ initializer ].stopPriority ].push(stopFunction)
}
}
// get an array with all satellites
loadSatellitesInPlace(Utils.getFiles(`${__dirname}/satellites`))
// load satellites from all the active modules
this.api.config.modules.forEach(moduleName => {
// build the full path to the satellites folder
let moduleSatellitePaths = `${this.api.scope.rootPath}/modules/${moduleName}/satellites`
// check if the folder exists
if (Utils.directoryExists(moduleSatellitePaths)) { loadSatellitesInPlace(Utils.getFiles(moduleSatellitePaths)) }
})
// organize final array to match the satellites priorities
this.loadSatellites = Engine.flattenOrderedInitializer(loadSatellitesRankings)
this.startSatellites = Engine.flattenOrderedInitializer(startSatellitesRankings)
this.stopSatellites = Engine.flattenOrderedInitializer(stopSatellitesRankings)
// on the end of loading all satellites set engine like initialized
this.loadSatellites.push(() => { this.stage2(callback) })
// start initialization process
async.series(this.loadSatellites, errors => Engine.fatalError(this.api, errors, 'stage1'))
}
/**
* Third startup stage.
*
* Steps:
* - start satellites;
* - mark Engine as running.
*
* @param callback
*/
stage2 (callback = null) {
// put the engine in the stage2 state
this.api.status = 'init_stage2'
if (startCount === 0) {
this.startSatellites.push(next => {
// set the state
this.api.status = 'running'
this.api.bootTime = new Date().getTime()
if (startCount === 0) {
this.api.log('** Server Started @ ' + new Date() + ' ***', 'alert')
} else {
this.api.log('** Server Restarted @ ' + new Date() + ' ***', 'alert')
}
// increment the number of starts
startCount++
// call the callback if it's present
if (callback !== null) { callback(null, this.api) }
next()
})
}
// start all initializers
async.series(this.startSatellites, err => Engine.fatalError(this.api, err, 'stage2'))
}
} | random_line_split |
|
app.js | 'use-strict';
var hours = ['6:00am', '7:00am', '8:00am', '9:00am', '10:00am', '11:00am', '12:00pm', '1:00pm', '2:00pm', '3:00pm', '4:00pm', '5:00pm', '6:00pm', '7:00pm'];
var allLocations = [];
var theTable = document.getElementById('pike');
var el = document.getElementById('moreStores');
// var hourlyTotals = [];
// contructor for the Cookie Stores
function CookieStore(locationName, minCustomersPerHour, maxCustomersPerHour, avgCookiesPerCustomer) {
this.locationName = locationName;
this.minCustomersPerHour = minCustomersPerHour;
this.maxCustomersPerHour = maxCustomersPerHour;
this.avgCookiesPerCustomer = avgCookiesPerCustomer;
this.customersEachHour = [];
this.cookiesEachHour = [];
this.totalDaily = 0;
this.calcCustomersThisHour();
this.calcCookiesThisHour();
allLocations.push(this);
}
// creates total for customers each hour
CookieStore.prototype.calcCustomersThisHour = function() {
var reference = [];
for (var i = 0; i < hours.length; i++) {
var numberCustomersPerHour = Math.floor(Math.random() * (this.maxCustomersPerHour - this.minCustomersPerHour + 1)) + this.minCustomersPerHour;
reference.push(numberCustomersPerHour);
}
this.customersEachHour = reference;
return numberCustomersPerHour;
};
// Creates total for daily cookie sales
CookieStore.prototype.calcCookiesThisHour = function() {
for (var j = 0; j < hours.length; j++) {
var totalCookieSales = Math.ceil(this.customersEachHour[j] * this.avgCookiesPerCustomer);
this.cookiesEachHour.push(totalCookieSales);
this.totalDaily += this.cookiesEachHour[j];
}
this.cookiesEachHour.push(this.totalDaily);
};
// creates table elements
function makeElement(type, content, parent){
// create
var newEl = document.createElement(type);
// content
newEl.textContent = content;
// append
parent.appendChild(newEl);
}
// Push hours to table header
var renderHeader = function() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Locations';
trEL.appendChild(thEL);
for (var i = 0; i < hours.length; i++) {
var thEL = document.createElement('th');
thEL.textContent = hours[i];
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = 'Daily';
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// Push totals to TD's in DOM
CookieStore.prototype.render = function() {
var trEL = document.createElement('tr');
var tdEL = document.createElement('td');
tdEL.textContent = this.locationName;
trEL.appendChild(tdEL);
for (var i = 0; i < hours.length + 1; i++) {
var tdEL = document.createElement('td');
tdEL.textContent = this.cookiesEachHour[i];
trEL.appendChild(tdEL);
}
theTable.appendChild(trEL);
};
// Footer TOTALLLLL
function renderFooter() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Total';
trEL.appendChild(thEL);
var totalOfTotals = 0;
var hourlyTotal = 0;
for (var i = 0; i < hours.length; i++) {
hourlyTotal = 0;
for (var j = 0; j < allLocations.length; j++) {
hourlyTotal += allLocations[j].cookiesEachHour[i];
totalOfTotals += allLocations[j].cookiesEachHour[i];
}
thEL = document.createElement('th');
thEL.textContent = hourlyTotal;
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = totalOfTotals;
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// passing new stores to the cookie store constructor
var pikePlace = new CookieStore('Pike Place Market', 23, 65, 6.3);
var seaTac = new CookieStore('Seatac', 3, 24, 1.2);
var seattleCenter = new CookieStore('Seattle Center', 11, 38, 3.7);
var capitolHill = new CookieStore('Capitol Hill', 20, 38, 2.3);
var alki = new CookieStore('Alki', 2, 16, 4.6);
// Renders the table
function renderTable(){
theTable.innerHTML = '';
renderHeader();
for (i = 0; i < allLocations.length; i++) {
allLocations[i].render();
}
renderFooter();
}
renderTable();
// Handler for listener
function handleStoreSubmit(event) {
event.preventDefault();
var newStoreLocation = event.target.storeLocation.value;
var minCustomers = parseInt(event.target.minCustomers.value);
var maxCustomers = parseInt(event.target.maxCustomers.value);
var avgCookie = parseFloat(event.target.avgCookiesSold.value);
console.log('go here');
// prevent empty
if(!newStoreLocation || !minCustomers || !maxCustomers || !avgCookie) |
//validate by type
if (typeof minCustomers !== 'number') {
return alert('Min customers must be a number');
}
// ignore case on store names
for(var i = 0; i < allLocations.length; i++){
if(newStoreLocation === allLocations[i].locationName) {
allLocations[i].minCustomersPerHour = minCustomers;
allLocations[i].maxCustomersPerHour = maxCustomers;
allLocations[i].avgCookiesPerCustomer = avgCookie;
clearForm();
allLocations[i].totalDaily = 0;
allLocations[i].customersEachHour = [];
allLocations[i].cookiesEachHour = [];
allLocations[i].calcCustomersThisHour();
allLocations[i].calcCookiesThisHour();
console.log('A match was found at index', allLocations[i]);
renderTable();
return;
}
}
new CookieStore(newStoreLocation, minCustomers, maxCustomers, avgCookie);
function clearForm(){
event.target.storeLocation.value = null;
event.target.minCustomers.value = null;
event.target.maxCustomers.value = null;
event.target.avgCookiesSold.value = null;
}
clearForm();
// for(var i = allLocations.length - 1; i < allLocations.length; i++){
// allLocations[i].render();
// }
renderTable();
};
// Listener code
el.addEventListener('submit', handleStoreSubmit);
| {
return alert('All fields must have a value');
} | conditional_block |
app.js | 'use-strict';
var hours = ['6:00am', '7:00am', '8:00am', '9:00am', '10:00am', '11:00am', '12:00pm', '1:00pm', '2:00pm', '3:00pm', '4:00pm', '5:00pm', '6:00pm', '7:00pm'];
var allLocations = [];
var theTable = document.getElementById('pike');
var el = document.getElementById('moreStores');
// var hourlyTotals = [];
// contructor for the Cookie Stores
function | (locationName, minCustomersPerHour, maxCustomersPerHour, avgCookiesPerCustomer) {
this.locationName = locationName;
this.minCustomersPerHour = minCustomersPerHour;
this.maxCustomersPerHour = maxCustomersPerHour;
this.avgCookiesPerCustomer = avgCookiesPerCustomer;
this.customersEachHour = [];
this.cookiesEachHour = [];
this.totalDaily = 0;
this.calcCustomersThisHour();
this.calcCookiesThisHour();
allLocations.push(this);
}
// creates total for customers each hour
CookieStore.prototype.calcCustomersThisHour = function() {
var reference = [];
for (var i = 0; i < hours.length; i++) {
var numberCustomersPerHour = Math.floor(Math.random() * (this.maxCustomersPerHour - this.minCustomersPerHour + 1)) + this.minCustomersPerHour;
reference.push(numberCustomersPerHour);
}
this.customersEachHour = reference;
return numberCustomersPerHour;
};
// Creates total for daily cookie sales
CookieStore.prototype.calcCookiesThisHour = function() {
for (var j = 0; j < hours.length; j++) {
var totalCookieSales = Math.ceil(this.customersEachHour[j] * this.avgCookiesPerCustomer);
this.cookiesEachHour.push(totalCookieSales);
this.totalDaily += this.cookiesEachHour[j];
}
this.cookiesEachHour.push(this.totalDaily);
};
// creates table elements
function makeElement(type, content, parent){
// create
var newEl = document.createElement(type);
// content
newEl.textContent = content;
// append
parent.appendChild(newEl);
}
// Push hours to table header
var renderHeader = function() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Locations';
trEL.appendChild(thEL);
for (var i = 0; i < hours.length; i++) {
var thEL = document.createElement('th');
thEL.textContent = hours[i];
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = 'Daily';
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// Push totals to TD's in DOM
CookieStore.prototype.render = function() {
var trEL = document.createElement('tr');
var tdEL = document.createElement('td');
tdEL.textContent = this.locationName;
trEL.appendChild(tdEL);
for (var i = 0; i < hours.length + 1; i++) {
var tdEL = document.createElement('td');
tdEL.textContent = this.cookiesEachHour[i];
trEL.appendChild(tdEL);
}
theTable.appendChild(trEL);
};
// Footer TOTALLLLL
function renderFooter() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Total';
trEL.appendChild(thEL);
var totalOfTotals = 0;
var hourlyTotal = 0;
for (var i = 0; i < hours.length; i++) {
hourlyTotal = 0;
for (var j = 0; j < allLocations.length; j++) {
hourlyTotal += allLocations[j].cookiesEachHour[i];
totalOfTotals += allLocations[j].cookiesEachHour[i];
}
thEL = document.createElement('th');
thEL.textContent = hourlyTotal;
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = totalOfTotals;
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// passing new stores to the cookie store constructor
var pikePlace = new CookieStore('Pike Place Market', 23, 65, 6.3);
var seaTac = new CookieStore('Seatac', 3, 24, 1.2);
var seattleCenter = new CookieStore('Seattle Center', 11, 38, 3.7);
var capitolHill = new CookieStore('Capitol Hill', 20, 38, 2.3);
var alki = new CookieStore('Alki', 2, 16, 4.6);
// Renders the table
function renderTable(){
theTable.innerHTML = '';
renderHeader();
for (i = 0; i < allLocations.length; i++) {
allLocations[i].render();
}
renderFooter();
}
renderTable();
// Handler for listener
function handleStoreSubmit(event) {
event.preventDefault();
var newStoreLocation = event.target.storeLocation.value;
var minCustomers = parseInt(event.target.minCustomers.value);
var maxCustomers = parseInt(event.target.maxCustomers.value);
var avgCookie = parseFloat(event.target.avgCookiesSold.value);
console.log('go here');
// prevent empty
if(!newStoreLocation || !minCustomers || !maxCustomers || !avgCookie){
return alert('All fields must have a value');
}
//validate by type
if (typeof minCustomers !== 'number') {
return alert('Min customers must be a number');
}
// ignore case on store names
for(var i = 0; i < allLocations.length; i++){
if(newStoreLocation === allLocations[i].locationName) {
allLocations[i].minCustomersPerHour = minCustomers;
allLocations[i].maxCustomersPerHour = maxCustomers;
allLocations[i].avgCookiesPerCustomer = avgCookie;
clearForm();
allLocations[i].totalDaily = 0;
allLocations[i].customersEachHour = [];
allLocations[i].cookiesEachHour = [];
allLocations[i].calcCustomersThisHour();
allLocations[i].calcCookiesThisHour();
console.log('A match was found at index', allLocations[i]);
renderTable();
return;
}
}
new CookieStore(newStoreLocation, minCustomers, maxCustomers, avgCookie);
function clearForm(){
event.target.storeLocation.value = null;
event.target.minCustomers.value = null;
event.target.maxCustomers.value = null;
event.target.avgCookiesSold.value = null;
}
clearForm();
// for(var i = allLocations.length - 1; i < allLocations.length; i++){
// allLocations[i].render();
// }
renderTable();
};
// Listener code
el.addEventListener('submit', handleStoreSubmit);
| CookieStore | identifier_name |
app.js | 'use-strict';
var hours = ['6:00am', '7:00am', '8:00am', '9:00am', '10:00am', '11:00am', '12:00pm', '1:00pm', '2:00pm', '3:00pm', '4:00pm', '5:00pm', '6:00pm', '7:00pm'];
var allLocations = [];
var theTable = document.getElementById('pike');
var el = document.getElementById('moreStores');
// var hourlyTotals = [];
// contructor for the Cookie Stores
function CookieStore(locationName, minCustomersPerHour, maxCustomersPerHour, avgCookiesPerCustomer) {
this.locationName = locationName;
this.minCustomersPerHour = minCustomersPerHour;
this.maxCustomersPerHour = maxCustomersPerHour;
this.avgCookiesPerCustomer = avgCookiesPerCustomer;
this.customersEachHour = [];
this.cookiesEachHour = [];
this.totalDaily = 0;
this.calcCustomersThisHour();
this.calcCookiesThisHour();
allLocations.push(this);
}
// creates total for customers each hour
CookieStore.prototype.calcCustomersThisHour = function() {
var reference = [];
for (var i = 0; i < hours.length; i++) {
var numberCustomersPerHour = Math.floor(Math.random() * (this.maxCustomersPerHour - this.minCustomersPerHour + 1)) + this.minCustomersPerHour;
reference.push(numberCustomersPerHour);
}
this.customersEachHour = reference;
return numberCustomersPerHour;
};
// Creates total for daily cookie sales
CookieStore.prototype.calcCookiesThisHour = function() {
for (var j = 0; j < hours.length; j++) {
var totalCookieSales = Math.ceil(this.customersEachHour[j] * this.avgCookiesPerCustomer);
this.cookiesEachHour.push(totalCookieSales);
this.totalDaily += this.cookiesEachHour[j];
}
this.cookiesEachHour.push(this.totalDaily);
};
// creates table elements
function makeElement(type, content, parent){
// create
var newEl = document.createElement(type);
// content
newEl.textContent = content;
// append
parent.appendChild(newEl);
}
// Push hours to table header
var renderHeader = function() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Locations';
trEL.appendChild(thEL);
for (var i = 0; i < hours.length; i++) {
var thEL = document.createElement('th');
thEL.textContent = hours[i];
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = 'Daily';
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// Push totals to TD's in DOM
CookieStore.prototype.render = function() {
var trEL = document.createElement('tr');
var tdEL = document.createElement('td');
tdEL.textContent = this.locationName;
trEL.appendChild(tdEL);
for (var i = 0; i < hours.length + 1; i++) {
var tdEL = document.createElement('td');
tdEL.textContent = this.cookiesEachHour[i];
trEL.appendChild(tdEL);
}
theTable.appendChild(trEL);
};
// Footer TOTALLLLL
function renderFooter() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Total';
trEL.appendChild(thEL);
var totalOfTotals = 0;
var hourlyTotal = 0;
for (var i = 0; i < hours.length; i++) {
hourlyTotal = 0;
for (var j = 0; j < allLocations.length; j++) {
hourlyTotal += allLocations[j].cookiesEachHour[i];
totalOfTotals += allLocations[j].cookiesEachHour[i];
}
thEL = document.createElement('th');
thEL.textContent = hourlyTotal;
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = totalOfTotals;
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// passing new stores to the cookie store constructor
var pikePlace = new CookieStore('Pike Place Market', 23, 65, 6.3);
var seaTac = new CookieStore('Seatac', 3, 24, 1.2);
var seattleCenter = new CookieStore('Seattle Center', 11, 38, 3.7);
var capitolHill = new CookieStore('Capitol Hill', 20, 38, 2.3);
var alki = new CookieStore('Alki', 2, 16, 4.6);
// Renders the table
function renderTable(){
theTable.innerHTML = '';
renderHeader();
for (i = 0; i < allLocations.length; i++) {
allLocations[i].render();
}
renderFooter();
}
renderTable();
// Handler for listener
function handleStoreSubmit(event) {
event.preventDefault();
var newStoreLocation = event.target.storeLocation.value;
var minCustomers = parseInt(event.target.minCustomers.value);
var maxCustomers = parseInt(event.target.maxCustomers.value);
var avgCookie = parseFloat(event.target.avgCookiesSold.value);
console.log('go here');
// prevent empty
if(!newStoreLocation || !minCustomers || !maxCustomers || !avgCookie){
return alert('All fields must have a value');
}
//validate by type
if (typeof minCustomers !== 'number') {
return alert('Min customers must be a number');
}
// ignore case on store names
for(var i = 0; i < allLocations.length; i++){
if(newStoreLocation === allLocations[i].locationName) {
allLocations[i].minCustomersPerHour = minCustomers;
allLocations[i].maxCustomersPerHour = maxCustomers;
allLocations[i].avgCookiesPerCustomer = avgCookie;
clearForm();
allLocations[i].totalDaily = 0;
allLocations[i].customersEachHour = [];
allLocations[i].cookiesEachHour = [];
allLocations[i].calcCustomersThisHour();
allLocations[i].calcCookiesThisHour();
console.log('A match was found at index', allLocations[i]);
renderTable();
return;
}
}
new CookieStore(newStoreLocation, minCustomers, maxCustomers, avgCookie);
function clearForm() |
clearForm();
// for(var i = allLocations.length - 1; i < allLocations.length; i++){
// allLocations[i].render();
// }
renderTable();
};
// Listener code
el.addEventListener('submit', handleStoreSubmit);
| {
event.target.storeLocation.value = null;
event.target.minCustomers.value = null;
event.target.maxCustomers.value = null;
event.target.avgCookiesSold.value = null;
} | identifier_body |
app.js | 'use-strict';
var hours = ['6:00am', '7:00am', '8:00am', '9:00am', '10:00am', '11:00am', '12:00pm', '1:00pm', '2:00pm', '3:00pm', '4:00pm', '5:00pm', '6:00pm', '7:00pm'];
var allLocations = [];
var theTable = document.getElementById('pike');
var el = document.getElementById('moreStores');
// var hourlyTotals = [];
// contructor for the Cookie Stores
function CookieStore(locationName, minCustomersPerHour, maxCustomersPerHour, avgCookiesPerCustomer) {
this.locationName = locationName;
this.minCustomersPerHour = minCustomersPerHour;
this.maxCustomersPerHour = maxCustomersPerHour;
this.avgCookiesPerCustomer = avgCookiesPerCustomer;
this.customersEachHour = [];
this.cookiesEachHour = [];
this.totalDaily = 0;
this.calcCustomersThisHour();
this.calcCookiesThisHour();
allLocations.push(this);
}
// creates total for customers each hour
CookieStore.prototype.calcCustomersThisHour = function() {
var reference = [];
for (var i = 0; i < hours.length; i++) {
var numberCustomersPerHour = Math.floor(Math.random() * (this.maxCustomersPerHour - this.minCustomersPerHour + 1)) + this.minCustomersPerHour;
reference.push(numberCustomersPerHour);
}
this.customersEachHour = reference;
return numberCustomersPerHour;
};
// Creates total for daily cookie sales
CookieStore.prototype.calcCookiesThisHour = function() {
for (var j = 0; j < hours.length; j++) {
var totalCookieSales = Math.ceil(this.customersEachHour[j] * this.avgCookiesPerCustomer);
this.cookiesEachHour.push(totalCookieSales);
this.totalDaily += this.cookiesEachHour[j];
}
this.cookiesEachHour.push(this.totalDaily);
};
// creates table elements
function makeElement(type, content, parent){
// create
var newEl = document.createElement(type);
// content
newEl.textContent = content;
// append
parent.appendChild(newEl);
}
// Push hours to table header
var renderHeader = function() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Locations';
trEL.appendChild(thEL);
for (var i = 0; i < hours.length; i++) {
var thEL = document.createElement('th');
thEL.textContent = hours[i];
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = 'Daily';
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// Push totals to TD's in DOM
CookieStore.prototype.render = function() {
var trEL = document.createElement('tr');
var tdEL = document.createElement('td');
tdEL.textContent = this.locationName;
trEL.appendChild(tdEL);
for (var i = 0; i < hours.length + 1; i++) {
var tdEL = document.createElement('td');
tdEL.textContent = this.cookiesEachHour[i];
trEL.appendChild(tdEL);
}
theTable.appendChild(trEL);
};
// Footer TOTALLLLL
function renderFooter() {
var trEL = document.createElement('tr');
var thEL = document.createElement('th');
thEL.textContent = 'Total';
trEL.appendChild(thEL);
var totalOfTotals = 0;
var hourlyTotal = 0;
for (var i = 0; i < hours.length; i++) { | thEL = document.createElement('th');
thEL.textContent = hourlyTotal;
trEL.appendChild(thEL);
}
thEL = document.createElement('th');
thEL.textContent = totalOfTotals;
trEL.appendChild(thEL);
theTable.appendChild(trEL);
};
// passing new stores to the cookie store constructor
var pikePlace = new CookieStore('Pike Place Market', 23, 65, 6.3);
var seaTac = new CookieStore('Seatac', 3, 24, 1.2);
var seattleCenter = new CookieStore('Seattle Center', 11, 38, 3.7);
var capitolHill = new CookieStore('Capitol Hill', 20, 38, 2.3);
var alki = new CookieStore('Alki', 2, 16, 4.6);
// Renders the table
function renderTable(){
theTable.innerHTML = '';
renderHeader();
for (i = 0; i < allLocations.length; i++) {
allLocations[i].render();
}
renderFooter();
}
renderTable();
// Handler for listener
function handleStoreSubmit(event) {
event.preventDefault();
var newStoreLocation = event.target.storeLocation.value;
var minCustomers = parseInt(event.target.minCustomers.value);
var maxCustomers = parseInt(event.target.maxCustomers.value);
var avgCookie = parseFloat(event.target.avgCookiesSold.value);
console.log('go here');
// prevent empty
if(!newStoreLocation || !minCustomers || !maxCustomers || !avgCookie){
return alert('All fields must have a value');
}
//validate by type
if (typeof minCustomers !== 'number') {
return alert('Min customers must be a number');
}
// ignore case on store names
for(var i = 0; i < allLocations.length; i++){
if(newStoreLocation === allLocations[i].locationName) {
allLocations[i].minCustomersPerHour = minCustomers;
allLocations[i].maxCustomersPerHour = maxCustomers;
allLocations[i].avgCookiesPerCustomer = avgCookie;
clearForm();
allLocations[i].totalDaily = 0;
allLocations[i].customersEachHour = [];
allLocations[i].cookiesEachHour = [];
allLocations[i].calcCustomersThisHour();
allLocations[i].calcCookiesThisHour();
console.log('A match was found at index', allLocations[i]);
renderTable();
return;
}
}
new CookieStore(newStoreLocation, minCustomers, maxCustomers, avgCookie);
function clearForm(){
event.target.storeLocation.value = null;
event.target.minCustomers.value = null;
event.target.maxCustomers.value = null;
event.target.avgCookiesSold.value = null;
}
clearForm();
// for(var i = allLocations.length - 1; i < allLocations.length; i++){
// allLocations[i].render();
// }
renderTable();
};
// Listener code
el.addEventListener('submit', handleStoreSubmit); | hourlyTotal = 0;
for (var j = 0; j < allLocations.length; j++) {
hourlyTotal += allLocations[j].cookiesEachHour[i];
totalOfTotals += allLocations[j].cookiesEachHour[i];
} | random_line_split |
Confirm.spec.tsx | /*
MIT License
Copyright (c) 2022 Looker Data Sciences, Inc. | in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import {
fireEvent,
screen,
waitForElementToBeRemoved,
} from '@testing-library/react'
import React from 'react'
import { renderWithTheme } from '@looker/components-test-utils'
import { theme } from '@looker/design-tokens'
import { Button } from '../../Button'
import { Confirm } from './Confirm'
const requiredProps = {
message: 'Foo',
onConfirm: jest.fn(),
title: 'Bar',
}
const optionalProps = {
cancelLabel: "Don't Delete",
confirmLabel: 'Delete',
message: 'This is permanent',
onCancel: jest.fn(),
title: 'Delete the thing?',
}
afterEach(() => {
requiredProps.onConfirm.mockClear()
optionalProps.onCancel.mockClear()
})
test('<Confirm/> with defaults', async () => {
renderWithTheme(
<Confirm {...requiredProps}>
{open => <Button onClick={open}>Do Something</Button>}
</Confirm>
)
const opener = screen.getByText('Do Something')
fireEvent.click(opener)
const button = screen.getByText('Confirm')
expect(screen.getByText(requiredProps.title)).toBeVisible()
expect(screen.getByText(requiredProps.message)).toBeVisible()
expect(button).toHaveStyleRule(`background: ${theme.colors.key}`)
fireEvent.click(button)
expect(requiredProps.onConfirm).toHaveBeenCalledTimes(1)
fireEvent.click(screen.getByText('Cancel'))
await waitForElementToBeRemoved(() => screen.queryByText(requiredProps.title))
expect(screen.queryByText(requiredProps.title)).not.toBeInTheDocument()
})
test('<Confirm/> with custom props', () => {
renderWithTheme(
<Confirm {...requiredProps} {...optionalProps} buttonColor="critical">
{open => <Button onClick={open}>Do Something</Button>}
</Confirm>
)
const opener = screen.getByText('Do Something')
fireEvent.click(opener)
const button = screen.getByText(optionalProps.confirmLabel || '')
expect(button).toHaveStyleRule(`background: ${theme.colors.critical}`)
fireEvent.click(screen.getByText(optionalProps.cancelLabel || ''))
fireEvent.click(button)
expect(requiredProps.onConfirm).toHaveBeenCalledTimes(1)
expect(optionalProps.onCancel).toHaveBeenCalledTimes(1)
}) |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal | random_line_split |
urls.py | # -*- coding: utf-8 -*-
"""
------
Urls
------
Arquivo de configuração das urls da aplicação blog
Autores:
* Alisson Barbosa Ferreira <[email protected]>
| ============== ==================
Criação Atualização
============== ==================
29/11/2014 29/11/2014
============== ==================
"""
from django.conf.urls import patterns, url
urlpatterns = patterns('blog.views',
url(r'^cadastro-usuario/$', 'usuario', name='usuario'),
url(r'^cadastro-post/$', 'post', name='post'),
url(r'^api-all-posts', 'all_posts', name='all_posts'),
url(r'^api-get-post/(?P<pk>[0-9]+)/$', 'get_post', name='get_post'),
url(r'^api-auth', 'api_auth', name='api_auth'),
url(r'^api-token', 'api_token', name='api_token'),
url(r'^api-login', 'api_login', name='api_login'),
url(r'^enviar-email/$', 'enviar_email', name='enviar_email'),
url(r'^autorelacionamento/$', 'autorelacionamento', name='autorelacionamento'),
) | Data:
| random_line_split |
linear-for-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn | () {
let x = vec!(1, 2, 3);
let mut y = 0;
for i in x.iter() { println!("{:?}", *i); y += *i; }
println!("{:?}", y);
assert_eq!(y, 6);
let s = "hello there".to_owned();
let mut i: int = 0;
for c in s.bytes() {
if i == 0 { assert!((c == 'h' as u8)); }
if i == 1 { assert!((c == 'e' as u8)); }
if i == 2 { assert!((c == 'l' as u8)); }
if i == 3 { assert!((c == 'l' as u8)); }
if i == 4 { assert!((c == 'o' as u8)); }
// ...
i += 1;
println!("{:?}", i);
println!("{:?}", c);
}
assert_eq!(i, 11);
}
| main | identifier_name |
linear-for-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let x = vec!(1, 2, 3);
let mut y = 0;
for i in x.iter() { println!("{:?}", *i); y += *i; }
println!("{:?}", y);
assert_eq!(y, 6);
let s = "hello there".to_owned();
let mut i: int = 0;
for c in s.bytes() {
if i == 0 { assert!((c == 'h' as u8)); }
if i == 1 |
if i == 2 { assert!((c == 'l' as u8)); }
if i == 3 { assert!((c == 'l' as u8)); }
if i == 4 { assert!((c == 'o' as u8)); }
// ...
i += 1;
println!("{:?}", i);
println!("{:?}", c);
}
assert_eq!(i, 11);
}
| { assert!((c == 'e' as u8)); } | conditional_block |
linear-for-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() | {
let x = vec!(1, 2, 3);
let mut y = 0;
for i in x.iter() { println!("{:?}", *i); y += *i; }
println!("{:?}", y);
assert_eq!(y, 6);
let s = "hello there".to_owned();
let mut i: int = 0;
for c in s.bytes() {
if i == 0 { assert!((c == 'h' as u8)); }
if i == 1 { assert!((c == 'e' as u8)); }
if i == 2 { assert!((c == 'l' as u8)); }
if i == 3 { assert!((c == 'l' as u8)); }
if i == 4 { assert!((c == 'o' as u8)); }
// ...
i += 1;
println!("{:?}", i);
println!("{:?}", c);
}
assert_eq!(i, 11);
} | identifier_body |
|
linear-for-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | let x = vec!(1, 2, 3);
let mut y = 0;
for i in x.iter() { println!("{:?}", *i); y += *i; }
println!("{:?}", y);
assert_eq!(y, 6);
let s = "hello there".to_owned();
let mut i: int = 0;
for c in s.bytes() {
if i == 0 { assert!((c == 'h' as u8)); }
if i == 1 { assert!((c == 'e' as u8)); }
if i == 2 { assert!((c == 'l' as u8)); }
if i == 3 { assert!((c == 'l' as u8)); }
if i == 4 { assert!((c == 'o' as u8)); }
// ...
i += 1;
println!("{:?}", i);
println!("{:?}", c);
}
assert_eq!(i, 11);
} | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() { | random_line_split |
karma.conf.js | module.exports = function (config) {
var configuration = { |
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['jasmine'],
// list of files / patterns to load in the browser
files: [
'node_modules/jquery/dist/jquery.js',
'node_modules/jasmine-jquery/lib/jasmine-jquery.js',
'node_modules/jasmine2-custom-message/jasmine2-custom-message.js',
{ pattern: 'Tests/*.html', included: true },
{ pattern: 'Tests/*.js', included: true },
'Scripts/moment.min.js',
'Scripts/bootstrap-sortable.js'
],
// list of files to exclude
exclude: [
],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['mocha'],
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['PhantomJS'],
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: true
};
config.set(configuration);
} |
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
| random_line_split |
iterable_differs.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Optional, Provider, SkipSelf} from '../../di';
import {ListWrapper} from '../../facade/collection';
import {getTypeNameForDebugging, isPresent} from '../../facade/lang';
import {ChangeDetectorRef} from '../change_detector_ref';
/**
* A strategy for tracking changes over time to an iterable. Used for {@link NgFor} to
* respond to changes in an iterable by effecting equivalent changes in the DOM.
*
* @stable
*/
export interface IterableDiffer {
diff(object: any): any;
onDestroy(): any /** TODO #9100 */;
}
/**
* An optional function passed into {@link NgFor} that defines how to track
* items in an iterable (e.g. by index or id)
*
* @stable
*/
export interface TrackByFn { (index: number, item: any): any; }
/**
* Provides a factory for {@link IterableDiffer}.
*
* @stable
*/
export interface IterableDifferFactory {
supports(objects: any): boolean;
create(cdRef: ChangeDetectorRef, trackByFn?: TrackByFn): IterableDiffer;
}
/**
* A repository of different iterable diffing strategies used by NgFor, NgClass, and others.
* @stable
*/
export class IterableDiffers {
constructor(public factories: IterableDifferFactory[]) {}
static create(factories: IterableDifferFactory[], parent?: IterableDiffers): IterableDiffers {
if (isPresent(parent)) {
var copied = ListWrapper.clone(parent.factories);
factories = factories.concat(copied);
return new IterableDiffers(factories);
} else {
return new IterableDiffers(factories);
}
}
/**
* Takes an array of {@link IterableDifferFactory} and returns a provider used to extend the
* inherited {@link IterableDiffers} instance with the provided factories and return a new
* {@link IterableDiffers} instance.
*
* The following example shows how to extend an existing list of factories,
* which will only be applied to the injector for this component and its children.
* This step is all that's required to make a new {@link IterableDiffer} available.
*
* ### Example
*
* ```
* @Component({
* viewProviders: [
* IterableDiffers.extend([new ImmutableListDiffer()])
* ]
* })
* ```
*/
static extend(factories: IterableDifferFactory[]): Provider {
return {
provide: IterableDiffers,
useFactory: (parent: IterableDiffers) => {
if (!parent) { | throw new Error('Cannot extend IterableDiffers without a parent injector');
}
return IterableDiffers.create(factories, parent);
},
// Dependency technically isn't optional, but we can provide a better error message this way.
deps: [[IterableDiffers, new SkipSelf(), new Optional()]]
};
}
find(iterable: any): IterableDifferFactory {
var factory = this.factories.find(f => f.supports(iterable));
if (isPresent(factory)) {
return factory;
} else {
throw new Error(
`Cannot find a differ supporting object '${iterable}' of type '${getTypeNameForDebugging(iterable)}'`);
}
}
} | // Typically would occur when calling IterableDiffers.extend inside of dependencies passed
// to
// bootstrap(), which would override default pipes instead of extending them. | random_line_split |
iterable_differs.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Optional, Provider, SkipSelf} from '../../di';
import {ListWrapper} from '../../facade/collection';
import {getTypeNameForDebugging, isPresent} from '../../facade/lang';
import {ChangeDetectorRef} from '../change_detector_ref';
/**
* A strategy for tracking changes over time to an iterable. Used for {@link NgFor} to
* respond to changes in an iterable by effecting equivalent changes in the DOM.
*
* @stable
*/
export interface IterableDiffer {
diff(object: any): any;
onDestroy(): any /** TODO #9100 */;
}
/**
* An optional function passed into {@link NgFor} that defines how to track
* items in an iterable (e.g. by index or id)
*
* @stable
*/
export interface TrackByFn { (index: number, item: any): any; }
/**
* Provides a factory for {@link IterableDiffer}.
*
* @stable
*/
export interface IterableDifferFactory {
supports(objects: any): boolean;
create(cdRef: ChangeDetectorRef, trackByFn?: TrackByFn): IterableDiffer;
}
/**
* A repository of different iterable diffing strategies used by NgFor, NgClass, and others.
* @stable
*/
export class IterableDiffers {
| (public factories: IterableDifferFactory[]) {}
static create(factories: IterableDifferFactory[], parent?: IterableDiffers): IterableDiffers {
if (isPresent(parent)) {
var copied = ListWrapper.clone(parent.factories);
factories = factories.concat(copied);
return new IterableDiffers(factories);
} else {
return new IterableDiffers(factories);
}
}
/**
* Takes an array of {@link IterableDifferFactory} and returns a provider used to extend the
* inherited {@link IterableDiffers} instance with the provided factories and return a new
* {@link IterableDiffers} instance.
*
* The following example shows how to extend an existing list of factories,
* which will only be applied to the injector for this component and its children.
* This step is all that's required to make a new {@link IterableDiffer} available.
*
* ### Example
*
* ```
* @Component({
* viewProviders: [
* IterableDiffers.extend([new ImmutableListDiffer()])
* ]
* })
* ```
*/
static extend(factories: IterableDifferFactory[]): Provider {
return {
provide: IterableDiffers,
useFactory: (parent: IterableDiffers) => {
if (!parent) {
// Typically would occur when calling IterableDiffers.extend inside of dependencies passed
// to
// bootstrap(), which would override default pipes instead of extending them.
throw new Error('Cannot extend IterableDiffers without a parent injector');
}
return IterableDiffers.create(factories, parent);
},
// Dependency technically isn't optional, but we can provide a better error message this way.
deps: [[IterableDiffers, new SkipSelf(), new Optional()]]
};
}
find(iterable: any): IterableDifferFactory {
var factory = this.factories.find(f => f.supports(iterable));
if (isPresent(factory)) {
return factory;
} else {
throw new Error(
`Cannot find a differ supporting object '${iterable}' of type '${getTypeNameForDebugging(iterable)}'`);
}
}
}
| constructor | identifier_name |
iterable_differs.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Optional, Provider, SkipSelf} from '../../di';
import {ListWrapper} from '../../facade/collection';
import {getTypeNameForDebugging, isPresent} from '../../facade/lang';
import {ChangeDetectorRef} from '../change_detector_ref';
/**
* A strategy for tracking changes over time to an iterable. Used for {@link NgFor} to
* respond to changes in an iterable by effecting equivalent changes in the DOM.
*
* @stable
*/
export interface IterableDiffer {
diff(object: any): any;
onDestroy(): any /** TODO #9100 */;
}
/**
* An optional function passed into {@link NgFor} that defines how to track
* items in an iterable (e.g. by index or id)
*
* @stable
*/
export interface TrackByFn { (index: number, item: any): any; }
/**
* Provides a factory for {@link IterableDiffer}.
*
* @stable
*/
export interface IterableDifferFactory {
supports(objects: any): boolean;
create(cdRef: ChangeDetectorRef, trackByFn?: TrackByFn): IterableDiffer;
}
/**
* A repository of different iterable diffing strategies used by NgFor, NgClass, and others.
* @stable
*/
export class IterableDiffers {
constructor(public factories: IterableDifferFactory[]) {}
static create(factories: IterableDifferFactory[], parent?: IterableDiffers): IterableDiffers {
if (isPresent(parent)) | else {
return new IterableDiffers(factories);
}
}
/**
* Takes an array of {@link IterableDifferFactory} and returns a provider used to extend the
* inherited {@link IterableDiffers} instance with the provided factories and return a new
* {@link IterableDiffers} instance.
*
* The following example shows how to extend an existing list of factories,
* which will only be applied to the injector for this component and its children.
* This step is all that's required to make a new {@link IterableDiffer} available.
*
* ### Example
*
* ```
* @Component({
* viewProviders: [
* IterableDiffers.extend([new ImmutableListDiffer()])
* ]
* })
* ```
*/
static extend(factories: IterableDifferFactory[]): Provider {
return {
provide: IterableDiffers,
useFactory: (parent: IterableDiffers) => {
if (!parent) {
// Typically would occur when calling IterableDiffers.extend inside of dependencies passed
// to
// bootstrap(), which would override default pipes instead of extending them.
throw new Error('Cannot extend IterableDiffers without a parent injector');
}
return IterableDiffers.create(factories, parent);
},
// Dependency technically isn't optional, but we can provide a better error message this way.
deps: [[IterableDiffers, new SkipSelf(), new Optional()]]
};
}
find(iterable: any): IterableDifferFactory {
var factory = this.factories.find(f => f.supports(iterable));
if (isPresent(factory)) {
return factory;
} else {
throw new Error(
`Cannot find a differ supporting object '${iterable}' of type '${getTypeNameForDebugging(iterable)}'`);
}
}
}
| {
var copied = ListWrapper.clone(parent.factories);
factories = factories.concat(copied);
return new IterableDiffers(factories);
} | conditional_block |
jakarta.js | "use strict";
var helpers = require("../../helpers/helpers");
exports["Asia/Jakarta"] = {
"guess" : helpers.makeTestGuess("Asia/Jakarta", { offset: true, abbr: true }),
"1923" : helpers.makeTestYear("Asia/Jakarta", [
["1923-12-31T16:39:59+00:00", "23:47:11", "BMT", -25632 / 60],
["1923-12-31T16:40:00+00:00", "00:00:00", "JAVT", -440]
]),
"1932" : helpers.makeTestYear("Asia/Jakarta", [
["1932-10-31T16:39:59+00:00", "23:59:59", "JAVT", -440],
["1932-10-31T16:40:00+00:00", "00:10:00", "WIB", -450]
]), | "1942" : helpers.makeTestYear("Asia/Jakarta", [
["1942-03-22T16:29:59+00:00", "23:59:59", "WIB", -450],
["1942-03-22T16:30:00+00:00", "01:30:00", "JST", -540]
]),
"1945" : helpers.makeTestYear("Asia/Jakarta", [
["1945-09-22T14:59:59+00:00", "23:59:59", "JST", -540],
["1945-09-22T15:00:00+00:00", "22:30:00", "WIB", -450]
]),
"1948" : helpers.makeTestYear("Asia/Jakarta", [
["1948-04-30T16:29:59+00:00", "23:59:59", "WIB", -450],
["1948-04-30T16:30:00+00:00", "00:30:00", "WIB", -480]
]),
"1950" : helpers.makeTestYear("Asia/Jakarta", [
["1950-04-30T15:59:59+00:00", "23:59:59", "WIB", -480],
["1950-04-30T16:00:00+00:00", "23:30:00", "WIB", -450]
]),
"1963" : helpers.makeTestYear("Asia/Jakarta", [
["1963-12-31T16:29:59+00:00", "23:59:59", "WIB", -450],
["1963-12-31T16:30:00+00:00", "23:30:00", "WIB", -420]
])
}; | random_line_split |
|
expressionengine.py | """
This signature containts test to see if the site is running on ExpressionEngine.
"""
__author__ = "Seth Gottlieb"
__copyright__ = "CM Fieldguide"
__credits__ = ["Seth Gottlieb",]
__license__ = "Unlicense"
__version__ = "0.1"
__maintainer__ = "Seth Gottlieb"
__email__ = "[email protected]"
__status__ = "Experimental"
from cmfieldguide.cmsdetector.signatures import BaseSignature
class Signature(BaseSignature):
| NAME = 'ExpressionEngine'
WEBSITE = 'http://expressionengine.com/'
KNOWN_POSITIVE = 'http://expressionengine.com/'
TECHNOLOGY = 'PHP'
def test_has_ee_login(self, site):
"""
By default, Expression Engine ships with a login page at /admin.php
"""
if site.page_cache[site.url_stem + '/admin.php'].contains_pattern('http://expressionengine.com'):
return 1
else:
return 0
def test_has_css_loader_script(self, site):
"""
ExpressionEngine loads CSS files with a query string off the root of the site like
?css=something.css
"""
if site.home_page.has_matching_tag('link', {'rel':'stylesheet','href': '/\?css=\w+[\.|/]'}):
return 1
else:
return 0 | identifier_body |
|
expressionengine.py | """
This signature containts test to see if the site is running on ExpressionEngine.
"""
__author__ = "Seth Gottlieb"
__copyright__ = "CM Fieldguide"
__credits__ = ["Seth Gottlieb",]
__license__ = "Unlicense"
__version__ = "0.1"
__maintainer__ = "Seth Gottlieb"
__email__ = "[email protected]"
__status__ = "Experimental"
from cmfieldguide.cmsdetector.signatures import BaseSignature
class Signature(BaseSignature):
NAME = 'ExpressionEngine'
WEBSITE = 'http://expressionengine.com/'
KNOWN_POSITIVE = 'http://expressionengine.com/'
TECHNOLOGY = 'PHP'
def test_has_ee_login(self, site):
"""
By default, Expression Engine ships with a login page at /admin.php
"""
if site.page_cache[site.url_stem + '/admin.php'].contains_pattern('http://expressionengine.com'):
return 1
else: | ?css=something.css
"""
if site.home_page.has_matching_tag('link', {'rel':'stylesheet','href': '/\?css=\w+[\.|/]'}):
return 1
else:
return 0 | return 0
def test_has_css_loader_script(self, site):
"""
ExpressionEngine loads CSS files with a query string off the root of the site like | random_line_split |
expressionengine.py | """
This signature containts test to see if the site is running on ExpressionEngine.
"""
__author__ = "Seth Gottlieb"
__copyright__ = "CM Fieldguide"
__credits__ = ["Seth Gottlieb",]
__license__ = "Unlicense"
__version__ = "0.1"
__maintainer__ = "Seth Gottlieb"
__email__ = "[email protected]"
__status__ = "Experimental"
from cmfieldguide.cmsdetector.signatures import BaseSignature
class | (BaseSignature):
NAME = 'ExpressionEngine'
WEBSITE = 'http://expressionengine.com/'
KNOWN_POSITIVE = 'http://expressionengine.com/'
TECHNOLOGY = 'PHP'
def test_has_ee_login(self, site):
"""
By default, Expression Engine ships with a login page at /admin.php
"""
if site.page_cache[site.url_stem + '/admin.php'].contains_pattern('http://expressionengine.com'):
return 1
else:
return 0
def test_has_css_loader_script(self, site):
"""
ExpressionEngine loads CSS files with a query string off the root of the site like
?css=something.css
"""
if site.home_page.has_matching_tag('link', {'rel':'stylesheet','href': '/\?css=\w+[\.|/]'}):
return 1
else:
return 0
| Signature | identifier_name |
expressionengine.py | """
This signature containts test to see if the site is running on ExpressionEngine.
"""
__author__ = "Seth Gottlieb"
__copyright__ = "CM Fieldguide"
__credits__ = ["Seth Gottlieb",]
__license__ = "Unlicense"
__version__ = "0.1"
__maintainer__ = "Seth Gottlieb"
__email__ = "[email protected]"
__status__ = "Experimental"
from cmfieldguide.cmsdetector.signatures import BaseSignature
class Signature(BaseSignature):
NAME = 'ExpressionEngine'
WEBSITE = 'http://expressionengine.com/'
KNOWN_POSITIVE = 'http://expressionengine.com/'
TECHNOLOGY = 'PHP'
def test_has_ee_login(self, site):
"""
By default, Expression Engine ships with a login page at /admin.php
"""
if site.page_cache[site.url_stem + '/admin.php'].contains_pattern('http://expressionengine.com'):
return 1
else:
return 0
def test_has_css_loader_script(self, site):
"""
ExpressionEngine loads CSS files with a query string off the root of the site like
?css=something.css
"""
if site.home_page.has_matching_tag('link', {'rel':'stylesheet','href': '/\?css=\w+[\.|/]'}):
|
else:
return 0
| return 1 | conditional_block |
serv.rs | // Copyright 2021 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::File;
use std::io;
use std::net::{IpAddr, Shutdown, SocketAddr, SocketAddrV4, TcpListener, TcpStream};
use std::path::PathBuf;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::chain;
use crate::chain::txhashset::BitmapChunk;
use crate::core::core;
use crate::core::core::hash::Hash;
use crate::core::core::{OutputIdentifier, Segment, SegmentIdentifier, TxKernel};
use crate::core::global;
use crate::core::pow::Difficulty;
use crate::handshake::Handshake;
use crate::peer::Peer;
use crate::peers::Peers;
use crate::store::PeerStore;
use crate::types::{
Capabilities, ChainAdapter, Error, NetAdapter, P2PConfig, PeerAddr, PeerInfo, ReasonForBan,
TxHashSetRead,
};
use crate::util::secp::pedersen::RangeProof;
use crate::util::StopState;
use chrono::prelude::{DateTime, Utc};
/// P2P server implementation, handling bootstrapping to find and connect to
/// peers, receiving connections from other peers and keep track of all of them.
pub struct Server {
pub config: P2PConfig,
capabilities: Capabilities,
handshake: Arc<Handshake>,
pub peers: Arc<Peers>,
stop_state: Arc<StopState>,
}
// TODO TLS
impl Server {
/// Creates a new idle p2p server with no peers
pub fn new(
db_root: &str,
capabilities: Capabilities,
config: P2PConfig,
adapter: Arc<dyn ChainAdapter>,
genesis: Hash,
stop_state: Arc<StopState>,
) -> Result<Server, Error> {
Ok(Server {
config: config.clone(),
capabilities,
handshake: Arc::new(Handshake::new(genesis, config.clone())),
peers: Arc::new(Peers::new(PeerStore::new(db_root)?, adapter, config)),
stop_state,
})
}
/// Starts a new TCP server and listen to incoming connections. This is a
/// blocking call until the TCP server stops.
pub fn listen(&self) -> Result<(), Error> {
// start TCP listener and handle incoming connections
let addr = SocketAddr::new(self.config.host, self.config.port);
let listener = TcpListener::bind(addr)?;
listener.set_nonblocking(true)?;
let sleep_time = Duration::from_millis(5);
loop {
// Pause peer ingress connection request. Only for tests.
if self.stop_state.is_paused() {
thread::sleep(Duration::from_secs(1));
continue;
}
match listener.accept() {
Ok((stream, peer_addr)) => {
// We want out TCP stream to be in blocking mode.
// The TCP listener is in nonblocking mode so we *must* explicitly
// move the accepted TCP stream into blocking mode (or all kinds of
// bad things can and will happen).
// A nonblocking TCP listener will accept nonblocking TCP streams which
// we do not want.
stream.set_nonblocking(false)?;
let mut peer_addr = PeerAddr(peer_addr);
// attempt to see if it an ipv4-mapped ipv6
// if yes convert to ipv4
if peer_addr.0.is_ipv6() {
if let IpAddr::V6(ipv6) = peer_addr.0.ip() {
if let Some(ipv4) = ipv6.to_ipv4() {
peer_addr = PeerAddr(SocketAddr::V4(SocketAddrV4::new(
ipv4,
peer_addr.0.port(),
)))
}
}
}
if self.check_undesirable(&stream) {
// Shutdown the incoming TCP connection if it is not desired
if let Err(e) = stream.shutdown(Shutdown::Both) {
debug!("Error shutting down conn: {:?}", e);
}
continue;
}
match self.handle_new_peer(stream) {
Err(Error::ConnectionClose) => debug!("shutting down, ignoring a new peer"),
Err(e) => {
debug!("Error accepting peer {}: {:?}", peer_addr.to_string(), e);
let _ = self.peers.add_banned(peer_addr, ReasonForBan::BadHandshake);
}
Ok(_) => {}
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
// nothing to do, will retry in next iteration
}
Err(e) => {
debug!("Couldn't establish new client connection: {:?}", e);
}
}
if self.stop_state.is_stopped() {
break;
}
thread::sleep(sleep_time);
}
Ok(())
}
/// Asks the server to connect to a new peer. Directly returns the peer if
/// we're already connected to the provided address.
pub fn connect(&self, addr: PeerAddr) -> Result<Arc<Peer>, Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
if Peer::is_denied(&self.config, addr) {
debug!("connect_peer: peer {} denied, not connecting.", addr);
return Err(Error::ConnectionClose);
}
if global::is_production_mode() {
let hs = self.handshake.clone();
let addrs = hs.addrs.read();
if addrs.contains(&addr) {
debug!("connect: ignore connecting to PeerWithSelf, addr: {}", addr);
return Err(Error::PeerWithSelf);
}
}
if let Some(p) = self.peers.get_connected_peer(addr) {
// if we're already connected to the addr, just return the peer
trace!("connect_peer: already connected {}", addr);
return Ok(p);
}
trace!(
"connect_peer: on {}:{}. connecting to {}",
self.config.host,
self.config.port,
addr
);
match TcpStream::connect_timeout(&addr.0, Duration::from_secs(10)) {
Ok(stream) => {
let addr = SocketAddr::new(self.config.host, self.config.port);
let total_diff = self.peers.total_difficulty()?;
let peer = Peer::connect(
stream,
self.capabilities,
total_diff,
PeerAddr(addr),
&self.handshake,
self.peers.clone(),
)?;
let peer = Arc::new(peer);
self.peers.add_connected(peer.clone())?;
Ok(peer)
}
Err(e) => {
trace!(
"connect_peer: on {}:{}. Could not connect to {}: {:?}",
self.config.host,
self.config.port,
addr,
e
);
Err(Error::Connection(e))
}
}
}
fn handle_new_peer(&self, stream: TcpStream) -> Result<(), Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
let total_diff = self.peers.total_difficulty()?;
// accept the peer and add it to the server map
let peer = Peer::accept(
stream,
self.capabilities,
total_diff,
&self.handshake,
self.peers.clone(),
)?;
self.peers.add_connected(Arc::new(peer))?;
Ok(())
}
/// Checks whether there's any reason we don't want to accept an incoming peer
/// connection. There can be a few of them:
/// 1. Accepting the peer connection would exceed the configured maximum allowed
/// inbound peer count. Note that seed nodes may wish to increase the default
/// value for PEER_LISTENER_BUFFER_COUNT to help with network bootstrapping.
/// A default buffer of 8 peers is allowed to help with network growth.
/// 2. The peer has been previously banned and the ban period hasn't
/// expired yet.
/// 3. We're already connected to a peer at the same IP. While there are
/// many reasons multiple peers can legitimately share identical IP
/// addresses (NAT), network distribution is improved if they choose
/// different sets of peers themselves. In addition, it prevent potential
/// duplicate connections, malicious or not.
fn check_undesirable(&self, stream: &TcpStream) -> bool {
if self.peers.iter().inbound().connected().count() as u32
>= self.config.peer_max_inbound_count() + self.config.peer_listener_buffer_count()
{
debug!("Accepting new connection will exceed peer limit, refusing connection.");
return true;
}
if let Ok(peer_addr) = stream.peer_addr() {
let peer_addr = PeerAddr(peer_addr);
if self.peers.is_banned(peer_addr) {
debug!("Peer {} banned, refusing connection.", peer_addr);
return true;
}
// The call to is_known() can fail due to contention on the peers map.
// If it fails we want to default to refusing the connection.
match self.peers.is_known(peer_addr) {
Ok(true) => {
debug!("Peer {} already known, refusing connection.", peer_addr);
return true;
}
Err(_) => {
error!(
"Peer {} is_known check failed, refusing connection.",
peer_addr
);
return true;
}
_ => (),
}
}
false
}
pub fn stop(&self) {
self.stop_state.stop();
self.peers.stop();
}
/// Pause means: stop all the current peers connection, only for tests.
/// Note:
/// 1. must pause the 'seed' thread also, to avoid the new egress peer connection
/// 2. must pause the 'p2p-server' thread also, to avoid the new ingress peer connection.
pub fn pause(&self) {
self.peers.stop();
}
}
/// A no-op network adapter used for testing.
pub struct DummyAdapter {}
impl ChainAdapter for DummyAdapter {
fn total_difficulty(&self) -> Result<Difficulty, chain::Error> {
Ok(Difficulty::min_dma())
}
fn total_height(&self) -> Result<u64, chain::Error> {
Ok(0)
}
fn | (&self, _h: Hash) -> Option<core::Transaction> {
None
}
fn tx_kernel_received(&self, _h: Hash, _peer_info: &PeerInfo) -> Result<bool, chain::Error> {
Ok(true)
}
fn transaction_received(
&self,
_: core::Transaction,
_stem: bool,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn compact_block_received(
&self,
_cb: core::CompactBlock,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn header_received(
&self,
_bh: core::BlockHeader,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn block_received(
&self,
_: core::Block,
_: &PeerInfo,
_: chain::Options,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn headers_received(
&self,
_: &[core::BlockHeader],
_: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn locate_headers(&self, _: &[Hash]) -> Result<Vec<core::BlockHeader>, chain::Error> {
Ok(vec![])
}
fn get_block(&self, _: Hash, _: &PeerInfo) -> Option<core::Block> {
None
}
fn txhashset_read(&self, _h: Hash) -> Option<TxHashSetRead> {
unimplemented!()
}
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
unimplemented!()
}
fn txhashset_receive_ready(&self) -> bool {
false
}
fn txhashset_write(
&self,
_h: Hash,
_txhashset_data: File,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(false)
}
fn txhashset_download_update(
&self,
_start_time: DateTime<Utc>,
_downloaded_size: u64,
_total_size: u64,
) -> bool {
false
}
fn get_tmp_dir(&self) -> PathBuf {
unimplemented!()
}
fn get_tmpfile_pathname(&self, _tmpfile_name: String) -> PathBuf {
unimplemented!()
}
fn get_kernel_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<TxKernel>, chain::Error> {
unimplemented!()
}
fn get_bitmap_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<BitmapChunk>, Hash), chain::Error> {
unimplemented!()
}
fn get_output_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<OutputIdentifier>, Hash), chain::Error> {
unimplemented!()
}
fn get_rangeproof_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<RangeProof>, chain::Error> {
unimplemented!()
}
}
impl NetAdapter for DummyAdapter {
fn find_peer_addrs(&self, _: Capabilities) -> Vec<PeerAddr> {
vec![]
}
fn peer_addrs_received(&self, _: Vec<PeerAddr>) {}
fn peer_difficulty(&self, _: PeerAddr, _: Difficulty, _: u64) {}
fn is_banned(&self, _: PeerAddr) -> bool {
false
}
}
| get_transaction | identifier_name |
serv.rs | // Copyright 2021 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::File;
use std::io;
use std::net::{IpAddr, Shutdown, SocketAddr, SocketAddrV4, TcpListener, TcpStream};
use std::path::PathBuf;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::chain;
use crate::chain::txhashset::BitmapChunk;
use crate::core::core;
use crate::core::core::hash::Hash;
use crate::core::core::{OutputIdentifier, Segment, SegmentIdentifier, TxKernel};
use crate::core::global;
use crate::core::pow::Difficulty;
use crate::handshake::Handshake;
use crate::peer::Peer;
use crate::peers::Peers;
use crate::store::PeerStore;
use crate::types::{
Capabilities, ChainAdapter, Error, NetAdapter, P2PConfig, PeerAddr, PeerInfo, ReasonForBan,
TxHashSetRead,
};
use crate::util::secp::pedersen::RangeProof;
use crate::util::StopState;
use chrono::prelude::{DateTime, Utc};
/// P2P server implementation, handling bootstrapping to find and connect to
/// peers, receiving connections from other peers and keep track of all of them.
pub struct Server {
pub config: P2PConfig,
capabilities: Capabilities,
handshake: Arc<Handshake>,
pub peers: Arc<Peers>,
stop_state: Arc<StopState>,
}
// TODO TLS
impl Server {
/// Creates a new idle p2p server with no peers
pub fn new(
db_root: &str,
capabilities: Capabilities,
config: P2PConfig,
adapter: Arc<dyn ChainAdapter>,
genesis: Hash,
stop_state: Arc<StopState>,
) -> Result<Server, Error> {
Ok(Server {
config: config.clone(),
capabilities,
handshake: Arc::new(Handshake::new(genesis, config.clone())),
peers: Arc::new(Peers::new(PeerStore::new(db_root)?, adapter, config)),
stop_state,
})
}
/// Starts a new TCP server and listen to incoming connections. This is a
/// blocking call until the TCP server stops.
pub fn listen(&self) -> Result<(), Error> {
// start TCP listener and handle incoming connections
let addr = SocketAddr::new(self.config.host, self.config.port);
let listener = TcpListener::bind(addr)?;
listener.set_nonblocking(true)?;
let sleep_time = Duration::from_millis(5);
loop {
// Pause peer ingress connection request. Only for tests.
if self.stop_state.is_paused() {
thread::sleep(Duration::from_secs(1));
continue;
}
match listener.accept() {
Ok((stream, peer_addr)) => {
// We want out TCP stream to be in blocking mode.
// The TCP listener is in nonblocking mode so we *must* explicitly
// move the accepted TCP stream into blocking mode (or all kinds of
// bad things can and will happen).
// A nonblocking TCP listener will accept nonblocking TCP streams which
// we do not want.
stream.set_nonblocking(false)?;
let mut peer_addr = PeerAddr(peer_addr);
// attempt to see if it an ipv4-mapped ipv6
// if yes convert to ipv4
if peer_addr.0.is_ipv6() {
if let IpAddr::V6(ipv6) = peer_addr.0.ip() {
if let Some(ipv4) = ipv6.to_ipv4() {
peer_addr = PeerAddr(SocketAddr::V4(SocketAddrV4::new(
ipv4,
peer_addr.0.port(),
)))
}
}
}
if self.check_undesirable(&stream) {
// Shutdown the incoming TCP connection if it is not desired
if let Err(e) = stream.shutdown(Shutdown::Both) {
debug!("Error shutting down conn: {:?}", e);
}
continue;
}
match self.handle_new_peer(stream) {
Err(Error::ConnectionClose) => debug!("shutting down, ignoring a new peer"),
Err(e) => {
debug!("Error accepting peer {}: {:?}", peer_addr.to_string(), e);
let _ = self.peers.add_banned(peer_addr, ReasonForBan::BadHandshake);
}
Ok(_) => {}
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
// nothing to do, will retry in next iteration
}
Err(e) => {
debug!("Couldn't establish new client connection: {:?}", e);
}
}
if self.stop_state.is_stopped() {
break;
}
thread::sleep(sleep_time);
}
Ok(())
}
/// Asks the server to connect to a new peer. Directly returns the peer if
/// we're already connected to the provided address.
pub fn connect(&self, addr: PeerAddr) -> Result<Arc<Peer>, Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
if Peer::is_denied(&self.config, addr) {
debug!("connect_peer: peer {} denied, not connecting.", addr);
return Err(Error::ConnectionClose);
}
if global::is_production_mode() {
let hs = self.handshake.clone();
let addrs = hs.addrs.read();
if addrs.contains(&addr) {
debug!("connect: ignore connecting to PeerWithSelf, addr: {}", addr);
return Err(Error::PeerWithSelf);
}
}
if let Some(p) = self.peers.get_connected_peer(addr) {
// if we're already connected to the addr, just return the peer
trace!("connect_peer: already connected {}", addr);
return Ok(p);
}
trace!(
"connect_peer: on {}:{}. connecting to {}",
self.config.host,
self.config.port,
addr
);
match TcpStream::connect_timeout(&addr.0, Duration::from_secs(10)) {
Ok(stream) => {
let addr = SocketAddr::new(self.config.host, self.config.port);
let total_diff = self.peers.total_difficulty()?;
let peer = Peer::connect(
stream,
self.capabilities,
total_diff,
PeerAddr(addr),
&self.handshake,
self.peers.clone(),
)?;
let peer = Arc::new(peer);
self.peers.add_connected(peer.clone())?;
Ok(peer)
}
Err(e) => {
trace!(
"connect_peer: on {}:{}. Could not connect to {}: {:?}",
self.config.host,
self.config.port,
addr,
e
);
Err(Error::Connection(e))
}
}
}
fn handle_new_peer(&self, stream: TcpStream) -> Result<(), Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
let total_diff = self.peers.total_difficulty()?;
// accept the peer and add it to the server map
let peer = Peer::accept(
stream,
self.capabilities,
total_diff,
&self.handshake,
self.peers.clone(),
)?;
self.peers.add_connected(Arc::new(peer))?;
Ok(())
}
/// Checks whether there's any reason we don't want to accept an incoming peer
/// connection. There can be a few of them:
/// 1. Accepting the peer connection would exceed the configured maximum allowed
/// inbound peer count. Note that seed nodes may wish to increase the default
/// value for PEER_LISTENER_BUFFER_COUNT to help with network bootstrapping.
/// A default buffer of 8 peers is allowed to help with network growth.
/// 2. The peer has been previously banned and the ban period hasn't
/// expired yet.
/// 3. We're already connected to a peer at the same IP. While there are
/// many reasons multiple peers can legitimately share identical IP
/// addresses (NAT), network distribution is improved if they choose
/// different sets of peers themselves. In addition, it prevent potential
/// duplicate connections, malicious or not.
fn check_undesirable(&self, stream: &TcpStream) -> bool {
if self.peers.iter().inbound().connected().count() as u32
>= self.config.peer_max_inbound_count() + self.config.peer_listener_buffer_count()
{
debug!("Accepting new connection will exceed peer limit, refusing connection.");
return true;
}
if let Ok(peer_addr) = stream.peer_addr() {
let peer_addr = PeerAddr(peer_addr);
if self.peers.is_banned(peer_addr) {
debug!("Peer {} banned, refusing connection.", peer_addr);
return true;
}
// The call to is_known() can fail due to contention on the peers map.
// If it fails we want to default to refusing the connection.
match self.peers.is_known(peer_addr) {
Ok(true) => {
debug!("Peer {} already known, refusing connection.", peer_addr);
return true;
}
Err(_) => {
error!(
"Peer {} is_known check failed, refusing connection.",
peer_addr
);
return true;
}
_ => (),
}
}
false
}
pub fn stop(&self) {
self.stop_state.stop();
self.peers.stop();
}
/// Pause means: stop all the current peers connection, only for tests.
/// Note:
/// 1. must pause the 'seed' thread also, to avoid the new egress peer connection
/// 2. must pause the 'p2p-server' thread also, to avoid the new ingress peer connection.
pub fn pause(&self) {
self.peers.stop();
}
}
/// A no-op network adapter used for testing.
pub struct DummyAdapter {}
impl ChainAdapter for DummyAdapter {
fn total_difficulty(&self) -> Result<Difficulty, chain::Error> {
Ok(Difficulty::min_dma())
}
fn total_height(&self) -> Result<u64, chain::Error> {
Ok(0)
}
fn get_transaction(&self, _h: Hash) -> Option<core::Transaction> {
None
}
fn tx_kernel_received(&self, _h: Hash, _peer_info: &PeerInfo) -> Result<bool, chain::Error> {
Ok(true)
}
fn transaction_received(
&self,
_: core::Transaction,
_stem: bool,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn compact_block_received(
&self,
_cb: core::CompactBlock,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn header_received(
&self,
_bh: core::BlockHeader,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn block_received(
&self,
_: core::Block,
_: &PeerInfo,
_: chain::Options,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn headers_received(
&self,
_: &[core::BlockHeader],
_: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn locate_headers(&self, _: &[Hash]) -> Result<Vec<core::BlockHeader>, chain::Error> {
Ok(vec![])
}
fn get_block(&self, _: Hash, _: &PeerInfo) -> Option<core::Block> {
None
}
fn txhashset_read(&self, _h: Hash) -> Option<TxHashSetRead> {
unimplemented!()
}
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
unimplemented!()
}
fn txhashset_receive_ready(&self) -> bool {
false
}
fn txhashset_write(
&self,
_h: Hash,
_txhashset_data: File,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(false)
}
fn txhashset_download_update(
&self,
_start_time: DateTime<Utc>,
_downloaded_size: u64,
_total_size: u64,
) -> bool {
false
}
fn get_tmp_dir(&self) -> PathBuf {
unimplemented!()
}
fn get_tmpfile_pathname(&self, _tmpfile_name: String) -> PathBuf {
unimplemented!()
}
fn get_kernel_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<TxKernel>, chain::Error> {
unimplemented!()
}
fn get_bitmap_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<BitmapChunk>, Hash), chain::Error> {
unimplemented!()
}
fn get_output_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<OutputIdentifier>, Hash), chain::Error> {
unimplemented!()
}
fn get_rangeproof_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<RangeProof>, chain::Error> {
unimplemented!()
}
}
impl NetAdapter for DummyAdapter {
fn find_peer_addrs(&self, _: Capabilities) -> Vec<PeerAddr> {
vec![]
}
fn peer_addrs_received(&self, _: Vec<PeerAddr>) {}
fn peer_difficulty(&self, _: PeerAddr, _: Difficulty, _: u64) {}
fn is_banned(&self, _: PeerAddr) -> bool |
}
| {
false
} | identifier_body |
serv.rs | // Copyright 2021 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::File;
use std::io;
use std::net::{IpAddr, Shutdown, SocketAddr, SocketAddrV4, TcpListener, TcpStream};
use std::path::PathBuf;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::chain;
use crate::chain::txhashset::BitmapChunk;
use crate::core::core;
use crate::core::core::hash::Hash;
use crate::core::core::{OutputIdentifier, Segment, SegmentIdentifier, TxKernel};
use crate::core::global;
use crate::core::pow::Difficulty;
use crate::handshake::Handshake;
use crate::peer::Peer;
use crate::peers::Peers;
use crate::store::PeerStore;
use crate::types::{
Capabilities, ChainAdapter, Error, NetAdapter, P2PConfig, PeerAddr, PeerInfo, ReasonForBan,
TxHashSetRead,
};
use crate::util::secp::pedersen::RangeProof;
use crate::util::StopState;
use chrono::prelude::{DateTime, Utc};
/// P2P server implementation, handling bootstrapping to find and connect to
/// peers, receiving connections from other peers and keep track of all of them.
pub struct Server {
pub config: P2PConfig,
capabilities: Capabilities,
handshake: Arc<Handshake>,
pub peers: Arc<Peers>,
stop_state: Arc<StopState>,
}
// TODO TLS
impl Server {
/// Creates a new idle p2p server with no peers
pub fn new(
db_root: &str,
capabilities: Capabilities,
config: P2PConfig,
adapter: Arc<dyn ChainAdapter>,
genesis: Hash,
stop_state: Arc<StopState>, | peers: Arc::new(Peers::new(PeerStore::new(db_root)?, adapter, config)),
stop_state,
})
}
/// Starts a new TCP server and listen to incoming connections. This is a
/// blocking call until the TCP server stops.
pub fn listen(&self) -> Result<(), Error> {
// start TCP listener and handle incoming connections
let addr = SocketAddr::new(self.config.host, self.config.port);
let listener = TcpListener::bind(addr)?;
listener.set_nonblocking(true)?;
let sleep_time = Duration::from_millis(5);
loop {
// Pause peer ingress connection request. Only for tests.
if self.stop_state.is_paused() {
thread::sleep(Duration::from_secs(1));
continue;
}
match listener.accept() {
Ok((stream, peer_addr)) => {
// We want out TCP stream to be in blocking mode.
// The TCP listener is in nonblocking mode so we *must* explicitly
// move the accepted TCP stream into blocking mode (or all kinds of
// bad things can and will happen).
// A nonblocking TCP listener will accept nonblocking TCP streams which
// we do not want.
stream.set_nonblocking(false)?;
let mut peer_addr = PeerAddr(peer_addr);
// attempt to see if it an ipv4-mapped ipv6
// if yes convert to ipv4
if peer_addr.0.is_ipv6() {
if let IpAddr::V6(ipv6) = peer_addr.0.ip() {
if let Some(ipv4) = ipv6.to_ipv4() {
peer_addr = PeerAddr(SocketAddr::V4(SocketAddrV4::new(
ipv4,
peer_addr.0.port(),
)))
}
}
}
if self.check_undesirable(&stream) {
// Shutdown the incoming TCP connection if it is not desired
if let Err(e) = stream.shutdown(Shutdown::Both) {
debug!("Error shutting down conn: {:?}", e);
}
continue;
}
match self.handle_new_peer(stream) {
Err(Error::ConnectionClose) => debug!("shutting down, ignoring a new peer"),
Err(e) => {
debug!("Error accepting peer {}: {:?}", peer_addr.to_string(), e);
let _ = self.peers.add_banned(peer_addr, ReasonForBan::BadHandshake);
}
Ok(_) => {}
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
// nothing to do, will retry in next iteration
}
Err(e) => {
debug!("Couldn't establish new client connection: {:?}", e);
}
}
if self.stop_state.is_stopped() {
break;
}
thread::sleep(sleep_time);
}
Ok(())
}
/// Asks the server to connect to a new peer. Directly returns the peer if
/// we're already connected to the provided address.
pub fn connect(&self, addr: PeerAddr) -> Result<Arc<Peer>, Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
if Peer::is_denied(&self.config, addr) {
debug!("connect_peer: peer {} denied, not connecting.", addr);
return Err(Error::ConnectionClose);
}
if global::is_production_mode() {
let hs = self.handshake.clone();
let addrs = hs.addrs.read();
if addrs.contains(&addr) {
debug!("connect: ignore connecting to PeerWithSelf, addr: {}", addr);
return Err(Error::PeerWithSelf);
}
}
if let Some(p) = self.peers.get_connected_peer(addr) {
// if we're already connected to the addr, just return the peer
trace!("connect_peer: already connected {}", addr);
return Ok(p);
}
trace!(
"connect_peer: on {}:{}. connecting to {}",
self.config.host,
self.config.port,
addr
);
match TcpStream::connect_timeout(&addr.0, Duration::from_secs(10)) {
Ok(stream) => {
let addr = SocketAddr::new(self.config.host, self.config.port);
let total_diff = self.peers.total_difficulty()?;
let peer = Peer::connect(
stream,
self.capabilities,
total_diff,
PeerAddr(addr),
&self.handshake,
self.peers.clone(),
)?;
let peer = Arc::new(peer);
self.peers.add_connected(peer.clone())?;
Ok(peer)
}
Err(e) => {
trace!(
"connect_peer: on {}:{}. Could not connect to {}: {:?}",
self.config.host,
self.config.port,
addr,
e
);
Err(Error::Connection(e))
}
}
}
fn handle_new_peer(&self, stream: TcpStream) -> Result<(), Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
let total_diff = self.peers.total_difficulty()?;
// accept the peer and add it to the server map
let peer = Peer::accept(
stream,
self.capabilities,
total_diff,
&self.handshake,
self.peers.clone(),
)?;
self.peers.add_connected(Arc::new(peer))?;
Ok(())
}
/// Checks whether there's any reason we don't want to accept an incoming peer
/// connection. There can be a few of them:
/// 1. Accepting the peer connection would exceed the configured maximum allowed
/// inbound peer count. Note that seed nodes may wish to increase the default
/// value for PEER_LISTENER_BUFFER_COUNT to help with network bootstrapping.
/// A default buffer of 8 peers is allowed to help with network growth.
/// 2. The peer has been previously banned and the ban period hasn't
/// expired yet.
/// 3. We're already connected to a peer at the same IP. While there are
/// many reasons multiple peers can legitimately share identical IP
/// addresses (NAT), network distribution is improved if they choose
/// different sets of peers themselves. In addition, it prevent potential
/// duplicate connections, malicious or not.
fn check_undesirable(&self, stream: &TcpStream) -> bool {
if self.peers.iter().inbound().connected().count() as u32
>= self.config.peer_max_inbound_count() + self.config.peer_listener_buffer_count()
{
debug!("Accepting new connection will exceed peer limit, refusing connection.");
return true;
}
if let Ok(peer_addr) = stream.peer_addr() {
let peer_addr = PeerAddr(peer_addr);
if self.peers.is_banned(peer_addr) {
debug!("Peer {} banned, refusing connection.", peer_addr);
return true;
}
// The call to is_known() can fail due to contention on the peers map.
// If it fails we want to default to refusing the connection.
match self.peers.is_known(peer_addr) {
Ok(true) => {
debug!("Peer {} already known, refusing connection.", peer_addr);
return true;
}
Err(_) => {
error!(
"Peer {} is_known check failed, refusing connection.",
peer_addr
);
return true;
}
_ => (),
}
}
false
}
pub fn stop(&self) {
self.stop_state.stop();
self.peers.stop();
}
/// Pause means: stop all the current peers connection, only for tests.
/// Note:
/// 1. must pause the 'seed' thread also, to avoid the new egress peer connection
/// 2. must pause the 'p2p-server' thread also, to avoid the new ingress peer connection.
pub fn pause(&self) {
self.peers.stop();
}
}
/// A no-op network adapter used for testing.
pub struct DummyAdapter {}
impl ChainAdapter for DummyAdapter {
fn total_difficulty(&self) -> Result<Difficulty, chain::Error> {
Ok(Difficulty::min_dma())
}
fn total_height(&self) -> Result<u64, chain::Error> {
Ok(0)
}
fn get_transaction(&self, _h: Hash) -> Option<core::Transaction> {
None
}
fn tx_kernel_received(&self, _h: Hash, _peer_info: &PeerInfo) -> Result<bool, chain::Error> {
Ok(true)
}
fn transaction_received(
&self,
_: core::Transaction,
_stem: bool,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn compact_block_received(
&self,
_cb: core::CompactBlock,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn header_received(
&self,
_bh: core::BlockHeader,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn block_received(
&self,
_: core::Block,
_: &PeerInfo,
_: chain::Options,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn headers_received(
&self,
_: &[core::BlockHeader],
_: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn locate_headers(&self, _: &[Hash]) -> Result<Vec<core::BlockHeader>, chain::Error> {
Ok(vec![])
}
fn get_block(&self, _: Hash, _: &PeerInfo) -> Option<core::Block> {
None
}
fn txhashset_read(&self, _h: Hash) -> Option<TxHashSetRead> {
unimplemented!()
}
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
unimplemented!()
}
fn txhashset_receive_ready(&self) -> bool {
false
}
fn txhashset_write(
&self,
_h: Hash,
_txhashset_data: File,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(false)
}
fn txhashset_download_update(
&self,
_start_time: DateTime<Utc>,
_downloaded_size: u64,
_total_size: u64,
) -> bool {
false
}
fn get_tmp_dir(&self) -> PathBuf {
unimplemented!()
}
fn get_tmpfile_pathname(&self, _tmpfile_name: String) -> PathBuf {
unimplemented!()
}
fn get_kernel_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<TxKernel>, chain::Error> {
unimplemented!()
}
fn get_bitmap_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<BitmapChunk>, Hash), chain::Error> {
unimplemented!()
}
fn get_output_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<OutputIdentifier>, Hash), chain::Error> {
unimplemented!()
}
fn get_rangeproof_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<RangeProof>, chain::Error> {
unimplemented!()
}
}
impl NetAdapter for DummyAdapter {
fn find_peer_addrs(&self, _: Capabilities) -> Vec<PeerAddr> {
vec![]
}
fn peer_addrs_received(&self, _: Vec<PeerAddr>) {}
fn peer_difficulty(&self, _: PeerAddr, _: Difficulty, _: u64) {}
fn is_banned(&self, _: PeerAddr) -> bool {
false
}
} | ) -> Result<Server, Error> {
Ok(Server {
config: config.clone(),
capabilities,
handshake: Arc::new(Handshake::new(genesis, config.clone())), | random_line_split |
serv.rs | // Copyright 2021 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs::File;
use std::io;
use std::net::{IpAddr, Shutdown, SocketAddr, SocketAddrV4, TcpListener, TcpStream};
use std::path::PathBuf;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::chain;
use crate::chain::txhashset::BitmapChunk;
use crate::core::core;
use crate::core::core::hash::Hash;
use crate::core::core::{OutputIdentifier, Segment, SegmentIdentifier, TxKernel};
use crate::core::global;
use crate::core::pow::Difficulty;
use crate::handshake::Handshake;
use crate::peer::Peer;
use crate::peers::Peers;
use crate::store::PeerStore;
use crate::types::{
Capabilities, ChainAdapter, Error, NetAdapter, P2PConfig, PeerAddr, PeerInfo, ReasonForBan,
TxHashSetRead,
};
use crate::util::secp::pedersen::RangeProof;
use crate::util::StopState;
use chrono::prelude::{DateTime, Utc};
/// P2P server implementation, handling bootstrapping to find and connect to
/// peers, receiving connections from other peers and keep track of all of them.
pub struct Server {
pub config: P2PConfig,
capabilities: Capabilities,
handshake: Arc<Handshake>,
pub peers: Arc<Peers>,
stop_state: Arc<StopState>,
}
// TODO TLS
impl Server {
/// Creates a new idle p2p server with no peers
pub fn new(
db_root: &str,
capabilities: Capabilities,
config: P2PConfig,
adapter: Arc<dyn ChainAdapter>,
genesis: Hash,
stop_state: Arc<StopState>,
) -> Result<Server, Error> {
Ok(Server {
config: config.clone(),
capabilities,
handshake: Arc::new(Handshake::new(genesis, config.clone())),
peers: Arc::new(Peers::new(PeerStore::new(db_root)?, adapter, config)),
stop_state,
})
}
/// Starts a new TCP server and listen to incoming connections. This is a
/// blocking call until the TCP server stops.
pub fn listen(&self) -> Result<(), Error> {
// start TCP listener and handle incoming connections
let addr = SocketAddr::new(self.config.host, self.config.port);
let listener = TcpListener::bind(addr)?;
listener.set_nonblocking(true)?;
let sleep_time = Duration::from_millis(5);
loop {
// Pause peer ingress connection request. Only for tests.
if self.stop_state.is_paused() {
thread::sleep(Duration::from_secs(1));
continue;
}
match listener.accept() {
Ok((stream, peer_addr)) => {
// We want out TCP stream to be in blocking mode.
// The TCP listener is in nonblocking mode so we *must* explicitly
// move the accepted TCP stream into blocking mode (or all kinds of
// bad things can and will happen).
// A nonblocking TCP listener will accept nonblocking TCP streams which
// we do not want.
stream.set_nonblocking(false)?;
let mut peer_addr = PeerAddr(peer_addr);
// attempt to see if it an ipv4-mapped ipv6
// if yes convert to ipv4
if peer_addr.0.is_ipv6() {
if let IpAddr::V6(ipv6) = peer_addr.0.ip() {
if let Some(ipv4) = ipv6.to_ipv4() {
peer_addr = PeerAddr(SocketAddr::V4(SocketAddrV4::new(
ipv4,
peer_addr.0.port(),
)))
}
}
}
if self.check_undesirable(&stream) {
// Shutdown the incoming TCP connection if it is not desired
if let Err(e) = stream.shutdown(Shutdown::Both) {
debug!("Error shutting down conn: {:?}", e);
}
continue;
}
match self.handle_new_peer(stream) {
Err(Error::ConnectionClose) => debug!("shutting down, ignoring a new peer"),
Err(e) => {
debug!("Error accepting peer {}: {:?}", peer_addr.to_string(), e);
let _ = self.peers.add_banned(peer_addr, ReasonForBan::BadHandshake);
}
Ok(_) => {}
}
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
// nothing to do, will retry in next iteration
}
Err(e) => {
debug!("Couldn't establish new client connection: {:?}", e);
}
}
if self.stop_state.is_stopped() {
break;
}
thread::sleep(sleep_time);
}
Ok(())
}
/// Asks the server to connect to a new peer. Directly returns the peer if
/// we're already connected to the provided address.
pub fn connect(&self, addr: PeerAddr) -> Result<Arc<Peer>, Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
if Peer::is_denied(&self.config, addr) {
debug!("connect_peer: peer {} denied, not connecting.", addr);
return Err(Error::ConnectionClose);
}
if global::is_production_mode() |
if let Some(p) = self.peers.get_connected_peer(addr) {
// if we're already connected to the addr, just return the peer
trace!("connect_peer: already connected {}", addr);
return Ok(p);
}
trace!(
"connect_peer: on {}:{}. connecting to {}",
self.config.host,
self.config.port,
addr
);
match TcpStream::connect_timeout(&addr.0, Duration::from_secs(10)) {
Ok(stream) => {
let addr = SocketAddr::new(self.config.host, self.config.port);
let total_diff = self.peers.total_difficulty()?;
let peer = Peer::connect(
stream,
self.capabilities,
total_diff,
PeerAddr(addr),
&self.handshake,
self.peers.clone(),
)?;
let peer = Arc::new(peer);
self.peers.add_connected(peer.clone())?;
Ok(peer)
}
Err(e) => {
trace!(
"connect_peer: on {}:{}. Could not connect to {}: {:?}",
self.config.host,
self.config.port,
addr,
e
);
Err(Error::Connection(e))
}
}
}
fn handle_new_peer(&self, stream: TcpStream) -> Result<(), Error> {
if self.stop_state.is_stopped() {
return Err(Error::ConnectionClose);
}
let total_diff = self.peers.total_difficulty()?;
// accept the peer and add it to the server map
let peer = Peer::accept(
stream,
self.capabilities,
total_diff,
&self.handshake,
self.peers.clone(),
)?;
self.peers.add_connected(Arc::new(peer))?;
Ok(())
}
/// Checks whether there's any reason we don't want to accept an incoming peer
/// connection. There can be a few of them:
/// 1. Accepting the peer connection would exceed the configured maximum allowed
/// inbound peer count. Note that seed nodes may wish to increase the default
/// value for PEER_LISTENER_BUFFER_COUNT to help with network bootstrapping.
/// A default buffer of 8 peers is allowed to help with network growth.
/// 2. The peer has been previously banned and the ban period hasn't
/// expired yet.
/// 3. We're already connected to a peer at the same IP. While there are
/// many reasons multiple peers can legitimately share identical IP
/// addresses (NAT), network distribution is improved if they choose
/// different sets of peers themselves. In addition, it prevent potential
/// duplicate connections, malicious or not.
fn check_undesirable(&self, stream: &TcpStream) -> bool {
if self.peers.iter().inbound().connected().count() as u32
>= self.config.peer_max_inbound_count() + self.config.peer_listener_buffer_count()
{
debug!("Accepting new connection will exceed peer limit, refusing connection.");
return true;
}
if let Ok(peer_addr) = stream.peer_addr() {
let peer_addr = PeerAddr(peer_addr);
if self.peers.is_banned(peer_addr) {
debug!("Peer {} banned, refusing connection.", peer_addr);
return true;
}
// The call to is_known() can fail due to contention on the peers map.
// If it fails we want to default to refusing the connection.
match self.peers.is_known(peer_addr) {
Ok(true) => {
debug!("Peer {} already known, refusing connection.", peer_addr);
return true;
}
Err(_) => {
error!(
"Peer {} is_known check failed, refusing connection.",
peer_addr
);
return true;
}
_ => (),
}
}
false
}
pub fn stop(&self) {
self.stop_state.stop();
self.peers.stop();
}
/// Pause means: stop all the current peers connection, only for tests.
/// Note:
/// 1. must pause the 'seed' thread also, to avoid the new egress peer connection
/// 2. must pause the 'p2p-server' thread also, to avoid the new ingress peer connection.
pub fn pause(&self) {
self.peers.stop();
}
}
/// A no-op network adapter used for testing.
pub struct DummyAdapter {}
impl ChainAdapter for DummyAdapter {
fn total_difficulty(&self) -> Result<Difficulty, chain::Error> {
Ok(Difficulty::min_dma())
}
fn total_height(&self) -> Result<u64, chain::Error> {
Ok(0)
}
fn get_transaction(&self, _h: Hash) -> Option<core::Transaction> {
None
}
fn tx_kernel_received(&self, _h: Hash, _peer_info: &PeerInfo) -> Result<bool, chain::Error> {
Ok(true)
}
fn transaction_received(
&self,
_: core::Transaction,
_stem: bool,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn compact_block_received(
&self,
_cb: core::CompactBlock,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn header_received(
&self,
_bh: core::BlockHeader,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn block_received(
&self,
_: core::Block,
_: &PeerInfo,
_: chain::Options,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn headers_received(
&self,
_: &[core::BlockHeader],
_: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(true)
}
fn locate_headers(&self, _: &[Hash]) -> Result<Vec<core::BlockHeader>, chain::Error> {
Ok(vec![])
}
fn get_block(&self, _: Hash, _: &PeerInfo) -> Option<core::Block> {
None
}
fn txhashset_read(&self, _h: Hash) -> Option<TxHashSetRead> {
unimplemented!()
}
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
unimplemented!()
}
fn txhashset_receive_ready(&self) -> bool {
false
}
fn txhashset_write(
&self,
_h: Hash,
_txhashset_data: File,
_peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
Ok(false)
}
fn txhashset_download_update(
&self,
_start_time: DateTime<Utc>,
_downloaded_size: u64,
_total_size: u64,
) -> bool {
false
}
fn get_tmp_dir(&self) -> PathBuf {
unimplemented!()
}
fn get_tmpfile_pathname(&self, _tmpfile_name: String) -> PathBuf {
unimplemented!()
}
fn get_kernel_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<TxKernel>, chain::Error> {
unimplemented!()
}
fn get_bitmap_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<BitmapChunk>, Hash), chain::Error> {
unimplemented!()
}
fn get_output_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<(Segment<OutputIdentifier>, Hash), chain::Error> {
unimplemented!()
}
fn get_rangeproof_segment(
&self,
_hash: Hash,
_id: SegmentIdentifier,
) -> Result<Segment<RangeProof>, chain::Error> {
unimplemented!()
}
}
impl NetAdapter for DummyAdapter {
fn find_peer_addrs(&self, _: Capabilities) -> Vec<PeerAddr> {
vec![]
}
fn peer_addrs_received(&self, _: Vec<PeerAddr>) {}
fn peer_difficulty(&self, _: PeerAddr, _: Difficulty, _: u64) {}
fn is_banned(&self, _: PeerAddr) -> bool {
false
}
}
| {
let hs = self.handshake.clone();
let addrs = hs.addrs.read();
if addrs.contains(&addr) {
debug!("connect: ignore connecting to PeerWithSelf, addr: {}", addr);
return Err(Error::PeerWithSelf);
}
} | conditional_block |
DefaultProjectKey.tsx | /*
* SonarQube
* Copyright (C) 2009-2022 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of | *
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import { Component } from '../../../types/types';
import CodeSnippet from '../../common/CodeSnippet';
import SentenceWithFilename from './SentenceWithFilename';
export interface DefaultProjectKeyProps {
component: Component;
}
const sonarProjectSnippet = (key: string) => `sonar.projectKey=${key}`;
export default function DefaultProjectKey(props: DefaultProjectKeyProps) {
const { component } = props;
return (
<li className="abs-width-600">
<SentenceWithFilename
filename="sonar-project.properties"
translationKey="onboarding.tutorial.other.project_key"
/>
<CodeSnippet snippet={sonarProjectSnippet(component.key)} />
</li>
);
} | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. | random_line_split |
DefaultProjectKey.tsx | /*
* SonarQube
* Copyright (C) 2009-2022 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import { Component } from '../../../types/types';
import CodeSnippet from '../../common/CodeSnippet';
import SentenceWithFilename from './SentenceWithFilename';
export interface DefaultProjectKeyProps {
component: Component;
}
const sonarProjectSnippet = (key: string) => `sonar.projectKey=${key}`;
export default function DefaultProjectKey(props: DefaultProjectKeyProps) | {
const { component } = props;
return (
<li className="abs-width-600">
<SentenceWithFilename
filename="sonar-project.properties"
translationKey="onboarding.tutorial.other.project_key"
/>
<CodeSnippet snippet={sonarProjectSnippet(component.key)} />
</li>
);
} | identifier_body |
|
DefaultProjectKey.tsx | /*
* SonarQube
* Copyright (C) 2009-2022 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import { Component } from '../../../types/types';
import CodeSnippet from '../../common/CodeSnippet';
import SentenceWithFilename from './SentenceWithFilename';
export interface DefaultProjectKeyProps {
component: Component;
}
const sonarProjectSnippet = (key: string) => `sonar.projectKey=${key}`;
export default function | (props: DefaultProjectKeyProps) {
const { component } = props;
return (
<li className="abs-width-600">
<SentenceWithFilename
filename="sonar-project.properties"
translationKey="onboarding.tutorial.other.project_key"
/>
<CodeSnippet snippet={sonarProjectSnippet(component.key)} />
</li>
);
}
| DefaultProjectKey | identifier_name |
lib.rs | extern crate regex;
macro_rules! regex(
($s:expr) => (regex::Regex::new($s).unwrap());
);
pub fn is_valid(kennitala : &str) -> bool {
let constants = [3, 2, 7, 6, 5, 4, 3, 2];
let re = regex!(r"^\d{6}-?\d{4}$");
if re.is_match(kennitala) |
false
}
| {
let sanitized = kennitala.replace("-","");
let check_digit = (sanitized.as_bytes()[8] as char).to_digit(10).unwrap();
let c = constants
.iter()
.zip(sanitized.bytes())
.fold(0,
|sum : u32, (x, y) : (&u32, u8)|
sum + x * (y as char).to_digit(10).unwrap()
);
println!("check digit: {0}", check_digit);
if 11 - (c % 11) == check_digit {
return true
}
} | conditional_block |
lib.rs | extern crate regex;
macro_rules! regex(
($s:expr) => (regex::Regex::new($s).unwrap());
);
pub fn is_valid(kennitala : &str) -> bool {
let constants = [3, 2, 7, 6, 5, 4, 3, 2];
let re = regex!(r"^\d{6}-?\d{4}$");
if re.is_match(kennitala) {
let sanitized = kennitala.replace("-","");
let check_digit = (sanitized.as_bytes()[8] as char).to_digit(10).unwrap();
let c = constants
.iter()
.zip(sanitized.bytes())
.fold(0,
|sum : u32, (x, y) : (&u32, u8)|
sum + x * (y as char).to_digit(10).unwrap()
);
println!("check digit: {0}", check_digit);
if 11 - (c % 11) == check_digit {
return true | } | }
}
false | random_line_split |
lib.rs | extern crate regex;
macro_rules! regex(
($s:expr) => (regex::Regex::new($s).unwrap());
);
pub fn is_valid(kennitala : &str) -> bool | {
let constants = [3, 2, 7, 6, 5, 4, 3, 2];
let re = regex!(r"^\d{6}-?\d{4}$");
if re.is_match(kennitala) {
let sanitized = kennitala.replace("-","");
let check_digit = (sanitized.as_bytes()[8] as char).to_digit(10).unwrap();
let c = constants
.iter()
.zip(sanitized.bytes())
.fold(0,
|sum : u32, (x, y) : (&u32, u8)|
sum + x * (y as char).to_digit(10).unwrap()
);
println!("check digit: {0}", check_digit);
if 11 - (c % 11) == check_digit {
return true
}
}
false
} | identifier_body |
|
lib.rs | extern crate regex;
macro_rules! regex(
($s:expr) => (regex::Regex::new($s).unwrap());
);
pub fn | (kennitala : &str) -> bool {
let constants = [3, 2, 7, 6, 5, 4, 3, 2];
let re = regex!(r"^\d{6}-?\d{4}$");
if re.is_match(kennitala) {
let sanitized = kennitala.replace("-","");
let check_digit = (sanitized.as_bytes()[8] as char).to_digit(10).unwrap();
let c = constants
.iter()
.zip(sanitized.bytes())
.fold(0,
|sum : u32, (x, y) : (&u32, u8)|
sum + x * (y as char).to_digit(10).unwrap()
);
println!("check digit: {0}", check_digit);
if 11 - (c % 11) == check_digit {
return true
}
}
false
}
| is_valid | identifier_name |
cpp.rs | //! Enables the generation of header and source files for using intercom
//! libraries from C++ projects.
extern crate std;
use std::borrow::Cow;
use std::io::Write;
use super::GeneratorError;
use super::{pascal_case, LibraryContext, ModelOptions, TypeSystemOptions};
use intercom::typelib::{
Arg, CoClass, Direction, Interface, InterfaceVariant, Method, TypeInfo, TypeLib,
};
use handlebars::Handlebars;
use serde_derive::Serialize;
#[derive(PartialEq, Serialize, Debug)]
pub struct CppLibrary
{
pub lib_name: String,
pub interfaces: Vec<CppInterface>,
pub coclass_count: usize,
pub coclasses: Vec<CppClass>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppInterface
{
pub name: String,
pub iid_struct: String,
pub base: Option<String>,
pub methods: Vec<CppMethod>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppMethod
{
pub name: String,
pub ret_type: String,
pub args: Vec<CppArg>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppArg
{
pub name: String,
pub arg_type: String,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppClass
{
pub name: String,
pub clsid_struct: String,
pub interface_count: usize,
pub interfaces: Vec<String>,
}
impl CppLibrary
{
fn | (lib: TypeLib, opts: &ModelOptions) -> Result<Self, GeneratorError>
{
let ctx = LibraryContext::try_from(&lib)?;
let mut interfaces = vec![];
let mut coclasses = vec![];
for t in &lib.types {
match t {
TypeInfo::Class(cls) => {
coclasses.push(CppClass::try_from(cls.as_ref(), opts, &ctx)?)
}
TypeInfo::Interface(itf) => {
interfaces.push(CppInterface::gather(itf.as_ref(), opts, &ctx)?)
}
}
}
let interfaces = interfaces
.into_iter()
.flatten()
.collect::<Vec<CppInterface>>();
Ok(Self {
lib_name: lib.name.to_string(),
interfaces,
coclass_count: coclasses.len(),
coclasses,
})
}
}
impl CppInterface
{
fn gather(
itf: &Interface,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Vec<Self>, GeneratorError>
{
Ok(opts
.type_systems
.iter()
.map(
|ts_opts| match itf.variants.iter().find(|v| v.as_ref().ts == ts_opts.ts) {
Some(v) => Some(CppInterface::try_from(&itf, v.as_ref(), ts_opts, ctx)),
None => None,
},
)
.filter_map(|i| i)
.collect::<Result<Vec<_>, _>>()?)
}
fn try_from(
itf: &Interface,
itf_variant: &InterfaceVariant,
ts_opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: Self::final_name(&itf, ts_opts),
iid_struct: guid_as_struct(&itf_variant.iid),
base: Some("IUnknown".to_string()),
methods: itf_variant
.methods
.iter()
.map(|m| CppMethod::try_from(m.as_ref(), ts_opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
pub fn final_name(itf: &Interface, opts: &TypeSystemOptions) -> String
{
let base_name = if itf.options.class_impl_interface {
Cow::from(format!("I{}", itf.name))
} else {
itf.name.clone()
};
match opts.use_full_name {
true => format!("{}_{:?}", base_name, opts.ts),
false => base_name.to_string(),
}
}
}
impl CppMethod
{
fn try_from(
method: &Method,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: pascal_case(&method.name),
ret_type: CppArg::cpp_type(&method.return_type, opts, ctx),
args: method
.parameters
.iter()
.map(|arg| CppArg::try_from(arg, opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
}
impl CppArg
{
fn try_from(
arg: &Arg,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let mut attrs = vec![];
match arg.direction {
Direction::In => attrs.push("in"),
Direction::Out => attrs.push("out"),
Direction::Retval => {
attrs.push("out");
attrs.push("retval");
}
Direction::Return => {
return Err("Direction::Return is invalid direction for arguments"
.to_string()
.into());
}
}
Ok(Self {
name: arg.name.to_string(),
arg_type: Self::cpp_type(arg, opts, ctx),
})
}
fn cpp_type(arg: &Arg, opts: &TypeSystemOptions, ctx: &LibraryContext) -> String
{
let base_name = ctx
.itfs_by_name
.get(arg.ty.as_ref())
.map(|itf| CppInterface::final_name(itf, opts))
.unwrap_or_else(|| arg.ty.to_string());
let indirection = match arg.direction {
Direction::In | Direction::Return => arg.indirection_level,
Direction::Out | Direction::Retval => arg.indirection_level + 1,
};
let base_name = match base_name.as_ref() {
"std::ffi::c_void" => "void".to_string(),
"HRESULT" => "intercom::HRESULT".to_string(),
other => other.to_string(),
};
format!("{}{}", base_name, "*".repeat(indirection as usize))
}
}
impl CppClass
{
fn try_from(
cls: &CoClass,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let interfaces = cls
.interfaces
.iter()
.flat_map(|itf_ref| {
opts.type_systems
.iter()
.map(|opt| {
let itf = ctx.itfs_by_ref[itf_ref.name.as_ref()];
CppInterface::final_name(itf, opt)
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
Ok(CppClass {
name: cls.name.to_string(),
clsid_struct: guid_as_struct(&cls.clsid),
interface_count: interfaces.len(),
interfaces,
})
}
}
/// Generates the manifest content.
///
/// - `out` - The writer to use for output.
pub fn write(
lib: intercom::typelib::TypeLib,
opts: ModelOptions,
out_header: Option<&mut dyn Write>,
out_source: Option<&mut dyn Write>,
) -> Result<(), GeneratorError>
{
let mut reg = Handlebars::new();
reg.register_template_string("cpp_header", include_str!("cpp_header.hbs"))
.expect("Error in the built-in C++ template.");
reg.register_template_string("cpp_source", include_str!("cpp_source.hbs"))
.expect("Error in the built-in C++ template.");
let cpp_model = CppLibrary::try_from(lib, &opts)?;
if let Some(out_header) = out_header {
let rendered = reg
.render("cpp_header", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_header, "{}", rendered)?;
}
if let Some(out_source) = out_source {
let rendered = reg
.render("cpp_source", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_source, "{}", rendered)?;
}
Ok(())
}
/// Converts a guid to binarys representation.
pub fn guid_as_struct(g: &intercom::GUID) -> String
{
format!( "{{0x{:08x},0x{:04x},0x{:04x},{{0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x}}}}}",
g.data1, g.data2, g.data3,
g.data4[0], g.data4[1], g.data4[2], g.data4[3],
g.data4[4], g.data4[5], g.data4[6], g.data4[7] )
}
| try_from | identifier_name |
cpp.rs | //! Enables the generation of header and source files for using intercom
//! libraries from C++ projects.
extern crate std;
use std::borrow::Cow;
use std::io::Write;
use super::GeneratorError;
use super::{pascal_case, LibraryContext, ModelOptions, TypeSystemOptions};
use intercom::typelib::{
Arg, CoClass, Direction, Interface, InterfaceVariant, Method, TypeInfo, TypeLib,
};
use handlebars::Handlebars;
use serde_derive::Serialize;
#[derive(PartialEq, Serialize, Debug)]
pub struct CppLibrary
{
pub lib_name: String,
pub interfaces: Vec<CppInterface>,
pub coclass_count: usize,
pub coclasses: Vec<CppClass>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppInterface
{
pub name: String,
pub iid_struct: String,
pub base: Option<String>,
pub methods: Vec<CppMethod>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppMethod
{
pub name: String,
pub ret_type: String,
pub args: Vec<CppArg>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppArg
{
pub name: String,
pub arg_type: String,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppClass
{
pub name: String,
pub clsid_struct: String,
pub interface_count: usize,
pub interfaces: Vec<String>,
}
impl CppLibrary
{
fn try_from(lib: TypeLib, opts: &ModelOptions) -> Result<Self, GeneratorError>
{
let ctx = LibraryContext::try_from(&lib)?;
let mut interfaces = vec![];
let mut coclasses = vec![];
for t in &lib.types {
match t {
TypeInfo::Class(cls) => {
coclasses.push(CppClass::try_from(cls.as_ref(), opts, &ctx)?)
}
TypeInfo::Interface(itf) => {
interfaces.push(CppInterface::gather(itf.as_ref(), opts, &ctx)?)
}
}
}
let interfaces = interfaces
.into_iter()
.flatten()
.collect::<Vec<CppInterface>>();
Ok(Self {
lib_name: lib.name.to_string(),
interfaces,
coclass_count: coclasses.len(),
coclasses,
})
}
}
impl CppInterface
{
fn gather(
itf: &Interface,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Vec<Self>, GeneratorError>
{
Ok(opts
.type_systems
.iter()
.map(
|ts_opts| match itf.variants.iter().find(|v| v.as_ref().ts == ts_opts.ts) {
Some(v) => Some(CppInterface::try_from(&itf, v.as_ref(), ts_opts, ctx)),
None => None,
},
)
.filter_map(|i| i)
.collect::<Result<Vec<_>, _>>()?)
}
fn try_from(
itf: &Interface,
itf_variant: &InterfaceVariant,
ts_opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: Self::final_name(&itf, ts_opts),
iid_struct: guid_as_struct(&itf_variant.iid),
base: Some("IUnknown".to_string()),
methods: itf_variant
.methods
.iter()
.map(|m| CppMethod::try_from(m.as_ref(), ts_opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
pub fn final_name(itf: &Interface, opts: &TypeSystemOptions) -> String
{
let base_name = if itf.options.class_impl_interface {
Cow::from(format!("I{}", itf.name))
} else {
itf.name.clone()
};
match opts.use_full_name {
true => format!("{}_{:?}", base_name, opts.ts),
false => base_name.to_string(),
}
}
}
impl CppMethod
{
fn try_from(
method: &Method,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: pascal_case(&method.name),
ret_type: CppArg::cpp_type(&method.return_type, opts, ctx),
args: method
.parameters
.iter()
.map(|arg| CppArg::try_from(arg, opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
}
impl CppArg
{
fn try_from(
arg: &Arg,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let mut attrs = vec![];
match arg.direction {
Direction::In => attrs.push("in"),
Direction::Out => attrs.push("out"),
Direction::Retval => {
attrs.push("out");
attrs.push("retval");
}
Direction::Return => {
return Err("Direction::Return is invalid direction for arguments"
.to_string()
.into());
}
}
Ok(Self {
name: arg.name.to_string(),
arg_type: Self::cpp_type(arg, opts, ctx),
})
}
fn cpp_type(arg: &Arg, opts: &TypeSystemOptions, ctx: &LibraryContext) -> String
{
let base_name = ctx
.itfs_by_name
.get(arg.ty.as_ref())
.map(|itf| CppInterface::final_name(itf, opts))
.unwrap_or_else(|| arg.ty.to_string());
let indirection = match arg.direction {
Direction::In | Direction::Return => arg.indirection_level,
Direction::Out | Direction::Retval => arg.indirection_level + 1,
};
let base_name = match base_name.as_ref() {
"std::ffi::c_void" => "void".to_string(),
"HRESULT" => "intercom::HRESULT".to_string(),
other => other.to_string(),
};
format!("{}{}", base_name, "*".repeat(indirection as usize))
}
}
impl CppClass
{
fn try_from(
cls: &CoClass,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
|
}
/// Generates the manifest content.
///
/// - `out` - The writer to use for output.
pub fn write(
lib: intercom::typelib::TypeLib,
opts: ModelOptions,
out_header: Option<&mut dyn Write>,
out_source: Option<&mut dyn Write>,
) -> Result<(), GeneratorError>
{
let mut reg = Handlebars::new();
reg.register_template_string("cpp_header", include_str!("cpp_header.hbs"))
.expect("Error in the built-in C++ template.");
reg.register_template_string("cpp_source", include_str!("cpp_source.hbs"))
.expect("Error in the built-in C++ template.");
let cpp_model = CppLibrary::try_from(lib, &opts)?;
if let Some(out_header) = out_header {
let rendered = reg
.render("cpp_header", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_header, "{}", rendered)?;
}
if let Some(out_source) = out_source {
let rendered = reg
.render("cpp_source", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_source, "{}", rendered)?;
}
Ok(())
}
/// Converts a guid to binarys representation.
pub fn guid_as_struct(g: &intercom::GUID) -> String
{
format!( "{{0x{:08x},0x{:04x},0x{:04x},{{0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x}}}}}",
g.data1, g.data2, g.data3,
g.data4[0], g.data4[1], g.data4[2], g.data4[3],
g.data4[4], g.data4[5], g.data4[6], g.data4[7] )
}
| {
let interfaces = cls
.interfaces
.iter()
.flat_map(|itf_ref| {
opts.type_systems
.iter()
.map(|opt| {
let itf = ctx.itfs_by_ref[itf_ref.name.as_ref()];
CppInterface::final_name(itf, opt)
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
Ok(CppClass {
name: cls.name.to_string(),
clsid_struct: guid_as_struct(&cls.clsid),
interface_count: interfaces.len(),
interfaces,
})
} | identifier_body |
cpp.rs | //! Enables the generation of header and source files for using intercom
//! libraries from C++ projects.
extern crate std;
use std::borrow::Cow;
use std::io::Write;
use super::GeneratorError;
use super::{pascal_case, LibraryContext, ModelOptions, TypeSystemOptions};
use intercom::typelib::{
Arg, CoClass, Direction, Interface, InterfaceVariant, Method, TypeInfo, TypeLib,
};
use handlebars::Handlebars;
use serde_derive::Serialize;
#[derive(PartialEq, Serialize, Debug)]
pub struct CppLibrary
{
pub lib_name: String,
pub interfaces: Vec<CppInterface>,
pub coclass_count: usize,
pub coclasses: Vec<CppClass>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppInterface
{
pub name: String,
pub iid_struct: String,
pub base: Option<String>,
pub methods: Vec<CppMethod>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppMethod
{
pub name: String,
pub ret_type: String,
pub args: Vec<CppArg>,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppArg
{
pub name: String,
pub arg_type: String,
}
#[derive(PartialEq, Serialize, Debug)]
pub struct CppClass
{
pub name: String,
pub clsid_struct: String,
pub interface_count: usize,
pub interfaces: Vec<String>,
}
impl CppLibrary
{
fn try_from(lib: TypeLib, opts: &ModelOptions) -> Result<Self, GeneratorError>
{
let ctx = LibraryContext::try_from(&lib)?;
let mut interfaces = vec![];
let mut coclasses = vec![];
for t in &lib.types {
match t {
TypeInfo::Class(cls) => {
coclasses.push(CppClass::try_from(cls.as_ref(), opts, &ctx)?)
}
TypeInfo::Interface(itf) => {
interfaces.push(CppInterface::gather(itf.as_ref(), opts, &ctx)?)
}
}
}
let interfaces = interfaces
.into_iter()
.flatten()
.collect::<Vec<CppInterface>>();
Ok(Self {
lib_name: lib.name.to_string(),
interfaces,
coclass_count: coclasses.len(),
coclasses,
})
}
}
impl CppInterface
{
fn gather(
itf: &Interface,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Vec<Self>, GeneratorError>
{
Ok(opts
.type_systems
.iter()
.map(
|ts_opts| match itf.variants.iter().find(|v| v.as_ref().ts == ts_opts.ts) {
Some(v) => Some(CppInterface::try_from(&itf, v.as_ref(), ts_opts, ctx)),
None => None,
},
)
.filter_map(|i| i)
.collect::<Result<Vec<_>, _>>()?)
}
fn try_from(
itf: &Interface,
itf_variant: &InterfaceVariant,
ts_opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: Self::final_name(&itf, ts_opts),
iid_struct: guid_as_struct(&itf_variant.iid),
base: Some("IUnknown".to_string()),
methods: itf_variant
.methods
.iter()
.map(|m| CppMethod::try_from(m.as_ref(), ts_opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
pub fn final_name(itf: &Interface, opts: &TypeSystemOptions) -> String
{
let base_name = if itf.options.class_impl_interface {
Cow::from(format!("I{}", itf.name))
} else {
itf.name.clone()
};
match opts.use_full_name {
true => format!("{}_{:?}", base_name, opts.ts),
false => base_name.to_string(),
}
}
}
impl CppMethod
{
fn try_from(
method: &Method,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
Ok(Self {
name: pascal_case(&method.name),
ret_type: CppArg::cpp_type(&method.return_type, opts, ctx),
args: method
.parameters
.iter()
.map(|arg| CppArg::try_from(arg, opts, ctx))
.collect::<Result<Vec<_>, _>>()?,
})
}
}
impl CppArg
{
fn try_from(
arg: &Arg,
opts: &TypeSystemOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let mut attrs = vec![];
match arg.direction {
Direction::In => attrs.push("in"),
Direction::Out => attrs.push("out"),
Direction::Retval => {
attrs.push("out");
attrs.push("retval");
}
Direction::Return => {
return Err("Direction::Return is invalid direction for arguments"
.to_string()
.into());
}
}
Ok(Self {
name: arg.name.to_string(),
arg_type: Self::cpp_type(arg, opts, ctx),
})
}
fn cpp_type(arg: &Arg, opts: &TypeSystemOptions, ctx: &LibraryContext) -> String
{
let base_name = ctx
.itfs_by_name
.get(arg.ty.as_ref())
.map(|itf| CppInterface::final_name(itf, opts))
.unwrap_or_else(|| arg.ty.to_string());
let indirection = match arg.direction {
Direction::In | Direction::Return => arg.indirection_level,
Direction::Out | Direction::Retval => arg.indirection_level + 1,
};
let base_name = match base_name.as_ref() {
"std::ffi::c_void" => "void".to_string(),
"HRESULT" => "intercom::HRESULT".to_string(), | other => other.to_string(),
};
format!("{}{}", base_name, "*".repeat(indirection as usize))
}
}
impl CppClass
{
fn try_from(
cls: &CoClass,
opts: &ModelOptions,
ctx: &LibraryContext,
) -> Result<Self, GeneratorError>
{
let interfaces = cls
.interfaces
.iter()
.flat_map(|itf_ref| {
opts.type_systems
.iter()
.map(|opt| {
let itf = ctx.itfs_by_ref[itf_ref.name.as_ref()];
CppInterface::final_name(itf, opt)
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
Ok(CppClass {
name: cls.name.to_string(),
clsid_struct: guid_as_struct(&cls.clsid),
interface_count: interfaces.len(),
interfaces,
})
}
}
/// Generates the manifest content.
///
/// - `out` - The writer to use for output.
pub fn write(
lib: intercom::typelib::TypeLib,
opts: ModelOptions,
out_header: Option<&mut dyn Write>,
out_source: Option<&mut dyn Write>,
) -> Result<(), GeneratorError>
{
let mut reg = Handlebars::new();
reg.register_template_string("cpp_header", include_str!("cpp_header.hbs"))
.expect("Error in the built-in C++ template.");
reg.register_template_string("cpp_source", include_str!("cpp_source.hbs"))
.expect("Error in the built-in C++ template.");
let cpp_model = CppLibrary::try_from(lib, &opts)?;
if let Some(out_header) = out_header {
let rendered = reg
.render("cpp_header", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_header, "{}", rendered)?;
}
if let Some(out_source) = out_source {
let rendered = reg
.render("cpp_source", &cpp_model)
.expect("Rendering a valid ComCrate to C++ failed");
write!(out_source, "{}", rendered)?;
}
Ok(())
}
/// Converts a guid to binarys representation.
pub fn guid_as_struct(g: &intercom::GUID) -> String
{
format!( "{{0x{:08x},0x{:04x},0x{:04x},{{0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x},0x{:02x}}}}}",
g.data1, g.data2, g.data3,
g.data4[0], g.data4[1], g.data4[2], g.data4[3],
g.data4[4], g.data4[5], g.data4[6], g.data4[7] )
} | random_line_split |
|
sequences.state.ts | /*
* Lumeer: Modern Data Definition and Processing Platform
*
* Copyright (C) since 2017 Lumeer.io, s.r.o. and/or its affiliates.
*
* This program is free software: you can redistribute it and/or modify | * This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import {createEntityAdapter, EntityState} from '@ngrx/entity';
import {AppState} from '../app.state';
import {createSelector} from '@ngrx/store';
import {Sequence} from '../../model/sequence';
export interface SequencesState extends EntityState<Sequence> {
loaded: boolean;
}
export const sequencesAdapter = createEntityAdapter<Sequence>({
selectId: sequence => sequence.id,
});
export const initialSequencesState: SequencesState = sequencesAdapter.getInitialState({
loaded: false,
});
export const selectSequencesState = (state: AppState) => state.sequences;
export const selectAllSequences = createSelector(selectSequencesState, sequencesAdapter.getSelectors().selectAll);
export const selectAllSequencesSorted = createSelector(selectAllSequences, sequences =>
sequences.sort((a, b) => a.name.localeCompare(b.name))
);
export const selectSequencesLoaded = createSelector(selectSequencesState, state => state.loaded); | * it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* | random_line_split |
sprk-alert.stories.ts | import { SprkAlertModule } from './sprk-alert.module';
import { SprkAlertComponent } from './sprk-alert.component';
import { markdownDocumentationLinkBuilder } from '../../../../../../../storybook-utilities/markdownDocumentationLinkBuilder';
// prettier-ignore
// @ts-ignore
import { moduleMetadata, Meta } from '@storybook/angular';
export default {
title: 'Components/Alert',
component: SprkAlertComponent,
decorators: [
moduleMetadata({
imports: [SprkAlertModule],
}),
],
parameters: {
docs: {
source: {
type: 'code',
},
iframeHeight: 120,
description: {
component: `${markdownDocumentationLinkBuilder('alert')}
- \`role=”alert”\` is required so that | `,
},
},
},
} as Meta;
export const info = () => ({
template: `
<sprk-alert
variant="info"
idString="alert-info-1"
analyticsString="test"
>
This is important information.
</sprk-alert>`,
});
info.parameters = {
jest: ['sprk-alert.component'],
};
export const success = () => ({
template: `
<sprk-alert
variant="success"
idString="alert-success-1"
analyticsString="test"
>
This is a success message.
</sprk-alert>
`,
});
success.parameters = {
jest: ['sprk-alert.component'],
};
export const fail = () => ({
template: `
<sprk-alert
variant="fail"
idString="alert-fail-1"
analyticsString="test"
>
This is a failure message to alert
that something was not successful.
</sprk-alert>
`,
});
fail.parameters = {
docs: { iframeHeight: 235 },
jest: ['sprk-alert.component'],
};
export const noDismissButton = () => ({
template: `
<sprk-alert
variant="success"
idString="no-dismiss"
analyticsString="test"
[isDismissible]="false"
>
This Success Alert has no dismiss button.
</sprk-alert>
`,
});
noDismissButton.parameters = {
jest: ['sprk-alert.component'],
}; | assistive technologies can inform
the user that their attention is needed. | random_line_split |
flex-offset.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {
Directive,
ElementRef,
Input,
OnInit,
OnChanges,
OnDestroy,
Optional,
SimpleChanges,
SkipSelf,
} from '@angular/core';
import {Directionality} from '@angular/cdk/bidi';
import {
BaseDirective,
MediaChange,
MediaMonitor,
StyleDefinition,
StyleUtils,
} from '@angular/flex-layout/core';
import {Subscription} from 'rxjs';
import {Layout, LayoutDirective} from '../layout/layout';
import {isFlowHorizontal} from '../../utils/layout-validator';
/**
* 'flex-offset' flexbox styling directive
* Configures the 'margin-left' of the element in a layout container
*/
@Directive({selector: `
[fxFlexOffset],
[fxFlexOffset.xs], [fxFlexOffset.sm], [fxFlexOffset.md], [fxFlexOffset.lg], [fxFlexOffset.xl],
[fxFlexOffset.lt-sm], [fxFlexOffset.lt-md], [fxFlexOffset.lt-lg], [fxFlexOffset.lt-xl],
[fxFlexOffset.gt-xs], [fxFlexOffset.gt-sm], [fxFlexOffset.gt-md], [fxFlexOffset.gt-lg]
`})
export class FlexOffsetDirective extends BaseDirective implements OnInit, OnChanges, OnDestroy {
private _directionWatcher: Subscription;
/* tslint:disable */
@Input('fxFlexOffset') set offset(val) { this._cacheInput('offset', val); }
@Input('fxFlexOffset.xs') set offsetXs(val) { this._cacheInput('offsetXs', val); }
@Input('fxFlexOffset.sm') set offsetSm(val) { this._cacheInput('offsetSm', val); };
@Input('fxFlexOffset.md') set offsetMd(val) { this._cacheInput('offsetMd', val); };
@Input('fxFlexOffset.lg') set offsetLg(val) { this._cacheInput('offsetLg', val); };
@Input('fxFlexOffset.xl') set offsetXl(val) { this._cacheInput('offsetXl', val); };
@Input('fxFlexOffset.lt-sm') set offsetLtSm(val) { this._cacheInput('offsetLtSm', val); };
@Input('fxFlexOffset.lt-md') set offsetLtMd(val) { this._cacheInput('offsetLtMd', val); };
@Input('fxFlexOffset.lt-lg') set offsetLtLg(val) { this._cacheInput('offsetLtLg', val); };
@Input('fxFlexOffset.lt-xl') set offsetLtXl(val) { this._cacheInput('offsetLtXl', val); };
@Input('fxFlexOffset.gt-xs') set offsetGtXs(val) { this._cacheInput('offsetGtXs', val); };
@Input('fxFlexOffset.gt-sm') set offsetGtSm(val) { this._cacheInput('offsetGtSm', val); };
@Input('fxFlexOffset.gt-md') set | (val) { this._cacheInput('offsetGtMd', val); };
@Input('fxFlexOffset.gt-lg') set offsetGtLg(val) { this._cacheInput('offsetGtLg', val); };
/* tslint:enable */
constructor(monitor: MediaMonitor,
elRef: ElementRef,
@Optional() @SkipSelf() protected _container: LayoutDirective,
private _directionality: Directionality,
styleUtils: StyleUtils) {
super(monitor, elRef, styleUtils);
this._directionWatcher =
this._directionality.change.subscribe(this._updateWithValue.bind(this));
this.watchParentFlow();
}
// *********************************************
// Lifecycle Methods
// *********************************************
/**
* For @Input changes on the current mq activation property, see onMediaQueryChanges()
*/
ngOnChanges(changes: SimpleChanges) {
if (changes['offset'] != null || this._mqActivation) {
this._updateWithValue();
}
}
/**
* Cleanup
*/
ngOnDestroy() {
super.ngOnDestroy();
if (this._layoutWatcher) {
this._layoutWatcher.unsubscribe();
}
if (this._directionWatcher) {
this._directionWatcher.unsubscribe();
}
}
/**
* After the initial onChanges, build an mqActivation object that bridges
* mql change events to onMediaQueryChange handlers
*/
ngOnInit() {
super.ngOnInit();
this._listenForMediaQueryChanges('offset', 0 , (changes: MediaChange) => {
this._updateWithValue(changes.value);
});
}
// *********************************************
// Protected methods
// *********************************************
/** The flex-direction of this element's host container. Defaults to 'row'. */
protected _layout = {direction: 'row', wrap: false};
/**
* Subscription to the parent flex container's layout changes.
* Stored so we can unsubscribe when this directive is destroyed.
*/
protected _layoutWatcher: Subscription;
/**
* If parent flow-direction changes, then update the margin property
* used to offset
*/
protected watchParentFlow() {
if (this._container) {
// Subscribe to layout immediate parent direction changes (if any)
this._layoutWatcher = this._container.layout$.subscribe((layout) => {
// `direction` === null if parent container does not have a `fxLayout`
this._onLayoutChange(layout);
});
}
}
/**
* Caches the parent container's 'flex-direction' and updates the element's style.
* Used as a handler for layout change events from the parent flex container.
*/
protected _onLayoutChange(layout?: Layout) {
this._layout = layout || this._layout || {direction: 'row', wrap: false};
this._updateWithValue();
}
/**
* Using the current fxFlexOffset value, update the inline CSS
* NOTE: this will assign `margin-left` if the parent flex-direction == 'row',
* otherwise `margin-top` is used for the offset.
*/
protected _updateWithValue(value?: string|number) {
value = value || this._queryInput('offset') || 0;
if (this._mqActivation) {
value = this._mqActivation.activatedInput;
}
this._applyStyleToElement(this._buildCSS(value));
}
protected _buildCSS(offset): StyleDefinition {
let isPercent = String(offset).indexOf('%') > -1;
let isPx = String(offset).indexOf('px') > -1;
if (!isPx && !isPercent && !isNaN(offset)) {
offset = offset + '%';
}
// The flex-direction of this element's flex container. Defaults to 'row'.
const isRtl = this._directionality.value === 'rtl';
const layout = this._getFlexFlowDirection(this.parentElement, true);
const horizontalLayoutKey = isRtl ? 'margin-right' : 'margin-left';
return isFlowHorizontal(layout) ? {[horizontalLayoutKey]: `${offset}`} :
{'margin-top': `${offset}`};
}
}
| offsetGtMd | identifier_name |
flex-offset.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {
Directive,
ElementRef,
Input,
OnInit,
OnChanges,
OnDestroy,
Optional,
SimpleChanges,
SkipSelf,
} from '@angular/core';
import {Directionality} from '@angular/cdk/bidi';
import {
BaseDirective,
MediaChange,
MediaMonitor,
StyleDefinition,
StyleUtils,
} from '@angular/flex-layout/core';
import {Subscription} from 'rxjs';
import {Layout, LayoutDirective} from '../layout/layout';
import {isFlowHorizontal} from '../../utils/layout-validator';
/**
* 'flex-offset' flexbox styling directive
* Configures the 'margin-left' of the element in a layout container
*/
@Directive({selector: `
[fxFlexOffset],
[fxFlexOffset.xs], [fxFlexOffset.sm], [fxFlexOffset.md], [fxFlexOffset.lg], [fxFlexOffset.xl],
[fxFlexOffset.lt-sm], [fxFlexOffset.lt-md], [fxFlexOffset.lt-lg], [fxFlexOffset.lt-xl],
[fxFlexOffset.gt-xs], [fxFlexOffset.gt-sm], [fxFlexOffset.gt-md], [fxFlexOffset.gt-lg]
`})
export class FlexOffsetDirective extends BaseDirective implements OnInit, OnChanges, OnDestroy {
private _directionWatcher: Subscription;
/* tslint:disable */
@Input('fxFlexOffset') set offset(val) { this._cacheInput('offset', val); }
@Input('fxFlexOffset.xs') set offsetXs(val) { this._cacheInput('offsetXs', val); }
@Input('fxFlexOffset.sm') set offsetSm(val) { this._cacheInput('offsetSm', val); };
@Input('fxFlexOffset.md') set offsetMd(val) { this._cacheInput('offsetMd', val); };
@Input('fxFlexOffset.lg') set offsetLg(val) { this._cacheInput('offsetLg', val); };
@Input('fxFlexOffset.xl') set offsetXl(val) | ;
@Input('fxFlexOffset.lt-sm') set offsetLtSm(val) { this._cacheInput('offsetLtSm', val); };
@Input('fxFlexOffset.lt-md') set offsetLtMd(val) { this._cacheInput('offsetLtMd', val); };
@Input('fxFlexOffset.lt-lg') set offsetLtLg(val) { this._cacheInput('offsetLtLg', val); };
@Input('fxFlexOffset.lt-xl') set offsetLtXl(val) { this._cacheInput('offsetLtXl', val); };
@Input('fxFlexOffset.gt-xs') set offsetGtXs(val) { this._cacheInput('offsetGtXs', val); };
@Input('fxFlexOffset.gt-sm') set offsetGtSm(val) { this._cacheInput('offsetGtSm', val); };
@Input('fxFlexOffset.gt-md') set offsetGtMd(val) { this._cacheInput('offsetGtMd', val); };
@Input('fxFlexOffset.gt-lg') set offsetGtLg(val) { this._cacheInput('offsetGtLg', val); };
/* tslint:enable */
constructor(monitor: MediaMonitor,
elRef: ElementRef,
@Optional() @SkipSelf() protected _container: LayoutDirective,
private _directionality: Directionality,
styleUtils: StyleUtils) {
super(monitor, elRef, styleUtils);
this._directionWatcher =
this._directionality.change.subscribe(this._updateWithValue.bind(this));
this.watchParentFlow();
}
// *********************************************
// Lifecycle Methods
// *********************************************
/**
* For @Input changes on the current mq activation property, see onMediaQueryChanges()
*/
ngOnChanges(changes: SimpleChanges) {
if (changes['offset'] != null || this._mqActivation) {
this._updateWithValue();
}
}
/**
* Cleanup
*/
ngOnDestroy() {
super.ngOnDestroy();
if (this._layoutWatcher) {
this._layoutWatcher.unsubscribe();
}
if (this._directionWatcher) {
this._directionWatcher.unsubscribe();
}
}
/**
* After the initial onChanges, build an mqActivation object that bridges
* mql change events to onMediaQueryChange handlers
*/
ngOnInit() {
super.ngOnInit();
this._listenForMediaQueryChanges('offset', 0 , (changes: MediaChange) => {
this._updateWithValue(changes.value);
});
}
// *********************************************
// Protected methods
// *********************************************
/** The flex-direction of this element's host container. Defaults to 'row'. */
protected _layout = {direction: 'row', wrap: false};
/**
* Subscription to the parent flex container's layout changes.
* Stored so we can unsubscribe when this directive is destroyed.
*/
protected _layoutWatcher: Subscription;
/**
* If parent flow-direction changes, then update the margin property
* used to offset
*/
protected watchParentFlow() {
if (this._container) {
// Subscribe to layout immediate parent direction changes (if any)
this._layoutWatcher = this._container.layout$.subscribe((layout) => {
// `direction` === null if parent container does not have a `fxLayout`
this._onLayoutChange(layout);
});
}
}
/**
* Caches the parent container's 'flex-direction' and updates the element's style.
* Used as a handler for layout change events from the parent flex container.
*/
protected _onLayoutChange(layout?: Layout) {
this._layout = layout || this._layout || {direction: 'row', wrap: false};
this._updateWithValue();
}
/**
* Using the current fxFlexOffset value, update the inline CSS
* NOTE: this will assign `margin-left` if the parent flex-direction == 'row',
* otherwise `margin-top` is used for the offset.
*/
protected _updateWithValue(value?: string|number) {
value = value || this._queryInput('offset') || 0;
if (this._mqActivation) {
value = this._mqActivation.activatedInput;
}
this._applyStyleToElement(this._buildCSS(value));
}
protected _buildCSS(offset): StyleDefinition {
let isPercent = String(offset).indexOf('%') > -1;
let isPx = String(offset).indexOf('px') > -1;
if (!isPx && !isPercent && !isNaN(offset)) {
offset = offset + '%';
}
// The flex-direction of this element's flex container. Defaults to 'row'.
const isRtl = this._directionality.value === 'rtl';
const layout = this._getFlexFlowDirection(this.parentElement, true);
const horizontalLayoutKey = isRtl ? 'margin-right' : 'margin-left';
return isFlowHorizontal(layout) ? {[horizontalLayoutKey]: `${offset}`} :
{'margin-top': `${offset}`};
}
}
| { this._cacheInput('offsetXl', val); } | identifier_body |
flex-offset.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {
Directive,
ElementRef,
Input,
OnInit,
OnChanges,
OnDestroy,
Optional,
SimpleChanges,
SkipSelf,
} from '@angular/core';
import {Directionality} from '@angular/cdk/bidi';
import {
BaseDirective,
MediaChange,
MediaMonitor,
StyleDefinition,
StyleUtils,
} from '@angular/flex-layout/core';
import {Subscription} from 'rxjs';
import {Layout, LayoutDirective} from '../layout/layout';
import {isFlowHorizontal} from '../../utils/layout-validator';
/**
* 'flex-offset' flexbox styling directive
* Configures the 'margin-left' of the element in a layout container
*/
@Directive({selector: `
[fxFlexOffset],
[fxFlexOffset.xs], [fxFlexOffset.sm], [fxFlexOffset.md], [fxFlexOffset.lg], [fxFlexOffset.xl],
[fxFlexOffset.lt-sm], [fxFlexOffset.lt-md], [fxFlexOffset.lt-lg], [fxFlexOffset.lt-xl],
[fxFlexOffset.gt-xs], [fxFlexOffset.gt-sm], [fxFlexOffset.gt-md], [fxFlexOffset.gt-lg]
`})
export class FlexOffsetDirective extends BaseDirective implements OnInit, OnChanges, OnDestroy {
private _directionWatcher: Subscription;
/* tslint:disable */
@Input('fxFlexOffset') set offset(val) { this._cacheInput('offset', val); }
@Input('fxFlexOffset.xs') set offsetXs(val) { this._cacheInput('offsetXs', val); }
@Input('fxFlexOffset.sm') set offsetSm(val) { this._cacheInput('offsetSm', val); };
@Input('fxFlexOffset.md') set offsetMd(val) { this._cacheInput('offsetMd', val); };
@Input('fxFlexOffset.lg') set offsetLg(val) { this._cacheInput('offsetLg', val); };
@Input('fxFlexOffset.xl') set offsetXl(val) { this._cacheInput('offsetXl', val); };
@Input('fxFlexOffset.lt-sm') set offsetLtSm(val) { this._cacheInput('offsetLtSm', val); };
@Input('fxFlexOffset.lt-md') set offsetLtMd(val) { this._cacheInput('offsetLtMd', val); };
@Input('fxFlexOffset.lt-lg') set offsetLtLg(val) { this._cacheInput('offsetLtLg', val); };
@Input('fxFlexOffset.lt-xl') set offsetLtXl(val) { this._cacheInput('offsetLtXl', val); };
@Input('fxFlexOffset.gt-xs') set offsetGtXs(val) { this._cacheInput('offsetGtXs', val); };
@Input('fxFlexOffset.gt-sm') set offsetGtSm(val) { this._cacheInput('offsetGtSm', val); };
@Input('fxFlexOffset.gt-md') set offsetGtMd(val) { this._cacheInput('offsetGtMd', val); };
@Input('fxFlexOffset.gt-lg') set offsetGtLg(val) { this._cacheInput('offsetGtLg', val); };
/* tslint:enable */
constructor(monitor: MediaMonitor,
elRef: ElementRef,
@Optional() @SkipSelf() protected _container: LayoutDirective,
private _directionality: Directionality,
styleUtils: StyleUtils) {
super(monitor, elRef, styleUtils);
this._directionWatcher =
this._directionality.change.subscribe(this._updateWithValue.bind(this));
this.watchParentFlow();
}
// *********************************************
// Lifecycle Methods
// *********************************************
/**
* For @Input changes on the current mq activation property, see onMediaQueryChanges()
*/
ngOnChanges(changes: SimpleChanges) {
if (changes['offset'] != null || this._mqActivation) {
this._updateWithValue();
}
}
/**
* Cleanup
*/
ngOnDestroy() {
super.ngOnDestroy();
if (this._layoutWatcher) {
this._layoutWatcher.unsubscribe();
}
if (this._directionWatcher) {
this._directionWatcher.unsubscribe();
}
}
/**
* After the initial onChanges, build an mqActivation object that bridges
* mql change events to onMediaQueryChange handlers
*/
ngOnInit() {
super.ngOnInit();
this._listenForMediaQueryChanges('offset', 0 , (changes: MediaChange) => {
this._updateWithValue(changes.value);
});
}
// *********************************************
// Protected methods
// *********************************************
/** The flex-direction of this element's host container. Defaults to 'row'. */
protected _layout = {direction: 'row', wrap: false};
/**
* Subscription to the parent flex container's layout changes.
* Stored so we can unsubscribe when this directive is destroyed.
*/
protected _layoutWatcher: Subscription;
/**
* If parent flow-direction changes, then update the margin property
* used to offset
*/
protected watchParentFlow() {
if (this._container) {
// Subscribe to layout immediate parent direction changes (if any)
this._layoutWatcher = this._container.layout$.subscribe((layout) => {
// `direction` === null if parent container does not have a `fxLayout`
this._onLayoutChange(layout);
});
}
}
/**
* Caches the parent container's 'flex-direction' and updates the element's style.
* Used as a handler for layout change events from the parent flex container.
*/
protected _onLayoutChange(layout?: Layout) {
this._layout = layout || this._layout || {direction: 'row', wrap: false};
this._updateWithValue();
}
/**
* Using the current fxFlexOffset value, update the inline CSS
* NOTE: this will assign `margin-left` if the parent flex-direction == 'row',
* otherwise `margin-top` is used for the offset.
*/
protected _updateWithValue(value?: string|number) {
value = value || this._queryInput('offset') || 0;
if (this._mqActivation) {
value = this._mqActivation.activatedInput;
}
this._applyStyleToElement(this._buildCSS(value));
}
protected _buildCSS(offset): StyleDefinition {
let isPercent = String(offset).indexOf('%') > -1;
let isPx = String(offset).indexOf('px') > -1;
if (!isPx && !isPercent && !isNaN(offset)) |
// The flex-direction of this element's flex container. Defaults to 'row'.
const isRtl = this._directionality.value === 'rtl';
const layout = this._getFlexFlowDirection(this.parentElement, true);
const horizontalLayoutKey = isRtl ? 'margin-right' : 'margin-left';
return isFlowHorizontal(layout) ? {[horizontalLayoutKey]: `${offset}`} :
{'margin-top': `${offset}`};
}
}
| {
offset = offset + '%';
} | conditional_block |
flex-offset.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {
Directive,
ElementRef,
Input,
OnInit,
OnChanges,
OnDestroy,
Optional,
SimpleChanges,
SkipSelf,
} from '@angular/core';
import {Directionality} from '@angular/cdk/bidi';
import {
BaseDirective,
MediaChange,
MediaMonitor,
StyleDefinition,
StyleUtils,
} from '@angular/flex-layout/core';
import {Subscription} from 'rxjs';
import {Layout, LayoutDirective} from '../layout/layout';
import {isFlowHorizontal} from '../../utils/layout-validator';
/**
* 'flex-offset' flexbox styling directive
* Configures the 'margin-left' of the element in a layout container
*/
@Directive({selector: `
[fxFlexOffset],
[fxFlexOffset.xs], [fxFlexOffset.sm], [fxFlexOffset.md], [fxFlexOffset.lg], [fxFlexOffset.xl],
[fxFlexOffset.lt-sm], [fxFlexOffset.lt-md], [fxFlexOffset.lt-lg], [fxFlexOffset.lt-xl],
[fxFlexOffset.gt-xs], [fxFlexOffset.gt-sm], [fxFlexOffset.gt-md], [fxFlexOffset.gt-lg]
`})
export class FlexOffsetDirective extends BaseDirective implements OnInit, OnChanges, OnDestroy {
private _directionWatcher: Subscription;
/* tslint:disable */
@Input('fxFlexOffset') set offset(val) { this._cacheInput('offset', val); }
@Input('fxFlexOffset.xs') set offsetXs(val) { this._cacheInput('offsetXs', val); }
@Input('fxFlexOffset.sm') set offsetSm(val) { this._cacheInput('offsetSm', val); };
@Input('fxFlexOffset.md') set offsetMd(val) { this._cacheInput('offsetMd', val); };
@Input('fxFlexOffset.lg') set offsetLg(val) { this._cacheInput('offsetLg', val); };
@Input('fxFlexOffset.xl') set offsetXl(val) { this._cacheInput('offsetXl', val); };
@Input('fxFlexOffset.lt-sm') set offsetLtSm(val) { this._cacheInput('offsetLtSm', val); };
@Input('fxFlexOffset.lt-md') set offsetLtMd(val) { this._cacheInput('offsetLtMd', val); };
@Input('fxFlexOffset.lt-lg') set offsetLtLg(val) { this._cacheInput('offsetLtLg', val); };
@Input('fxFlexOffset.lt-xl') set offsetLtXl(val) { this._cacheInput('offsetLtXl', val); };
@Input('fxFlexOffset.gt-xs') set offsetGtXs(val) { this._cacheInput('offsetGtXs', val); };
@Input('fxFlexOffset.gt-sm') set offsetGtSm(val) { this._cacheInput('offsetGtSm', val); };
@Input('fxFlexOffset.gt-md') set offsetGtMd(val) { this._cacheInput('offsetGtMd', val); };
@Input('fxFlexOffset.gt-lg') set offsetGtLg(val) { this._cacheInput('offsetGtLg', val); };
/* tslint:enable */
constructor(monitor: MediaMonitor,
elRef: ElementRef,
@Optional() @SkipSelf() protected _container: LayoutDirective,
private _directionality: Directionality,
styleUtils: StyleUtils) {
super(monitor, elRef, styleUtils);
this._directionWatcher =
this._directionality.change.subscribe(this._updateWithValue.bind(this));
this.watchParentFlow();
} | // *********************************************
// Lifecycle Methods
// *********************************************
/**
* For @Input changes on the current mq activation property, see onMediaQueryChanges()
*/
ngOnChanges(changes: SimpleChanges) {
if (changes['offset'] != null || this._mqActivation) {
this._updateWithValue();
}
}
/**
* Cleanup
*/
ngOnDestroy() {
super.ngOnDestroy();
if (this._layoutWatcher) {
this._layoutWatcher.unsubscribe();
}
if (this._directionWatcher) {
this._directionWatcher.unsubscribe();
}
}
/**
* After the initial onChanges, build an mqActivation object that bridges
* mql change events to onMediaQueryChange handlers
*/
ngOnInit() {
super.ngOnInit();
this._listenForMediaQueryChanges('offset', 0 , (changes: MediaChange) => {
this._updateWithValue(changes.value);
});
}
// *********************************************
// Protected methods
// *********************************************
/** The flex-direction of this element's host container. Defaults to 'row'. */
protected _layout = {direction: 'row', wrap: false};
/**
* Subscription to the parent flex container's layout changes.
* Stored so we can unsubscribe when this directive is destroyed.
*/
protected _layoutWatcher: Subscription;
/**
* If parent flow-direction changes, then update the margin property
* used to offset
*/
protected watchParentFlow() {
if (this._container) {
// Subscribe to layout immediate parent direction changes (if any)
this._layoutWatcher = this._container.layout$.subscribe((layout) => {
// `direction` === null if parent container does not have a `fxLayout`
this._onLayoutChange(layout);
});
}
}
/**
* Caches the parent container's 'flex-direction' and updates the element's style.
* Used as a handler for layout change events from the parent flex container.
*/
protected _onLayoutChange(layout?: Layout) {
this._layout = layout || this._layout || {direction: 'row', wrap: false};
this._updateWithValue();
}
/**
* Using the current fxFlexOffset value, update the inline CSS
* NOTE: this will assign `margin-left` if the parent flex-direction == 'row',
* otherwise `margin-top` is used for the offset.
*/
protected _updateWithValue(value?: string|number) {
value = value || this._queryInput('offset') || 0;
if (this._mqActivation) {
value = this._mqActivation.activatedInput;
}
this._applyStyleToElement(this._buildCSS(value));
}
protected _buildCSS(offset): StyleDefinition {
let isPercent = String(offset).indexOf('%') > -1;
let isPx = String(offset).indexOf('px') > -1;
if (!isPx && !isPercent && !isNaN(offset)) {
offset = offset + '%';
}
// The flex-direction of this element's flex container. Defaults to 'row'.
const isRtl = this._directionality.value === 'rtl';
const layout = this._getFlexFlowDirection(this.parentElement, true);
const horizontalLayoutKey = isRtl ? 'margin-right' : 'margin-left';
return isFlowHorizontal(layout) ? {[horizontalLayoutKey]: `${offset}`} :
{'margin-top': `${offset}`};
}
} | random_line_split |
|
favoriteDAO.js | 'use strict'
var mongoose = require('mongoose');
let q = require('q');
let EventModel = require('../model/eventModel');
let FavoriteModel = require('../model/favoriteModel');
class FavoriteDAO {
findUser(req) {
var defer = q.defer();
let eventos = [];
FavoriteModel.find({ userId: req.decoded.id })
.then(result => {
result.map(event => {
EventModel
.findById({
_id: event._doc.eventId
}).then(event => {
return eventos.push(event);
}).then(() => {
defer.resolve(eventos);
})
})
})
return defer.promise;
}
findCompany(req) {
var defer = q.defer();
FavoriteModel.find({
companyId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite._doc);
});
return defer.promise;
}
findPlace(req) {
var defer = q.defer();
FavoriteModel.find({
userId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite);
});
return defer.promise;
}
persist(req) {
const { body } = req;
var defer = q.defer();
FavoriteModel.find({ userId: req.decoded.id })
.then((result) => {
console.log(result);
if (result[0].eventId === body.eventId) {
FavoriteModel
.update({
userId: req.decoded.id
},
{
$set: {
checkIn: body.check,
favorite: body.favorite,
}
}).then((result) => defer.resolve(result));
}
else {
let saveFavorite = new FavoriteModel({
companyId: body.companyId,
eventId: body.eventId,
favorite: body.favorite,
checkIn: body.check,
userId: req.decoded.id,
});
saveFavorite
.save()
.then((result) => {
defer.resolve(result);
})
.catch((err) => {
defer.reject(err);
});
} | return defer.promise;
}
}
module.exports = new FavoriteDAO(); | }); | random_line_split |
favoriteDAO.js | 'use strict'
var mongoose = require('mongoose');
let q = require('q');
let EventModel = require('../model/eventModel');
let FavoriteModel = require('../model/favoriteModel');
class FavoriteDAO {
| (req) {
var defer = q.defer();
let eventos = [];
FavoriteModel.find({ userId: req.decoded.id })
.then(result => {
result.map(event => {
EventModel
.findById({
_id: event._doc.eventId
}).then(event => {
return eventos.push(event);
}).then(() => {
defer.resolve(eventos);
})
})
})
return defer.promise;
}
findCompany(req) {
var defer = q.defer();
FavoriteModel.find({
companyId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite._doc);
});
return defer.promise;
}
findPlace(req) {
var defer = q.defer();
FavoriteModel.find({
userId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite);
});
return defer.promise;
}
persist(req) {
const { body } = req;
var defer = q.defer();
FavoriteModel.find({ userId: req.decoded.id })
.then((result) => {
console.log(result);
if (result[0].eventId === body.eventId) {
FavoriteModel
.update({
userId: req.decoded.id
},
{
$set: {
checkIn: body.check,
favorite: body.favorite,
}
}).then((result) => defer.resolve(result));
}
else {
let saveFavorite = new FavoriteModel({
companyId: body.companyId,
eventId: body.eventId,
favorite: body.favorite,
checkIn: body.check,
userId: req.decoded.id,
});
saveFavorite
.save()
.then((result) => {
defer.resolve(result);
})
.catch((err) => {
defer.reject(err);
});
}
});
return defer.promise;
}
}
module.exports = new FavoriteDAO(); | findUser | identifier_name |
favoriteDAO.js | 'use strict'
var mongoose = require('mongoose');
let q = require('q');
let EventModel = require('../model/eventModel');
let FavoriteModel = require('../model/favoriteModel');
class FavoriteDAO {
findUser(req) |
findCompany(req) {
var defer = q.defer();
FavoriteModel.find({
companyId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite._doc);
});
return defer.promise;
}
findPlace(req) {
var defer = q.defer();
FavoriteModel.find({
userId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite);
});
return defer.promise;
}
persist(req) {
const { body } = req;
var defer = q.defer();
FavoriteModel.find({ userId: req.decoded.id })
.then((result) => {
console.log(result);
if (result[0].eventId === body.eventId) {
FavoriteModel
.update({
userId: req.decoded.id
},
{
$set: {
checkIn: body.check,
favorite: body.favorite,
}
}).then((result) => defer.resolve(result));
}
else {
let saveFavorite = new FavoriteModel({
companyId: body.companyId,
eventId: body.eventId,
favorite: body.favorite,
checkIn: body.check,
userId: req.decoded.id,
});
saveFavorite
.save()
.then((result) => {
defer.resolve(result);
})
.catch((err) => {
defer.reject(err);
});
}
});
return defer.promise;
}
}
module.exports = new FavoriteDAO(); | {
var defer = q.defer();
let eventos = [];
FavoriteModel.find({ userId: req.decoded.id })
.then(result => {
result.map(event => {
EventModel
.findById({
_id: event._doc.eventId
}).then(event => {
return eventos.push(event);
}).then(() => {
defer.resolve(eventos);
})
})
})
return defer.promise;
} | identifier_body |
favoriteDAO.js | 'use strict'
var mongoose = require('mongoose');
let q = require('q');
let EventModel = require('../model/eventModel');
let FavoriteModel = require('../model/favoriteModel');
class FavoriteDAO {
findUser(req) {
var defer = q.defer();
let eventos = [];
FavoriteModel.find({ userId: req.decoded.id })
.then(result => {
result.map(event => {
EventModel
.findById({
_id: event._doc.eventId
}).then(event => {
return eventos.push(event);
}).then(() => {
defer.resolve(eventos);
})
})
})
return defer.promise;
}
findCompany(req) {
var defer = q.defer();
FavoriteModel.find({
companyId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite._doc);
});
return defer.promise;
}
findPlace(req) {
var defer = q.defer();
FavoriteModel.find({
userId: req.decoded.id
}).then(favorite => {
defer.resolve(favorite);
});
return defer.promise;
}
persist(req) {
const { body } = req;
var defer = q.defer();
FavoriteModel.find({ userId: req.decoded.id })
.then((result) => {
console.log(result);
if (result[0].eventId === body.eventId) |
else {
let saveFavorite = new FavoriteModel({
companyId: body.companyId,
eventId: body.eventId,
favorite: body.favorite,
checkIn: body.check,
userId: req.decoded.id,
});
saveFavorite
.save()
.then((result) => {
defer.resolve(result);
})
.catch((err) => {
defer.reject(err);
});
}
});
return defer.promise;
}
}
module.exports = new FavoriteDAO(); | {
FavoriteModel
.update({
userId: req.decoded.id
},
{
$set: {
checkIn: body.check,
favorite: body.favorite,
}
}).then((result) => defer.resolve(result));
} | conditional_block |
boardSub.js | var app = angular.module('cc98.controllers')
app.controller('boardSubCtrl',
function ($scope, $http, $stateParams) {
$scope.title = $stateParams.title;
$scope.doRefresh = function () {
var boardRootId = $stateParams.id;
$http.get('http://api.cc98.org/board/' + boardRootId + '/subs')
.success(function (newItems) {
$scope.boardSubs = newItems;
})
.finally(function () {
$scope.$broadcast('scroll.refreshComplete');
});
};
//根据板块今日帖数的多少来确定label的颜色
$scope.labelColor = function (postCount) {
if (postCount != undefined) {
if (postCount == 0)
return "label-default";
else if (postCount < 10)
return "label-info";
else if (postCount < 50)
return "label-primary";
else if (postCount < 100)
return "label-success";
else if (postCount < 200)
return "label-energized";
else if (postCount < 500) | else if (postCount < 1000)
return "label-pink";
else
return "label-danger";
}
}
}); | return "label-royal"; | random_line_split |
app.js | 'use strict';
(function (angular,buildfire) {
angular.module('fixedTimerPluginContent', ['ngRoute', 'ui.bootstrap', 'ui.tinymce', 'timerModals', 'ui.sortable'])
//injected ngRoute for routing
.config(['$routeProvider', function ($routeProvider) {
$routeProvider
.when('/', { | })
.when('/item', {
templateUrl: 'templates/timerItem.html',
controllerAs: 'ContentItem',
controller: 'ContentItemCtrl'
})
.when('/item/:itemId', {
templateUrl: 'templates/timerItem.html',
controllerAs: 'ContentItem',
controller: 'ContentItemCtrl'
})
.otherwise('/');
}])
.run(['Location','Buildfire', function (Location,Buildfire) {
// Handler to receive message from widget
Buildfire.history.onPop(function(data, err){
if(data && data.label!='Item')
Location.goToHome();
console.log('Buildfire.history.onPop called', data, err);
});
}])
})(window.angular,window.buildfire); | templateUrl: 'templates/home.html',
controllerAs: 'ContentHome',
controller: 'ContentHomeCtrl' | random_line_split |
setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='pfile-tools',
version='0.5.0',
author='Nathan Vack',
author_email='[email protected]', | 'dump_pfile_header = pfile_tools.scripts.dump_pfile_header:main',
'anonymize_pfile = pfile_tools.scripts.anonymize_pfile:main'
]}
)
# setup(
# name='pfile-tools',
# version=pfile_tools.VERSION,
# packages=setuptools.find_packages(),
# data_files=[('', ['distribute_setup.py'])],
# license='BSD License',
# long_description=read('README'),
# url="https://github.com/njvack/pfile-tools",
# author="Nathan Vack",
# author_email="[email protected]",
# entry_points = {
# 'console_scripts': [
# 'dump_pfile_header = pfile_tools.scripts.dump_pfile_header:main',
# 'anonymize_pfile = pfile_tools.scripts.anonymize_pfile:main'
# ]
# }
# ) | license='BSD License',
url='https://github.com/njvack/pfile-tools',
packages=['pfile_tools'],
entry_points={
'console_scripts': [ | random_line_split |
MenuState.ts | /**
* @author Julien Midedji <[email protected]>
* @copyright 2017 Julien Midedji
* @license {@link https://github.com/ResamVi/spayle/blob/master/LICENSE MIT License}
*/
import Const from './Constants';
import Player from './Player';
/**
* The menu is displayed after loading and before playing.
* This is where options can be changed, credits viewed, name changed
* and the this.game started.
*
* @typedef {Phaser.State}
*/
export default
{
player: null, // TODO: Describe properties
planet: null,
title: null,
startButton: null,
optionButton: null,
backButton: null,
instructions: null,
menuMusic: null,
startMusic: null,
centerX: 0,
centerY: 0,
create: function()
{
// Center of screen (not the world!)
this.centerX = this.game.camera.width / 2;
this.centerY = this.game.camera.height / 2;
// Background
this.game.add.sprite(0, 0, 'background');
// Moon
this.planet = this.game.add.sprite(Const.PLAYER_START_X, Const.PLAYER_START_Y, 'moon');
this.planet.anchor.setTo(0.5, 0.5);
this.planet.scale.setTo(0.1, 0.1);
this.planet.pivot.set(Const.ORBIT_RADIUS, Const.ORBIT_RADIUS);
// Player (only used for displayal; not to actually control)
this.player = new Player(this.game);
// this.title
this.title = this.game.add.bitmapText(0, 0, 'menuFont', 'SPAYLE', 80);
this.title.updateTransform();
this.title.anchor.setTo(0.5, 0.5);
this.title.position.x = this.centerX + Const.TITLE_X_OFFSET;
this.title.position.y = this.centerY - Const.TITLE_Y_OFFSET;
this.game.add.tween(this.title.scale).to({x: 1.1, y: 1.1}, 2000, Phaser.Easing.Cubic.InOut, true, 10, -1, true);
// Buttons
this.startButton = this.createButton(-50, 1.5, this.play, 'buttonAtlas',
'yellow_button01.png',
'yellow_button02.png',
'yellow_button01.png');
this.optionButton = this.createButton(50, 1.5, this.moveDown, 'buttonAtlas',
'grey_button02.png',
'grey_button01.png',
'grey_button02.png');
this.backButton = this.createButton(850, 1.5, this.moveUp, 'buttonAtlas',
'grey_button02.png',
'grey_button01.png',
'grey_button02.png');
// Instructions
this.instructions = this.game.add.sprite(30, 870, 'instructions');
// Music
this.menuMusic = this.game.add.audio('menuMusic');
this.menuMusic.onDecoded.add(function () {
this.menuMusic.fadeIn(Const.AUDIO_FADE_DURATION, true);
}, this);
},
createButton: function(y, scale, func, atlas, onHover, onIdle, onClick)
{
let button = this.game.add.button(0, 0, atlas, func, this, onHover, onIdle, onClick, onIdle);
button.anchor.setTo(0.5, 0.5);
button.scale.setTo(scale, scale);
button.x = this.centerX + Const.BUTTON_X;
button.y = this.centerY + y;
return button;
},
play: function()
{
// Scale camera out for dramatic effect
this.game.add.tween(this.game.camera.scale).to({x: 0.5, y: 0.5}, 7000, Phaser.Easing.Cubic.InOut, true);
// Fade out all menu items
for (let sprite of [this.title, this.startButton, this.optionButton, this.backButton, this.instructions])
{
let t = this.game.add.tween(sprite).to({alpha: 0}, 1000, Phaser.Easing.Cubic.InOut, true, 0);
t.onComplete.add(function (invisibleSprite: Phaser.Sprite) {
invisibleSprite.destroy();
});
}
// Change music and start count down
this.menuMusic.fadeOut(1000);
// TODO: Can this be made local?
this.startMusic = this.game.add.audio('this.startMusic');
this.startMusic.onDecoded.add(function ()
{
this.startMusic.fadeIn(Const.AUDIO_FADE_DURATION);
}, this);
let countdown = this.game.add.audio('ignition');
countdown.onDecoded.add(function ()
{
countdown.play();
});
countdown.onStop.add(function ()
{
this.player.destroy(); |
moveUp: function()
{
this.game.add.tween(this.game.camera).to({y: 0}, 1500, Phaser.Easing.Cubic.Out, true);
},
moveDown: function()
{
this.game.add.tween(this.game.camera).to({y: 700}, 1500, Phaser.Easing.Cubic.Out, true);
},
update: function()
{
this.planet.rotation += Const.ORBIT_SPEED;
}
}; | this.game.state.start('play', false, false);
}, this);
}, | random_line_split |
stubconfiguration.py | # shtub - shell command stub
# Copyright (C) 2012-2013 Immobilien Scout GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
"""
this module provides the class Expectation which extends Execution with
a list of answers and the index of the current answer.
"""
__author__ = 'Alexander Metzner, Michael Gruber, Udo Juettner, Maximilien Riehl, Marcel Wolf'
from shtub.answer import Answer
from shtub.commandinput import CommandInput
class StubConfiguration(object):
"""
Represents the configuration of a command stub and contains the corresponding answers.
"""
def __init__(self, command, arguments=[], stdin=None, answers=[], initial_answer=0):
"""
will initialize a new object with the given properties.
answers and initial_answer are not mandatory.
"""
self.command_input = CommandInput(command, arguments, stdin)
self.answers = []
self.current_answer = initial_answer
for answer in answers:
self.answers.append(answer)
self.and_input = self.with_input
def as_dictionary(self):
"""
returns a dictionary representation of this stub configuration.
"""
answers_list = []
for answer in self.answers:
answer_dictionary = answer.as_dictionary()
answers_list.append(answer_dictionary)
result = {'command_input': self.command_input.as_dictionary(),
'answers': answers_list,
'current_answer': self.current_answer}
return result
def next_answer(self):
"""
returns the next answer in the list of answers or if the end of the
list is reached it will repeatedly return the last answer of the
list.
"""
if len(self.answers) == 0:
raise Exception('List of answers is empty!')
result = self.answers[self.current_answer]
if self.current_answer < len(self.answers) - 1:
self.current_answer += 1
return result
def then(self, answer):
"""
will append the given answer to the list of answers and return
the object itself for invocation chaining.
"""
self.answers.append(answer)
return self
def then_answer(self, stdout=None, stderr=None, return_code=0, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given properties.
"""
return self.then(Answer(stdout, stderr, return_code, milliseconds_to_wait))
def then_return(self, return_code, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given return_code.
"""
return self.then_answer(return_code=return_code, milliseconds_to_wait=milliseconds_to_wait)
def then_write(self, stdout=None, stderr=None, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given stdout and stderr output.
"""
return self.then_answer(stdout=stdout, stderr=stderr, milliseconds_to_wait=milliseconds_to_wait)
def at_least_with_arguments(self, *arguments):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.arguments = list(arguments)
return self
def with_input(self, stdin):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.stdin = stdin
return self
def __eq__(self, other):
return self.command_input == other.command_input \
and self.current_answer == other.current_answer \
and self.answers == other.answers
def __str__(self):
"""
returns a string representation of this stub configuration using the method "as_dictionary"
"""
return 'StubConfiguration %s' % (self.as_dictionary())
@staticmethod
def from_dictionary(dictionary):
"""
returns a new stub configuration object with the properties from the given dictionary
"""
answers = []
for answer_dictionary in dictionary['answers']:
answer = Answer.from_dictionary(answer_dictionary)
answers.append(answer)
command_input_dictionary = dictionary['command_input']
stub_configuration = StubConfiguration(
command_input_dictionary['command'],
command_input_dictionary[
'arguments'],
command_input_dictionary[
'stdin'],
answers,
dictionary['current_answer'])
return stub_configuration | random_line_split |
|
stubconfiguration.py | # shtub - shell command stub
# Copyright (C) 2012-2013 Immobilien Scout GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
this module provides the class Expectation which extends Execution with
a list of answers and the index of the current answer.
"""
__author__ = 'Alexander Metzner, Michael Gruber, Udo Juettner, Maximilien Riehl, Marcel Wolf'
from shtub.answer import Answer
from shtub.commandinput import CommandInput
class StubConfiguration(object):
"""
Represents the configuration of a command stub and contains the corresponding answers.
"""
def __init__(self, command, arguments=[], stdin=None, answers=[], initial_answer=0):
"""
will initialize a new object with the given properties.
answers and initial_answer are not mandatory.
"""
self.command_input = CommandInput(command, arguments, stdin)
self.answers = []
self.current_answer = initial_answer
for answer in answers:
self.answers.append(answer)
self.and_input = self.with_input
def as_dictionary(self):
"""
returns a dictionary representation of this stub configuration.
"""
answers_list = []
for answer in self.answers:
answer_dictionary = answer.as_dictionary()
answers_list.append(answer_dictionary)
result = {'command_input': self.command_input.as_dictionary(),
'answers': answers_list,
'current_answer': self.current_answer}
return result
def next_answer(self):
"""
returns the next answer in the list of answers or if the end of the
list is reached it will repeatedly return the last answer of the
list.
"""
if len(self.answers) == 0:
raise Exception('List of answers is empty!')
result = self.answers[self.current_answer]
if self.current_answer < len(self.answers) - 1:
|
return result
def then(self, answer):
"""
will append the given answer to the list of answers and return
the object itself for invocation chaining.
"""
self.answers.append(answer)
return self
def then_answer(self, stdout=None, stderr=None, return_code=0, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given properties.
"""
return self.then(Answer(stdout, stderr, return_code, milliseconds_to_wait))
def then_return(self, return_code, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given return_code.
"""
return self.then_answer(return_code=return_code, milliseconds_to_wait=milliseconds_to_wait)
def then_write(self, stdout=None, stderr=None, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given stdout and stderr output.
"""
return self.then_answer(stdout=stdout, stderr=stderr, milliseconds_to_wait=milliseconds_to_wait)
def at_least_with_arguments(self, *arguments):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.arguments = list(arguments)
return self
def with_input(self, stdin):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.stdin = stdin
return self
def __eq__(self, other):
return self.command_input == other.command_input \
and self.current_answer == other.current_answer \
and self.answers == other.answers
def __str__(self):
"""
returns a string representation of this stub configuration using the method "as_dictionary"
"""
return 'StubConfiguration %s' % (self.as_dictionary())
@staticmethod
def from_dictionary(dictionary):
"""
returns a new stub configuration object with the properties from the given dictionary
"""
answers = []
for answer_dictionary in dictionary['answers']:
answer = Answer.from_dictionary(answer_dictionary)
answers.append(answer)
command_input_dictionary = dictionary['command_input']
stub_configuration = StubConfiguration(
command_input_dictionary['command'],
command_input_dictionary[
'arguments'],
command_input_dictionary[
'stdin'],
answers,
dictionary['current_answer'])
return stub_configuration
| self.current_answer += 1 | conditional_block |
stubconfiguration.py | # shtub - shell command stub
# Copyright (C) 2012-2013 Immobilien Scout GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
this module provides the class Expectation which extends Execution with
a list of answers and the index of the current answer.
"""
__author__ = 'Alexander Metzner, Michael Gruber, Udo Juettner, Maximilien Riehl, Marcel Wolf'
from shtub.answer import Answer
from shtub.commandinput import CommandInput
class StubConfiguration(object):
"""
Represents the configuration of a command stub and contains the corresponding answers.
"""
def __init__(self, command, arguments=[], stdin=None, answers=[], initial_answer=0):
"""
will initialize a new object with the given properties.
answers and initial_answer are not mandatory.
"""
self.command_input = CommandInput(command, arguments, stdin)
self.answers = []
self.current_answer = initial_answer
for answer in answers:
self.answers.append(answer)
self.and_input = self.with_input
def as_dictionary(self):
"""
returns a dictionary representation of this stub configuration.
"""
answers_list = []
for answer in self.answers:
answer_dictionary = answer.as_dictionary()
answers_list.append(answer_dictionary)
result = {'command_input': self.command_input.as_dictionary(),
'answers': answers_list,
'current_answer': self.current_answer}
return result
def next_answer(self):
"""
returns the next answer in the list of answers or if the end of the
list is reached it will repeatedly return the last answer of the
list.
"""
if len(self.answers) == 0:
raise Exception('List of answers is empty!')
result = self.answers[self.current_answer]
if self.current_answer < len(self.answers) - 1:
self.current_answer += 1
return result
def then(self, answer):
"""
will append the given answer to the list of answers and return
the object itself for invocation chaining.
"""
self.answers.append(answer)
return self
def then_answer(self, stdout=None, stderr=None, return_code=0, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given properties.
"""
return self.then(Answer(stdout, stderr, return_code, milliseconds_to_wait))
def then_return(self, return_code, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given return_code.
"""
return self.then_answer(return_code=return_code, milliseconds_to_wait=milliseconds_to_wait)
def then_write(self, stdout=None, stderr=None, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given stdout and stderr output.
"""
return self.then_answer(stdout=stdout, stderr=stderr, milliseconds_to_wait=milliseconds_to_wait)
def at_least_with_arguments(self, *arguments):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.arguments = list(arguments)
return self
def with_input(self, stdin):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.stdin = stdin
return self
def __eq__(self, other):
return self.command_input == other.command_input \
and self.current_answer == other.current_answer \
and self.answers == other.answers
def | (self):
"""
returns a string representation of this stub configuration using the method "as_dictionary"
"""
return 'StubConfiguration %s' % (self.as_dictionary())
@staticmethod
def from_dictionary(dictionary):
"""
returns a new stub configuration object with the properties from the given dictionary
"""
answers = []
for answer_dictionary in dictionary['answers']:
answer = Answer.from_dictionary(answer_dictionary)
answers.append(answer)
command_input_dictionary = dictionary['command_input']
stub_configuration = StubConfiguration(
command_input_dictionary['command'],
command_input_dictionary[
'arguments'],
command_input_dictionary[
'stdin'],
answers,
dictionary['current_answer'])
return stub_configuration
| __str__ | identifier_name |
stubconfiguration.py | # shtub - shell command stub
# Copyright (C) 2012-2013 Immobilien Scout GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
this module provides the class Expectation which extends Execution with
a list of answers and the index of the current answer.
"""
__author__ = 'Alexander Metzner, Michael Gruber, Udo Juettner, Maximilien Riehl, Marcel Wolf'
from shtub.answer import Answer
from shtub.commandinput import CommandInput
class StubConfiguration(object):
"""
Represents the configuration of a command stub and contains the corresponding answers.
"""
def __init__(self, command, arguments=[], stdin=None, answers=[], initial_answer=0):
"""
will initialize a new object with the given properties.
answers and initial_answer are not mandatory.
"""
self.command_input = CommandInput(command, arguments, stdin)
self.answers = []
self.current_answer = initial_answer
for answer in answers:
self.answers.append(answer)
self.and_input = self.with_input
def as_dictionary(self):
"""
returns a dictionary representation of this stub configuration.
"""
answers_list = []
for answer in self.answers:
answer_dictionary = answer.as_dictionary()
answers_list.append(answer_dictionary)
result = {'command_input': self.command_input.as_dictionary(),
'answers': answers_list,
'current_answer': self.current_answer}
return result
def next_answer(self):
"""
returns the next answer in the list of answers or if the end of the
list is reached it will repeatedly return the last answer of the
list.
"""
if len(self.answers) == 0:
raise Exception('List of answers is empty!')
result = self.answers[self.current_answer]
if self.current_answer < len(self.answers) - 1:
self.current_answer += 1
return result
def then(self, answer):
"""
will append the given answer to the list of answers and return
the object itself for invocation chaining.
"""
self.answers.append(answer)
return self
def then_answer(self, stdout=None, stderr=None, return_code=0, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given properties.
"""
return self.then(Answer(stdout, stderr, return_code, milliseconds_to_wait))
def then_return(self, return_code, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given return_code.
"""
return self.then_answer(return_code=return_code, milliseconds_to_wait=milliseconds_to_wait)
def then_write(self, stdout=None, stderr=None, milliseconds_to_wait=None):
"""
a convenience method to "then" which will create a new answer
object with the given stdout and stderr output.
"""
return self.then_answer(stdout=stdout, stderr=stderr, milliseconds_to_wait=milliseconds_to_wait)
def at_least_with_arguments(self, *arguments):
"""
sets the given arguments and returns self for invocation chaining
"""
self.command_input.arguments = list(arguments)
return self
def with_input(self, stdin):
|
def __eq__(self, other):
return self.command_input == other.command_input \
and self.current_answer == other.current_answer \
and self.answers == other.answers
def __str__(self):
"""
returns a string representation of this stub configuration using the method "as_dictionary"
"""
return 'StubConfiguration %s' % (self.as_dictionary())
@staticmethod
def from_dictionary(dictionary):
"""
returns a new stub configuration object with the properties from the given dictionary
"""
answers = []
for answer_dictionary in dictionary['answers']:
answer = Answer.from_dictionary(answer_dictionary)
answers.append(answer)
command_input_dictionary = dictionary['command_input']
stub_configuration = StubConfiguration(
command_input_dictionary['command'],
command_input_dictionary[
'arguments'],
command_input_dictionary[
'stdin'],
answers,
dictionary['current_answer'])
return stub_configuration
| """
sets the given arguments and returns self for invocation chaining
"""
self.command_input.stdin = stdin
return self | identifier_body |
test_udpipe.py | #!/usr/bin/python
# vim:fileencoding=utf8
from __future__ import unicode_literals
import unittest
class TestUDPipe(unittest.TestCase):
def test_model(self):
import ufal.udpipe
model = ufal.udpipe.Model.load('test/data/test.model')
self.assertTrue(model)
tokenizer = model.newTokenizer(model.DEFAULT)
conlluOutput = ufal.udpipe.OutputFormat.newOutputFormat("conllu")
sentence = ufal.udpipe.Sentence()
error = ufal.udpipe.ProcessingError();
tokenizer.setText("Znamená to, že realitě nepodléhá. ");
self.assertTrue(tokenizer.nextSentence(sentence, error))
self.assertFalse(error.occurred())
self.assertTrue(model.tag(sentence, model.DEFAULT))
self.assertTrue(model.parse(sentence, model.DEFAULT))
self.assertEqual(conlluOutput.writeSentence(sentence), """\
# newdoc
# newpar
# sent_id = 1
# text = Znamená to, že realitě nepodléhá.
1 Znamená znamenat VERB VB-S---3P-AA--- Aspect=Imp|Mood=Ind|Negative=Pos|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 0 root _ _
2 to ten PRON PDNS1---------- Case=Nom|Gender=Neut|Number=Sing|PronType=Dem 1 nsubj _ SpaceAfter=No | 7 . . PUNCT Z:------------- _ 1 punct _ _
""")
self.assertFalse(tokenizer.nextSentence(sentence))
if __name__ == '__main__':
unittest.main() | 3 , , PUNCT Z:------------- _ 6 punct _ _
4 že že SCONJ J,------------- _ 6 mark _ _
5 realitě realita NOUN NNFS3-----A---- Case=Dat|Gender=Fem|Negative=Pos|Number=Sing 6 dobj _ _
6 nepodléhá podléhat VERB VB-S---3P-NA--- Aspect=Imp|Mood=Ind|Negative=Neg|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 1 ccomp _ SpaceAfter=No | random_line_split |
test_udpipe.py | #!/usr/bin/python
# vim:fileencoding=utf8
from __future__ import unicode_literals
import unittest
class TestUDPipe(unittest.TestCase):
def | (self):
import ufal.udpipe
model = ufal.udpipe.Model.load('test/data/test.model')
self.assertTrue(model)
tokenizer = model.newTokenizer(model.DEFAULT)
conlluOutput = ufal.udpipe.OutputFormat.newOutputFormat("conllu")
sentence = ufal.udpipe.Sentence()
error = ufal.udpipe.ProcessingError();
tokenizer.setText("Znamená to, že realitě nepodléhá. ");
self.assertTrue(tokenizer.nextSentence(sentence, error))
self.assertFalse(error.occurred())
self.assertTrue(model.tag(sentence, model.DEFAULT))
self.assertTrue(model.parse(sentence, model.DEFAULT))
self.assertEqual(conlluOutput.writeSentence(sentence), """\
# newdoc
# newpar
# sent_id = 1
# text = Znamená to, že realitě nepodléhá.
1 Znamená znamenat VERB VB-S---3P-AA--- Aspect=Imp|Mood=Ind|Negative=Pos|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 0 root _ _
2 to ten PRON PDNS1---------- Case=Nom|Gender=Neut|Number=Sing|PronType=Dem 1 nsubj _ SpaceAfter=No
3 , , PUNCT Z:------------- _ 6 punct _ _
4 že že SCONJ J,------------- _ 6 mark _ _
5 realitě realita NOUN NNFS3-----A---- Case=Dat|Gender=Fem|Negative=Pos|Number=Sing 6 dobj _ _
6 nepodléhá podléhat VERB VB-S---3P-NA--- Aspect=Imp|Mood=Ind|Negative=Neg|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 1 ccomp _ SpaceAfter=No
7 . . PUNCT Z:------------- _ 1 punct _ _
""")
self.assertFalse(tokenizer.nextSentence(sentence))
if __name__ == '__main__':
unittest.main()
| test_model | identifier_name |
test_udpipe.py | #!/usr/bin/python
# vim:fileencoding=utf8
from __future__ import unicode_literals
import unittest
class TestUDPipe(unittest.TestCase):
| '__main__':
unittest.main()
| def test_model(self):
import ufal.udpipe
model = ufal.udpipe.Model.load('test/data/test.model')
self.assertTrue(model)
tokenizer = model.newTokenizer(model.DEFAULT)
conlluOutput = ufal.udpipe.OutputFormat.newOutputFormat("conllu")
sentence = ufal.udpipe.Sentence()
error = ufal.udpipe.ProcessingError();
tokenizer.setText("Znamená to, že realitě nepodléhá. ");
self.assertTrue(tokenizer.nextSentence(sentence, error))
self.assertFalse(error.occurred())
self.assertTrue(model.tag(sentence, model.DEFAULT))
self.assertTrue(model.parse(sentence, model.DEFAULT))
self.assertEqual(conlluOutput.writeSentence(sentence), """\
# newdoc
# newpar
# sent_id = 1
# text = Znamená to, že realitě nepodléhá.
1 Znamená znamenat VERB VB-S---3P-AA--- Aspect=Imp|Mood=Ind|Negative=Pos|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 0 root _ _
2 to ten PRON PDNS1---------- Case=Nom|Gender=Neut|Number=Sing|PronType=Dem 1 nsubj _ SpaceAfter=No
3 , , PUNCT Z:------------- _ 6 punct _ _
4 že že SCONJ J,------------- _ 6 mark _ _
5 realitě realita NOUN NNFS3-----A---- Case=Dat|Gender=Fem|Negative=Pos|Number=Sing 6 dobj _ _
6 nepodléhá podléhat VERB VB-S---3P-NA--- Aspect=Imp|Mood=Ind|Negative=Neg|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 1 ccomp _ SpaceAfter=No
7 . . PUNCT Z:------------- _ 1 punct _ _
""")
self.assertFalse(tokenizer.nextSentence(sentence))
if __name__ == | identifier_body |
test_udpipe.py | #!/usr/bin/python
# vim:fileencoding=utf8
from __future__ import unicode_literals
import unittest
class TestUDPipe(unittest.TestCase):
def test_model(self):
import ufal.udpipe
model = ufal.udpipe.Model.load('test/data/test.model')
self.assertTrue(model)
tokenizer = model.newTokenizer(model.DEFAULT)
conlluOutput = ufal.udpipe.OutputFormat.newOutputFormat("conllu")
sentence = ufal.udpipe.Sentence()
error = ufal.udpipe.ProcessingError();
tokenizer.setText("Znamená to, že realitě nepodléhá. ");
self.assertTrue(tokenizer.nextSentence(sentence, error))
self.assertFalse(error.occurred())
self.assertTrue(model.tag(sentence, model.DEFAULT))
self.assertTrue(model.parse(sentence, model.DEFAULT))
self.assertEqual(conlluOutput.writeSentence(sentence), """\
# newdoc
# newpar
# sent_id = 1
# text = Znamená to, že realitě nepodléhá.
1 Znamená znamenat VERB VB-S---3P-AA--- Aspect=Imp|Mood=Ind|Negative=Pos|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 0 root _ _
2 to ten PRON PDNS1---------- Case=Nom|Gender=Neut|Number=Sing|PronType=Dem 1 nsubj _ SpaceAfter=No
3 , , PUNCT Z:------------- _ 6 punct _ _
4 že že SCONJ J,------------- _ 6 mark _ _
5 realitě realita NOUN NNFS3-----A---- Case=Dat|Gender=Fem|Negative=Pos|Number=Sing 6 dobj _ _
6 nepodléhá podléhat VERB VB-S---3P-NA--- Aspect=Imp|Mood=Ind|Negative=Neg|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 1 ccomp _ SpaceAfter=No
7 . . PUNCT Z:------------- _ 1 punct _ _
""")
self.assertFalse(tokenizer.nextSentence(sentence))
if __name__ == '__main__':
unittest.main()
| conditional_block |
||
index.js | /* eslint-env mocha */
import path from 'path';
import fs from 'fs';
import assert from 'assert';
import {transformFileSync} from 'babel-core';
function trim(str) |
describe('Transpile ES7 async/await to vanilla ES6 Promise chains -', function () {
// sometimes 2000 isn't enough when starting up in coverage mode.
this.timeout(5000);
const fixturesDir = path.join(__dirname, 'fixtures');
fs.readdirSync(fixturesDir).forEach(caseName => {
const fixtureDir = path.join(fixturesDir, caseName);
const actualPath = path.join(fixtureDir, 'actual.js');
if (!fs.statSync(fixtureDir).isDirectory()) {
return;
}
it(caseName.split('-').join(' '), () => {
const actual = transformFileSync(actualPath).code;
const expected = fs.readFileSync(
path.join(fixtureDir, 'expected.js')
).toString();
assert.equal(trim(actual), trim(expected));
});
});
});
| {
return str.replace(/^\s+|\s+$/, '');
} | identifier_body |
index.js | /* eslint-env mocha */
import path from 'path';
import fs from 'fs';
import assert from 'assert';
import {transformFileSync} from 'babel-core';
function | (str) {
return str.replace(/^\s+|\s+$/, '');
}
describe('Transpile ES7 async/await to vanilla ES6 Promise chains -', function () {
// sometimes 2000 isn't enough when starting up in coverage mode.
this.timeout(5000);
const fixturesDir = path.join(__dirname, 'fixtures');
fs.readdirSync(fixturesDir).forEach(caseName => {
const fixtureDir = path.join(fixturesDir, caseName);
const actualPath = path.join(fixtureDir, 'actual.js');
if (!fs.statSync(fixtureDir).isDirectory()) {
return;
}
it(caseName.split('-').join(' '), () => {
const actual = transformFileSync(actualPath).code;
const expected = fs.readFileSync(
path.join(fixtureDir, 'expected.js')
).toString();
assert.equal(trim(actual), trim(expected));
});
});
});
| trim | identifier_name |
index.js | /* eslint-env mocha */
import path from 'path';
import fs from 'fs';
import assert from 'assert';
import {transformFileSync} from 'babel-core';
function trim(str) {
return str.replace(/^\s+|\s+$/, '');
}
describe('Transpile ES7 async/await to vanilla ES6 Promise chains -', function () {
// sometimes 2000 isn't enough when starting up in coverage mode.
this.timeout(5000);
const fixturesDir = path.join(__dirname, 'fixtures');
fs.readdirSync(fixturesDir).forEach(caseName => {
const fixtureDir = path.join(fixturesDir, caseName);
const actualPath = path.join(fixtureDir, 'actual.js');
if (!fs.statSync(fixtureDir).isDirectory()) |
it(caseName.split('-').join(' '), () => {
const actual = transformFileSync(actualPath).code;
const expected = fs.readFileSync(
path.join(fixtureDir, 'expected.js')
).toString();
assert.equal(trim(actual), trim(expected));
});
});
});
| {
return;
} | conditional_block |
index.js | /* eslint-env mocha */
import path from 'path';
import fs from 'fs'; | function trim(str) {
return str.replace(/^\s+|\s+$/, '');
}
describe('Transpile ES7 async/await to vanilla ES6 Promise chains -', function () {
// sometimes 2000 isn't enough when starting up in coverage mode.
this.timeout(5000);
const fixturesDir = path.join(__dirname, 'fixtures');
fs.readdirSync(fixturesDir).forEach(caseName => {
const fixtureDir = path.join(fixturesDir, caseName);
const actualPath = path.join(fixtureDir, 'actual.js');
if (!fs.statSync(fixtureDir).isDirectory()) {
return;
}
it(caseName.split('-').join(' '), () => {
const actual = transformFileSync(actualPath).code;
const expected = fs.readFileSync(
path.join(fixtureDir, 'expected.js')
).toString();
assert.equal(trim(actual), trim(expected));
});
});
}); | import assert from 'assert';
import {transformFileSync} from 'babel-core';
| random_line_split |
bigint_extensions.rs | use ramp::int::Int;
use core::convert::From;
const DEFAULT_BUCKET_SIZE: usize = 5;
pub trait ModPow<T, K> {
fn mod_pow(&self, exp: &T, m: &K) -> Self;
fn mod_pow_k(&self, exp: &T, m: &K, k: usize) -> Self;
}
impl ModPow<Int, Int> for Int {
fn mod_pow(&self, exp: &Int, m: &Int) -> Int {
self.mod_pow_k(exp, m, DEFAULT_BUCKET_SIZE)
}
fn mod_pow_k(&self, exp: &Int, m: &Int, k: usize) -> Int {
let base = 2 << (k - 1);
let mut table = Vec::with_capacity(base);
table.push(Int::one());
for i in 1..base {
let last = table.get_mut(i-1).unwrap().clone();
table.push((last * self) % m);
}
let mut r = Int::one();
for i in digits_of_n(exp, base).iter().rev() {
for _ in 0..k {
r = &r * &r % m
}
if *i != 0 {
r = &r * table.get(*i).unwrap() % m;
}
}
r
}
}
fn digits_of_n(e: &Int, b: usize) -> Vec<usize> |
pub trait ModInverse<T> : Sized {
fn mod_inverse(&self, n: &T) -> Option<Self>;
}
impl ModInverse<Int> for Int {
fn mod_inverse(&self, n: &Int) -> Option<Int> {
let mut u1 = Int::one();
let mut u3 = (*self).clone();
let mut v1 = Int::zero();
let mut v3 = (*n).clone();
let mut iter = true;
while v3 != Int::zero()
{
let q = &u3 / &v3;
let t3 = u3 % &v3;
let t1 = u1 + &q * &v1;
u1 = v1.clone();
v1 = t1.clone();
u3 = v3.clone();
v3 = t3.clone();
iter = !iter;
}
if u3 != Int::one() {
return None;
}
let inv = if iter == false {
n - u1
} else {
u1
};
Some(inv)
}
}
#[cfg(test)]
mod tests {
use super::*;
use ramp::{ Int, RandomInt };
use rand;
use test::Bencher;
#[test]
fn test_bigint_mod_pow() {
fn check(b: &Int, e: &Int, m: &Int, r: &Int) {
assert_eq!(b.mod_pow(&e, &m), *r);
}
fn check_i64(b: i64, e: i64, m: i64, r: i64) {
let big_b = Int::from(b);
let big_e = Int::from(e);
let big_m = Int::from(m);
let big_r = Int::from(r);
check(&big_b, &big_e, &big_m, &big_r);
}
check_i64(-2, 5, 33, -32);
check_i64(-2, 5, 32, 0);
check_i64(-1, 3, 10, -1);
check_i64(-1, 4, 10, 1);
check_i64(0, 2352, 21, 0);
check_i64(1, 26, 21, 1);
check_i64(2, 5, 33, 32);
check_i64(2, 5, 32, 0);
check_i64(::std::i64::MAX, ::std::i64::MAX, 2, 1);
}
#[test]
fn test_bigint_mod_inverse() {
fn check(a: i64, b: i64, c: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
let big_c = Int::from(c);
assert_eq!(big_a.mod_inverse(&big_b).unwrap(), big_c);
}
fn check_none(a: i64, b: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
assert_eq!(big_a.mod_inverse(&big_b), None);
}
check(7, 26, 15);
check(37, 216, 181);
check(17, 3120, 2753);
check(7, -72, 31);
check_none(0, 21);
check_none(0, 198);
check_none(7, 21);
}
#[bench]
fn bench_mod_pow(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let base = rng.gen_uint(265);
let m = rng.gen_uint(265);
b.iter(|| {
let exp = rng.gen_uint(265);
base.mod_pow(&exp, &m);
});
}
#[bench]
fn bench_mod_inverse(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let m = rng.gen_uint(128);
b.iter(|| {
let a = rng.gen_uint(128);
a.mod_inverse(&m);
});
}
}
| {
let mut digits = Vec::new();
let mut n = (*e).clone();
let base = Int::from(b);
while n > Int::zero() {
digits.push(usize::from(&(&n % &base)));
n = &n / &base;
}
digits
} | identifier_body |
bigint_extensions.rs | use ramp::int::Int;
use core::convert::From;
const DEFAULT_BUCKET_SIZE: usize = 5;
pub trait ModPow<T, K> {
fn mod_pow(&self, exp: &T, m: &K) -> Self;
fn mod_pow_k(&self, exp: &T, m: &K, k: usize) -> Self;
}
impl ModPow<Int, Int> for Int {
fn mod_pow(&self, exp: &Int, m: &Int) -> Int {
self.mod_pow_k(exp, m, DEFAULT_BUCKET_SIZE)
}
fn mod_pow_k(&self, exp: &Int, m: &Int, k: usize) -> Int {
let base = 2 << (k - 1);
let mut table = Vec::with_capacity(base);
table.push(Int::one());
for i in 1..base {
let last = table.get_mut(i-1).unwrap().clone();
table.push((last * self) % m);
}
let mut r = Int::one();
for i in digits_of_n(exp, base).iter().rev() {
for _ in 0..k {
r = &r * &r % m
}
if *i != 0 {
r = &r * table.get(*i).unwrap() % m;
}
}
r
}
}
fn digits_of_n(e: &Int, b: usize) -> Vec<usize> {
let mut digits = Vec::new();
let mut n = (*e).clone();
let base = Int::from(b);
while n > Int::zero() {
digits.push(usize::from(&(&n % &base)));
n = &n / &base;
}
digits
}
pub trait ModInverse<T> : Sized {
fn mod_inverse(&self, n: &T) -> Option<Self>;
}
impl ModInverse<Int> for Int {
fn mod_inverse(&self, n: &Int) -> Option<Int> {
let mut u1 = Int::one();
let mut u3 = (*self).clone();
let mut v1 = Int::zero();
let mut v3 = (*n).clone();
let mut iter = true;
while v3 != Int::zero()
{
let q = &u3 / &v3;
let t3 = u3 % &v3;
let t1 = u1 + &q * &v1;
u1 = v1.clone();
v1 = t1.clone();
u3 = v3.clone();
v3 = t3.clone();
iter = !iter;
}
if u3 != Int::one() {
return None;
}
let inv = if iter == false {
n - u1
} else {
u1
};
Some(inv)
}
}
#[cfg(test)]
mod tests {
use super::*;
use ramp::{ Int, RandomInt };
use rand;
use test::Bencher;
#[test]
fn test_bigint_mod_pow() {
fn check(b: &Int, e: &Int, m: &Int, r: &Int) {
assert_eq!(b.mod_pow(&e, &m), *r);
}
fn check_i64(b: i64, e: i64, m: i64, r: i64) {
let big_b = Int::from(b);
let big_e = Int::from(e);
let big_m = Int::from(m);
let big_r = Int::from(r);
check(&big_b, &big_e, &big_m, &big_r);
}
check_i64(-2, 5, 33, -32);
check_i64(-2, 5, 32, 0);
check_i64(-1, 3, 10, -1);
check_i64(-1, 4, 10, 1);
check_i64(0, 2352, 21, 0);
check_i64(1, 26, 21, 1);
check_i64(2, 5, 33, 32);
check_i64(2, 5, 32, 0);
check_i64(::std::i64::MAX, ::std::i64::MAX, 2, 1);
}
#[test]
fn test_bigint_mod_inverse() {
fn check(a: i64, b: i64, c: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
let big_c = Int::from(c);
assert_eq!(big_a.mod_inverse(&big_b).unwrap(), big_c);
}
fn | (a: i64, b: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
assert_eq!(big_a.mod_inverse(&big_b), None);
}
check(7, 26, 15);
check(37, 216, 181);
check(17, 3120, 2753);
check(7, -72, 31);
check_none(0, 21);
check_none(0, 198);
check_none(7, 21);
}
#[bench]
fn bench_mod_pow(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let base = rng.gen_uint(265);
let m = rng.gen_uint(265);
b.iter(|| {
let exp = rng.gen_uint(265);
base.mod_pow(&exp, &m);
});
}
#[bench]
fn bench_mod_inverse(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let m = rng.gen_uint(128);
b.iter(|| {
let a = rng.gen_uint(128);
a.mod_inverse(&m);
});
}
}
| check_none | identifier_name |
bigint_extensions.rs | use ramp::int::Int;
use core::convert::From;
const DEFAULT_BUCKET_SIZE: usize = 5;
pub trait ModPow<T, K> {
fn mod_pow(&self, exp: &T, m: &K) -> Self;
fn mod_pow_k(&self, exp: &T, m: &K, k: usize) -> Self;
}
impl ModPow<Int, Int> for Int {
fn mod_pow(&self, exp: &Int, m: &Int) -> Int {
self.mod_pow_k(exp, m, DEFAULT_BUCKET_SIZE)
}
fn mod_pow_k(&self, exp: &Int, m: &Int, k: usize) -> Int {
let base = 2 << (k - 1);
let mut table = Vec::with_capacity(base);
table.push(Int::one());
for i in 1..base {
let last = table.get_mut(i-1).unwrap().clone();
table.push((last * self) % m);
}
let mut r = Int::one();
for i in digits_of_n(exp, base).iter().rev() {
for _ in 0..k {
r = &r * &r % m
}
if *i != 0 {
r = &r * table.get(*i).unwrap() % m;
}
}
r
}
}
fn digits_of_n(e: &Int, b: usize) -> Vec<usize> {
let mut digits = Vec::new();
let mut n = (*e).clone();
let base = Int::from(b);
while n > Int::zero() {
digits.push(usize::from(&(&n % &base)));
n = &n / &base;
}
digits
}
pub trait ModInverse<T> : Sized {
fn mod_inverse(&self, n: &T) -> Option<Self>;
}
impl ModInverse<Int> for Int {
fn mod_inverse(&self, n: &Int) -> Option<Int> {
let mut u1 = Int::one();
let mut u3 = (*self).clone();
let mut v1 = Int::zero();
let mut v3 = (*n).clone();
let mut iter = true;
while v3 != Int::zero()
{
let q = &u3 / &v3;
let t3 = u3 % &v3;
let t1 = u1 + &q * &v1;
u1 = v1.clone();
v1 = t1.clone();
u3 = v3.clone();
v3 = t3.clone();
iter = !iter;
}
if u3 != Int::one() {
return None;
}
let inv = if iter == false {
n - u1
} else {
u1
};
Some(inv)
}
}
#[cfg(test)]
mod tests {
use super::*;
use ramp::{ Int, RandomInt }; | #[test]
fn test_bigint_mod_pow() {
fn check(b: &Int, e: &Int, m: &Int, r: &Int) {
assert_eq!(b.mod_pow(&e, &m), *r);
}
fn check_i64(b: i64, e: i64, m: i64, r: i64) {
let big_b = Int::from(b);
let big_e = Int::from(e);
let big_m = Int::from(m);
let big_r = Int::from(r);
check(&big_b, &big_e, &big_m, &big_r);
}
check_i64(-2, 5, 33, -32);
check_i64(-2, 5, 32, 0);
check_i64(-1, 3, 10, -1);
check_i64(-1, 4, 10, 1);
check_i64(0, 2352, 21, 0);
check_i64(1, 26, 21, 1);
check_i64(2, 5, 33, 32);
check_i64(2, 5, 32, 0);
check_i64(::std::i64::MAX, ::std::i64::MAX, 2, 1);
}
#[test]
fn test_bigint_mod_inverse() {
fn check(a: i64, b: i64, c: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
let big_c = Int::from(c);
assert_eq!(big_a.mod_inverse(&big_b).unwrap(), big_c);
}
fn check_none(a: i64, b: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
assert_eq!(big_a.mod_inverse(&big_b), None);
}
check(7, 26, 15);
check(37, 216, 181);
check(17, 3120, 2753);
check(7, -72, 31);
check_none(0, 21);
check_none(0, 198);
check_none(7, 21);
}
#[bench]
fn bench_mod_pow(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let base = rng.gen_uint(265);
let m = rng.gen_uint(265);
b.iter(|| {
let exp = rng.gen_uint(265);
base.mod_pow(&exp, &m);
});
}
#[bench]
fn bench_mod_inverse(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let m = rng.gen_uint(128);
b.iter(|| {
let a = rng.gen_uint(128);
a.mod_inverse(&m);
});
}
} |
use rand;
use test::Bencher;
| random_line_split |
bigint_extensions.rs | use ramp::int::Int;
use core::convert::From;
const DEFAULT_BUCKET_SIZE: usize = 5;
pub trait ModPow<T, K> {
fn mod_pow(&self, exp: &T, m: &K) -> Self;
fn mod_pow_k(&self, exp: &T, m: &K, k: usize) -> Self;
}
impl ModPow<Int, Int> for Int {
fn mod_pow(&self, exp: &Int, m: &Int) -> Int {
self.mod_pow_k(exp, m, DEFAULT_BUCKET_SIZE)
}
fn mod_pow_k(&self, exp: &Int, m: &Int, k: usize) -> Int {
let base = 2 << (k - 1);
let mut table = Vec::with_capacity(base);
table.push(Int::one());
for i in 1..base {
let last = table.get_mut(i-1).unwrap().clone();
table.push((last * self) % m);
}
let mut r = Int::one();
for i in digits_of_n(exp, base).iter().rev() {
for _ in 0..k {
r = &r * &r % m
}
if *i != 0 {
r = &r * table.get(*i).unwrap() % m;
}
}
r
}
}
fn digits_of_n(e: &Int, b: usize) -> Vec<usize> {
let mut digits = Vec::new();
let mut n = (*e).clone();
let base = Int::from(b);
while n > Int::zero() {
digits.push(usize::from(&(&n % &base)));
n = &n / &base;
}
digits
}
pub trait ModInverse<T> : Sized {
fn mod_inverse(&self, n: &T) -> Option<Self>;
}
impl ModInverse<Int> for Int {
fn mod_inverse(&self, n: &Int) -> Option<Int> {
let mut u1 = Int::one();
let mut u3 = (*self).clone();
let mut v1 = Int::zero();
let mut v3 = (*n).clone();
let mut iter = true;
while v3 != Int::zero()
{
let q = &u3 / &v3;
let t3 = u3 % &v3;
let t1 = u1 + &q * &v1;
u1 = v1.clone();
v1 = t1.clone();
u3 = v3.clone();
v3 = t3.clone();
iter = !iter;
}
if u3 != Int::one() {
return None;
}
let inv = if iter == false | else {
u1
};
Some(inv)
}
}
#[cfg(test)]
mod tests {
use super::*;
use ramp::{ Int, RandomInt };
use rand;
use test::Bencher;
#[test]
fn test_bigint_mod_pow() {
fn check(b: &Int, e: &Int, m: &Int, r: &Int) {
assert_eq!(b.mod_pow(&e, &m), *r);
}
fn check_i64(b: i64, e: i64, m: i64, r: i64) {
let big_b = Int::from(b);
let big_e = Int::from(e);
let big_m = Int::from(m);
let big_r = Int::from(r);
check(&big_b, &big_e, &big_m, &big_r);
}
check_i64(-2, 5, 33, -32);
check_i64(-2, 5, 32, 0);
check_i64(-1, 3, 10, -1);
check_i64(-1, 4, 10, 1);
check_i64(0, 2352, 21, 0);
check_i64(1, 26, 21, 1);
check_i64(2, 5, 33, 32);
check_i64(2, 5, 32, 0);
check_i64(::std::i64::MAX, ::std::i64::MAX, 2, 1);
}
#[test]
fn test_bigint_mod_inverse() {
fn check(a: i64, b: i64, c: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
let big_c = Int::from(c);
assert_eq!(big_a.mod_inverse(&big_b).unwrap(), big_c);
}
fn check_none(a: i64, b: i64) {
let big_a = Int::from(a);
let big_b = Int::from(b);
assert_eq!(big_a.mod_inverse(&big_b), None);
}
check(7, 26, 15);
check(37, 216, 181);
check(17, 3120, 2753);
check(7, -72, 31);
check_none(0, 21);
check_none(0, 198);
check_none(7, 21);
}
#[bench]
fn bench_mod_pow(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let base = rng.gen_uint(265);
let m = rng.gen_uint(265);
b.iter(|| {
let exp = rng.gen_uint(265);
base.mod_pow(&exp, &m);
});
}
#[bench]
fn bench_mod_inverse(b: &mut Bencher) {
let mut rng = rand::thread_rng();
let m = rng.gen_uint(128);
b.iter(|| {
let a = rng.gen_uint(128);
a.mod_inverse(&m);
});
}
}
| {
n - u1
} | conditional_block |
file_info.rs | extern crate libc;
use std::mem;
use crate::libproc::helpers;
use crate::libproc::proc_pid::{ListPIDInfo, PidInfoFlavor};
#[cfg(target_os = "macos")]
use self::libc::{c_int, c_void};
// this extern block links to the libproc library
// Original signatures of functions can be found at http://opensource.apple.com/source/Libc/Libc-594.9.4/darwin/libproc.c
#[cfg(target_os = "macos")]
#[link(name = "proc", kind = "dylib")]
extern {
// This method is supported in the minimum version of Mac OS X which is 10.5
fn proc_pidfdinfo(pid: c_int, fd: c_int, flavor: c_int, buffer: *mut c_void, buffersize: c_int) -> c_int;
}
/// Flavor of Pid FileDescriptor info for different types of File Descriptors
pub enum PIDFDInfoFlavor {
/// VNode Info
VNodeInfo = 1,
/// VNode Path Info
VNodePathInfo = 2,
/// Socket info
SocketInfo = 3,
/// PSEM Info
PSEMInfo = 4,
/// PSHM Info
PSHMInfo = 5,
/// Pipe Info
PipeInfo = 6,
/// KQueue Info
KQueueInfo = 7,
/// Apple Talk Info
ATalkInfo = 8,
}
/// Struct for Listing File Descriptors
pub struct ListFDs;
impl ListPIDInfo for ListFDs {
type Item = ProcFDInfo;
fn flavor() -> PidInfoFlavor { PidInfoFlavor::ListFDs }
}
/// Struct to hold info about a Processes FileDescriptor Info
#[repr(C)]
pub struct ProcFDInfo {
/// File Descriptor
pub proc_fd: i32,
/// File Descriptor type
pub proc_fdtype: u32,
}
/// Enum for different File Descriptor types
#[derive(Copy, Clone, Debug)]
pub enum ProcFDType {
/// AppleTalk
ATalk = 0,
/// Vnode
VNode = 1,
/// Socket
Socket = 2,
/// POSIX shared memory
PSHM = 3,
/// POSIX semaphore
PSEM = 4,
/// Kqueue
KQueue = 5,
/// Pipe
Pipe = 6,
/// FSEvents
FSEvents = 7,
/// Unknown
Unknown,
}
impl From<u32> for ProcFDType {
fn from(value: u32) -> ProcFDType {
match value {
0 => ProcFDType::ATalk,
1 => ProcFDType::VNode,
2 => ProcFDType::Socket,
3 => ProcFDType::PSHM,
4 => ProcFDType::PSEM,
5 => ProcFDType::KQueue,
6 => ProcFDType::Pipe,
7 => ProcFDType::FSEvents,
_ => ProcFDType::Unknown,
}
}
}
/// The `PIDFDInfo` trait is needed for polymorphism on pidfdinfo types, also abstracting flavor in order to provide
/// type-guaranteed flavor correctness
pub trait PIDFDInfo: Default {
/// Return the Pid File Descriptor Info flavor of the implementing struct
fn flavor() -> PIDFDInfoFlavor;
}
/// Returns the information about file descriptors of the process that match pid passed in.
///
/// # Examples
///
/// ```
/// use std::io::Write;
/// use std::net::TcpListener;
/// use libproc::libproc::proc_pid::{listpidinfo, pidinfo, ListThreads};
/// use libproc::libproc::bsd_info::{BSDInfo};
/// use libproc::libproc::net_info::{SocketFDInfo, SocketInfoKind};
/// use libproc::libproc::file_info::{pidfdinfo, ListFDs, ProcFDType};
/// use std::process;
///
/// let pid = process::id() as i32;
///
/// // Open TCP port:8000 to test.
/// let _listener = TcpListener::bind("127.0.0.1:8000");
///
/// if let Ok(info) = pidinfo::<BSDInfo>(pid, 0) {
/// if let Ok(fds) = listpidinfo::<ListFDs>(pid, info.pbi_nfiles as usize) {
/// for fd in &fds {
/// match fd.proc_fdtype.into() {
/// ProcFDType::Socket => {
/// if let Ok(socket) = pidfdinfo::<SocketFDInfo>(pid, fd.proc_fd) {
/// match socket.psi.soi_kind.into() {
/// SocketInfoKind::Tcp => {
/// // access to the member of `soi_proto` is unsafe becasuse of union type.
/// let info = unsafe { socket.psi.soi_proto.pri_tcp };
///
/// // change endian and cut off because insi_lport is network endian and 16bit witdh.
/// let mut port = 0;
/// port |= info.tcpsi_ini.insi_lport >> 8 & 0x00ff;
/// port |= info.tcpsi_ini.insi_lport << 8 & 0xff00;
///
/// // access to the member of `insi_laddr` is unsafe becasuse of union type.
/// let s_addr = unsafe { info.tcpsi_ini.insi_laddr.ina_46.i46a_addr4.s_addr };
///
/// // change endian because insi_laddr is network endian.
/// let mut addr = 0;
/// addr |= s_addr >> 24 & 0x000000ff;
/// addr |= s_addr >> 8 & 0x0000ff00;
/// addr |= s_addr << 8 & 0x00ff0000;
/// addr |= s_addr << 24 & 0xff000000;
///
/// println!("{}.{}.{}.{}:{}", addr >> 24 & 0xff, addr >> 16 & 0xff, addr >> 8 & 0xff, addr & 0xff, port);
/// }
/// _ => (),
/// }
/// }
/// }
/// _ => (),
/// }
/// }
/// }
/// }
/// ```
///
#[cfg(target_os = "macos")]
pub fn | <T: PIDFDInfo>(pid: i32, fd: i32) -> Result<T, String> {
let flavor = T::flavor() as i32;
let buffer_size = mem::size_of::<T>() as i32;
let mut pidinfo = T::default();
let buffer_ptr = &mut pidinfo as *mut _ as *mut c_void;
let ret: i32;
unsafe {
ret = proc_pidfdinfo(pid, fd, flavor, buffer_ptr, buffer_size);
};
if ret <= 0 {
Err(helpers::get_errno_with_message(ret))
} else {
Ok(pidinfo)
}
}
#[cfg(not(target_os = "macos"))]
pub fn pidfdinfo<T: PIDFDInfo>(_pid: i32, _fd: i32) -> Result<T, String> {
unimplemented!()
}
#[cfg(all(test, target_os = "macos"))]
mod test {
use crate::libproc::bsd_info::BSDInfo;
use crate::libproc::file_info::{ListFDs, ProcFDType};
use crate::libproc::net_info::{SocketFDInfo, SocketInfoKind};
use crate::libproc::proc_pid::{listpidinfo, pidinfo};
use super::pidfdinfo;
#[test]
fn pidfdinfo_test() {
use std::process;
use std::net::TcpListener;
let pid = process::id() as i32;
let _listener = TcpListener::bind("127.0.0.1:65535");
let info = pidinfo::<BSDInfo>(pid, 0).expect("pidinfo() failed");
let fds = listpidinfo::<ListFDs>(pid, info.pbi_nfiles as usize).expect("listpidinfo() failed");
for fd in fds {
if let ProcFDType::Socket = fd.proc_fdtype.into() {
let socket = pidfdinfo::<SocketFDInfo>(pid, fd.proc_fd).expect("pidfdinfo() failed");
if let SocketInfoKind::Tcp = socket.psi.soi_kind.into() {
unsafe {
let info = socket.psi.soi_proto.pri_tcp;
assert_eq!(socket.psi.soi_protocol, libc::IPPROTO_TCP);
assert_eq!(info.tcpsi_ini.insi_lport as u32, 65535);
}
}
}
}
}
} | pidfdinfo | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.