file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
pyrarcr-0.2.py | #!/usr/bin/env python3
##### ##### ##### ##### #### ####
# # # # # # # # # # #### #### # # #
##### #### ##### ##### ##### # # # # ####
# # # # # # # # # # # # #
# # # # # # # # #### # #### # ####
#finds the password of a desired rar or zip file using a brute-force algorithm
##will fail to find the password if the password has a character that isnt in
##the english alphabet or isnt a number (you can change the char. list though)
#now using itertools!
#importing needed modules
import time,os,sys,shutil,itertools
#checking if the user has unrar/p7zip installed
for which in ["unrar","p7zip"]:
if not shutil.which(which):
print("ERROR:",which,"isn't installed.\nExiting...")
sys.exit(-1)
#defining the function
def | (rf):
alphabet="aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ1234567890"
start=time.time()
tryn=0
for a in range(1,len(alphabet)+1):
for b in itertools.product(alphabet,repeat=a):
k="".join(b)
if rf[-4:]==".rar":
print("Trying:",k)
kf=os.popen("unrar t -y -p%s %s 2>&1|grep 'All OK'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="All OK\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
elif rf[-4:]==".zip" or rf[-3:]==".7z":
print("Trying:",k)
kf=os.popen("7za t -p%s %s 2>&1|grep 'Everything is Ok'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="Everything is Ok\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
else:
print("ERROR: File isnt a RAR, ZIP or 7z file.\nExiting...")
#checking if the file exists/running the function
if len(sys.argv)==2:
if os.path.exists(sys.argv[1]):
rc(sys.argv[1])
else:
print("ERROR: File doesn't exist.\nExiting...")
else:
print("Usage:",os.path.basename(__file__),"[rar file]")
print("Example:",os.path.basename(__file__),"foobar.rar")
| rc | identifier_name |
pyrarcr-0.2.py | #!/usr/bin/env python3
##### ##### ##### ##### #### ####
# # # # # # # # # # #### #### # # # |
#finds the password of a desired rar or zip file using a brute-force algorithm
##will fail to find the password if the password has a character that isnt in
##the english alphabet or isnt a number (you can change the char. list though)
#now using itertools!
#importing needed modules
import time,os,sys,shutil,itertools
#checking if the user has unrar/p7zip installed
for which in ["unrar","p7zip"]:
if not shutil.which(which):
print("ERROR:",which,"isn't installed.\nExiting...")
sys.exit(-1)
#defining the function
def rc(rf):
alphabet="aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ1234567890"
start=time.time()
tryn=0
for a in range(1,len(alphabet)+1):
for b in itertools.product(alphabet,repeat=a):
k="".join(b)
if rf[-4:]==".rar":
print("Trying:",k)
kf=os.popen("unrar t -y -p%s %s 2>&1|grep 'All OK'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="All OK\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
elif rf[-4:]==".zip" or rf[-3:]==".7z":
print("Trying:",k)
kf=os.popen("7za t -p%s %s 2>&1|grep 'Everything is Ok'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="Everything is Ok\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
else:
print("ERROR: File isnt a RAR, ZIP or 7z file.\nExiting...")
#checking if the file exists/running the function
if len(sys.argv)==2:
if os.path.exists(sys.argv[1]):
rc(sys.argv[1])
else:
print("ERROR: File doesn't exist.\nExiting...")
else:
print("Usage:",os.path.basename(__file__),"[rar file]")
print("Example:",os.path.basename(__file__),"foobar.rar") | ##### #### ##### ##### ##### # # # # ####
# # # # # # # # # # # # #
# # # # # # # # #### # #### # #### | random_line_split |
pyrarcr-0.2.py | #!/usr/bin/env python3
##### ##### ##### ##### #### ####
# # # # # # # # # # #### #### # # #
##### #### ##### ##### ##### # # # # ####
# # # # # # # # # # # # #
# # # # # # # # #### # #### # ####
#finds the password of a desired rar or zip file using a brute-force algorithm
##will fail to find the password if the password has a character that isnt in
##the english alphabet or isnt a number (you can change the char. list though)
#now using itertools!
#importing needed modules
import time,os,sys,shutil,itertools
#checking if the user has unrar/p7zip installed
for which in ["unrar","p7zip"]:
if not shutil.which(which):
print("ERROR:",which,"isn't installed.\nExiting...")
sys.exit(-1)
#defining the function
def rc(rf):
|
#checking if the file exists/running the function
if len(sys.argv)==2:
if os.path.exists(sys.argv[1]):
rc(sys.argv[1])
else:
print("ERROR: File doesn't exist.\nExiting...")
else:
print("Usage:",os.path.basename(__file__),"[rar file]")
print("Example:",os.path.basename(__file__),"foobar.rar")
| alphabet="aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ1234567890"
start=time.time()
tryn=0
for a in range(1,len(alphabet)+1):
for b in itertools.product(alphabet,repeat=a):
k="".join(b)
if rf[-4:]==".rar":
print("Trying:",k)
kf=os.popen("unrar t -y -p%s %s 2>&1|grep 'All OK'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="All OK\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
elif rf[-4:]==".zip" or rf[-3:]==".7z":
print("Trying:",k)
kf=os.popen("7za t -p%s %s 2>&1|grep 'Everything is Ok'"%(k,rf))
tryn+=1
for rkf in kf.readlines():
if rkf=="Everything is Ok\n":
print("Found password:",repr(k))
print("Tried combination count:",tryn)
print("It took",round(time.time()-start,3),"seconds")
print("Exiting...")
time.sleep(2)
sys.exit(1)
else:
print("ERROR: File isnt a RAR, ZIP or 7z file.\nExiting...") | identifier_body |
test_titles.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import docutils.core
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
import testtools
class | (rst.Directive):
has_content = True
def run(self):
return []
def fake_role(name, rawtext, text, lineno, inliner,
options=None, content=None):
return [], []
directives.register_directive('seqdiag', FakeDirective)
directives.register_directive('blockdiag', FakeDirective)
directives.register_directive('nwdiag', FakeDirective)
directives.register_directive('actdiag', FakeDirective)
directives.register_directive('graphviz', FakeDirective)
roles.register_local_role('doc', fake_role)
class TestTitles(testtools.TestCase):
def _get_title(self, section_tree):
section = {
'subtitles': [],
}
for node in section_tree:
if node.tagname == 'title':
section['name'] = node.rawsource
elif node.tagname == 'section':
subsection = self._get_title(node)
section['subtitles'].append(subsection['name'])
return section
def _get_titles(self, spec):
titles = {}
for node in spec:
if node.tagname == 'section':
section = self._get_title(node)
titles[section['name']] = section['subtitles']
return titles
def _check_titles(self, titles):
self.assertEqual(7, len(titles))
problem = 'Problem description'
self.assertIn(problem, titles)
self.assertEqual(0, len(titles[problem]))
proposed = 'Proposed change'
self.assertIn(proposed, titles)
self.assertIn('Alternatives', titles[proposed])
self.assertIn('Data model impact', titles[proposed])
self.assertIn('REST API impact', titles[proposed])
self.assertIn('Security impact', titles[proposed])
self.assertIn('Notifications impact', titles[proposed])
self.assertIn('Other end user impact', titles[proposed])
self.assertIn('Performance Impact', titles[proposed])
self.assertIn('Other deployer impact', titles[proposed])
self.assertIn('Developer impact', titles[proposed])
impl = 'Implementation'
self.assertIn(impl, titles)
self.assertEqual(2, len(titles[impl]))
self.assertIn('Assignee(s)', titles[impl])
self.assertIn('Work Items', titles[impl])
deps = 'Dependencies'
self.assertIn(deps, titles)
self.assertEqual(0, len(titles[deps]))
testing = 'Testing'
self.assertIn(testing, titles)
self.assertEqual(0, len(titles[testing]))
docs = 'Documentation Impact'
self.assertIn(docs, titles)
self.assertEqual(0, len(titles[docs]))
refs = 'References'
self.assertIn(refs, titles)
self.assertEqual(0, len(titles[refs]))
def test_template(self):
files = set(glob.glob('specs/*.rst') + glob.glob('specs/*/*'))
files = files - set(glob.glob('specs/*/*.dot'))
files = files - set(glob.glob('specs/*/*.diag'))
for filename in files:
self.assertTrue(filename.endswith(".rst"),
"spec's file must use 'rst' extension.")
with open(filename) as f:
data = f.read()
spec = docutils.core.publish_doctree(data)
titles = self._get_titles(spec)
self._check_titles(titles)
| FakeDirective | identifier_name |
test_titles.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import docutils.core
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
import testtools
class FakeDirective(rst.Directive):
has_content = True
def run(self):
return []
def fake_role(name, rawtext, text, lineno, inliner,
options=None, content=None):
return [], []
directives.register_directive('seqdiag', FakeDirective)
directives.register_directive('blockdiag', FakeDirective)
directives.register_directive('nwdiag', FakeDirective)
directives.register_directive('actdiag', FakeDirective)
directives.register_directive('graphviz', FakeDirective)
roles.register_local_role('doc', fake_role)
class TestTitles(testtools.TestCase):
def _get_title(self, section_tree):
section = {
'subtitles': [],
}
for node in section_tree:
if node.tagname == 'title':
section['name'] = node.rawsource
elif node.tagname == 'section':
subsection = self._get_title(node)
section['subtitles'].append(subsection['name'])
return section
def _get_titles(self, spec):
titles = {}
for node in spec: | return titles
def _check_titles(self, titles):
self.assertEqual(7, len(titles))
problem = 'Problem description'
self.assertIn(problem, titles)
self.assertEqual(0, len(titles[problem]))
proposed = 'Proposed change'
self.assertIn(proposed, titles)
self.assertIn('Alternatives', titles[proposed])
self.assertIn('Data model impact', titles[proposed])
self.assertIn('REST API impact', titles[proposed])
self.assertIn('Security impact', titles[proposed])
self.assertIn('Notifications impact', titles[proposed])
self.assertIn('Other end user impact', titles[proposed])
self.assertIn('Performance Impact', titles[proposed])
self.assertIn('Other deployer impact', titles[proposed])
self.assertIn('Developer impact', titles[proposed])
impl = 'Implementation'
self.assertIn(impl, titles)
self.assertEqual(2, len(titles[impl]))
self.assertIn('Assignee(s)', titles[impl])
self.assertIn('Work Items', titles[impl])
deps = 'Dependencies'
self.assertIn(deps, titles)
self.assertEqual(0, len(titles[deps]))
testing = 'Testing'
self.assertIn(testing, titles)
self.assertEqual(0, len(titles[testing]))
docs = 'Documentation Impact'
self.assertIn(docs, titles)
self.assertEqual(0, len(titles[docs]))
refs = 'References'
self.assertIn(refs, titles)
self.assertEqual(0, len(titles[refs]))
def test_template(self):
files = set(glob.glob('specs/*.rst') + glob.glob('specs/*/*'))
files = files - set(glob.glob('specs/*/*.dot'))
files = files - set(glob.glob('specs/*/*.diag'))
for filename in files:
self.assertTrue(filename.endswith(".rst"),
"spec's file must use 'rst' extension.")
with open(filename) as f:
data = f.read()
spec = docutils.core.publish_doctree(data)
titles = self._get_titles(spec)
self._check_titles(titles) | if node.tagname == 'section':
section = self._get_title(node)
titles[section['name']] = section['subtitles'] | random_line_split |
test_titles.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import docutils.core
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
import testtools
class FakeDirective(rst.Directive):
has_content = True
def run(self):
return []
def fake_role(name, rawtext, text, lineno, inliner,
options=None, content=None):
return [], []
directives.register_directive('seqdiag', FakeDirective)
directives.register_directive('blockdiag', FakeDirective)
directives.register_directive('nwdiag', FakeDirective)
directives.register_directive('actdiag', FakeDirective)
directives.register_directive('graphviz', FakeDirective)
roles.register_local_role('doc', fake_role)
class TestTitles(testtools.TestCase):
def _get_title(self, section_tree):
section = {
'subtitles': [],
}
for node in section_tree:
if node.tagname == 'title':
|
elif node.tagname == 'section':
subsection = self._get_title(node)
section['subtitles'].append(subsection['name'])
return section
def _get_titles(self, spec):
titles = {}
for node in spec:
if node.tagname == 'section':
section = self._get_title(node)
titles[section['name']] = section['subtitles']
return titles
def _check_titles(self, titles):
self.assertEqual(7, len(titles))
problem = 'Problem description'
self.assertIn(problem, titles)
self.assertEqual(0, len(titles[problem]))
proposed = 'Proposed change'
self.assertIn(proposed, titles)
self.assertIn('Alternatives', titles[proposed])
self.assertIn('Data model impact', titles[proposed])
self.assertIn('REST API impact', titles[proposed])
self.assertIn('Security impact', titles[proposed])
self.assertIn('Notifications impact', titles[proposed])
self.assertIn('Other end user impact', titles[proposed])
self.assertIn('Performance Impact', titles[proposed])
self.assertIn('Other deployer impact', titles[proposed])
self.assertIn('Developer impact', titles[proposed])
impl = 'Implementation'
self.assertIn(impl, titles)
self.assertEqual(2, len(titles[impl]))
self.assertIn('Assignee(s)', titles[impl])
self.assertIn('Work Items', titles[impl])
deps = 'Dependencies'
self.assertIn(deps, titles)
self.assertEqual(0, len(titles[deps]))
testing = 'Testing'
self.assertIn(testing, titles)
self.assertEqual(0, len(titles[testing]))
docs = 'Documentation Impact'
self.assertIn(docs, titles)
self.assertEqual(0, len(titles[docs]))
refs = 'References'
self.assertIn(refs, titles)
self.assertEqual(0, len(titles[refs]))
def test_template(self):
files = set(glob.glob('specs/*.rst') + glob.glob('specs/*/*'))
files = files - set(glob.glob('specs/*/*.dot'))
files = files - set(glob.glob('specs/*/*.diag'))
for filename in files:
self.assertTrue(filename.endswith(".rst"),
"spec's file must use 'rst' extension.")
with open(filename) as f:
data = f.read()
spec = docutils.core.publish_doctree(data)
titles = self._get_titles(spec)
self._check_titles(titles)
| section['name'] = node.rawsource | conditional_block |
test_titles.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import docutils.core
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
import testtools
class FakeDirective(rst.Directive):
has_content = True
def run(self):
|
def fake_role(name, rawtext, text, lineno, inliner,
options=None, content=None):
return [], []
directives.register_directive('seqdiag', FakeDirective)
directives.register_directive('blockdiag', FakeDirective)
directives.register_directive('nwdiag', FakeDirective)
directives.register_directive('actdiag', FakeDirective)
directives.register_directive('graphviz', FakeDirective)
roles.register_local_role('doc', fake_role)
class TestTitles(testtools.TestCase):
def _get_title(self, section_tree):
section = {
'subtitles': [],
}
for node in section_tree:
if node.tagname == 'title':
section['name'] = node.rawsource
elif node.tagname == 'section':
subsection = self._get_title(node)
section['subtitles'].append(subsection['name'])
return section
def _get_titles(self, spec):
titles = {}
for node in spec:
if node.tagname == 'section':
section = self._get_title(node)
titles[section['name']] = section['subtitles']
return titles
def _check_titles(self, titles):
self.assertEqual(7, len(titles))
problem = 'Problem description'
self.assertIn(problem, titles)
self.assertEqual(0, len(titles[problem]))
proposed = 'Proposed change'
self.assertIn(proposed, titles)
self.assertIn('Alternatives', titles[proposed])
self.assertIn('Data model impact', titles[proposed])
self.assertIn('REST API impact', titles[proposed])
self.assertIn('Security impact', titles[proposed])
self.assertIn('Notifications impact', titles[proposed])
self.assertIn('Other end user impact', titles[proposed])
self.assertIn('Performance Impact', titles[proposed])
self.assertIn('Other deployer impact', titles[proposed])
self.assertIn('Developer impact', titles[proposed])
impl = 'Implementation'
self.assertIn(impl, titles)
self.assertEqual(2, len(titles[impl]))
self.assertIn('Assignee(s)', titles[impl])
self.assertIn('Work Items', titles[impl])
deps = 'Dependencies'
self.assertIn(deps, titles)
self.assertEqual(0, len(titles[deps]))
testing = 'Testing'
self.assertIn(testing, titles)
self.assertEqual(0, len(titles[testing]))
docs = 'Documentation Impact'
self.assertIn(docs, titles)
self.assertEqual(0, len(titles[docs]))
refs = 'References'
self.assertIn(refs, titles)
self.assertEqual(0, len(titles[refs]))
def test_template(self):
files = set(glob.glob('specs/*.rst') + glob.glob('specs/*/*'))
files = files - set(glob.glob('specs/*/*.dot'))
files = files - set(glob.glob('specs/*/*.diag'))
for filename in files:
self.assertTrue(filename.endswith(".rst"),
"spec's file must use 'rst' extension.")
with open(filename) as f:
data = f.read()
spec = docutils.core.publish_doctree(data)
titles = self._get_titles(spec)
self._check_titles(titles)
| return [] | identifier_body |
RemoveParentModulesPlugin.js | /*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
function chunkContainsModule(chunk, module) {
var chunks = module.chunks;
var modules = chunk.modules;
if(chunks.length < modules.length) {
return chunks.indexOf(chunk) >= 0;
} else {
return modules.indexOf(module) >= 0;
}
}
function hasModule(chunk, module, checkedChunks) {
if(chunkContainsModule(chunk, module)) return [chunk];
if(chunk.entry) return false;
return allHaveModule(chunk.parents.filter(function(c) {
return checkedChunks.indexOf(c) < 0;
}), module, checkedChunks);
}
function allHaveModule(someChunks, module, checkedChunks) {
if(!checkedChunks) checkedChunks = [];
var chunks = [];
for(var i = 0; i < someChunks.length; i++) {
checkedChunks.push(someChunks[i]);
var subChunks = hasModule(someChunks[i], module, checkedChunks);
if(!subChunks) return false;
addToSet(chunks, subChunks);
}
return chunks;
}
function addToSet(set, items) {
items.forEach(function(item) {
if(set.indexOf(item) < 0)
set.push(item);
});
}
function debugIds(chunks) {
var list = chunks.map(function(chunk) {
return chunk.debugId;
});
if(list.some(function(dId) {
return typeof dId !== "number";
})) return "no";
list.sort();
return list.join(",");
}
function RemoveParentModulesPlugin() |
module.exports = RemoveParentModulesPlugin;
RemoveParentModulesPlugin.prototype.apply = function(compiler) {
compiler.plugin("compilation", function(compilation) {
compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], function(chunks) {
chunks.forEach(function(chunk) {
var cache = {};
chunk.modules.slice().forEach(function(module) {
if(chunk.entry) return;
var dId = "$" + debugIds(module.chunks);
var parentChunksWithModule;
if((dId in cache) && dId !== "$no") {
parentChunksWithModule = cache[dId];
} else {
parentChunksWithModule = cache[dId] = allHaveModule(chunk.parents, module);
}
if(parentChunksWithModule) {
module.rewriteChunkInReasons(chunk, parentChunksWithModule);
chunk.removeModule(module);
}
});
});
});
});
};
| {} | identifier_body |
RemoveParentModulesPlugin.js | /*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
function chunkContainsModule(chunk, module) {
var chunks = module.chunks;
var modules = chunk.modules;
if(chunks.length < modules.length) {
return chunks.indexOf(chunk) >= 0;
} else {
return modules.indexOf(module) >= 0;
}
}
function hasModule(chunk, module, checkedChunks) {
if(chunkContainsModule(chunk, module)) return [chunk];
if(chunk.entry) return false;
return allHaveModule(chunk.parents.filter(function(c) {
return checkedChunks.indexOf(c) < 0;
}), module, checkedChunks);
}
function allHaveModule(someChunks, module, checkedChunks) {
if(!checkedChunks) checkedChunks = [];
var chunks = [];
for(var i = 0; i < someChunks.length; i++) {
checkedChunks.push(someChunks[i]);
var subChunks = hasModule(someChunks[i], module, checkedChunks);
if(!subChunks) return false;
addToSet(chunks, subChunks);
}
return chunks;
}
function | (set, items) {
items.forEach(function(item) {
if(set.indexOf(item) < 0)
set.push(item);
});
}
function debugIds(chunks) {
var list = chunks.map(function(chunk) {
return chunk.debugId;
});
if(list.some(function(dId) {
return typeof dId !== "number";
})) return "no";
list.sort();
return list.join(",");
}
function RemoveParentModulesPlugin() {}
module.exports = RemoveParentModulesPlugin;
RemoveParentModulesPlugin.prototype.apply = function(compiler) {
compiler.plugin("compilation", function(compilation) {
compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], function(chunks) {
chunks.forEach(function(chunk) {
var cache = {};
chunk.modules.slice().forEach(function(module) {
if(chunk.entry) return;
var dId = "$" + debugIds(module.chunks);
var parentChunksWithModule;
if((dId in cache) && dId !== "$no") {
parentChunksWithModule = cache[dId];
} else {
parentChunksWithModule = cache[dId] = allHaveModule(chunk.parents, module);
}
if(parentChunksWithModule) {
module.rewriteChunkInReasons(chunk, parentChunksWithModule);
chunk.removeModule(module);
}
});
});
});
});
};
| addToSet | identifier_name |
RemoveParentModulesPlugin.js | /*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
function chunkContainsModule(chunk, module) {
var chunks = module.chunks;
var modules = chunk.modules;
if(chunks.length < modules.length) {
return chunks.indexOf(chunk) >= 0;
} else {
return modules.indexOf(module) >= 0;
} | function hasModule(chunk, module, checkedChunks) {
if(chunkContainsModule(chunk, module)) return [chunk];
if(chunk.entry) return false;
return allHaveModule(chunk.parents.filter(function(c) {
return checkedChunks.indexOf(c) < 0;
}), module, checkedChunks);
}
function allHaveModule(someChunks, module, checkedChunks) {
if(!checkedChunks) checkedChunks = [];
var chunks = [];
for(var i = 0; i < someChunks.length; i++) {
checkedChunks.push(someChunks[i]);
var subChunks = hasModule(someChunks[i], module, checkedChunks);
if(!subChunks) return false;
addToSet(chunks, subChunks);
}
return chunks;
}
function addToSet(set, items) {
items.forEach(function(item) {
if(set.indexOf(item) < 0)
set.push(item);
});
}
function debugIds(chunks) {
var list = chunks.map(function(chunk) {
return chunk.debugId;
});
if(list.some(function(dId) {
return typeof dId !== "number";
})) return "no";
list.sort();
return list.join(",");
}
function RemoveParentModulesPlugin() {}
module.exports = RemoveParentModulesPlugin;
RemoveParentModulesPlugin.prototype.apply = function(compiler) {
compiler.plugin("compilation", function(compilation) {
compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], function(chunks) {
chunks.forEach(function(chunk) {
var cache = {};
chunk.modules.slice().forEach(function(module) {
if(chunk.entry) return;
var dId = "$" + debugIds(module.chunks);
var parentChunksWithModule;
if((dId in cache) && dId !== "$no") {
parentChunksWithModule = cache[dId];
} else {
parentChunksWithModule = cache[dId] = allHaveModule(chunk.parents, module);
}
if(parentChunksWithModule) {
module.rewriteChunkInReasons(chunk, parentChunksWithModule);
chunk.removeModule(module);
}
});
});
});
});
}; | }
| random_line_split |
jquery.pretty-text-diff.min.js | // Generated by CoffeeScript 1.7.1
/*
@preserve jQuery.PrettyTextDiff 1.0.4
See https://github.com/arnab/jQuery.PrettyTextDiff/ | */
(function() {
var $;
$ = jQuery;
$.fn.extend({
prettyTextDiff: function(options) {
var dmp, settings;
settings = {
originalContainer: ".original",
changedContainer: ".changed",
diffContainer: ".diff",
cleanup: true,
debug: false
};
settings = $.extend(settings, options);
$.fn.prettyTextDiff.debug("Options: ", settings, settings);
dmp = new diff_match_patch();
return this.each(function() {
var changed, diff_as_html, diffs, original;
if (settings.originalContent && settings.changedContent) {
original = $('<div />').html(settings.originalContent).text();
changed = $('<div />').html(settings.changedContent).text();
} else {
original = $(settings.originalContainer, this).text();
changed = $(settings.changedContainer, this).text();
}
$.fn.prettyTextDiff.debug("Original text found: ", original, settings);
$.fn.prettyTextDiff.debug("Changed text found: ", changed, settings);
diffs = dmp.diff_main(original, changed);
if (settings.cleanup) {
dmp.diff_cleanupSemantic(diffs);
}
$.fn.prettyTextDiff.debug("Diffs: ", diffs, settings);
diff_as_html = $.map(diffs, function(diff) {
return $.fn.prettyTextDiff.createHTML(diff);
});
$(settings.diffContainer, this).html(diff_as_html.join(''));
return this;
});
}
});
$.fn.prettyTextDiff.debug = function(message, object, settings) {
if (settings.debug) {
return console.log(message, object);
}
};
$.fn.prettyTextDiff.createHTML = function(diff) {
var data, html, operation, pattern_amp, pattern_gt, pattern_lt, pattern_para, text;
html = [];
pattern_amp = /&/g;
pattern_lt = /</g;
pattern_gt = />/g;
pattern_para = /\n/g;
operation = diff[0], data = diff[1];
text = data.replace(pattern_amp, '&').replace(pattern_lt, '<').replace(pattern_gt, '>').replace(pattern_para, '<br>');
switch (operation) {
case DIFF_INSERT:
return '<ins>' + text + '</ins>';
case DIFF_DELETE:
return '<del>' + text + '</del>';
case DIFF_EQUAL:
return '<span>' + text + '</span>';
}
};
}).call(this); | random_line_split |
|
jquery.pretty-text-diff.min.js | // Generated by CoffeeScript 1.7.1
/*
@preserve jQuery.PrettyTextDiff 1.0.4
See https://github.com/arnab/jQuery.PrettyTextDiff/
*/
(function() {
var $;
$ = jQuery;
$.fn.extend({
prettyTextDiff: function(options) {
var dmp, settings;
settings = {
originalContainer: ".original",
changedContainer: ".changed",
diffContainer: ".diff",
cleanup: true,
debug: false
};
settings = $.extend(settings, options);
$.fn.prettyTextDiff.debug("Options: ", settings, settings);
dmp = new diff_match_patch();
return this.each(function() {
var changed, diff_as_html, diffs, original;
if (settings.originalContent && settings.changedContent) {
original = $('<div />').html(settings.originalContent).text();
changed = $('<div />').html(settings.changedContent).text();
} else |
$.fn.prettyTextDiff.debug("Original text found: ", original, settings);
$.fn.prettyTextDiff.debug("Changed text found: ", changed, settings);
diffs = dmp.diff_main(original, changed);
if (settings.cleanup) {
dmp.diff_cleanupSemantic(diffs);
}
$.fn.prettyTextDiff.debug("Diffs: ", diffs, settings);
diff_as_html = $.map(diffs, function(diff) {
return $.fn.prettyTextDiff.createHTML(diff);
});
$(settings.diffContainer, this).html(diff_as_html.join(''));
return this;
});
}
});
$.fn.prettyTextDiff.debug = function(message, object, settings) {
if (settings.debug) {
return console.log(message, object);
}
};
$.fn.prettyTextDiff.createHTML = function(diff) {
var data, html, operation, pattern_amp, pattern_gt, pattern_lt, pattern_para, text;
html = [];
pattern_amp = /&/g;
pattern_lt = /</g;
pattern_gt = />/g;
pattern_para = /\n/g;
operation = diff[0], data = diff[1];
text = data.replace(pattern_amp, '&').replace(pattern_lt, '<').replace(pattern_gt, '>').replace(pattern_para, '<br>');
switch (operation) {
case DIFF_INSERT:
return '<ins>' + text + '</ins>';
case DIFF_DELETE:
return '<del>' + text + '</del>';
case DIFF_EQUAL:
return '<span>' + text + '</span>';
}
};
}).call(this);
| {
original = $(settings.originalContainer, this).text();
changed = $(settings.changedContainer, this).text();
} | conditional_block |
write.rs | use std::fmt;
use std::io;
pub trait AnyWrite {
type wstr: ?Sized;
type Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error>;
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error>;
}
impl<'a> AnyWrite for fmt::Write + 'a {
type wstr = str;
type Error = fmt::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
fmt::Write::write_fmt(self, fmt)
}
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
fmt::Write::write_str(self, s)
}
}
impl<'a> AnyWrite for io::Write + 'a {
type wstr = [u8];
type Error = io::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
io::Write::write_fmt(self, fmt)
}
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> |
}
| {
io::Write::write_all(self, s)
} | identifier_body |
write.rs | use std::fmt;
use std::io;
pub trait AnyWrite {
type wstr: ?Sized;
type Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error>;
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error>;
}
impl<'a> AnyWrite for fmt::Write + 'a {
type wstr = str;
type Error = fmt::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
fmt::Write::write_fmt(self, fmt)
}
fn | (&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
fmt::Write::write_str(self, s)
}
}
impl<'a> AnyWrite for io::Write + 'a {
type wstr = [u8];
type Error = io::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
io::Write::write_fmt(self, fmt)
}
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
io::Write::write_all(self, s)
}
}
| write_str | identifier_name |
write.rs | use std::fmt;
use std::io;
pub trait AnyWrite {
type wstr: ?Sized;
type Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error>;
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error>;
}
impl<'a> AnyWrite for fmt::Write + 'a {
type wstr = str;
type Error = fmt::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
fmt::Write::write_fmt(self, fmt)
}
fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
fmt::Write::write_str(self, s)
}
}
impl<'a> AnyWrite for io::Write + 'a {
type wstr = [u8];
type Error = io::Error;
fn write_fmt(&mut self, fmt: fmt::Arguments) -> Result<(), Self::Error> {
io::Write::write_fmt(self, fmt)
} | fn write_str(&mut self, s: &Self::wstr) -> Result<(), Self::Error> {
io::Write::write_all(self, s)
}
} | random_line_split |
|
datelib.py | #!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of classes and functions for dealing with dates and timestamps.
The BaseTimestamp and Timestamp are timezone-aware wrappers around Python
datetime.datetime class.
"""
import calendar
import copy
import datetime
import re
import sys
import time
import types
import warnings
from dateutil import parser
import pytz
_MICROSECONDS_PER_SECOND = 1000000
_MICROSECONDS_PER_SECOND_F = float(_MICROSECONDS_PER_SECOND)
def SecondsToMicroseconds(seconds):
"""Convert seconds to microseconds.
Args:
seconds: number
Returns:
microseconds
"""
return seconds * _MICROSECONDS_PER_SECOND
def MicrosecondsToSeconds(microseconds):
"""Convert microseconds to seconds.
Args:
microseconds: A number representing some duration of time measured in
microseconds.
Returns:
A number representing the same duration of time measured in seconds.
"""
return microseconds / _MICROSECONDS_PER_SECOND_F
def _GetCurrentTimeMicros():
"""Get the current time in microseconds, in UTC.
Returns:
The number of microseconds since the epoch.
"""
return int(SecondsToMicroseconds(time.time()))
def GetSecondsSinceEpoch(time_tuple):
"""Convert time_tuple (in UTC) to seconds (also in UTC).
Args:
time_tuple: tuple with at least 6 items.
Returns:
seconds.
"""
return calendar.timegm(time_tuple[:6] + (0, 0, 0))
def GetTimeMicros(time_tuple):
"""Get a time in microseconds.
Arguments:
time_tuple: A (year, month, day, hour, minute, second) tuple (the python
time tuple format) in the UTC time zone.
Returns:
The number of microseconds since the epoch represented by the input tuple.
"""
return int(SecondsToMicroseconds(GetSecondsSinceEpoch(time_tuple)))
def DatetimeToUTCMicros(date):
"""Converts a datetime object to microseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of microseconds since the epoch, in UTC, represented by the input
datetime.
"""
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
# And this conversion guide: http://docs.python.org/library/time.html
# Turn the date parameter into a tuple (struct_time) that can then be
# manipulated into a long value of seconds. During the conversion from
# struct_time to long, the source date in UTC, and so it follows that the
# correct transformation is calendar.timegm()
micros = calendar.timegm(date.utctimetuple()) * _MICROSECONDS_PER_SECOND
return micros + date.microsecond
def DatetimeToUTCMillis(date):
"""Converts a datetime object to milliseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of milliseconds since the epoch, in UTC, represented by the input
datetime.
"""
return DatetimeToUTCMicros(date) / 1000
def UTCMicrosToDatetime(micros, tz=None):
"""Converts a microsecond epoch time to a datetime object.
Args:
micros: A UTC time, expressed in microseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
# The conversion from micros to seconds for input into the
# utcfromtimestamp function needs to be done as a float to make sure
# we dont lose the sub-second resolution of the input time.
dt = datetime.datetime.utcfromtimestamp(
micros / _MICROSECONDS_PER_SECOND_F)
if tz is not None:
dt = tz.fromutc(dt)
return dt
def UTCMillisToDatetime(millis, tz=None):
"""Converts a millisecond epoch time to a datetime object.
Args:
millis: A UTC time, expressed in milliseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
return UTCMicrosToDatetime(millis * 1000, tz)
UTC = pytz.UTC
US_PACIFIC = pytz.timezone('US/Pacific')
class TimestampError(ValueError):
"""Generic timestamp-related error."""
pass
class TimezoneNotSpecifiedError(TimestampError):
"""This error is raised when timezone is not specified."""
pass
class TimeParseError(TimestampError):
"""This error is raised when we can't parse the input."""
pass
# TODO(user): this class needs to handle daylight better
class LocalTimezoneClass(datetime.tzinfo):
"""This class defines local timezone."""
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
STDOFFSET = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
DSTOFFSET = datetime.timedelta(seconds=-time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
def utcoffset(self, dt):
"""datetime -> minutes east of UTC (negative for west of UTC)."""
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC."""
if self._isdst(dt):
return self.DSTDIFF
else:
return self.ZERO
def tzname(self, dt):
"""datetime -> string name of time zone."""
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
"""Return true if given datetime is within local DST."""
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
def __repr__(self):
"""Return string '<Local>'."""
return '<Local>'
def localize(self, dt, unused_is_dst=False):
"""Convert naive time to local time."""
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, unused_is_dst=False):
"""Correct the timezone information on the given datetime."""
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.replace(tzinfo=self)
LocalTimezone = LocalTimezoneClass()
class BaseTimestamp(datetime.datetime):
"""Our kind of wrapper over datetime.datetime.
The objects produced by methods now, today, fromtimestamp, utcnow,
utcfromtimestamp are timezone-aware (with correct timezone).
We also overload __add__ and __sub__ method, to fix the result of arithmetic
operations.
"""
LocalTimezone = LocalTimezone
@classmethod
def AddLocalTimezone(cls, obj):
"""If obj is naive, add local timezone to it."""
if not obj.tzinfo:
return obj.replace(tzinfo=cls.LocalTimezone)
return obj
@classmethod
def Localize(cls, obj):
"""If obj is naive, localize it to cls.LocalTimezone."""
if not obj.tzinfo:
return cls.LocalTimezone.localize(obj)
return obj
def __add__(self, *args, **kwargs):
"""x.__add__(y) <==> x+y."""
r = super(BaseTimestamp, self).__add__(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
def __sub__(self, *args, **kwargs):
"""x.__add__(y) <==> x-y."""
r = super(BaseTimestamp, self).__sub__(*args, **kwargs)
if isinstance(r, datetime.datetime):
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return r
@classmethod
def now(cls, *args, **kwargs):
"""Get a timestamp corresponding to right now.
Args:
args: Positional arguments to pass to datetime.datetime.now().
kwargs: Keyword arguments to pass to datetime.datetime.now(). If tz is not
specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.AddLocalTimezone(
super(BaseTimestamp, cls).now(*args, **kwargs))
@classmethod
def today(cls):
"""Current BaseTimestamp.
Same as self.__class__.fromtimestamp(time.time()).
Returns:
New self.__class__.
"""
return cls.AddLocalTimezone(super(BaseTimestamp, cls).today())
@classmethod
def fromtimestamp(cls, *args, **kwargs):
"""Get a new localized timestamp from a POSIX timestamp.
Args:
args: Positional arguments to pass to datetime.datetime.fromtimestamp().
kwargs: Keyword arguments to pass to datetime.datetime.fromtimestamp().
If tz is not specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.Localize(
super(BaseTimestamp, cls).fromtimestamp(*args, **kwargs))
@classmethod
def utcnow(cls):
"""Return a new BaseTimestamp representing UTC day and time."""
return super(BaseTimestamp, cls).utcnow().replace(tzinfo=pytz.utc)
@classmethod
def utcfromtimestamp(cls, *args, **kwargs):
"""timestamp -> UTC datetime from a POSIX timestamp (like time.time())."""
return super(BaseTimestamp, cls).utcfromtimestamp(
*args, **kwargs).replace(tzinfo=pytz.utc)
@classmethod
def strptime(cls, date_string, format, tz=None):
"""Parse date_string according to format and construct BaseTimestamp.
Args:
date_string: string passed to time.strptime.
format: format string passed to time.strptime.
tz: if not specified, local timezone assumed.
Returns:
New BaseTimestamp.
"""
if tz is None:
return cls.Localize(cls(*(time.strptime(date_string, format)[:6])))
return tz.localize(cls(*(time.strptime(date_string, format)[:6])))
def astimezone(self, *args, **kwargs):
"""tz -> convert to time in new timezone tz."""
r = super(BaseTimestamp, self).astimezone(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
@classmethod
def FromMicroTimestamp(cls, ts):
"""Create new Timestamp object from microsecond UTC timestamp value.
Args:
ts: integer microsecond UTC timestamp
Returns:
New cls()
"""
return cls.utcfromtimestamp(ts/_MICROSECONDS_PER_SECOND_F)
def AsSecondsSinceEpoch(self):
"""Return number of seconds since epoch (timestamp in seconds)."""
return GetSecondsSinceEpoch(self.utctimetuple())
def AsMicroTimestamp(self):
"""Return microsecond timestamp constructed from this object."""
return (SecondsToMicroseconds(self.AsSecondsSinceEpoch()) +
self.microsecond)
@classmethod
def combine(cls, datepart, timepart, tz=None):
"""Combine date and time into timestamp, timezone-aware.
Args:
datepart: datetime.date
timepart: datetime.time
tz: timezone or None
Returns:
timestamp object
"""
result = super(BaseTimestamp, cls).combine(datepart, timepart)
if tz:
result = tz.localize(result)
return result
# Conversions from interval suffixes to number of seconds.
# (m => 60s, d => 86400s, etc)
_INTERVAL_CONV_DICT = {'s': 1}
_INTERVAL_CONV_DICT['m'] = 60 * _INTERVAL_CONV_DICT['s']
_INTERVAL_CONV_DICT['h'] = 60 * _INTERVAL_CONV_DICT['m']
_INTERVAL_CONV_DICT['d'] = 24 * _INTERVAL_CONV_DICT['h']
_INTERVAL_CONV_DICT['D'] = _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['w'] = 7 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['W'] = _INTERVAL_CONV_DICT['w']
_INTERVAL_CONV_DICT['M'] = 30 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['Y'] = 365 * _INTERVAL_CONV_DICT['d']
_INTERVAL_REGEXP = re.compile('^([0-9]+)([%s])?' % ''.join(_INTERVAL_CONV_DICT))
def ConvertIntervalToSeconds(interval):
"""Convert a formatted string representing an interval into seconds.
Args:
interval: String to interpret as an interval. A basic interval looks like
"<number><suffix>". Complex intervals consisting of a chain of basic
intervals are also allowed.
Returns:
An integer representing the number of seconds represented by the interval
string, or None if the interval string could not be decoded.
"""
total = 0
while interval:
match = _INTERVAL_REGEXP.match(interval)
if not match:
return None
try:
num = int(match.group(1))
except ValueError:
return None
suffix = match.group(2)
if suffix:
multiplier = _INTERVAL_CONV_DICT.get(suffix)
if not multiplier:
return None
num *= multiplier
total += num
interval = interval[match.end(0):]
return total
class Timestamp(BaseTimestamp):
"""This subclass contains methods to parse W3C and interval date spec.
The interval date specification is in the form "1D", where "D" can be
"s"econds "m"inutes "h"ours "D"ays "W"eeks "M"onths "Y"ears.
"""
INTERVAL_CONV_DICT = _INTERVAL_CONV_DICT
INTERVAL_REGEXP = _INTERVAL_REGEXP
@classmethod
def | (cls, timestring, tz=None):
"""Use dateutil.parser to convert string into timestamp.
dateutil.parser understands ISO8601 which is really handy.
Args:
timestring: string with datetime
tz: optional timezone, if timezone is omitted from timestring.
Returns:
New Timestamp or None if unable to parse the timestring.
"""
try:
r = parser.parse(timestring)
except ValueError:
return None
if not r.tzinfo:
r = (tz or cls.LocalTimezone).localize(r)
result = cls(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return result
@classmethod
def _IntStringToInterval(cls, timestring):
"""Parse interval date specification and create a timedelta object.
Args:
timestring: string interval.
Returns:
A datetime.timedelta representing the specified interval or None if
unable to parse the timestring.
"""
seconds = ConvertIntervalToSeconds(timestring)
return datetime.timedelta(seconds=seconds) if seconds else None
@classmethod
def FromString(cls, value, tz=None):
"""Create a Timestamp from a string.
Args:
value: String interval or datetime.
e.g. "2013-01-05 13:00:00" or "1d"
tz: optional timezone, if timezone is omitted from timestring.
Returns:
A new Timestamp.
Raises:
TimeParseError if unable to parse value.
"""
result = cls._StringToTime(value, tz=tz)
if result:
return result
result = cls._IntStringToInterval(value)
if result:
return cls.utcnow() - result
raise TimeParseError(value)
# What's written below is a clear python bug. I mean, okay, I can apply
# negative timezone to it and end result will be inconversible.
MAXIMUM_PYTHON_TIMESTAMP = Timestamp(
9999, 12, 31, 23, 59, 59, 999999, UTC)
# This is also a bug. It is called 32bit time_t. I hate it.
# This is fixed in 2.5, btw.
MAXIMUM_MICROSECOND_TIMESTAMP = 0x80000000 * _MICROSECONDS_PER_SECOND - 1
MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS = Timestamp(2038, 1, 19, 3, 14, 7, 999999)
| _StringToTime | identifier_name |
datelib.py | #!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of classes and functions for dealing with dates and timestamps.
The BaseTimestamp and Timestamp are timezone-aware wrappers around Python
datetime.datetime class.
"""
import calendar
import copy
import datetime
import re
import sys
import time
import types
import warnings
from dateutil import parser
import pytz
_MICROSECONDS_PER_SECOND = 1000000
_MICROSECONDS_PER_SECOND_F = float(_MICROSECONDS_PER_SECOND)
def SecondsToMicroseconds(seconds):
"""Convert seconds to microseconds.
Args:
seconds: number
Returns:
microseconds
"""
return seconds * _MICROSECONDS_PER_SECOND
def MicrosecondsToSeconds(microseconds):
"""Convert microseconds to seconds.
Args:
microseconds: A number representing some duration of time measured in
microseconds.
Returns:
A number representing the same duration of time measured in seconds.
"""
return microseconds / _MICROSECONDS_PER_SECOND_F
def _GetCurrentTimeMicros():
"""Get the current time in microseconds, in UTC.
Returns:
The number of microseconds since the epoch.
"""
return int(SecondsToMicroseconds(time.time()))
def GetSecondsSinceEpoch(time_tuple):
"""Convert time_tuple (in UTC) to seconds (also in UTC).
Args:
time_tuple: tuple with at least 6 items.
Returns:
seconds.
"""
return calendar.timegm(time_tuple[:6] + (0, 0, 0))
def GetTimeMicros(time_tuple):
"""Get a time in microseconds.
Arguments:
time_tuple: A (year, month, day, hour, minute, second) tuple (the python
time tuple format) in the UTC time zone.
Returns:
The number of microseconds since the epoch represented by the input tuple.
"""
return int(SecondsToMicroseconds(GetSecondsSinceEpoch(time_tuple)))
def DatetimeToUTCMicros(date):
"""Converts a datetime object to microseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of microseconds since the epoch, in UTC, represented by the input
datetime.
"""
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
# And this conversion guide: http://docs.python.org/library/time.html
# Turn the date parameter into a tuple (struct_time) that can then be
# manipulated into a long value of seconds. During the conversion from
# struct_time to long, the source date in UTC, and so it follows that the
# correct transformation is calendar.timegm()
micros = calendar.timegm(date.utctimetuple()) * _MICROSECONDS_PER_SECOND
return micros + date.microsecond
def DatetimeToUTCMillis(date):
"""Converts a datetime object to milliseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of milliseconds since the epoch, in UTC, represented by the input
datetime.
"""
return DatetimeToUTCMicros(date) / 1000
def UTCMicrosToDatetime(micros, tz=None):
"""Converts a microsecond epoch time to a datetime object.
Args:
micros: A UTC time, expressed in microseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
# The conversion from micros to seconds for input into the
# utcfromtimestamp function needs to be done as a float to make sure
# we dont lose the sub-second resolution of the input time.
dt = datetime.datetime.utcfromtimestamp(
micros / _MICROSECONDS_PER_SECOND_F)
if tz is not None:
dt = tz.fromutc(dt)
return dt
def UTCMillisToDatetime(millis, tz=None):
"""Converts a millisecond epoch time to a datetime object.
Args:
millis: A UTC time, expressed in milliseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
return UTCMicrosToDatetime(millis * 1000, tz)
UTC = pytz.UTC
US_PACIFIC = pytz.timezone('US/Pacific')
class TimestampError(ValueError):
"""Generic timestamp-related error."""
pass
class TimezoneNotSpecifiedError(TimestampError):
"""This error is raised when timezone is not specified."""
pass
class TimeParseError(TimestampError):
"""This error is raised when we can't parse the input."""
pass
# TODO(user): this class needs to handle daylight better
class LocalTimezoneClass(datetime.tzinfo):
"""This class defines local timezone."""
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
STDOFFSET = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
DSTOFFSET = datetime.timedelta(seconds=-time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
def utcoffset(self, dt):
"""datetime -> minutes east of UTC (negative for west of UTC)."""
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC."""
if self._isdst(dt):
return self.DSTDIFF
else:
return self.ZERO
def tzname(self, dt):
"""datetime -> string name of time zone."""
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
"""Return true if given datetime is within local DST."""
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
def __repr__(self):
"""Return string '<Local>'."""
return '<Local>'
def localize(self, dt, unused_is_dst=False):
|
def normalize(self, dt, unused_is_dst=False):
"""Correct the timezone information on the given datetime."""
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.replace(tzinfo=self)
LocalTimezone = LocalTimezoneClass()
class BaseTimestamp(datetime.datetime):
"""Our kind of wrapper over datetime.datetime.
The objects produced by methods now, today, fromtimestamp, utcnow,
utcfromtimestamp are timezone-aware (with correct timezone).
We also overload __add__ and __sub__ method, to fix the result of arithmetic
operations.
"""
LocalTimezone = LocalTimezone
@classmethod
def AddLocalTimezone(cls, obj):
"""If obj is naive, add local timezone to it."""
if not obj.tzinfo:
return obj.replace(tzinfo=cls.LocalTimezone)
return obj
@classmethod
def Localize(cls, obj):
"""If obj is naive, localize it to cls.LocalTimezone."""
if not obj.tzinfo:
return cls.LocalTimezone.localize(obj)
return obj
def __add__(self, *args, **kwargs):
"""x.__add__(y) <==> x+y."""
r = super(BaseTimestamp, self).__add__(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
def __sub__(self, *args, **kwargs):
"""x.__add__(y) <==> x-y."""
r = super(BaseTimestamp, self).__sub__(*args, **kwargs)
if isinstance(r, datetime.datetime):
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return r
@classmethod
def now(cls, *args, **kwargs):
"""Get a timestamp corresponding to right now.
Args:
args: Positional arguments to pass to datetime.datetime.now().
kwargs: Keyword arguments to pass to datetime.datetime.now(). If tz is not
specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.AddLocalTimezone(
super(BaseTimestamp, cls).now(*args, **kwargs))
@classmethod
def today(cls):
"""Current BaseTimestamp.
Same as self.__class__.fromtimestamp(time.time()).
Returns:
New self.__class__.
"""
return cls.AddLocalTimezone(super(BaseTimestamp, cls).today())
@classmethod
def fromtimestamp(cls, *args, **kwargs):
"""Get a new localized timestamp from a POSIX timestamp.
Args:
args: Positional arguments to pass to datetime.datetime.fromtimestamp().
kwargs: Keyword arguments to pass to datetime.datetime.fromtimestamp().
If tz is not specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.Localize(
super(BaseTimestamp, cls).fromtimestamp(*args, **kwargs))
@classmethod
def utcnow(cls):
"""Return a new BaseTimestamp representing UTC day and time."""
return super(BaseTimestamp, cls).utcnow().replace(tzinfo=pytz.utc)
@classmethod
def utcfromtimestamp(cls, *args, **kwargs):
"""timestamp -> UTC datetime from a POSIX timestamp (like time.time())."""
return super(BaseTimestamp, cls).utcfromtimestamp(
*args, **kwargs).replace(tzinfo=pytz.utc)
@classmethod
def strptime(cls, date_string, format, tz=None):
"""Parse date_string according to format and construct BaseTimestamp.
Args:
date_string: string passed to time.strptime.
format: format string passed to time.strptime.
tz: if not specified, local timezone assumed.
Returns:
New BaseTimestamp.
"""
if tz is None:
return cls.Localize(cls(*(time.strptime(date_string, format)[:6])))
return tz.localize(cls(*(time.strptime(date_string, format)[:6])))
def astimezone(self, *args, **kwargs):
"""tz -> convert to time in new timezone tz."""
r = super(BaseTimestamp, self).astimezone(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
@classmethod
def FromMicroTimestamp(cls, ts):
"""Create new Timestamp object from microsecond UTC timestamp value.
Args:
ts: integer microsecond UTC timestamp
Returns:
New cls()
"""
return cls.utcfromtimestamp(ts/_MICROSECONDS_PER_SECOND_F)
def AsSecondsSinceEpoch(self):
"""Return number of seconds since epoch (timestamp in seconds)."""
return GetSecondsSinceEpoch(self.utctimetuple())
def AsMicroTimestamp(self):
"""Return microsecond timestamp constructed from this object."""
return (SecondsToMicroseconds(self.AsSecondsSinceEpoch()) +
self.microsecond)
@classmethod
def combine(cls, datepart, timepart, tz=None):
"""Combine date and time into timestamp, timezone-aware.
Args:
datepart: datetime.date
timepart: datetime.time
tz: timezone or None
Returns:
timestamp object
"""
result = super(BaseTimestamp, cls).combine(datepart, timepart)
if tz:
result = tz.localize(result)
return result
# Conversions from interval suffixes to number of seconds.
# (m => 60s, d => 86400s, etc)
_INTERVAL_CONV_DICT = {'s': 1}
_INTERVAL_CONV_DICT['m'] = 60 * _INTERVAL_CONV_DICT['s']
_INTERVAL_CONV_DICT['h'] = 60 * _INTERVAL_CONV_DICT['m']
_INTERVAL_CONV_DICT['d'] = 24 * _INTERVAL_CONV_DICT['h']
_INTERVAL_CONV_DICT['D'] = _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['w'] = 7 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['W'] = _INTERVAL_CONV_DICT['w']
_INTERVAL_CONV_DICT['M'] = 30 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['Y'] = 365 * _INTERVAL_CONV_DICT['d']
_INTERVAL_REGEXP = re.compile('^([0-9]+)([%s])?' % ''.join(_INTERVAL_CONV_DICT))
def ConvertIntervalToSeconds(interval):
"""Convert a formatted string representing an interval into seconds.
Args:
interval: String to interpret as an interval. A basic interval looks like
"<number><suffix>". Complex intervals consisting of a chain of basic
intervals are also allowed.
Returns:
An integer representing the number of seconds represented by the interval
string, or None if the interval string could not be decoded.
"""
total = 0
while interval:
match = _INTERVAL_REGEXP.match(interval)
if not match:
return None
try:
num = int(match.group(1))
except ValueError:
return None
suffix = match.group(2)
if suffix:
multiplier = _INTERVAL_CONV_DICT.get(suffix)
if not multiplier:
return None
num *= multiplier
total += num
interval = interval[match.end(0):]
return total
class Timestamp(BaseTimestamp):
"""This subclass contains methods to parse W3C and interval date spec.
The interval date specification is in the form "1D", where "D" can be
"s"econds "m"inutes "h"ours "D"ays "W"eeks "M"onths "Y"ears.
"""
INTERVAL_CONV_DICT = _INTERVAL_CONV_DICT
INTERVAL_REGEXP = _INTERVAL_REGEXP
@classmethod
def _StringToTime(cls, timestring, tz=None):
"""Use dateutil.parser to convert string into timestamp.
dateutil.parser understands ISO8601 which is really handy.
Args:
timestring: string with datetime
tz: optional timezone, if timezone is omitted from timestring.
Returns:
New Timestamp or None if unable to parse the timestring.
"""
try:
r = parser.parse(timestring)
except ValueError:
return None
if not r.tzinfo:
r = (tz or cls.LocalTimezone).localize(r)
result = cls(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return result
@classmethod
def _IntStringToInterval(cls, timestring):
"""Parse interval date specification and create a timedelta object.
Args:
timestring: string interval.
Returns:
A datetime.timedelta representing the specified interval or None if
unable to parse the timestring.
"""
seconds = ConvertIntervalToSeconds(timestring)
return datetime.timedelta(seconds=seconds) if seconds else None
@classmethod
def FromString(cls, value, tz=None):
"""Create a Timestamp from a string.
Args:
value: String interval or datetime.
e.g. "2013-01-05 13:00:00" or "1d"
tz: optional timezone, if timezone is omitted from timestring.
Returns:
A new Timestamp.
Raises:
TimeParseError if unable to parse value.
"""
result = cls._StringToTime(value, tz=tz)
if result:
return result
result = cls._IntStringToInterval(value)
if result:
return cls.utcnow() - result
raise TimeParseError(value)
# What's written below is a clear python bug. I mean, okay, I can apply
# negative timezone to it and end result will be inconversible.
MAXIMUM_PYTHON_TIMESTAMP = Timestamp(
9999, 12, 31, 23, 59, 59, 999999, UTC)
# This is also a bug. It is called 32bit time_t. I hate it.
# This is fixed in 2.5, btw.
MAXIMUM_MICROSECOND_TIMESTAMP = 0x80000000 * _MICROSECONDS_PER_SECOND - 1
MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS = Timestamp(2038, 1, 19, 3, 14, 7, 999999)
| """Convert naive time to local time."""
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self) | identifier_body |
datelib.py | #!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of classes and functions for dealing with dates and timestamps.
The BaseTimestamp and Timestamp are timezone-aware wrappers around Python
datetime.datetime class.
"""
import calendar
import copy
import datetime
import re
import sys
import time
import types
import warnings
from dateutil import parser
import pytz
_MICROSECONDS_PER_SECOND = 1000000
_MICROSECONDS_PER_SECOND_F = float(_MICROSECONDS_PER_SECOND)
def SecondsToMicroseconds(seconds):
"""Convert seconds to microseconds.
Args:
seconds: number
Returns:
microseconds
"""
return seconds * _MICROSECONDS_PER_SECOND
def MicrosecondsToSeconds(microseconds):
"""Convert microseconds to seconds.
Args:
microseconds: A number representing some duration of time measured in
microseconds.
Returns:
A number representing the same duration of time measured in seconds.
"""
return microseconds / _MICROSECONDS_PER_SECOND_F
def _GetCurrentTimeMicros():
"""Get the current time in microseconds, in UTC.
Returns:
The number of microseconds since the epoch.
"""
return int(SecondsToMicroseconds(time.time()))
def GetSecondsSinceEpoch(time_tuple):
"""Convert time_tuple (in UTC) to seconds (also in UTC).
Args:
time_tuple: tuple with at least 6 items.
Returns:
seconds.
"""
return calendar.timegm(time_tuple[:6] + (0, 0, 0))
def GetTimeMicros(time_tuple):
"""Get a time in microseconds.
Arguments:
time_tuple: A (year, month, day, hour, minute, second) tuple (the python
time tuple format) in the UTC time zone.
Returns:
The number of microseconds since the epoch represented by the input tuple.
"""
return int(SecondsToMicroseconds(GetSecondsSinceEpoch(time_tuple)))
def DatetimeToUTCMicros(date):
"""Converts a datetime object to microseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of microseconds since the epoch, in UTC, represented by the input
datetime.
"""
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
# And this conversion guide: http://docs.python.org/library/time.html
# Turn the date parameter into a tuple (struct_time) that can then be
# manipulated into a long value of seconds. During the conversion from
# struct_time to long, the source date in UTC, and so it follows that the
# correct transformation is calendar.timegm()
micros = calendar.timegm(date.utctimetuple()) * _MICROSECONDS_PER_SECOND
return micros + date.microsecond
def DatetimeToUTCMillis(date):
"""Converts a datetime object to milliseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of milliseconds since the epoch, in UTC, represented by the input
datetime.
"""
return DatetimeToUTCMicros(date) / 1000
def UTCMicrosToDatetime(micros, tz=None):
"""Converts a microsecond epoch time to a datetime object.
Args:
micros: A UTC time, expressed in microseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
# The conversion from micros to seconds for input into the
# utcfromtimestamp function needs to be done as a float to make sure
# we dont lose the sub-second resolution of the input time.
dt = datetime.datetime.utcfromtimestamp(
micros / _MICROSECONDS_PER_SECOND_F)
if tz is not None:
dt = tz.fromutc(dt)
return dt
def UTCMillisToDatetime(millis, tz=None):
"""Converts a millisecond epoch time to a datetime object.
Args:
millis: A UTC time, expressed in milliseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
return UTCMicrosToDatetime(millis * 1000, tz)
UTC = pytz.UTC
US_PACIFIC = pytz.timezone('US/Pacific')
class TimestampError(ValueError):
"""Generic timestamp-related error."""
pass
class TimezoneNotSpecifiedError(TimestampError):
"""This error is raised when timezone is not specified."""
pass
class TimeParseError(TimestampError):
"""This error is raised when we can't parse the input."""
pass
# TODO(user): this class needs to handle daylight better
class LocalTimezoneClass(datetime.tzinfo):
"""This class defines local timezone."""
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
STDOFFSET = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
DSTOFFSET = datetime.timedelta(seconds=-time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
def utcoffset(self, dt):
"""datetime -> minutes east of UTC (negative for west of UTC)."""
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC."""
if self._isdst(dt):
return self.DSTDIFF
else:
return self.ZERO
def tzname(self, dt):
"""datetime -> string name of time zone."""
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
"""Return true if given datetime is within local DST."""
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
def __repr__(self):
"""Return string '<Local>'."""
return '<Local>'
def localize(self, dt, unused_is_dst=False):
"""Convert naive time to local time."""
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, unused_is_dst=False):
"""Correct the timezone information on the given datetime."""
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.replace(tzinfo=self)
LocalTimezone = LocalTimezoneClass()
class BaseTimestamp(datetime.datetime):
"""Our kind of wrapper over datetime.datetime.
The objects produced by methods now, today, fromtimestamp, utcnow,
utcfromtimestamp are timezone-aware (with correct timezone).
We also overload __add__ and __sub__ method, to fix the result of arithmetic
operations.
"""
LocalTimezone = LocalTimezone
@classmethod
def AddLocalTimezone(cls, obj):
"""If obj is naive, add local timezone to it."""
if not obj.tzinfo:
return obj.replace(tzinfo=cls.LocalTimezone)
return obj
@classmethod
def Localize(cls, obj):
"""If obj is naive, localize it to cls.LocalTimezone."""
if not obj.tzinfo:
return cls.LocalTimezone.localize(obj)
return obj
def __add__(self, *args, **kwargs):
"""x.__add__(y) <==> x+y."""
r = super(BaseTimestamp, self).__add__(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
def __sub__(self, *args, **kwargs):
"""x.__add__(y) <==> x-y."""
r = super(BaseTimestamp, self).__sub__(*args, **kwargs)
if isinstance(r, datetime.datetime):
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return r
@classmethod
def now(cls, *args, **kwargs):
"""Get a timestamp corresponding to right now.
Args:
args: Positional arguments to pass to datetime.datetime.now().
kwargs: Keyword arguments to pass to datetime.datetime.now(). If tz is not
specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.AddLocalTimezone(
super(BaseTimestamp, cls).now(*args, **kwargs))
@classmethod
def today(cls):
"""Current BaseTimestamp.
Same as self.__class__.fromtimestamp(time.time()).
Returns:
New self.__class__.
"""
return cls.AddLocalTimezone(super(BaseTimestamp, cls).today())
@classmethod
def fromtimestamp(cls, *args, **kwargs):
"""Get a new localized timestamp from a POSIX timestamp.
Args:
args: Positional arguments to pass to datetime.datetime.fromtimestamp().
kwargs: Keyword arguments to pass to datetime.datetime.fromtimestamp().
If tz is not specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.Localize(
super(BaseTimestamp, cls).fromtimestamp(*args, **kwargs))
@classmethod
def utcnow(cls):
"""Return a new BaseTimestamp representing UTC day and time."""
return super(BaseTimestamp, cls).utcnow().replace(tzinfo=pytz.utc)
@classmethod
def utcfromtimestamp(cls, *args, **kwargs):
"""timestamp -> UTC datetime from a POSIX timestamp (like time.time())."""
return super(BaseTimestamp, cls).utcfromtimestamp(
*args, **kwargs).replace(tzinfo=pytz.utc)
@classmethod
def strptime(cls, date_string, format, tz=None):
"""Parse date_string according to format and construct BaseTimestamp.
Args:
date_string: string passed to time.strptime.
format: format string passed to time.strptime.
tz: if not specified, local timezone assumed.
Returns:
New BaseTimestamp.
"""
if tz is None:
return cls.Localize(cls(*(time.strptime(date_string, format)[:6])))
return tz.localize(cls(*(time.strptime(date_string, format)[:6])))
def astimezone(self, *args, **kwargs):
"""tz -> convert to time in new timezone tz."""
r = super(BaseTimestamp, self).astimezone(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
@classmethod
def FromMicroTimestamp(cls, ts):
"""Create new Timestamp object from microsecond UTC timestamp value.
Args:
ts: integer microsecond UTC timestamp
Returns:
New cls()
"""
return cls.utcfromtimestamp(ts/_MICROSECONDS_PER_SECOND_F)
def AsSecondsSinceEpoch(self):
"""Return number of seconds since epoch (timestamp in seconds)."""
return GetSecondsSinceEpoch(self.utctimetuple())
def AsMicroTimestamp(self):
"""Return microsecond timestamp constructed from this object."""
return (SecondsToMicroseconds(self.AsSecondsSinceEpoch()) +
self.microsecond)
@classmethod
def combine(cls, datepart, timepart, tz=None):
"""Combine date and time into timestamp, timezone-aware.
Args:
datepart: datetime.date
timepart: datetime.time
tz: timezone or None
Returns:
timestamp object
"""
result = super(BaseTimestamp, cls).combine(datepart, timepart)
if tz:
result = tz.localize(result)
return result
# Conversions from interval suffixes to number of seconds.
# (m => 60s, d => 86400s, etc)
_INTERVAL_CONV_DICT = {'s': 1}
_INTERVAL_CONV_DICT['m'] = 60 * _INTERVAL_CONV_DICT['s']
_INTERVAL_CONV_DICT['h'] = 60 * _INTERVAL_CONV_DICT['m']
_INTERVAL_CONV_DICT['d'] = 24 * _INTERVAL_CONV_DICT['h']
_INTERVAL_CONV_DICT['D'] = _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['w'] = 7 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['W'] = _INTERVAL_CONV_DICT['w']
_INTERVAL_CONV_DICT['M'] = 30 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['Y'] = 365 * _INTERVAL_CONV_DICT['d']
_INTERVAL_REGEXP = re.compile('^([0-9]+)([%s])?' % ''.join(_INTERVAL_CONV_DICT))
def ConvertIntervalToSeconds(interval):
"""Convert a formatted string representing an interval into seconds. |
Returns:
An integer representing the number of seconds represented by the interval
string, or None if the interval string could not be decoded.
"""
total = 0
while interval:
match = _INTERVAL_REGEXP.match(interval)
if not match:
return None
try:
num = int(match.group(1))
except ValueError:
return None
suffix = match.group(2)
if suffix:
multiplier = _INTERVAL_CONV_DICT.get(suffix)
if not multiplier:
return None
num *= multiplier
total += num
interval = interval[match.end(0):]
return total
class Timestamp(BaseTimestamp):
"""This subclass contains methods to parse W3C and interval date spec.
The interval date specification is in the form "1D", where "D" can be
"s"econds "m"inutes "h"ours "D"ays "W"eeks "M"onths "Y"ears.
"""
INTERVAL_CONV_DICT = _INTERVAL_CONV_DICT
INTERVAL_REGEXP = _INTERVAL_REGEXP
@classmethod
def _StringToTime(cls, timestring, tz=None):
"""Use dateutil.parser to convert string into timestamp.
dateutil.parser understands ISO8601 which is really handy.
Args:
timestring: string with datetime
tz: optional timezone, if timezone is omitted from timestring.
Returns:
New Timestamp or None if unable to parse the timestring.
"""
try:
r = parser.parse(timestring)
except ValueError:
return None
if not r.tzinfo:
r = (tz or cls.LocalTimezone).localize(r)
result = cls(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return result
@classmethod
def _IntStringToInterval(cls, timestring):
"""Parse interval date specification and create a timedelta object.
Args:
timestring: string interval.
Returns:
A datetime.timedelta representing the specified interval or None if
unable to parse the timestring.
"""
seconds = ConvertIntervalToSeconds(timestring)
return datetime.timedelta(seconds=seconds) if seconds else None
@classmethod
def FromString(cls, value, tz=None):
"""Create a Timestamp from a string.
Args:
value: String interval or datetime.
e.g. "2013-01-05 13:00:00" or "1d"
tz: optional timezone, if timezone is omitted from timestring.
Returns:
A new Timestamp.
Raises:
TimeParseError if unable to parse value.
"""
result = cls._StringToTime(value, tz=tz)
if result:
return result
result = cls._IntStringToInterval(value)
if result:
return cls.utcnow() - result
raise TimeParseError(value)
# What's written below is a clear python bug. I mean, okay, I can apply
# negative timezone to it and end result will be inconversible.
MAXIMUM_PYTHON_TIMESTAMP = Timestamp(
9999, 12, 31, 23, 59, 59, 999999, UTC)
# This is also a bug. It is called 32bit time_t. I hate it.
# This is fixed in 2.5, btw.
MAXIMUM_MICROSECOND_TIMESTAMP = 0x80000000 * _MICROSECONDS_PER_SECOND - 1
MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS = Timestamp(2038, 1, 19, 3, 14, 7, 999999) |
Args:
interval: String to interpret as an interval. A basic interval looks like
"<number><suffix>". Complex intervals consisting of a chain of basic
intervals are also allowed. | random_line_split |
datelib.py | #!/usr/bin/env python
# Copyright 2002 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set of classes and functions for dealing with dates and timestamps.
The BaseTimestamp and Timestamp are timezone-aware wrappers around Python
datetime.datetime class.
"""
import calendar
import copy
import datetime
import re
import sys
import time
import types
import warnings
from dateutil import parser
import pytz
_MICROSECONDS_PER_SECOND = 1000000
_MICROSECONDS_PER_SECOND_F = float(_MICROSECONDS_PER_SECOND)
def SecondsToMicroseconds(seconds):
"""Convert seconds to microseconds.
Args:
seconds: number
Returns:
microseconds
"""
return seconds * _MICROSECONDS_PER_SECOND
def MicrosecondsToSeconds(microseconds):
"""Convert microseconds to seconds.
Args:
microseconds: A number representing some duration of time measured in
microseconds.
Returns:
A number representing the same duration of time measured in seconds.
"""
return microseconds / _MICROSECONDS_PER_SECOND_F
def _GetCurrentTimeMicros():
"""Get the current time in microseconds, in UTC.
Returns:
The number of microseconds since the epoch.
"""
return int(SecondsToMicroseconds(time.time()))
def GetSecondsSinceEpoch(time_tuple):
"""Convert time_tuple (in UTC) to seconds (also in UTC).
Args:
time_tuple: tuple with at least 6 items.
Returns:
seconds.
"""
return calendar.timegm(time_tuple[:6] + (0, 0, 0))
def GetTimeMicros(time_tuple):
"""Get a time in microseconds.
Arguments:
time_tuple: A (year, month, day, hour, minute, second) tuple (the python
time tuple format) in the UTC time zone.
Returns:
The number of microseconds since the epoch represented by the input tuple.
"""
return int(SecondsToMicroseconds(GetSecondsSinceEpoch(time_tuple)))
def DatetimeToUTCMicros(date):
"""Converts a datetime object to microseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of microseconds since the epoch, in UTC, represented by the input
datetime.
"""
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
# And this conversion guide: http://docs.python.org/library/time.html
# Turn the date parameter into a tuple (struct_time) that can then be
# manipulated into a long value of seconds. During the conversion from
# struct_time to long, the source date in UTC, and so it follows that the
# correct transformation is calendar.timegm()
micros = calendar.timegm(date.utctimetuple()) * _MICROSECONDS_PER_SECOND
return micros + date.microsecond
def DatetimeToUTCMillis(date):
"""Converts a datetime object to milliseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of milliseconds since the epoch, in UTC, represented by the input
datetime.
"""
return DatetimeToUTCMicros(date) / 1000
def UTCMicrosToDatetime(micros, tz=None):
"""Converts a microsecond epoch time to a datetime object.
Args:
micros: A UTC time, expressed in microseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
# The conversion from micros to seconds for input into the
# utcfromtimestamp function needs to be done as a float to make sure
# we dont lose the sub-second resolution of the input time.
dt = datetime.datetime.utcfromtimestamp(
micros / _MICROSECONDS_PER_SECOND_F)
if tz is not None:
dt = tz.fromutc(dt)
return dt
def UTCMillisToDatetime(millis, tz=None):
"""Converts a millisecond epoch time to a datetime object.
Args:
millis: A UTC time, expressed in milliseconds since the epoch.
tz: The desired tzinfo for the datetime object. If None, the
datetime will be naive.
Returns:
The datetime represented by the input value.
"""
return UTCMicrosToDatetime(millis * 1000, tz)
UTC = pytz.UTC
US_PACIFIC = pytz.timezone('US/Pacific')
class TimestampError(ValueError):
"""Generic timestamp-related error."""
pass
class TimezoneNotSpecifiedError(TimestampError):
"""This error is raised when timezone is not specified."""
pass
class TimeParseError(TimestampError):
"""This error is raised when we can't parse the input."""
pass
# TODO(user): this class needs to handle daylight better
class LocalTimezoneClass(datetime.tzinfo):
"""This class defines local timezone."""
ZERO = datetime.timedelta(0)
HOUR = datetime.timedelta(hours=1)
STDOFFSET = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
DSTOFFSET = datetime.timedelta(seconds=-time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
def utcoffset(self, dt):
"""datetime -> minutes east of UTC (negative for west of UTC)."""
if self._isdst(dt):
return self.DSTOFFSET
else:
return self.STDOFFSET
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC."""
if self._isdst(dt):
return self.DSTDIFF
else:
return self.ZERO
def tzname(self, dt):
"""datetime -> string name of time zone."""
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
"""Return true if given datetime is within local DST."""
tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
def __repr__(self):
"""Return string '<Local>'."""
return '<Local>'
def localize(self, dt, unused_is_dst=False):
"""Convert naive time to local time."""
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self)
def normalize(self, dt, unused_is_dst=False):
"""Correct the timezone information on the given datetime."""
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.replace(tzinfo=self)
LocalTimezone = LocalTimezoneClass()
class BaseTimestamp(datetime.datetime):
"""Our kind of wrapper over datetime.datetime.
The objects produced by methods now, today, fromtimestamp, utcnow,
utcfromtimestamp are timezone-aware (with correct timezone).
We also overload __add__ and __sub__ method, to fix the result of arithmetic
operations.
"""
LocalTimezone = LocalTimezone
@classmethod
def AddLocalTimezone(cls, obj):
"""If obj is naive, add local timezone to it."""
if not obj.tzinfo:
return obj.replace(tzinfo=cls.LocalTimezone)
return obj
@classmethod
def Localize(cls, obj):
"""If obj is naive, localize it to cls.LocalTimezone."""
if not obj.tzinfo:
return cls.LocalTimezone.localize(obj)
return obj
def __add__(self, *args, **kwargs):
"""x.__add__(y) <==> x+y."""
r = super(BaseTimestamp, self).__add__(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
def __sub__(self, *args, **kwargs):
"""x.__add__(y) <==> x-y."""
r = super(BaseTimestamp, self).__sub__(*args, **kwargs)
if isinstance(r, datetime.datetime):
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return r
@classmethod
def now(cls, *args, **kwargs):
"""Get a timestamp corresponding to right now.
Args:
args: Positional arguments to pass to datetime.datetime.now().
kwargs: Keyword arguments to pass to datetime.datetime.now(). If tz is not
specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.AddLocalTimezone(
super(BaseTimestamp, cls).now(*args, **kwargs))
@classmethod
def today(cls):
"""Current BaseTimestamp.
Same as self.__class__.fromtimestamp(time.time()).
Returns:
New self.__class__.
"""
return cls.AddLocalTimezone(super(BaseTimestamp, cls).today())
@classmethod
def fromtimestamp(cls, *args, **kwargs):
"""Get a new localized timestamp from a POSIX timestamp.
Args:
args: Positional arguments to pass to datetime.datetime.fromtimestamp().
kwargs: Keyword arguments to pass to datetime.datetime.fromtimestamp().
If tz is not specified, local timezone is assumed.
Returns:
A new BaseTimestamp with tz's local day and time.
"""
return cls.Localize(
super(BaseTimestamp, cls).fromtimestamp(*args, **kwargs))
@classmethod
def utcnow(cls):
"""Return a new BaseTimestamp representing UTC day and time."""
return super(BaseTimestamp, cls).utcnow().replace(tzinfo=pytz.utc)
@classmethod
def utcfromtimestamp(cls, *args, **kwargs):
"""timestamp -> UTC datetime from a POSIX timestamp (like time.time())."""
return super(BaseTimestamp, cls).utcfromtimestamp(
*args, **kwargs).replace(tzinfo=pytz.utc)
@classmethod
def strptime(cls, date_string, format, tz=None):
"""Parse date_string according to format and construct BaseTimestamp.
Args:
date_string: string passed to time.strptime.
format: format string passed to time.strptime.
tz: if not specified, local timezone assumed.
Returns:
New BaseTimestamp.
"""
if tz is None:
return cls.Localize(cls(*(time.strptime(date_string, format)[:6])))
return tz.localize(cls(*(time.strptime(date_string, format)[:6])))
def astimezone(self, *args, **kwargs):
"""tz -> convert to time in new timezone tz."""
r = super(BaseTimestamp, self).astimezone(*args, **kwargs)
return type(self)(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
@classmethod
def FromMicroTimestamp(cls, ts):
"""Create new Timestamp object from microsecond UTC timestamp value.
Args:
ts: integer microsecond UTC timestamp
Returns:
New cls()
"""
return cls.utcfromtimestamp(ts/_MICROSECONDS_PER_SECOND_F)
def AsSecondsSinceEpoch(self):
"""Return number of seconds since epoch (timestamp in seconds)."""
return GetSecondsSinceEpoch(self.utctimetuple())
def AsMicroTimestamp(self):
"""Return microsecond timestamp constructed from this object."""
return (SecondsToMicroseconds(self.AsSecondsSinceEpoch()) +
self.microsecond)
@classmethod
def combine(cls, datepart, timepart, tz=None):
"""Combine date and time into timestamp, timezone-aware.
Args:
datepart: datetime.date
timepart: datetime.time
tz: timezone or None
Returns:
timestamp object
"""
result = super(BaseTimestamp, cls).combine(datepart, timepart)
if tz:
|
return result
# Conversions from interval suffixes to number of seconds.
# (m => 60s, d => 86400s, etc)
_INTERVAL_CONV_DICT = {'s': 1}
_INTERVAL_CONV_DICT['m'] = 60 * _INTERVAL_CONV_DICT['s']
_INTERVAL_CONV_DICT['h'] = 60 * _INTERVAL_CONV_DICT['m']
_INTERVAL_CONV_DICT['d'] = 24 * _INTERVAL_CONV_DICT['h']
_INTERVAL_CONV_DICT['D'] = _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['w'] = 7 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['W'] = _INTERVAL_CONV_DICT['w']
_INTERVAL_CONV_DICT['M'] = 30 * _INTERVAL_CONV_DICT['d']
_INTERVAL_CONV_DICT['Y'] = 365 * _INTERVAL_CONV_DICT['d']
_INTERVAL_REGEXP = re.compile('^([0-9]+)([%s])?' % ''.join(_INTERVAL_CONV_DICT))
def ConvertIntervalToSeconds(interval):
"""Convert a formatted string representing an interval into seconds.
Args:
interval: String to interpret as an interval. A basic interval looks like
"<number><suffix>". Complex intervals consisting of a chain of basic
intervals are also allowed.
Returns:
An integer representing the number of seconds represented by the interval
string, or None if the interval string could not be decoded.
"""
total = 0
while interval:
match = _INTERVAL_REGEXP.match(interval)
if not match:
return None
try:
num = int(match.group(1))
except ValueError:
return None
suffix = match.group(2)
if suffix:
multiplier = _INTERVAL_CONV_DICT.get(suffix)
if not multiplier:
return None
num *= multiplier
total += num
interval = interval[match.end(0):]
return total
class Timestamp(BaseTimestamp):
"""This subclass contains methods to parse W3C and interval date spec.
The interval date specification is in the form "1D", where "D" can be
"s"econds "m"inutes "h"ours "D"ays "W"eeks "M"onths "Y"ears.
"""
INTERVAL_CONV_DICT = _INTERVAL_CONV_DICT
INTERVAL_REGEXP = _INTERVAL_REGEXP
@classmethod
def _StringToTime(cls, timestring, tz=None):
"""Use dateutil.parser to convert string into timestamp.
dateutil.parser understands ISO8601 which is really handy.
Args:
timestring: string with datetime
tz: optional timezone, if timezone is omitted from timestring.
Returns:
New Timestamp or None if unable to parse the timestring.
"""
try:
r = parser.parse(timestring)
except ValueError:
return None
if not r.tzinfo:
r = (tz or cls.LocalTimezone).localize(r)
result = cls(r.year, r.month, r.day, r.hour, r.minute, r.second,
r.microsecond, r.tzinfo)
return result
@classmethod
def _IntStringToInterval(cls, timestring):
"""Parse interval date specification and create a timedelta object.
Args:
timestring: string interval.
Returns:
A datetime.timedelta representing the specified interval or None if
unable to parse the timestring.
"""
seconds = ConvertIntervalToSeconds(timestring)
return datetime.timedelta(seconds=seconds) if seconds else None
@classmethod
def FromString(cls, value, tz=None):
"""Create a Timestamp from a string.
Args:
value: String interval or datetime.
e.g. "2013-01-05 13:00:00" or "1d"
tz: optional timezone, if timezone is omitted from timestring.
Returns:
A new Timestamp.
Raises:
TimeParseError if unable to parse value.
"""
result = cls._StringToTime(value, tz=tz)
if result:
return result
result = cls._IntStringToInterval(value)
if result:
return cls.utcnow() - result
raise TimeParseError(value)
# What's written below is a clear python bug. I mean, okay, I can apply
# negative timezone to it and end result will be inconversible.
MAXIMUM_PYTHON_TIMESTAMP = Timestamp(
9999, 12, 31, 23, 59, 59, 999999, UTC)
# This is also a bug. It is called 32bit time_t. I hate it.
# This is fixed in 2.5, btw.
MAXIMUM_MICROSECOND_TIMESTAMP = 0x80000000 * _MICROSECONDS_PER_SECOND - 1
MAXIMUM_MICROSECOND_TIMESTAMP_AS_TS = Timestamp(2038, 1, 19, 3, 14, 7, 999999)
| result = tz.localize(result) | conditional_block |
netdata.py | # -*- coding: utf-8 -*-
"""
Display network speed and bandwidth usage.
Configuration parameters:
cache_timeout: refresh interval for this module (default 2)
format: display format for this module
*(default '{nic} [\?color=down LAN(Kb): {down}↓ {up}↑]
[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]')*
nic: network interface to use (default None)
thresholds: color thresholds to use
*(default {'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]})*
Format placeholders:
{nic} network interface
{down} number of download speed
{up} number of upload speed
{download} number of download usage
{upload} number of upload usage
{total} number of total usage
Color thresholds:
{down} color threshold of download speed
{total} color threshold of total usage
@author Shahin Azad <ishahinism at Gmail>
SAMPLE OUTPUT
[
{'full_text': 'eth0 '},
{'full_text': 'LAN(Kb): 77.8↓ 26.9↑ ', 'color': '#00FF00'},
{'full_text': 'T(Mb): 394↓ 45↑ 438↕', 'color': '#FFFF00'},
]
"""
class GetData:
"""
Get system status.
"""
def __init__(self, nic):
self.nic = nic
def netBytes(self):
"""
Get bytes directly from /proc.
"""
with open('/proc/net/dev') as fh:
net_data = fh.read().split()
interface_index = net_data.index(self.nic + ':')
received_bytes = int(net_data[interface_index + 1])
transmitted_bytes = int(net_data[interface_index + 9])
return received_bytes, transmitted_bytes
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 2
format = u'{nic} [\?color=down LAN(Kb): {down}↓ {up}↑] ' + \
u'[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]'
nic = None
thresholds = {
'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]
}
class Meta:
def deprecate_function(config):
return {
'thresholds': {
'down': [
(0, 'bad'),
(config.get('low_speed', 30), 'degraded'),
(config.get('med_speed', 60), 'good')
],
'total': [
(0, 'good'),
(config.get('low_traffic', 400), 'degraded'),
(config.get('med_traffic', 700), 'bad')
]
}
}
deprecated = {
'function': [
{'function': deprecate_function},
],
'remove': [
{
'param': 'low_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'low_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
],
}
update_config = {
'update_placeholder_format': [
{
'placeholder_formats': {
'down': ':5.1f',
'up': ':5.1f',
'download': ':3.0f',
'upload': ':3.0f',
'total': ':3.0f',
},
'format_strings': ['format']
},
],
}
def post_config_hook(self):
"""
Get network interface.
"""
self.old_transmitted = 0
self.old_received = 0
if self.nic is None:
# Get default gateway directly from /proc.
with open('/proc/net/route') as fh:
for line in fh:
fields = line.strip().split()
if fields[1] == '00000000' and int(fields[3], 16) & 2:
self.nic = fields[0]
break
if self.nic is None:
self.nic = 'lo'
self.py3.log('selected nic: %s' % self.nic)
def netdata(self):
"""
| lculate network speed and network traffic.
"""
data = GetData(self.nic)
received_bytes, transmitted_bytes = data.netBytes()
# net_speed (statistic)
down = (received_bytes - self.old_received) / 1024.
up = (transmitted_bytes - self.old_transmitted) / 1024.
self.old_received = received_bytes
self.old_transmitted = transmitted_bytes
# net_traffic (statistic)
download = received_bytes / 1024 / 1024.
upload = transmitted_bytes / 1024 / 1024.
total = download + upload
# color threshold
self.py3.threshold_get_color(down, 'down')
self.py3.threshold_get_color(total, 'total')
netdata = self.py3.safe_format(self.format, {'down': down,
'up': up,
'download': download,
'upload': upload,
'total': total,
'nic': self.nic})
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': netdata
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| Ca | identifier_name |
netdata.py | # -*- coding: utf-8 -*-
"""
Display network speed and bandwidth usage.
Configuration parameters:
cache_timeout: refresh interval for this module (default 2)
format: display format for this module
*(default '{nic} [\?color=down LAN(Kb): {down}↓ {up}↑]
[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]')*
nic: network interface to use (default None)
thresholds: color thresholds to use
*(default {'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]})*
Format placeholders:
{nic} network interface
{down} number of download speed
{up} number of upload speed
{download} number of download usage
{upload} number of upload usage
{total} number of total usage
Color thresholds:
{down} color threshold of download speed
{total} color threshold of total usage
@author Shahin Azad <ishahinism at Gmail>
SAMPLE OUTPUT
[
{'full_text': 'eth0 '},
{'full_text': 'LAN(Kb): 77.8↓ 26.9↑ ', 'color': '#00FF00'},
{'full_text': 'T(Mb): 394↓ 45↑ 438↕', 'color': '#FFFF00'},
]
"""
class GetData:
"""
Get system status.
"""
def __init__(self, nic):
self.nic = nic
def netBytes(self):
"""
Get bytes directly from /proc.
"""
with open('/proc/net/dev') as fh:
net_data = fh.read().split()
interface_index = net_data.index(self.nic + ':')
received_bytes = int(net_data[interface_index + 1])
transmitted_bytes = int(net_data[interface_index + 9])
return received_bytes, transmitted_bytes
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 2
format = u'{nic} [\?color=down LAN(Kb): {down}↓ {up}↑] ' + \
u'[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]'
nic = None
thresholds = {
'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]
}
class Meta:
def deprecate_function(config):
return {
'thresholds': {
'down': [
(0, 'bad'),
(config.get('low_speed', 30), 'degraded'),
(config.get('med_speed', 60), 'good')
],
'total': [
(0, 'good'),
(config.get('low_traffic', 400), 'degraded'),
(config.get('med_traffic', 700), 'bad')
]
}
}
deprecated = {
'function': [
{'function': deprecate_function},
],
'remove': [
{
'param': 'low_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'low_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
],
}
update_config = {
'update_placeholder_format': [
{
'placeholder_formats': {
'down': ':5.1f',
'up': ':5.1f',
'download': ':3.0f',
'upload': ':3.0f',
'total': ':3.0f',
},
'format_strings': ['format']
},
],
}
def post_config_hook(self):
"""
Get network interface.
"""
self.old_transmitted = 0
self.old_received = 0
if self.nic is None:
# Get default gateway directly from /proc.
with open('/proc/net/route') as fh:
for line in fh:
fields = line.strip().split()
if fields[1] == '00000000' and int(fields[3], 16) & 2:
self.nic = fields[0]
break
if self.nic is None:
self.nic = 'lo'
self.py3.log('selected nic: %s' % self.nic)
def netdata(self):
"""
Calculate network | """
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| speed and network traffic.
"""
data = GetData(self.nic)
received_bytes, transmitted_bytes = data.netBytes()
# net_speed (statistic)
down = (received_bytes - self.old_received) / 1024.
up = (transmitted_bytes - self.old_transmitted) / 1024.
self.old_received = received_bytes
self.old_transmitted = transmitted_bytes
# net_traffic (statistic)
download = received_bytes / 1024 / 1024.
upload = transmitted_bytes / 1024 / 1024.
total = download + upload
# color threshold
self.py3.threshold_get_color(down, 'down')
self.py3.threshold_get_color(total, 'total')
netdata = self.py3.safe_format(self.format, {'down': down,
'up': up,
'download': download,
'upload': upload,
'total': total,
'nic': self.nic})
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': netdata
}
if __name__ == "__main__":
| identifier_body |
netdata.py | # -*- coding: utf-8 -*-
"""
Display network speed and bandwidth usage.
Configuration parameters:
cache_timeout: refresh interval for this module (default 2)
format: display format for this module
*(default '{nic} [\?color=down LAN(Kb): {down}↓ {up}↑]
[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]')*
nic: network interface to use (default None)
thresholds: color thresholds to use
*(default {'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]})*
Format placeholders:
{nic} network interface
{down} number of download speed
{up} number of upload speed
{download} number of download usage
{upload} number of upload usage
{total} number of total usage
Color thresholds:
{down} color threshold of download speed
{total} color threshold of total usage
@author Shahin Azad <ishahinism at Gmail>
SAMPLE OUTPUT
[
{'full_text': 'eth0 '},
{'full_text': 'LAN(Kb): 77.8↓ 26.9↑ ', 'color': '#00FF00'},
{'full_text': 'T(Mb): 394↓ 45↑ 438↕', 'color': '#FFFF00'},
]
"""
class GetData:
"""
Get system status.
"""
def __init__(self, nic):
self.nic = nic
def netBytes(self):
"""
Get bytes directly from /proc.
"""
with open('/proc/net/dev') as fh:
net_data = fh.read().split()
interface_index = net_data.index(self.nic + ':')
received_bytes = int(net_data[interface_index + 1])
transmitted_bytes = int(net_data[interface_index + 9])
return received_bytes, transmitted_bytes
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 2
format = u'{nic} [\?color=down LAN(Kb): {down}↓ {up}↑] ' + \
u'[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]'
nic = None
thresholds = {
'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]
}
class Meta:
def deprecate_function(config):
return {
'thresholds': {
'down': [
(0, 'bad'),
(config.get('low_speed', 30), 'degraded'),
(config.get('med_speed', 60), 'good')
],
'total': [
(0, 'good'),
(config.get('low_traffic', 400), 'degraded'),
(config.get('med_traffic', 700), 'bad')
]
}
}
deprecated = {
'function': [
{'function': deprecate_function},
],
'remove': [
{
'param': 'low_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'low_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
],
}
update_config = {
'update_placeholder_format': [
{
'placeholder_formats': {
'down': ':5.1f',
'up': ':5.1f',
'download': ':3.0f',
'upload': ':3.0f',
'total': ':3.0f',
},
'format_strings': ['format']
},
],
}
def post_config_hook(self):
"""
Get network interface.
"""
self.old_transmitted = 0
self.old_received = 0
if self.nic is None:
# Get default gateway directly from /proc.
with open('/proc/net/route') a | """
Calculate network speed and network traffic.
"""
data = GetData(self.nic)
received_bytes, transmitted_bytes = data.netBytes()
# net_speed (statistic)
down = (received_bytes - self.old_received) / 1024.
up = (transmitted_bytes - self.old_transmitted) / 1024.
self.old_received = received_bytes
self.old_transmitted = transmitted_bytes
# net_traffic (statistic)
download = received_bytes / 1024 / 1024.
upload = transmitted_bytes / 1024 / 1024.
total = download + upload
# color threshold
self.py3.threshold_get_color(down, 'down')
self.py3.threshold_get_color(total, 'total')
netdata = self.py3.safe_format(self.format, {'down': down,
'up': up,
'download': download,
'upload': upload,
'total': total,
'nic': self.nic})
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': netdata
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| s fh:
for line in fh:
fields = line.strip().split()
if fields[1] == '00000000' and int(fields[3], 16) & 2:
self.nic = fields[0]
break
if self.nic is None:
self.nic = 'lo'
self.py3.log('selected nic: %s' % self.nic)
def netdata(self):
| conditional_block |
netdata.py | # -*- coding: utf-8 -*-
""" | Display network speed and bandwidth usage.
Configuration parameters:
cache_timeout: refresh interval for this module (default 2)
format: display format for this module
*(default '{nic} [\?color=down LAN(Kb): {down}↓ {up}↑]
[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]')*
nic: network interface to use (default None)
thresholds: color thresholds to use
*(default {'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]})*
Format placeholders:
{nic} network interface
{down} number of download speed
{up} number of upload speed
{download} number of download usage
{upload} number of upload usage
{total} number of total usage
Color thresholds:
{down} color threshold of download speed
{total} color threshold of total usage
@author Shahin Azad <ishahinism at Gmail>
SAMPLE OUTPUT
[
{'full_text': 'eth0 '},
{'full_text': 'LAN(Kb): 77.8↓ 26.9↑ ', 'color': '#00FF00'},
{'full_text': 'T(Mb): 394↓ 45↑ 438↕', 'color': '#FFFF00'},
]
"""
class GetData:
"""
Get system status.
"""
def __init__(self, nic):
self.nic = nic
def netBytes(self):
"""
Get bytes directly from /proc.
"""
with open('/proc/net/dev') as fh:
net_data = fh.read().split()
interface_index = net_data.index(self.nic + ':')
received_bytes = int(net_data[interface_index + 1])
transmitted_bytes = int(net_data[interface_index + 9])
return received_bytes, transmitted_bytes
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 2
format = u'{nic} [\?color=down LAN(Kb): {down}↓ {up}↑] ' + \
u'[\?color=total T(Mb): {download}↓ {upload}↑ {total}↕]'
nic = None
thresholds = {
'down': [(0, 'bad'), (30, 'degraded'), (60, 'good')],
'total': [(0, 'good'), (400, 'degraded'), (700, 'bad')]
}
class Meta:
def deprecate_function(config):
return {
'thresholds': {
'down': [
(0, 'bad'),
(config.get('low_speed', 30), 'degraded'),
(config.get('med_speed', 60), 'good')
],
'total': [
(0, 'good'),
(config.get('low_traffic', 400), 'degraded'),
(config.get('med_traffic', 700), 'bad')
]
}
}
deprecated = {
'function': [
{'function': deprecate_function},
],
'remove': [
{
'param': 'low_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_speed',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'low_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
{
'param': 'med_traffic',
'msg': 'obsolete, set using thresholds parameter',
},
],
}
update_config = {
'update_placeholder_format': [
{
'placeholder_formats': {
'down': ':5.1f',
'up': ':5.1f',
'download': ':3.0f',
'upload': ':3.0f',
'total': ':3.0f',
},
'format_strings': ['format']
},
],
}
def post_config_hook(self):
"""
Get network interface.
"""
self.old_transmitted = 0
self.old_received = 0
if self.nic is None:
# Get default gateway directly from /proc.
with open('/proc/net/route') as fh:
for line in fh:
fields = line.strip().split()
if fields[1] == '00000000' and int(fields[3], 16) & 2:
self.nic = fields[0]
break
if self.nic is None:
self.nic = 'lo'
self.py3.log('selected nic: %s' % self.nic)
def netdata(self):
"""
Calculate network speed and network traffic.
"""
data = GetData(self.nic)
received_bytes, transmitted_bytes = data.netBytes()
# net_speed (statistic)
down = (received_bytes - self.old_received) / 1024.
up = (transmitted_bytes - self.old_transmitted) / 1024.
self.old_received = received_bytes
self.old_transmitted = transmitted_bytes
# net_traffic (statistic)
download = received_bytes / 1024 / 1024.
upload = transmitted_bytes / 1024 / 1024.
total = download + upload
# color threshold
self.py3.threshold_get_color(down, 'down')
self.py3.threshold_get_color(total, 'total')
netdata = self.py3.safe_format(self.format, {'down': down,
'up': up,
'download': download,
'upload': upload,
'total': total,
'nic': self.nic})
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': netdata
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status) | random_line_split |
|
scopemeasure_stopover_adds_an_extra_line_to_the_log_upon_each_call.rs | use libnewsboat::{
logger::{self, Level},
scopemeasure::ScopeMeasure,
};
use std::fs::File;
use std::io::{BufRead, BufReader, Result};
use std::path::Path;
use tempfile::TempDir;
fn file_lines_count(logfile: &Path) -> Result<usize> {
let file = File::open(logfile)?;
let reader = BufReader::new(file);
Ok(reader.lines().count())
}
#[test]
fn stopover_adds_an_extra_line_to_the_log_upon_each_call() | {
for calls in &[1, 2, 5] {
let tmp = TempDir::new().unwrap();
let logfile = {
let mut logfile = tmp.path().to_owned();
logfile.push("example.log");
logfile
};
{
logger::get_instance().set_logfile(logfile.to_str().unwrap());
logger::get_instance().set_loglevel(Level::Debug);
let sm = ScopeMeasure::new(String::from("test"));
for i in 0..*calls {
sm.stopover(&format!("stopover No.{}", i));
}
}
// One line for each call to stopover(), plus one more for the call to drop()
assert_eq!(file_lines_count(&logfile).unwrap(), calls + 1usize);
}
} | identifier_body |
|
scopemeasure_stopover_adds_an_extra_line_to_the_log_upon_each_call.rs | use libnewsboat::{
logger::{self, Level},
scopemeasure::ScopeMeasure,
};
use std::fs::File;
use std::io::{BufRead, BufReader, Result};
use std::path::Path;
use tempfile::TempDir;
fn | (logfile: &Path) -> Result<usize> {
let file = File::open(logfile)?;
let reader = BufReader::new(file);
Ok(reader.lines().count())
}
#[test]
fn stopover_adds_an_extra_line_to_the_log_upon_each_call() {
for calls in &[1, 2, 5] {
let tmp = TempDir::new().unwrap();
let logfile = {
let mut logfile = tmp.path().to_owned();
logfile.push("example.log");
logfile
};
{
logger::get_instance().set_logfile(logfile.to_str().unwrap());
logger::get_instance().set_loglevel(Level::Debug);
let sm = ScopeMeasure::new(String::from("test"));
for i in 0..*calls {
sm.stopover(&format!("stopover No.{}", i));
}
}
// One line for each call to stopover(), plus one more for the call to drop()
assert_eq!(file_lines_count(&logfile).unwrap(), calls + 1usize);
}
}
| file_lines_count | identifier_name |
scopemeasure_stopover_adds_an_extra_line_to_the_log_upon_each_call.rs | use libnewsboat::{
logger::{self, Level},
scopemeasure::ScopeMeasure,
};
use std::fs::File;
use std::io::{BufRead, BufReader, Result};
use std::path::Path;
use tempfile::TempDir;
fn file_lines_count(logfile: &Path) -> Result<usize> {
let file = File::open(logfile)?;
let reader = BufReader::new(file);
Ok(reader.lines().count())
}
| for calls in &[1, 2, 5] {
let tmp = TempDir::new().unwrap();
let logfile = {
let mut logfile = tmp.path().to_owned();
logfile.push("example.log");
logfile
};
{
logger::get_instance().set_logfile(logfile.to_str().unwrap());
logger::get_instance().set_loglevel(Level::Debug);
let sm = ScopeMeasure::new(String::from("test"));
for i in 0..*calls {
sm.stopover(&format!("stopover No.{}", i));
}
}
// One line for each call to stopover(), plus one more for the call to drop()
assert_eq!(file_lines_count(&logfile).unwrap(), calls + 1usize);
}
} | #[test]
fn stopover_adds_an_extra_line_to_the_log_upon_each_call() { | random_line_split |
mod.rs | use std::time::{Instant, Duration};
use super::Figure;
/// This struct is responsible for logging the average frame duration to stdout
/// once a second.
pub struct FpsLog {
last_second: Instant,
avg_duration_ns: u64,
ticks: u64,
}
impl FpsLog {
pub fn new() -> FpsLog {
FpsLog {
last_second: Instant::now(), | /// Dump the frame time to std out
fn print(&self) {
let frame_time_ms = self.avg_duration_ns / 1000000;
println!("avg frame time: {}ns which is {}ms",
self.avg_duration_ns, frame_time_ms);
}
/// Reset internal state which is used to calculate frame duration
fn reset(&mut self) {
self.last_second = Instant::now();
self.avg_duration_ns = 0;
self.ticks = 0;
}
/// Update state for duration calculation
fn add_frame_duration(&mut self, duration: Duration) {
let scaled_avg = self.avg_duration_ns * self.ticks;
let frame_ns = duration.subsec_nanos() as u64;
self.ticks += 1;
self.avg_duration_ns = (scaled_avg + frame_ns) / self.ticks;
}
}
impl Figure for FpsLog {
fn update(&mut self, duration: Duration) {
if self.last_second.elapsed() > Duration::from_secs(1) {
self.print();
self.reset();
}
self.add_frame_duration(duration);
}
} | avg_duration_ns: 0,
ticks: 0
}
}
| random_line_split |
mod.rs | use std::time::{Instant, Duration};
use super::Figure;
/// This struct is responsible for logging the average frame duration to stdout
/// once a second.
pub struct FpsLog {
last_second: Instant,
avg_duration_ns: u64,
ticks: u64,
}
impl FpsLog {
pub fn new() -> FpsLog {
FpsLog {
last_second: Instant::now(),
avg_duration_ns: 0,
ticks: 0
}
}
/// Dump the frame time to std out
fn | (&self) {
let frame_time_ms = self.avg_duration_ns / 1000000;
println!("avg frame time: {}ns which is {}ms",
self.avg_duration_ns, frame_time_ms);
}
/// Reset internal state which is used to calculate frame duration
fn reset(&mut self) {
self.last_second = Instant::now();
self.avg_duration_ns = 0;
self.ticks = 0;
}
/// Update state for duration calculation
fn add_frame_duration(&mut self, duration: Duration) {
let scaled_avg = self.avg_duration_ns * self.ticks;
let frame_ns = duration.subsec_nanos() as u64;
self.ticks += 1;
self.avg_duration_ns = (scaled_avg + frame_ns) / self.ticks;
}
}
impl Figure for FpsLog {
fn update(&mut self, duration: Duration) {
if self.last_second.elapsed() > Duration::from_secs(1) {
self.print();
self.reset();
}
self.add_frame_duration(duration);
}
}
| print | identifier_name |
mod.rs | use std::time::{Instant, Duration};
use super::Figure;
/// This struct is responsible for logging the average frame duration to stdout
/// once a second.
pub struct FpsLog {
last_second: Instant,
avg_duration_ns: u64,
ticks: u64,
}
impl FpsLog {
pub fn new() -> FpsLog |
/// Dump the frame time to std out
fn print(&self) {
let frame_time_ms = self.avg_duration_ns / 1000000;
println!("avg frame time: {}ns which is {}ms",
self.avg_duration_ns, frame_time_ms);
}
/// Reset internal state which is used to calculate frame duration
fn reset(&mut self) {
self.last_second = Instant::now();
self.avg_duration_ns = 0;
self.ticks = 0;
}
/// Update state for duration calculation
fn add_frame_duration(&mut self, duration: Duration) {
let scaled_avg = self.avg_duration_ns * self.ticks;
let frame_ns = duration.subsec_nanos() as u64;
self.ticks += 1;
self.avg_duration_ns = (scaled_avg + frame_ns) / self.ticks;
}
}
impl Figure for FpsLog {
fn update(&mut self, duration: Duration) {
if self.last_second.elapsed() > Duration::from_secs(1) {
self.print();
self.reset();
}
self.add_frame_duration(duration);
}
}
| {
FpsLog {
last_second: Instant::now(),
avg_duration_ns: 0,
ticks: 0
}
} | identifier_body |
mod.rs | use std::time::{Instant, Duration};
use super::Figure;
/// This struct is responsible for logging the average frame duration to stdout
/// once a second.
pub struct FpsLog {
last_second: Instant,
avg_duration_ns: u64,
ticks: u64,
}
impl FpsLog {
pub fn new() -> FpsLog {
FpsLog {
last_second: Instant::now(),
avg_duration_ns: 0,
ticks: 0
}
}
/// Dump the frame time to std out
fn print(&self) {
let frame_time_ms = self.avg_duration_ns / 1000000;
println!("avg frame time: {}ns which is {}ms",
self.avg_duration_ns, frame_time_ms);
}
/// Reset internal state which is used to calculate frame duration
fn reset(&mut self) {
self.last_second = Instant::now();
self.avg_duration_ns = 0;
self.ticks = 0;
}
/// Update state for duration calculation
fn add_frame_duration(&mut self, duration: Duration) {
let scaled_avg = self.avg_duration_ns * self.ticks;
let frame_ns = duration.subsec_nanos() as u64;
self.ticks += 1;
self.avg_duration_ns = (scaled_avg + frame_ns) / self.ticks;
}
}
impl Figure for FpsLog {
fn update(&mut self, duration: Duration) {
if self.last_second.elapsed() > Duration::from_secs(1) |
self.add_frame_duration(duration);
}
}
| {
self.print();
self.reset();
} | conditional_block |
classadbase_1_1_timer_item.js | [ "start", "d0/db0/classadbase_1_1_timer_item.html#ad86504a79d82c1e25633ebf56a089120", null ],
[ "stop", "d0/db0/classadbase_1_1_timer_item.html#a04551c02abdd8803c1c7329be496a3c4", null ],
[ "timerHandler", "d0/db0/classadbase_1_1_timer_item.html#aadf5574e948f85d696ab571f3d9fcc69", null ]
]; | var classadbase_1_1_timer_item =
[
[ "TimerItem", "d0/db0/classadbase_1_1_timer_item.html#a72988b767c5f2d44d3e7d0da58837d33", null ],
[ "~TimerItem", "d0/db0/classadbase_1_1_timer_item.html#a605e9a82245eeccfe12f96ccf93aab1b", null ],
[ "setDelTimerCallback", "d0/db0/classadbase_1_1_timer_item.html#a9eb85cc856a4852fe77e9db30b9fc462", null ], | random_line_split |
|
simpleamt.py | import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
| """
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argument('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
kwargs['aws_access_key_id'] = aws_access_key
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def setup_qualifications(hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual | def get_jinja_env(config): | random_line_split |
simpleamt.py | import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
def get_jinja_env(config):
"""
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argument('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
kwargs['aws_access_key_id'] = aws_access_key
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def | (hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual
| setup_qualifications | identifier_name |
simpleamt.py | import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
def get_jinja_env(config):
"""
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
|
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argument('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
kwargs['aws_access_key_id'] = aws_access_key
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def setup_qualifications(hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual
| with open(filename, 'r') as f:
return json.load(f) | identifier_body |
simpleamt.py | import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
def get_jinja_env(config):
"""
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argument('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
|
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def setup_qualifications(hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual
| kwargs['aws_access_key_id'] = aws_access_key | conditional_block |
record_all_old.py | import time
import threading
import logging
import serial
import io
import sim900
import sys
if __name__ == "__main__":
#this is a bad file for recording the diode temps and voltages
#eventually it will be merged with recording the resistance bridges
#and actually use the sim900 file functions
#create an instance of the sim900 commands
sim = sim900.sim900()
#main function to records temps
try:
timestr = time.strftime("%Y%m%d-%H%M%S")
filename = "/home/heather/SRS/%s.txt" % timestr
f = open(filename, 'w+')
# The column headers for rox 3 were the opposite of the written data until 2014-10-10:
f.write("time, diode ch1 temp, dio ch 2 temp, dio 3 temp, dio 4 temp, dio 1 volts, dio 2 volts, dio 3 volts, dio 4 volts, rox 1 temp, rox 1 res, rox 2 temp, rox 2 res, rox 3 temp, rox 3 res\n")
while 1:
#get diode info
sim.connect_sim922()
dio_temps = sim.get_sim922_temp()
dio_temps = dio_temps.rstrip()
time.sleep(1)
dio_volts = sim.get_sim922_volts()
dio_volts = dio_volts.rstrip()
sim.close_sim922()
print "diode"
time.sleep(1)
#get rox1 info
sim.connect_sim921_1()
rox1_res = sim.get_resistance()
rox1_temp = sim.get_temp()
sim.close_sim921_1()
print "rox1"
time.sleep(1)
sim.connect_sim921()
rox2_res = sim.get_resistance()
rox2_temp = sim.get_temp()
sim.close_sim921()
#get rox3 info
sim.connect_sim921_6()
rox3_res = sim.get_resistance()
rox3_temp = sim.get_temp()
sim.close_sim921_6()
print "rox2"
time.sleep(1)
#write it all to file
current_time = time.strftime("%Y%m%d-%H%M%S")
f.write("%s, %s, %s, %s, %s, %s, %s, %s, %s\n" % (current_time, dio_temps, dio_volts, rox1_temp, rox1_res, rox2_temp, rox2_res, rox3_temp, rox3_res))
f.flush()
except KeyboardInterrupt:
f.close() | sim.close_sim900()
print "ports closed" | print "done writing"
sim.close_sim922() | random_line_split |
record_all_old.py | import time
import threading
import logging
import serial
import io
import sim900
import sys
if __name__ == "__main__":
#this is a bad file for recording the diode temps and voltages
#eventually it will be merged with recording the resistance bridges
#and actually use the sim900 file functions
#create an instance of the sim900 commands
sim = sim900.sim900()
#main function to records temps
try:
timestr = time.strftime("%Y%m%d-%H%M%S")
filename = "/home/heather/SRS/%s.txt" % timestr
f = open(filename, 'w+')
# The column headers for rox 3 were the opposite of the written data until 2014-10-10:
f.write("time, diode ch1 temp, dio ch 2 temp, dio 3 temp, dio 4 temp, dio 1 volts, dio 2 volts, dio 3 volts, dio 4 volts, rox 1 temp, rox 1 res, rox 2 temp, rox 2 res, rox 3 temp, rox 3 res\n")
while 1:
#get diode info
|
except KeyboardInterrupt:
f.close()
print "done writing"
sim.close_sim922()
sim.close_sim900()
print "ports closed"
| sim.connect_sim922()
dio_temps = sim.get_sim922_temp()
dio_temps = dio_temps.rstrip()
time.sleep(1)
dio_volts = sim.get_sim922_volts()
dio_volts = dio_volts.rstrip()
sim.close_sim922()
print "diode"
time.sleep(1)
#get rox1 info
sim.connect_sim921_1()
rox1_res = sim.get_resistance()
rox1_temp = sim.get_temp()
sim.close_sim921_1()
print "rox1"
time.sleep(1)
sim.connect_sim921()
rox2_res = sim.get_resistance()
rox2_temp = sim.get_temp()
sim.close_sim921()
#get rox3 info
sim.connect_sim921_6()
rox3_res = sim.get_resistance()
rox3_temp = sim.get_temp()
sim.close_sim921_6()
print "rox2"
time.sleep(1)
#write it all to file
current_time = time.strftime("%Y%m%d-%H%M%S")
f.write("%s, %s, %s, %s, %s, %s, %s, %s, %s\n" % (current_time, dio_temps, dio_volts, rox1_temp, rox1_res, rox2_temp, rox2_res, rox3_temp, rox3_res))
f.flush() | conditional_block |
greenlets.py | import distutils.version
try:
import greenlet
getcurrent = greenlet.greenlet.getcurrent
GreenletExit = greenlet.greenlet.GreenletExit
preserves_excinfo = (distutils.version.LooseVersion(greenlet.__version__)
>= distutils.version.LooseVersion('0.3.2'))
greenlet = greenlet.greenlet | from py.magic import greenlet
getcurrent = greenlet.getcurrent
GreenletExit = greenlet.GreenletExit
preserves_excinfo = False
except ImportError:
try:
from stackless import greenlet
getcurrent = greenlet.getcurrent
GreenletExit = greenlet.GreenletExit
preserves_excinfo = False
except ImportError:
try:
from support.stacklesss import greenlet, getcurrent, GreenletExit
preserves_excinfo = False
(greenlet, getcurrent, GreenletExit) # silence pyflakes
except ImportError, e:
raise ImportError("Unable to find an implementation of greenlet.") | except ImportError, e:
raise
try: | random_line_split |
test.rs | #![crate_name = "test"]
#![feature(libc)]
#![feature(start)]
extern crate libc;
extern crate wx;
use libc::c_void;
use wx::_unsafe::*;
use wx::defs::*;
use wx::base::*;
use wx::core::*;
mod macros;
wxApp!(wx_main);
extern "C"
fn wx_main() |
fn make_frame() -> Frame {
let frame = Frame::new(&Window::null(), ID_ANY, "Hello, wxRust!", -1, -1, -1, -1, DEFAULT_FRAME_STYLE);
let menubar = make_menubar();
frame.setMenuBar(&menubar);
make_button(&frame);
frame
}
fn make_menubar() -> MenuBar {
let menubar = MenuBar::new(0);
let fileMenu = Menu::new("", 0);
let fileNew = MenuItem::newEx(ID_ANY, "New", "Create a new file.", 0, &Menu::null());
fileMenu.appendItem(&fileNew);
menubar.append(&fileMenu, "File");
menubar
}
extern "C"
fn MyButton_clicked(fun: *mut c_void, data: *mut c_void, evt: *mut c_void) {
if evt == 0 as *mut c_void {
// Comes here when the target widget is destroyed.
return;
}
println!("hello!");
let parent = Window::from(data);
let msgDlg = MessageDialog::new(&parent, "Pushed!!", "The Button", OK);
msgDlg.showModal();
}
fn make_button<T: WindowMethods>(parent: &T) -> Button {
let button = Button::new(parent, ID_ANY, "Push me!", 10, 10, 50, 30, 0);
let closure = Closure::new(MyButton_clicked as *mut c_void, parent.ptr());
unsafe {
button.connect(ID_ANY, ID_ANY, expEVT_COMMAND_BUTTON_CLICKED(), closure.ptr());
}
button
}
| {
let frame = make_frame();
frame.show();
frame.raise();
} | identifier_body |
test.rs | #![crate_name = "test"]
#![feature(libc)]
#![feature(start)]
extern crate libc;
extern crate wx;
use libc::c_void;
use wx::_unsafe::*;
use wx::defs::*;
use wx::base::*;
use wx::core::*;
mod macros;
wxApp!(wx_main);
extern "C"
fn wx_main() {
let frame = make_frame();
frame.show();
frame.raise();
}
fn make_frame() -> Frame {
let frame = Frame::new(&Window::null(), ID_ANY, "Hello, wxRust!", -1, -1, -1, -1, DEFAULT_FRAME_STYLE);
let menubar = make_menubar();
frame.setMenuBar(&menubar);
make_button(&frame);
frame
}
fn make_menubar() -> MenuBar {
let menubar = MenuBar::new(0);
let fileMenu = Menu::new("", 0);
let fileNew = MenuItem::newEx(ID_ANY, "New", "Create a new file.", 0, &Menu::null());
fileMenu.appendItem(&fileNew);
menubar.append(&fileMenu, "File");
menubar
}
extern "C"
fn MyButton_clicked(fun: *mut c_void, data: *mut c_void, evt: *mut c_void) {
if evt == 0 as *mut c_void |
println!("hello!");
let parent = Window::from(data);
let msgDlg = MessageDialog::new(&parent, "Pushed!!", "The Button", OK);
msgDlg.showModal();
}
fn make_button<T: WindowMethods>(parent: &T) -> Button {
let button = Button::new(parent, ID_ANY, "Push me!", 10, 10, 50, 30, 0);
let closure = Closure::new(MyButton_clicked as *mut c_void, parent.ptr());
unsafe {
button.connect(ID_ANY, ID_ANY, expEVT_COMMAND_BUTTON_CLICKED(), closure.ptr());
}
button
}
| {
// Comes here when the target widget is destroyed.
return;
} | conditional_block |
test.rs | #![crate_name = "test"]
#![feature(libc)]
#![feature(start)]
extern crate libc;
extern crate wx;
use libc::c_void;
use wx::_unsafe::*;
use wx::defs::*;
use wx::base::*;
use wx::core::*;
mod macros;
wxApp!(wx_main);
extern "C"
fn wx_main() {
let frame = make_frame();
frame.show();
frame.raise();
}
fn make_frame() -> Frame {
let frame = Frame::new(&Window::null(), ID_ANY, "Hello, wxRust!", -1, -1, -1, -1, DEFAULT_FRAME_STYLE);
let menubar = make_menubar();
frame.setMenuBar(&menubar);
make_button(&frame);
frame
}
fn make_menubar() -> MenuBar {
let menubar = MenuBar::new(0);
let fileMenu = Menu::new("", 0);
let fileNew = MenuItem::newEx(ID_ANY, "New", "Create a new file.", 0, &Menu::null());
fileMenu.appendItem(&fileNew);
menubar.append(&fileMenu, "File");
menubar
}
extern "C"
fn MyButton_clicked(fun: *mut c_void, data: *mut c_void, evt: *mut c_void) {
if evt == 0 as *mut c_void {
// Comes here when the target widget is destroyed.
return;
}
println!("hello!");
let parent = Window::from(data);
let msgDlg = MessageDialog::new(&parent, "Pushed!!", "The Button", OK);
msgDlg.showModal();
}
fn | <T: WindowMethods>(parent: &T) -> Button {
let button = Button::new(parent, ID_ANY, "Push me!", 10, 10, 50, 30, 0);
let closure = Closure::new(MyButton_clicked as *mut c_void, parent.ptr());
unsafe {
button.connect(ID_ANY, ID_ANY, expEVT_COMMAND_BUTTON_CLICKED(), closure.ptr());
}
button
}
| make_button | identifier_name |
test.rs | #![crate_name = "test"]
#![feature(libc)]
#![feature(start)]
extern crate libc;
extern crate wx;
use libc::c_void;
use wx::_unsafe::*;
use wx::defs::*;
use wx::base::*;
use wx::core::*;
mod macros;
wxApp!(wx_main);
extern "C"
fn wx_main() {
let frame = make_frame();
frame.show();
frame.raise();
}
fn make_frame() -> Frame {
let frame = Frame::new(&Window::null(), ID_ANY, "Hello, wxRust!", -1, -1, -1, -1, DEFAULT_FRAME_STYLE);
let menubar = make_menubar();
frame.setMenuBar(&menubar);
| }
fn make_menubar() -> MenuBar {
let menubar = MenuBar::new(0);
let fileMenu = Menu::new("", 0);
let fileNew = MenuItem::newEx(ID_ANY, "New", "Create a new file.", 0, &Menu::null());
fileMenu.appendItem(&fileNew);
menubar.append(&fileMenu, "File");
menubar
}
extern "C"
fn MyButton_clicked(fun: *mut c_void, data: *mut c_void, evt: *mut c_void) {
if evt == 0 as *mut c_void {
// Comes here when the target widget is destroyed.
return;
}
println!("hello!");
let parent = Window::from(data);
let msgDlg = MessageDialog::new(&parent, "Pushed!!", "The Button", OK);
msgDlg.showModal();
}
fn make_button<T: WindowMethods>(parent: &T) -> Button {
let button = Button::new(parent, ID_ANY, "Push me!", 10, 10, 50, 30, 0);
let closure = Closure::new(MyButton_clicked as *mut c_void, parent.ptr());
unsafe {
button.connect(ID_ANY, ID_ANY, expEVT_COMMAND_BUTTON_CLICKED(), closure.ptr());
}
button
} | make_button(&frame);
frame | random_line_split |
outlook.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains an Outlook Registry parser."""
from plaso.lib import event
from plaso.parsers.winreg_plugins import interface
__author__ = 'David Nides ([email protected])'
class OutlookSearchMRUPlugin(interface.KeyPlugin):
"""Windows Registry plugin parsing Outlook Search MRU keys."""
NAME = 'winreg_outlook_mru'
DESCRIPTION = 'PST Paths'
REG_KEYS = [
u'\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search',
u'\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search']
# TODO: The catalog for Office 2013 (15.0) contains binary values not
# dword values. Check if Office 2007 and 2010 have the same. Re-enable the
# plug-ins once confirmed and OutlookSearchMRUPlugin has been extended to
# handle the binary data or create a OutlookSearchCatalogMRUPlugin.
# Registry keys for:
# MS Outlook 2007 Search Catalog: | # '\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search\\Catalog'
REG_TYPE = 'NTUSER'
def GetEntries(self, key, **unused_kwargs):
"""Collect the values under Outlook and return event for each one."""
value_index = 0
for value in key.GetValues():
# Ignore the default value.
if not value.name:
continue
# Ignore any value that is empty or that does not contain an integer.
if not value.data or not value.DataIsInteger():
continue
# TODO: change this 32-bit integer into something meaningful, for now
# the value name is the most interesting part.
text_dict = {}
text_dict[value.name] = '0x{0:08x}'.format(value.data)
if value_index == 0:
timestamp = key.last_written_timestamp
else:
timestamp = 0
yield event.WinRegistryEvent(
key.path, text_dict, timestamp=timestamp,
source_append=': {0:s}'.format(self.DESCRIPTION))
value_index += 1 | # '\\Software\\Microsoft\\Office\\12.0\\Outlook\\Catalog'
# MS Outlook 2010 Search Catalog:
# '\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search\\Catalog'
# MS Outlook 2013 Search Catalog: | random_line_split |
outlook.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains an Outlook Registry parser."""
from plaso.lib import event
from plaso.parsers.winreg_plugins import interface
__author__ = 'David Nides ([email protected])'
class OutlookSearchMRUPlugin(interface.KeyPlugin):
"""Windows Registry plugin parsing Outlook Search MRU keys."""
NAME = 'winreg_outlook_mru'
DESCRIPTION = 'PST Paths'
REG_KEYS = [
u'\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search',
u'\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search']
# TODO: The catalog for Office 2013 (15.0) contains binary values not
# dword values. Check if Office 2007 and 2010 have the same. Re-enable the
# plug-ins once confirmed and OutlookSearchMRUPlugin has been extended to
# handle the binary data or create a OutlookSearchCatalogMRUPlugin.
# Registry keys for:
# MS Outlook 2007 Search Catalog:
# '\\Software\\Microsoft\\Office\\12.0\\Outlook\\Catalog'
# MS Outlook 2010 Search Catalog:
# '\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search\\Catalog'
# MS Outlook 2013 Search Catalog:
# '\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search\\Catalog'
REG_TYPE = 'NTUSER'
def GetEntries(self, key, **unused_kwargs):
| """Collect the values under Outlook and return event for each one."""
value_index = 0
for value in key.GetValues():
# Ignore the default value.
if not value.name:
continue
# Ignore any value that is empty or that does not contain an integer.
if not value.data or not value.DataIsInteger():
continue
# TODO: change this 32-bit integer into something meaningful, for now
# the value name is the most interesting part.
text_dict = {}
text_dict[value.name] = '0x{0:08x}'.format(value.data)
if value_index == 0:
timestamp = key.last_written_timestamp
else:
timestamp = 0
yield event.WinRegistryEvent(
key.path, text_dict, timestamp=timestamp,
source_append=': {0:s}'.format(self.DESCRIPTION))
value_index += 1 | identifier_body |
|
outlook.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains an Outlook Registry parser."""
from plaso.lib import event
from plaso.parsers.winreg_plugins import interface
__author__ = 'David Nides ([email protected])'
class OutlookSearchMRUPlugin(interface.KeyPlugin):
"""Windows Registry plugin parsing Outlook Search MRU keys."""
NAME = 'winreg_outlook_mru'
DESCRIPTION = 'PST Paths'
REG_KEYS = [
u'\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search',
u'\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search']
# TODO: The catalog for Office 2013 (15.0) contains binary values not
# dword values. Check if Office 2007 and 2010 have the same. Re-enable the
# plug-ins once confirmed and OutlookSearchMRUPlugin has been extended to
# handle the binary data or create a OutlookSearchCatalogMRUPlugin.
# Registry keys for:
# MS Outlook 2007 Search Catalog:
# '\\Software\\Microsoft\\Office\\12.0\\Outlook\\Catalog'
# MS Outlook 2010 Search Catalog:
# '\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search\\Catalog'
# MS Outlook 2013 Search Catalog:
# '\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search\\Catalog'
REG_TYPE = 'NTUSER'
def | (self, key, **unused_kwargs):
"""Collect the values under Outlook and return event for each one."""
value_index = 0
for value in key.GetValues():
# Ignore the default value.
if not value.name:
continue
# Ignore any value that is empty or that does not contain an integer.
if not value.data or not value.DataIsInteger():
continue
# TODO: change this 32-bit integer into something meaningful, for now
# the value name is the most interesting part.
text_dict = {}
text_dict[value.name] = '0x{0:08x}'.format(value.data)
if value_index == 0:
timestamp = key.last_written_timestamp
else:
timestamp = 0
yield event.WinRegistryEvent(
key.path, text_dict, timestamp=timestamp,
source_append=': {0:s}'.format(self.DESCRIPTION))
value_index += 1
| GetEntries | identifier_name |
outlook.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This file contains an Outlook Registry parser."""
from plaso.lib import event
from plaso.parsers.winreg_plugins import interface
__author__ = 'David Nides ([email protected])'
class OutlookSearchMRUPlugin(interface.KeyPlugin):
"""Windows Registry plugin parsing Outlook Search MRU keys."""
NAME = 'winreg_outlook_mru'
DESCRIPTION = 'PST Paths'
REG_KEYS = [
u'\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search',
u'\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search']
# TODO: The catalog for Office 2013 (15.0) contains binary values not
# dword values. Check if Office 2007 and 2010 have the same. Re-enable the
# plug-ins once confirmed and OutlookSearchMRUPlugin has been extended to
# handle the binary data or create a OutlookSearchCatalogMRUPlugin.
# Registry keys for:
# MS Outlook 2007 Search Catalog:
# '\\Software\\Microsoft\\Office\\12.0\\Outlook\\Catalog'
# MS Outlook 2010 Search Catalog:
# '\\Software\\Microsoft\\Office\\14.0\\Outlook\\Search\\Catalog'
# MS Outlook 2013 Search Catalog:
# '\\Software\\Microsoft\\Office\\15.0\\Outlook\\Search\\Catalog'
REG_TYPE = 'NTUSER'
def GetEntries(self, key, **unused_kwargs):
"""Collect the values under Outlook and return event for each one."""
value_index = 0
for value in key.GetValues():
# Ignore the default value.
if not value.name:
|
# Ignore any value that is empty or that does not contain an integer.
if not value.data or not value.DataIsInteger():
continue
# TODO: change this 32-bit integer into something meaningful, for now
# the value name is the most interesting part.
text_dict = {}
text_dict[value.name] = '0x{0:08x}'.format(value.data)
if value_index == 0:
timestamp = key.last_written_timestamp
else:
timestamp = 0
yield event.WinRegistryEvent(
key.path, text_dict, timestamp=timestamp,
source_append=': {0:s}'.format(self.DESCRIPTION))
value_index += 1
| continue | conditional_block |
autorizada.js | var mongoose = require('mongoose');
var u = require('../utils');
var autorizadaSchema = new mongoose.Schema({
nome: String,
cpf: { type: String, unique: true },
celular: String,
dtInicial: Date,
dtFinal: Date,
autorizador: String,
apto: Number,
bloco: String,
contato: String
});
var A = mongoose.model('Autorizada', autorizadaSchema);
[{
nome: 'Mickey',
cpf: '99933366600',
celular: '11 9 9633-3366',
dtInicial: new Date(2013, 11, 1),
dtFinal: new Date(2015, 0, 1),
autorizador: 'Pateta',
apto: 432,
bloco: 'D',
contato: '11 9 9833-3388'
}].forEach(function(f) {
var model = new A();
u.objetoExtends(model, f);
model.save();
}); | module.exports = A; | random_line_split |
|
state.rs | use nalgebra::{Point2, Scalar, Vector2};
use std::collections::HashSet;
use std::hash::Hash;
use event::{ElementState, React};
/// An atomic state of an input element.
pub trait State: Copy + Eq {
// TODO: Use a default type (`Self`) here once that feature stabilizes.
/// Representation of a difference between states.
type Difference;
/// Gets the transition between a live and snapshot state. If no transition
/// has occurred, returns `None`.
fn transition(live: Self, snapshot: Self) -> Option<Self> {
if live == snapshot {
None
}
else {
Some(live)
}
}
}
impl State for bool {
type Difference = Self;
}
impl State for ElementState {
type Difference = Self;
}
impl<T> State for Point2<T>
where | }
/// An input element, such as a button, key, or position.
pub trait Element: Copy + Sized {
/// Representation of the state of the element.
type State: State;
}
/// A state with a composite representation. This is used for input elements
/// which have a cardinality greater than one. For example, a mouse may have
/// more than one button.
pub trait CompositeState<E>
where
E: Element,
{
// TODO: Use a default type (`E::State`) here once that feature stabilizes.
/// Representation of the composite state.
type Composite;
/// Gets the composite state.
fn composite(&self) -> &Self::Composite;
}
/// Provides a state for an input element.
pub trait InputState<E>
where
E: Element,
{
/// Gets the state of an input element.
fn state(&self, element: E) -> E::State;
}
// Blanket implementation for `InputState` for composite states represented by
// a `HashSet`, such as keys and buttons.
impl<E, T> InputState<E> for T
where
T: CompositeState<E, Composite = HashSet<E>>,
E: Element<State = ElementState> + Eq + Hash,
{
fn state(&self, element: E) -> E::State {
if self.composite().contains(&element) {
ElementState::Pressed
}
else {
ElementState::Released
}
}
}
/// Provides a transition state for an input element.
pub trait InputTransition<E>
where
E: Element,
{
/// Gets the transition state of an input element.
fn transition(&self, element: E) -> Option<E::State>;
}
impl<E, T> InputTransition<E> for T
where
T: Input,
T::State: InputState<E>,
E: Element,
{
fn transition(&self, element: E) -> Option<E::State> {
E::State::transition(self.live().state(element), self.snapshot().state(element))
}
}
/// Determines the difference in state for an input element.
pub trait InputDifference<E>
where
E: Element,
{
/// Iterable representation of differences in state.
type Difference: IntoIterator<Item = (E, <E::State as State>::Difference)>;
/// Gets the difference in state for an input element.
fn difference(&self) -> Self::Difference;
}
// Blanket implementation for `InputDifference` for composite states
// represented by a `HashSet`, such as keys and buttons.
impl<E, S, T> InputDifference<E> for T
where
T: Input,
T::State: CompositeState<E, Composite = HashSet<E>> + InputState<E>,
E: Element<State = S> + Eq + Hash,
S: State<Difference = S>,
{
type Difference = Vec<(E, <E::State as State>::Difference)>;
fn difference(&self) -> Self::Difference {
self.live()
.composite()
.symmetric_difference(self.snapshot().composite())
.map(|element| (*element, self.live().state(*element)))
.collect()
}
}
/// An input device with a live state and snapshot state. These are updated via
/// `React` and `Snapshot` and provide information about the live state and
/// changes based on the snapshot state.
pub trait Input: React + Snapshot {
/// Aggregate state for the input device.
type State;
/// Gets the live state.
fn live(&self) -> &Self::State;
// TODO: The term "snapshot" is ambiguous. Here, it refers to the snapshot
// of the state of an input device. In the `Snapshot` trait, it is
// used as a verb for the operation of taking a snapshot (copying the
// live state into the snapshot state). However, the `Input` trait is
// not exposed outside of this module, so this shouldn't affect
// client code.
/// Gets the snapshot state.
fn snapshot(&self) -> &Self::State;
}
/// Provides snapshotting for an input device. Input devices maintain a live
/// state and snapshot state, which are updated via `React` and this trait,
/// respectively.
pub trait Snapshot {
/// Snapshots the live state.
fn snapshot(&mut self);
} | T: Eq + Scalar,
{
type Difference = Vector2<T>; | random_line_split |
state.rs | use nalgebra::{Point2, Scalar, Vector2};
use std::collections::HashSet;
use std::hash::Hash;
use event::{ElementState, React};
/// An atomic state of an input element.
pub trait State: Copy + Eq {
// TODO: Use a default type (`Self`) here once that feature stabilizes.
/// Representation of a difference between states.
type Difference;
/// Gets the transition between a live and snapshot state. If no transition
/// has occurred, returns `None`.
fn transition(live: Self, snapshot: Self) -> Option<Self> |
}
impl State for bool {
type Difference = Self;
}
impl State for ElementState {
type Difference = Self;
}
impl<T> State for Point2<T>
where
T: Eq + Scalar,
{
type Difference = Vector2<T>;
}
/// An input element, such as a button, key, or position.
pub trait Element: Copy + Sized {
/// Representation of the state of the element.
type State: State;
}
/// A state with a composite representation. This is used for input elements
/// which have a cardinality greater than one. For example, a mouse may have
/// more than one button.
pub trait CompositeState<E>
where
E: Element,
{
// TODO: Use a default type (`E::State`) here once that feature stabilizes.
/// Representation of the composite state.
type Composite;
/// Gets the composite state.
fn composite(&self) -> &Self::Composite;
}
/// Provides a state for an input element.
pub trait InputState<E>
where
E: Element,
{
/// Gets the state of an input element.
fn state(&self, element: E) -> E::State;
}
// Blanket implementation for `InputState` for composite states represented by
// a `HashSet`, such as keys and buttons.
impl<E, T> InputState<E> for T
where
T: CompositeState<E, Composite = HashSet<E>>,
E: Element<State = ElementState> + Eq + Hash,
{
fn state(&self, element: E) -> E::State {
if self.composite().contains(&element) {
ElementState::Pressed
}
else {
ElementState::Released
}
}
}
/// Provides a transition state for an input element.
pub trait InputTransition<E>
where
E: Element,
{
/// Gets the transition state of an input element.
fn transition(&self, element: E) -> Option<E::State>;
}
impl<E, T> InputTransition<E> for T
where
T: Input,
T::State: InputState<E>,
E: Element,
{
fn transition(&self, element: E) -> Option<E::State> {
E::State::transition(self.live().state(element), self.snapshot().state(element))
}
}
/// Determines the difference in state for an input element.
pub trait InputDifference<E>
where
E: Element,
{
/// Iterable representation of differences in state.
type Difference: IntoIterator<Item = (E, <E::State as State>::Difference)>;
/// Gets the difference in state for an input element.
fn difference(&self) -> Self::Difference;
}
// Blanket implementation for `InputDifference` for composite states
// represented by a `HashSet`, such as keys and buttons.
impl<E, S, T> InputDifference<E> for T
where
T: Input,
T::State: CompositeState<E, Composite = HashSet<E>> + InputState<E>,
E: Element<State = S> + Eq + Hash,
S: State<Difference = S>,
{
type Difference = Vec<(E, <E::State as State>::Difference)>;
fn difference(&self) -> Self::Difference {
self.live()
.composite()
.symmetric_difference(self.snapshot().composite())
.map(|element| (*element, self.live().state(*element)))
.collect()
}
}
/// An input device with a live state and snapshot state. These are updated via
/// `React` and `Snapshot` and provide information about the live state and
/// changes based on the snapshot state.
pub trait Input: React + Snapshot {
/// Aggregate state for the input device.
type State;
/// Gets the live state.
fn live(&self) -> &Self::State;
// TODO: The term "snapshot" is ambiguous. Here, it refers to the snapshot
// of the state of an input device. In the `Snapshot` trait, it is
// used as a verb for the operation of taking a snapshot (copying the
// live state into the snapshot state). However, the `Input` trait is
// not exposed outside of this module, so this shouldn't affect
// client code.
/// Gets the snapshot state.
fn snapshot(&self) -> &Self::State;
}
/// Provides snapshotting for an input device. Input devices maintain a live
/// state and snapshot state, which are updated via `React` and this trait,
/// respectively.
pub trait Snapshot {
/// Snapshots the live state.
fn snapshot(&mut self);
}
| {
if live == snapshot {
None
}
else {
Some(live)
}
} | identifier_body |
state.rs | use nalgebra::{Point2, Scalar, Vector2};
use std::collections::HashSet;
use std::hash::Hash;
use event::{ElementState, React};
/// An atomic state of an input element.
pub trait State: Copy + Eq {
// TODO: Use a default type (`Self`) here once that feature stabilizes.
/// Representation of a difference between states.
type Difference;
/// Gets the transition between a live and snapshot state. If no transition
/// has occurred, returns `None`.
fn transition(live: Self, snapshot: Self) -> Option<Self> {
if live == snapshot {
None
}
else {
Some(live)
}
}
}
impl State for bool {
type Difference = Self;
}
impl State for ElementState {
type Difference = Self;
}
impl<T> State for Point2<T>
where
T: Eq + Scalar,
{
type Difference = Vector2<T>;
}
/// An input element, such as a button, key, or position.
pub trait Element: Copy + Sized {
/// Representation of the state of the element.
type State: State;
}
/// A state with a composite representation. This is used for input elements
/// which have a cardinality greater than one. For example, a mouse may have
/// more than one button.
pub trait CompositeState<E>
where
E: Element,
{
// TODO: Use a default type (`E::State`) here once that feature stabilizes.
/// Representation of the composite state.
type Composite;
/// Gets the composite state.
fn composite(&self) -> &Self::Composite;
}
/// Provides a state for an input element.
pub trait InputState<E>
where
E: Element,
{
/// Gets the state of an input element.
fn state(&self, element: E) -> E::State;
}
// Blanket implementation for `InputState` for composite states represented by
// a `HashSet`, such as keys and buttons.
impl<E, T> InputState<E> for T
where
T: CompositeState<E, Composite = HashSet<E>>,
E: Element<State = ElementState> + Eq + Hash,
{
fn state(&self, element: E) -> E::State {
if self.composite().contains(&element) |
else {
ElementState::Released
}
}
}
/// Provides a transition state for an input element.
pub trait InputTransition<E>
where
E: Element,
{
/// Gets the transition state of an input element.
fn transition(&self, element: E) -> Option<E::State>;
}
impl<E, T> InputTransition<E> for T
where
T: Input,
T::State: InputState<E>,
E: Element,
{
fn transition(&self, element: E) -> Option<E::State> {
E::State::transition(self.live().state(element), self.snapshot().state(element))
}
}
/// Determines the difference in state for an input element.
pub trait InputDifference<E>
where
E: Element,
{
/// Iterable representation of differences in state.
type Difference: IntoIterator<Item = (E, <E::State as State>::Difference)>;
/// Gets the difference in state for an input element.
fn difference(&self) -> Self::Difference;
}
// Blanket implementation for `InputDifference` for composite states
// represented by a `HashSet`, such as keys and buttons.
impl<E, S, T> InputDifference<E> for T
where
T: Input,
T::State: CompositeState<E, Composite = HashSet<E>> + InputState<E>,
E: Element<State = S> + Eq + Hash,
S: State<Difference = S>,
{
type Difference = Vec<(E, <E::State as State>::Difference)>;
fn difference(&self) -> Self::Difference {
self.live()
.composite()
.symmetric_difference(self.snapshot().composite())
.map(|element| (*element, self.live().state(*element)))
.collect()
}
}
/// An input device with a live state and snapshot state. These are updated via
/// `React` and `Snapshot` and provide information about the live state and
/// changes based on the snapshot state.
pub trait Input: React + Snapshot {
/// Aggregate state for the input device.
type State;
/// Gets the live state.
fn live(&self) -> &Self::State;
// TODO: The term "snapshot" is ambiguous. Here, it refers to the snapshot
// of the state of an input device. In the `Snapshot` trait, it is
// used as a verb for the operation of taking a snapshot (copying the
// live state into the snapshot state). However, the `Input` trait is
// not exposed outside of this module, so this shouldn't affect
// client code.
/// Gets the snapshot state.
fn snapshot(&self) -> &Self::State;
}
/// Provides snapshotting for an input device. Input devices maintain a live
/// state and snapshot state, which are updated via `React` and this trait,
/// respectively.
pub trait Snapshot {
/// Snapshots the live state.
fn snapshot(&mut self);
}
| {
ElementState::Pressed
} | conditional_block |
state.rs | use nalgebra::{Point2, Scalar, Vector2};
use std::collections::HashSet;
use std::hash::Hash;
use event::{ElementState, React};
/// An atomic state of an input element.
pub trait State: Copy + Eq {
// TODO: Use a default type (`Self`) here once that feature stabilizes.
/// Representation of a difference between states.
type Difference;
/// Gets the transition between a live and snapshot state. If no transition
/// has occurred, returns `None`.
fn transition(live: Self, snapshot: Self) -> Option<Self> {
if live == snapshot {
None
}
else {
Some(live)
}
}
}
impl State for bool {
type Difference = Self;
}
impl State for ElementState {
type Difference = Self;
}
impl<T> State for Point2<T>
where
T: Eq + Scalar,
{
type Difference = Vector2<T>;
}
/// An input element, such as a button, key, or position.
pub trait Element: Copy + Sized {
/// Representation of the state of the element.
type State: State;
}
/// A state with a composite representation. This is used for input elements
/// which have a cardinality greater than one. For example, a mouse may have
/// more than one button.
pub trait CompositeState<E>
where
E: Element,
{
// TODO: Use a default type (`E::State`) here once that feature stabilizes.
/// Representation of the composite state.
type Composite;
/// Gets the composite state.
fn composite(&self) -> &Self::Composite;
}
/// Provides a state for an input element.
pub trait InputState<E>
where
E: Element,
{
/// Gets the state of an input element.
fn state(&self, element: E) -> E::State;
}
// Blanket implementation for `InputState` for composite states represented by
// a `HashSet`, such as keys and buttons.
impl<E, T> InputState<E> for T
where
T: CompositeState<E, Composite = HashSet<E>>,
E: Element<State = ElementState> + Eq + Hash,
{
fn | (&self, element: E) -> E::State {
if self.composite().contains(&element) {
ElementState::Pressed
}
else {
ElementState::Released
}
}
}
/// Provides a transition state for an input element.
pub trait InputTransition<E>
where
E: Element,
{
/// Gets the transition state of an input element.
fn transition(&self, element: E) -> Option<E::State>;
}
impl<E, T> InputTransition<E> for T
where
T: Input,
T::State: InputState<E>,
E: Element,
{
fn transition(&self, element: E) -> Option<E::State> {
E::State::transition(self.live().state(element), self.snapshot().state(element))
}
}
/// Determines the difference in state for an input element.
pub trait InputDifference<E>
where
E: Element,
{
/// Iterable representation of differences in state.
type Difference: IntoIterator<Item = (E, <E::State as State>::Difference)>;
/// Gets the difference in state for an input element.
fn difference(&self) -> Self::Difference;
}
// Blanket implementation for `InputDifference` for composite states
// represented by a `HashSet`, such as keys and buttons.
impl<E, S, T> InputDifference<E> for T
where
T: Input,
T::State: CompositeState<E, Composite = HashSet<E>> + InputState<E>,
E: Element<State = S> + Eq + Hash,
S: State<Difference = S>,
{
type Difference = Vec<(E, <E::State as State>::Difference)>;
fn difference(&self) -> Self::Difference {
self.live()
.composite()
.symmetric_difference(self.snapshot().composite())
.map(|element| (*element, self.live().state(*element)))
.collect()
}
}
/// An input device with a live state and snapshot state. These are updated via
/// `React` and `Snapshot` and provide information about the live state and
/// changes based on the snapshot state.
pub trait Input: React + Snapshot {
/// Aggregate state for the input device.
type State;
/// Gets the live state.
fn live(&self) -> &Self::State;
// TODO: The term "snapshot" is ambiguous. Here, it refers to the snapshot
// of the state of an input device. In the `Snapshot` trait, it is
// used as a verb for the operation of taking a snapshot (copying the
// live state into the snapshot state). However, the `Input` trait is
// not exposed outside of this module, so this shouldn't affect
// client code.
/// Gets the snapshot state.
fn snapshot(&self) -> &Self::State;
}
/// Provides snapshotting for an input device. Input devices maintain a live
/// state and snapshot state, which are updated via `React` and this trait,
/// respectively.
pub trait Snapshot {
/// Snapshots the live state.
fn snapshot(&mut self);
}
| state | identifier_name |
dispnew.rs | //! Updating of data structures for redisplay.
use std::{cmp, ptr};
use remacs_lib::current_timespec;
use remacs_macros::lisp_fn;
use crate::{
eval::unbind_to,
frame::selected_frame,
frame::{LispFrameLiveOrSelected, LispFrameRef},
lisp::{ExternalPtr, LispObject},
lists::{LispConsCircularChecks, LispConsEndChecks},
remacs_sys::{
clear_current_matrices, detect_input_pending_run_timers, dtotimespec, fset_redisplay,
mark_window_display_accurate, putchar_unlocked, redisplay_preserve_echo_area, ring_bell,
specbind, swallow_events, timespec_add, timespec_sub, wait_reading_process_output,
},
remacs_sys::{
globals, noninteractive, redisplaying_p, Qnil, Qredisplay_dont_pause, Qt, Vframe_list,
WAIT_READING_MAX,
},
remacs_sys::{EmacsDouble, EmacsInt, Lisp_Glyph},
terminal::{clear_frame, update_begin, update_end},
threads::c_specpdl_index,
windows::{LispWindowOrSelected, LispWindowRef},
};
pub type LispGlyphRef = ExternalPtr<Lisp_Glyph>;
/// Pause, without updating display, for SECONDS seconds.
/// SECONDS may be a floating-point value, meaning that you can wait for a
/// fraction of a second. Optional second arg MILLISECONDS specifies an
/// additional wait period, in milliseconds; this is for backwards compatibility.
/// (Not all operating systems support waiting for a fraction of a second.)
#[lisp_fn(min = "1")]
pub fn sleep_for(seconds: EmacsDouble, milliseconds: Option<EmacsInt>) {
let duration = seconds + (milliseconds.unwrap_or(0) as f64 / 1000.0);
if duration > 0.0 {
let mut t = unsafe { dtotimespec(duration) };
let tend = unsafe { timespec_add(current_timespec(), t) };
while !t.tv_sec < 0 && (t.tv_sec > 0 || t.tv_nsec > 0) {
unsafe {
wait_reading_process_output(
cmp::min(t.tv_sec as i64, WAIT_READING_MAX),
t.tv_nsec as i32,
0,
true,
Qnil,
ptr::null_mut(),
0,
)
};
t = unsafe { timespec_sub(tend, current_timespec()) };
}
}
}
/**********************************************************************
Redrawing Frames
**********************************************************************/
/// Redraw frame FRAME.
#[no_mangle]
pub extern "C" fn redraw_frame(mut frame: LispFrameRef) {
unsafe {
// Error if FRAME has no glyphs.
debug_assert!(frame.glyphs_initialized_p());
update_begin(frame);
clear_frame(frame);
clear_current_matrices(frame.as_mut());
update_end(frame);
fset_redisplay(frame.as_mut());
// Mark all windows as inaccurate, so that every window will have
// its redisplay done.
mark_window_display_accurate(frame.root_window, false);
set_window_update_flags(frame.root_window.into(), true);
frame.set_garbaged(false);
} | #[lisp_fn(c_name = "redraw_frame", name = "redraw-frame", min = "0")]
pub fn redraw_frame_lisp(frame: LispFrameLiveOrSelected) {
redraw_frame(frame.into());
}
/// Clear and redisplay all visible frames.
#[lisp_fn]
pub fn redraw_display() {
for_each_frame!(frame => {
if frame.visible() != 0 {
redraw_frame(frame);
}
});
}
/// Set WINDOW->must_be_updated_p to ON_P for all windows in
/// the window tree rooted at W.
// Make private once all C usages are ported in this file
#[no_mangle]
pub extern "C" fn set_window_update_flags(w: LispWindowRef, on_p: bool) {
let mut w = Some(w);
while let Some(mut win) = w {
if let Some(contents) = win.contents.as_window() {
set_window_update_flags(contents, on_p);
} else {
win.set_must_be_updated_p(on_p);
}
let next = win.next;
w = if next.is_nil() {
None
} else {
Some(next.into())
};
}
}
/***********************************************************************
Blinking cursor
***********************************************************************/
/// Set the cursor-visibility flag of WINDOW to SHOW.
/// WINDOW nil means use the selected window. SHOW non-nil means
/// show a cursor in WINDOW in the next redisplay. SHOW nil means
/// don't show a cursor.
#[lisp_fn]
pub fn internal_show_cursor(window: LispWindowOrSelected, show: bool) {
let mut win: LispWindowRef = window.into();
// Don't change cursor state while redisplaying. This could confuse
// output routines.
if !unsafe { redisplaying_p } {
win.set_cursor_off_p(!show)
}
}
/// Value is non-nil if next redisplay will display a cursor in WINDOW.
/// WINDOW nil or omitted means report on the selected window.
#[lisp_fn(min = "0")]
pub fn internal_show_cursor_p(window: LispWindowOrSelected) -> bool {
let win: LispWindowRef = window.into();
!win.cursor_off_p()
}
/// Return whether input is coming from the keyboard.
// Corresponds to the INTERACTIVE macro in commands.h.
pub fn is_interactive() -> bool {
unsafe { globals.Vexecuting_kbd_macro.is_nil() && !noninteractive }
}
#[no_mangle]
pub extern "C" fn ding_internal(terminate_macro: bool) {
unsafe {
if noninteractive {
putchar_unlocked(0o7);
} else if terminate_macro && !is_interactive() {
// Stop executing a keyboard macro.
user_error!("Keyboard macro terminated by a command ringing the bell");
} else {
ring_bell(selected_frame().as_mut())
}
}
}
/// Beep, or flash the screen.
/// Also, unless an argument is given,
/// terminate any keyboard macro currently executing.
#[lisp_fn(min = "0")]
pub fn ding(arg: LispObject) {
ding_internal(arg.is_nil())
}
/// Perform redisplay.
/// Optional arg FORCE, if non-nil, prevents redisplay from being
/// preempted by arriving input, even if `redisplay-dont-pause' is nil.
/// If `redisplay-dont-pause' is non-nil (the default), redisplay is never
/// preempted by arriving input, so FORCE does nothing.
///
/// Return t if redisplay was performed, nil if redisplay was preempted
/// immediately by pending input.
#[lisp_fn(min = "0")]
pub fn redisplay(force: LispObject) -> bool {
let force: bool = force.is_not_nil();
unsafe {
swallow_events(true);
let ret =
(detect_input_pending_run_timers(true) && !force && !globals.redisplay_dont_pause)
|| globals.Vexecuting_kbd_macro.is_not_nil();
if ret {
let count = c_specpdl_index();
if force && !globals.redisplay_dont_pause {
specbind(Qredisplay_dont_pause, Qt);
}
redisplay_preserve_echo_area(2);
unbind_to(count, Qnil);
}
ret
}
}
include!(concat!(env!("OUT_DIR"), "/dispnew_exports.rs")); | }
/// Clear frame FRAME and output again what is supposed to appear on it.
/// If FRAME is omitted or nil, the selected frame is used. | random_line_split |
dispnew.rs | //! Updating of data structures for redisplay.
use std::{cmp, ptr};
use remacs_lib::current_timespec;
use remacs_macros::lisp_fn;
use crate::{
eval::unbind_to,
frame::selected_frame,
frame::{LispFrameLiveOrSelected, LispFrameRef},
lisp::{ExternalPtr, LispObject},
lists::{LispConsCircularChecks, LispConsEndChecks},
remacs_sys::{
clear_current_matrices, detect_input_pending_run_timers, dtotimespec, fset_redisplay,
mark_window_display_accurate, putchar_unlocked, redisplay_preserve_echo_area, ring_bell,
specbind, swallow_events, timespec_add, timespec_sub, wait_reading_process_output,
},
remacs_sys::{
globals, noninteractive, redisplaying_p, Qnil, Qredisplay_dont_pause, Qt, Vframe_list,
WAIT_READING_MAX,
},
remacs_sys::{EmacsDouble, EmacsInt, Lisp_Glyph},
terminal::{clear_frame, update_begin, update_end},
threads::c_specpdl_index,
windows::{LispWindowOrSelected, LispWindowRef},
};
pub type LispGlyphRef = ExternalPtr<Lisp_Glyph>;
/// Pause, without updating display, for SECONDS seconds.
/// SECONDS may be a floating-point value, meaning that you can wait for a
/// fraction of a second. Optional second arg MILLISECONDS specifies an
/// additional wait period, in milliseconds; this is for backwards compatibility.
/// (Not all operating systems support waiting for a fraction of a second.)
#[lisp_fn(min = "1")]
pub fn sleep_for(seconds: EmacsDouble, milliseconds: Option<EmacsInt>) {
let duration = seconds + (milliseconds.unwrap_or(0) as f64 / 1000.0);
if duration > 0.0 {
let mut t = unsafe { dtotimespec(duration) };
let tend = unsafe { timespec_add(current_timespec(), t) };
while !t.tv_sec < 0 && (t.tv_sec > 0 || t.tv_nsec > 0) {
unsafe {
wait_reading_process_output(
cmp::min(t.tv_sec as i64, WAIT_READING_MAX),
t.tv_nsec as i32,
0,
true,
Qnil,
ptr::null_mut(),
0,
)
};
t = unsafe { timespec_sub(tend, current_timespec()) };
}
}
}
/**********************************************************************
Redrawing Frames
**********************************************************************/
/// Redraw frame FRAME.
#[no_mangle]
pub extern "C" fn redraw_frame(mut frame: LispFrameRef) {
unsafe {
// Error if FRAME has no glyphs.
debug_assert!(frame.glyphs_initialized_p());
update_begin(frame);
clear_frame(frame);
clear_current_matrices(frame.as_mut());
update_end(frame);
fset_redisplay(frame.as_mut());
// Mark all windows as inaccurate, so that every window will have
// its redisplay done.
mark_window_display_accurate(frame.root_window, false);
set_window_update_flags(frame.root_window.into(), true);
frame.set_garbaged(false);
}
}
/// Clear frame FRAME and output again what is supposed to appear on it.
/// If FRAME is omitted or nil, the selected frame is used.
#[lisp_fn(c_name = "redraw_frame", name = "redraw-frame", min = "0")]
pub fn redraw_frame_lisp(frame: LispFrameLiveOrSelected) {
redraw_frame(frame.into());
}
/// Clear and redisplay all visible frames.
#[lisp_fn]
pub fn redraw_display() {
for_each_frame!(frame => {
if frame.visible() != 0 {
redraw_frame(frame);
}
});
}
/// Set WINDOW->must_be_updated_p to ON_P for all windows in
/// the window tree rooted at W.
// Make private once all C usages are ported in this file
#[no_mangle]
pub extern "C" fn set_window_update_flags(w: LispWindowRef, on_p: bool) {
let mut w = Some(w);
while let Some(mut win) = w {
if let Some(contents) = win.contents.as_window() {
set_window_update_flags(contents, on_p);
} else {
win.set_must_be_updated_p(on_p);
}
let next = win.next;
w = if next.is_nil() {
None
} else {
Some(next.into())
};
}
}
/***********************************************************************
Blinking cursor
***********************************************************************/
/// Set the cursor-visibility flag of WINDOW to SHOW.
/// WINDOW nil means use the selected window. SHOW non-nil means
/// show a cursor in WINDOW in the next redisplay. SHOW nil means
/// don't show a cursor.
#[lisp_fn]
pub fn internal_show_cursor(window: LispWindowOrSelected, show: bool) {
let mut win: LispWindowRef = window.into();
// Don't change cursor state while redisplaying. This could confuse
// output routines.
if !unsafe { redisplaying_p } {
win.set_cursor_off_p(!show)
}
}
/// Value is non-nil if next redisplay will display a cursor in WINDOW.
/// WINDOW nil or omitted means report on the selected window.
#[lisp_fn(min = "0")]
pub fn internal_show_cursor_p(window: LispWindowOrSelected) -> bool {
let win: LispWindowRef = window.into();
!win.cursor_off_p()
}
/// Return whether input is coming from the keyboard.
// Corresponds to the INTERACTIVE macro in commands.h.
pub fn is_interactive() -> bool {
unsafe { globals.Vexecuting_kbd_macro.is_nil() && !noninteractive }
}
#[no_mangle]
pub extern "C" fn ding_internal(terminate_macro: bool) {
unsafe {
if noninteractive {
putchar_unlocked(0o7);
} else if terminate_macro && !is_interactive() {
// Stop executing a keyboard macro.
user_error!("Keyboard macro terminated by a command ringing the bell");
} else {
ring_bell(selected_frame().as_mut())
}
}
}
/// Beep, or flash the screen.
/// Also, unless an argument is given,
/// terminate any keyboard macro currently executing.
#[lisp_fn(min = "0")]
pub fn ding(arg: LispObject) {
ding_internal(arg.is_nil())
}
/// Perform redisplay.
/// Optional arg FORCE, if non-nil, prevents redisplay from being
/// preempted by arriving input, even if `redisplay-dont-pause' is nil.
/// If `redisplay-dont-pause' is non-nil (the default), redisplay is never
/// preempted by arriving input, so FORCE does nothing.
///
/// Return t if redisplay was performed, nil if redisplay was preempted
/// immediately by pending input.
#[lisp_fn(min = "0")]
pub fn redisplay(force: LispObject) -> bool |
include!(concat!(env!("OUT_DIR"), "/dispnew_exports.rs"));
| {
let force: bool = force.is_not_nil();
unsafe {
swallow_events(true);
let ret =
(detect_input_pending_run_timers(true) && !force && !globals.redisplay_dont_pause)
|| globals.Vexecuting_kbd_macro.is_not_nil();
if ret {
let count = c_specpdl_index();
if force && !globals.redisplay_dont_pause {
specbind(Qredisplay_dont_pause, Qt);
}
redisplay_preserve_echo_area(2);
unbind_to(count, Qnil);
}
ret
}
} | identifier_body |
dispnew.rs | //! Updating of data structures for redisplay.
use std::{cmp, ptr};
use remacs_lib::current_timespec;
use remacs_macros::lisp_fn;
use crate::{
eval::unbind_to,
frame::selected_frame,
frame::{LispFrameLiveOrSelected, LispFrameRef},
lisp::{ExternalPtr, LispObject},
lists::{LispConsCircularChecks, LispConsEndChecks},
remacs_sys::{
clear_current_matrices, detect_input_pending_run_timers, dtotimespec, fset_redisplay,
mark_window_display_accurate, putchar_unlocked, redisplay_preserve_echo_area, ring_bell,
specbind, swallow_events, timespec_add, timespec_sub, wait_reading_process_output,
},
remacs_sys::{
globals, noninteractive, redisplaying_p, Qnil, Qredisplay_dont_pause, Qt, Vframe_list,
WAIT_READING_MAX,
},
remacs_sys::{EmacsDouble, EmacsInt, Lisp_Glyph},
terminal::{clear_frame, update_begin, update_end},
threads::c_specpdl_index,
windows::{LispWindowOrSelected, LispWindowRef},
};
pub type LispGlyphRef = ExternalPtr<Lisp_Glyph>;
/// Pause, without updating display, for SECONDS seconds.
/// SECONDS may be a floating-point value, meaning that you can wait for a
/// fraction of a second. Optional second arg MILLISECONDS specifies an
/// additional wait period, in milliseconds; this is for backwards compatibility.
/// (Not all operating systems support waiting for a fraction of a second.)
#[lisp_fn(min = "1")]
pub fn sleep_for(seconds: EmacsDouble, milliseconds: Option<EmacsInt>) {
let duration = seconds + (milliseconds.unwrap_or(0) as f64 / 1000.0);
if duration > 0.0 {
let mut t = unsafe { dtotimespec(duration) };
let tend = unsafe { timespec_add(current_timespec(), t) };
while !t.tv_sec < 0 && (t.tv_sec > 0 || t.tv_nsec > 0) {
unsafe {
wait_reading_process_output(
cmp::min(t.tv_sec as i64, WAIT_READING_MAX),
t.tv_nsec as i32,
0,
true,
Qnil,
ptr::null_mut(),
0,
)
};
t = unsafe { timespec_sub(tend, current_timespec()) };
}
}
}
/**********************************************************************
Redrawing Frames
**********************************************************************/
/// Redraw frame FRAME.
#[no_mangle]
pub extern "C" fn redraw_frame(mut frame: LispFrameRef) {
unsafe {
// Error if FRAME has no glyphs.
debug_assert!(frame.glyphs_initialized_p());
update_begin(frame);
clear_frame(frame);
clear_current_matrices(frame.as_mut());
update_end(frame);
fset_redisplay(frame.as_mut());
// Mark all windows as inaccurate, so that every window will have
// its redisplay done.
mark_window_display_accurate(frame.root_window, false);
set_window_update_flags(frame.root_window.into(), true);
frame.set_garbaged(false);
}
}
/// Clear frame FRAME and output again what is supposed to appear on it.
/// If FRAME is omitted or nil, the selected frame is used.
#[lisp_fn(c_name = "redraw_frame", name = "redraw-frame", min = "0")]
pub fn redraw_frame_lisp(frame: LispFrameLiveOrSelected) {
redraw_frame(frame.into());
}
/// Clear and redisplay all visible frames.
#[lisp_fn]
pub fn redraw_display() {
for_each_frame!(frame => {
if frame.visible() != 0 {
redraw_frame(frame);
}
});
}
/// Set WINDOW->must_be_updated_p to ON_P for all windows in
/// the window tree rooted at W.
// Make private once all C usages are ported in this file
#[no_mangle]
pub extern "C" fn | (w: LispWindowRef, on_p: bool) {
let mut w = Some(w);
while let Some(mut win) = w {
if let Some(contents) = win.contents.as_window() {
set_window_update_flags(contents, on_p);
} else {
win.set_must_be_updated_p(on_p);
}
let next = win.next;
w = if next.is_nil() {
None
} else {
Some(next.into())
};
}
}
/***********************************************************************
Blinking cursor
***********************************************************************/
/// Set the cursor-visibility flag of WINDOW to SHOW.
/// WINDOW nil means use the selected window. SHOW non-nil means
/// show a cursor in WINDOW in the next redisplay. SHOW nil means
/// don't show a cursor.
#[lisp_fn]
pub fn internal_show_cursor(window: LispWindowOrSelected, show: bool) {
let mut win: LispWindowRef = window.into();
// Don't change cursor state while redisplaying. This could confuse
// output routines.
if !unsafe { redisplaying_p } {
win.set_cursor_off_p(!show)
}
}
/// Value is non-nil if next redisplay will display a cursor in WINDOW.
/// WINDOW nil or omitted means report on the selected window.
#[lisp_fn(min = "0")]
pub fn internal_show_cursor_p(window: LispWindowOrSelected) -> bool {
let win: LispWindowRef = window.into();
!win.cursor_off_p()
}
/// Return whether input is coming from the keyboard.
// Corresponds to the INTERACTIVE macro in commands.h.
pub fn is_interactive() -> bool {
unsafe { globals.Vexecuting_kbd_macro.is_nil() && !noninteractive }
}
#[no_mangle]
pub extern "C" fn ding_internal(terminate_macro: bool) {
unsafe {
if noninteractive {
putchar_unlocked(0o7);
} else if terminate_macro && !is_interactive() {
// Stop executing a keyboard macro.
user_error!("Keyboard macro terminated by a command ringing the bell");
} else {
ring_bell(selected_frame().as_mut())
}
}
}
/// Beep, or flash the screen.
/// Also, unless an argument is given,
/// terminate any keyboard macro currently executing.
#[lisp_fn(min = "0")]
pub fn ding(arg: LispObject) {
ding_internal(arg.is_nil())
}
/// Perform redisplay.
/// Optional arg FORCE, if non-nil, prevents redisplay from being
/// preempted by arriving input, even if `redisplay-dont-pause' is nil.
/// If `redisplay-dont-pause' is non-nil (the default), redisplay is never
/// preempted by arriving input, so FORCE does nothing.
///
/// Return t if redisplay was performed, nil if redisplay was preempted
/// immediately by pending input.
#[lisp_fn(min = "0")]
pub fn redisplay(force: LispObject) -> bool {
let force: bool = force.is_not_nil();
unsafe {
swallow_events(true);
let ret =
(detect_input_pending_run_timers(true) && !force && !globals.redisplay_dont_pause)
|| globals.Vexecuting_kbd_macro.is_not_nil();
if ret {
let count = c_specpdl_index();
if force && !globals.redisplay_dont_pause {
specbind(Qredisplay_dont_pause, Qt);
}
redisplay_preserve_echo_area(2);
unbind_to(count, Qnil);
}
ret
}
}
include!(concat!(env!("OUT_DIR"), "/dispnew_exports.rs"));
| set_window_update_flags | identifier_name |
scheduler.rs | use std::io;
use std::io::prelude::*;
use std::collections::{HashSet, HashMap};
use std::str::FromStr;
use time;
use regex::Regex;
use error::Error;
use error::Error::ErrCronFormat;
pub type SchedulerResult<'a> = Result<Scheduler<'a>, Error>;
#[derive(Debug)]
pub struct Scheduler<'a> {
seconds: &'a str,
minutes: &'a str,
hours: &'a str,
days: &'a str,
months: &'a str,
weekdays: &'a str,
timeFiledsLength: usize,
timePoints: HashMap<&'a str, HashSet<u32>>,
re: Regex,
}
impl<'a> Scheduler<'a> {
pub fn new(intervals: &'a str) -> SchedulerResult {
let reRes = Regex::new(r"^\s*((\*(/\d+)?)|[0-9-,/]+)(\s+((\*(/\d+)?)|[0-9-,/]+)){4,5}\s*$");
match reRes {
Ok(re) => {
if !re.is_match(intervals) {
return Err(ErrCronFormat(format!("invalid format: {}", intervals)));
}
let timeFileds: Vec<&str> = intervals.split_whitespace().collect();
let timeFiledsLength = timeFileds.len();
if timeFiledsLength != 5 && timeFiledsLength != 6 {
return Err(ErrCronFormat(format!("length of itervals should be 5 or 6, \
but got {}",
timeFiledsLength)));
}
let mut sec = "";
let mut startIndex: usize = 0;
if timeFiledsLength == 6 {
sec = timeFileds[0].clone();
startIndex = 1;
}
let mut sch = Scheduler {
seconds: sec,
minutes: timeFileds[startIndex],
hours: timeFileds[startIndex + 1],
days: timeFileds[startIndex + 2],
months: timeFileds[startIndex + 3],
weekdays: timeFileds[startIndex + 4],
timeFiledsLength: timeFiledsLength,
timePoints: HashMap::new(),
re: re,
};
try!(sch.parse_time_fields().map_err(|e| ErrCronFormat(e.to_string())));
Ok(sch)
}
Err(e) => Err(ErrCronFormat(e.to_string())),
}
}
pub fn parse_time_fields(&mut self) -> Result<(), Error> {
if self.seconds != "" {
self.timePoints.insert("seconds", try!(parse_intervals_field(self.seconds, 0, 59)));
} else {
self.timePoints.insert("seconds", [0].iter().cloned().collect::<HashSet<u32>>());
}
self.timePoints.insert("minutes", try!(parse_intervals_field(self.minutes, 0, 59)));
self.timePoints.insert("hours", try!(parse_intervals_field(self.hours, 0, 23)));
self.timePoints.insert("days", try!(parse_intervals_field(self.days, 1, 31)));
self.timePoints.insert("months", try!(parse_intervals_field(self.months, 1, 12)));
self.timePoints.insert("weekdays", try!(parse_intervals_field(self.weekdays, 0, 6)));
Ok(())
}
pub fn is_time_up(&self, t: &time::Tm) -> bool |
}
fn parse_intervals_field(inter: &str, min: u32, max: u32) -> Result<HashSet<u32>, Error> {
let mut points = HashSet::new();
let parts: Vec<&str> = inter.split(",").collect();
for part in parts {
let x: Vec<&str> = part.split("/").collect();
let y: Vec<&str> = x[0].split("-").collect();
let mut _min = min;
let mut _max = max;
let mut step = 1u32;
let (xLen, yLen) = (x.len(), y.len());
if xLen == 1 && yLen == 1 {
if y[0] != "*" {
_min = try!(y[0].parse::<u32>());
_max = _min;
}
} else if xLen == 1 && yLen == 2 {
_min = try!(y[0].parse::<u32>());
_max = try!(y[1].parse::<u32>());
} else if xLen == 2 && yLen == 1 && x[0] == "*" {
step = try!(x[1].parse::<u32>());
} else {
return Err(ErrCronFormat(String::from(part)));
}
for i in (_min.._max + 1).filter(|x| x % step == 0).collect::<Vec<u32>>() {
points.insert(i);
}
}
Ok(points)
}
#[test]
fn test_parse_intervals() {
assert!(Scheduler::new("*/2 1-8,11 * * *").is_ok());
assert!(Scheduler::new("0 */2 1-8,11 * * *").is_ok());
assert!(Scheduler::new("*/2 1-4,16,11,17 * * *").is_ok());
assert!(Scheduler::new("05 */2 1-8,11 * * * *").is_err());
assert!(Scheduler::new("05 */ 1-8,11 * * *").is_err());
}
| {
let (second, minute, hour, day, month, weekday) = (t.tm_sec as u32,
t.tm_min as u32,
t.tm_hour as u32,
t.tm_mday as u32,
t.tm_mon as u32,
t.tm_wday as u32);
let isSecond = self.timePoints.get("seconds").unwrap().contains(&second);
let isLeft = self.timePoints.get("minutes").unwrap().contains(&minute) &&
self.timePoints.get("hours").unwrap().contains(&hour) &&
self.timePoints.get("days").unwrap().contains(&day) &&
self.timePoints.get("months").unwrap().contains(&month) &&
self.timePoints.get("weekdays").unwrap().contains(&weekday);
if self.timeFiledsLength == 5 {
isLeft
} else {
isSecond && isLeft
}
} | identifier_body |
scheduler.rs | use std::io;
use std::io::prelude::*;
use std::collections::{HashSet, HashMap};
use std::str::FromStr;
use time;
use regex::Regex;
use error::Error;
use error::Error::ErrCronFormat;
pub type SchedulerResult<'a> = Result<Scheduler<'a>, Error>;
#[derive(Debug)]
pub struct Scheduler<'a> {
seconds: &'a str,
minutes: &'a str,
hours: &'a str,
days: &'a str,
months: &'a str,
weekdays: &'a str,
timeFiledsLength: usize,
timePoints: HashMap<&'a str, HashSet<u32>>,
re: Regex,
}
impl<'a> Scheduler<'a> {
pub fn | (intervals: &'a str) -> SchedulerResult {
let reRes = Regex::new(r"^\s*((\*(/\d+)?)|[0-9-,/]+)(\s+((\*(/\d+)?)|[0-9-,/]+)){4,5}\s*$");
match reRes {
Ok(re) => {
if !re.is_match(intervals) {
return Err(ErrCronFormat(format!("invalid format: {}", intervals)));
}
let timeFileds: Vec<&str> = intervals.split_whitespace().collect();
let timeFiledsLength = timeFileds.len();
if timeFiledsLength != 5 && timeFiledsLength != 6 {
return Err(ErrCronFormat(format!("length of itervals should be 5 or 6, \
but got {}",
timeFiledsLength)));
}
let mut sec = "";
let mut startIndex: usize = 0;
if timeFiledsLength == 6 {
sec = timeFileds[0].clone();
startIndex = 1;
}
let mut sch = Scheduler {
seconds: sec,
minutes: timeFileds[startIndex],
hours: timeFileds[startIndex + 1],
days: timeFileds[startIndex + 2],
months: timeFileds[startIndex + 3],
weekdays: timeFileds[startIndex + 4],
timeFiledsLength: timeFiledsLength,
timePoints: HashMap::new(),
re: re,
};
try!(sch.parse_time_fields().map_err(|e| ErrCronFormat(e.to_string())));
Ok(sch)
}
Err(e) => Err(ErrCronFormat(e.to_string())),
}
}
pub fn parse_time_fields(&mut self) -> Result<(), Error> {
if self.seconds != "" {
self.timePoints.insert("seconds", try!(parse_intervals_field(self.seconds, 0, 59)));
} else {
self.timePoints.insert("seconds", [0].iter().cloned().collect::<HashSet<u32>>());
}
self.timePoints.insert("minutes", try!(parse_intervals_field(self.minutes, 0, 59)));
self.timePoints.insert("hours", try!(parse_intervals_field(self.hours, 0, 23)));
self.timePoints.insert("days", try!(parse_intervals_field(self.days, 1, 31)));
self.timePoints.insert("months", try!(parse_intervals_field(self.months, 1, 12)));
self.timePoints.insert("weekdays", try!(parse_intervals_field(self.weekdays, 0, 6)));
Ok(())
}
pub fn is_time_up(&self, t: &time::Tm) -> bool {
let (second, minute, hour, day, month, weekday) = (t.tm_sec as u32,
t.tm_min as u32,
t.tm_hour as u32,
t.tm_mday as u32,
t.tm_mon as u32,
t.tm_wday as u32);
let isSecond = self.timePoints.get("seconds").unwrap().contains(&second);
let isLeft = self.timePoints.get("minutes").unwrap().contains(&minute) &&
self.timePoints.get("hours").unwrap().contains(&hour) &&
self.timePoints.get("days").unwrap().contains(&day) &&
self.timePoints.get("months").unwrap().contains(&month) &&
self.timePoints.get("weekdays").unwrap().contains(&weekday);
if self.timeFiledsLength == 5 {
isLeft
} else {
isSecond && isLeft
}
}
}
fn parse_intervals_field(inter: &str, min: u32, max: u32) -> Result<HashSet<u32>, Error> {
let mut points = HashSet::new();
let parts: Vec<&str> = inter.split(",").collect();
for part in parts {
let x: Vec<&str> = part.split("/").collect();
let y: Vec<&str> = x[0].split("-").collect();
let mut _min = min;
let mut _max = max;
let mut step = 1u32;
let (xLen, yLen) = (x.len(), y.len());
if xLen == 1 && yLen == 1 {
if y[0] != "*" {
_min = try!(y[0].parse::<u32>());
_max = _min;
}
} else if xLen == 1 && yLen == 2 {
_min = try!(y[0].parse::<u32>());
_max = try!(y[1].parse::<u32>());
} else if xLen == 2 && yLen == 1 && x[0] == "*" {
step = try!(x[1].parse::<u32>());
} else {
return Err(ErrCronFormat(String::from(part)));
}
for i in (_min.._max + 1).filter(|x| x % step == 0).collect::<Vec<u32>>() {
points.insert(i);
}
}
Ok(points)
}
#[test]
fn test_parse_intervals() {
assert!(Scheduler::new("*/2 1-8,11 * * *").is_ok());
assert!(Scheduler::new("0 */2 1-8,11 * * *").is_ok());
assert!(Scheduler::new("*/2 1-4,16,11,17 * * *").is_ok());
assert!(Scheduler::new("05 */2 1-8,11 * * * *").is_err());
assert!(Scheduler::new("05 */ 1-8,11 * * *").is_err());
}
| new | identifier_name |
scheduler.rs | use std::io;
use std::io::prelude::*;
use std::collections::{HashSet, HashMap};
use std::str::FromStr;
use time;
use regex::Regex;
use error::Error;
use error::Error::ErrCronFormat;
pub type SchedulerResult<'a> = Result<Scheduler<'a>, Error>;
#[derive(Debug)]
pub struct Scheduler<'a> {
seconds: &'a str,
minutes: &'a str,
hours: &'a str,
days: &'a str,
months: &'a str,
weekdays: &'a str,
timeFiledsLength: usize,
timePoints: HashMap<&'a str, HashSet<u32>>,
re: Regex,
}
impl<'a> Scheduler<'a> {
pub fn new(intervals: &'a str) -> SchedulerResult {
let reRes = Regex::new(r"^\s*((\*(/\d+)?)|[0-9-,/]+)(\s+((\*(/\d+)?)|[0-9-,/]+)){4,5}\s*$");
match reRes {
Ok(re) => {
if !re.is_match(intervals) {
return Err(ErrCronFormat(format!("invalid format: {}", intervals)));
}
let timeFileds: Vec<&str> = intervals.split_whitespace().collect();
let timeFiledsLength = timeFileds.len();
if timeFiledsLength != 5 && timeFiledsLength != 6 {
return Err(ErrCronFormat(format!("length of itervals should be 5 or 6, \
but got {}",
timeFiledsLength)));
}
let mut sec = "";
let mut startIndex: usize = 0;
if timeFiledsLength == 6 {
sec = timeFileds[0].clone();
startIndex = 1;
}
let mut sch = Scheduler {
seconds: sec,
minutes: timeFileds[startIndex],
hours: timeFileds[startIndex + 1],
days: timeFileds[startIndex + 2],
months: timeFileds[startIndex + 3],
weekdays: timeFileds[startIndex + 4],
timeFiledsLength: timeFiledsLength,
timePoints: HashMap::new(),
re: re,
};
try!(sch.parse_time_fields().map_err(|e| ErrCronFormat(e.to_string())));
Ok(sch)
}
Err(e) => Err(ErrCronFormat(e.to_string())),
}
}
pub fn parse_time_fields(&mut self) -> Result<(), Error> {
if self.seconds != "" {
self.timePoints.insert("seconds", try!(parse_intervals_field(self.seconds, 0, 59)));
} else {
self.timePoints.insert("seconds", [0].iter().cloned().collect::<HashSet<u32>>());
}
self.timePoints.insert("minutes", try!(parse_intervals_field(self.minutes, 0, 59)));
self.timePoints.insert("hours", try!(parse_intervals_field(self.hours, 0, 23)));
self.timePoints.insert("days", try!(parse_intervals_field(self.days, 1, 31)));
self.timePoints.insert("months", try!(parse_intervals_field(self.months, 1, 12)));
self.timePoints.insert("weekdays", try!(parse_intervals_field(self.weekdays, 0, 6)));
Ok(())
}
pub fn is_time_up(&self, t: &time::Tm) -> bool {
let (second, minute, hour, day, month, weekday) = (t.tm_sec as u32,
t.tm_min as u32,
t.tm_hour as u32,
t.tm_mday as u32,
t.tm_mon as u32,
t.tm_wday as u32);
let isSecond = self.timePoints.get("seconds").unwrap().contains(&second);
let isLeft = self.timePoints.get("minutes").unwrap().contains(&minute) &&
self.timePoints.get("hours").unwrap().contains(&hour) &&
self.timePoints.get("days").unwrap().contains(&day) &&
self.timePoints.get("months").unwrap().contains(&month) &&
self.timePoints.get("weekdays").unwrap().contains(&weekday);
if self.timeFiledsLength == 5 {
isLeft
} else {
isSecond && isLeft
}
}
}
fn parse_intervals_field(inter: &str, min: u32, max: u32) -> Result<HashSet<u32>, Error> {
let mut points = HashSet::new();
let parts: Vec<&str> = inter.split(",").collect();
for part in parts {
let x: Vec<&str> = part.split("/").collect();
let y: Vec<&str> = x[0].split("-").collect();
let mut _min = min;
let mut _max = max;
let mut step = 1u32;
let (xLen, yLen) = (x.len(), y.len());
if xLen == 1 && yLen == 1 {
if y[0] != "*" {
_min = try!(y[0].parse::<u32>());
_max = _min;
}
} else if xLen == 1 && yLen == 2 {
_min = try!(y[0].parse::<u32>());
_max = try!(y[1].parse::<u32>());
} else if xLen == 2 && yLen == 1 && x[0] == "*" {
step = try!(x[1].parse::<u32>());
} else {
return Err(ErrCronFormat(String::from(part)));
}
for i in (_min.._max + 1).filter(|x| x % step == 0).collect::<Vec<u32>>() {
points.insert(i);
}
} | }
#[test]
fn test_parse_intervals() {
assert!(Scheduler::new("*/2 1-8,11 * * *").is_ok());
assert!(Scheduler::new("0 */2 1-8,11 * * *").is_ok());
assert!(Scheduler::new("*/2 1-4,16,11,17 * * *").is_ok());
assert!(Scheduler::new("05 */2 1-8,11 * * * *").is_err());
assert!(Scheduler::new("05 */ 1-8,11 * * *").is_err());
} |
Ok(points) | random_line_split |
linux.py | # Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import logging
import datetime
import re
import dateutil.parser
from lib.cuckoo.common.abstracts import BehaviorHandler
log = logging.getLogger(__name__)
class FilteredProcessLog(list):
def __init__(self, eventstream, **kwfilters):
|
def __iter__(self):
for event in self.eventstream:
for k, v in self.kwfilters.items():
if event[k] != v:
continue
del event["type"]
yield event
def __nonzero__(self):
return True
class LinuxSystemTap(BehaviorHandler):
"""Parses systemtap generated plaintext logs (see data/strace.stp)."""
key = "processes"
def __init__(self, *args, **kwargs):
super(LinuxSystemTap, self).__init__(*args, **kwargs)
self.processes = []
self.pids_seen = set()
self.forkmap = {}
self.matched = False
self._check_for_probelkm()
def _check_for_probelkm(self):
path_lkm = os.path.join(self.analysis.logs_path, "all.lkm")
if os.path.exists(path_lkm):
lines = open(path_lkm).readlines()
forks = [re.findall("task (\d+)@0x[0-9a-f]+ forked to (\d+)@0x[0-9a-f]+", line) for line in lines]
self.forkmap = dict((j, i) for i, j in reduce(lambda x, y: x+y, forks, []))
# self.results["source"].append("probelkm")
def handles_path(self, path):
if path.endswith(".stap"):
self.matched = True
return True
def parse(self, path):
parser = StapParser(open(path))
for event in parser:
pid = event["pid"]
if pid not in self.pids_seen:
self.pids_seen.add(pid)
ppid = self.forkmap.get(pid, -1)
process = {
"pid": pid,
"ppid": ppid,
"process_name": event["process_name"],
"first_seen": event["time"],
}
# create a process event as we don't have those with linux+systemtap
pevent = dict(process)
pevent["type"] = "process"
yield pevent
process["calls"] = FilteredProcessLog(parser, pid=pid)
self.processes.append(process)
yield event
def run(self):
if not self.matched:
return
self.processes.sort(key=lambda process: process["first_seen"])
return self.processes
class StapParser(object):
"""Handle .stap logs from the Linux analyzer."""
def __init__(self, fd):
self.fd = fd
def __iter__(self):
self.fd.seek(0)
for line in self.fd:
# 'Thu May 7 14:58:43 2015.390178 python@7f798cb95240[2114] close(6) = 0\n'
# datetime is 31 characters
datetimepart, rest = line[:31], line[32:]
# incredibly sophisticated date time handling
dtms = datetime.timedelta(0, 0, int(datetimepart.split(".", 1)[1]))
dt = dateutil.parser.parse(datetimepart.split(".", 1)[0]) + dtms
parts = re.match("^(.+)@([a-f0-9]+)\[(\d+)\] (\w+)\((.*)\) = (\S+){0,1}\s{0,1}(\(\w+\)){0,1}$", rest)
if not parts:
log.warning("Could not parse syscall trace line: %s", line)
continue
pname, ip, pid, fn, arguments, retval, ecode = parts.groups()
argsplit = arguments.split(", ")
arguments = dict(("p%u" % pos, argsplit[pos]) for pos in range(len(argsplit)))
pid = int(pid) if pid.isdigit() else -1
yield {
"time": dt, "process_name": pname, "pid": pid,
"instruction_pointer": ip, "api": fn, "arguments": arguments,
"return_value": retval, "status": ecode,
"type": "apicall", "raw": line,
}
| self.eventstream = eventstream
self.kwfilters = kwfilters | identifier_body |
linux.py | # Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import logging
import datetime
import re
import dateutil.parser
from lib.cuckoo.common.abstracts import BehaviorHandler
log = logging.getLogger(__name__)
class FilteredProcessLog(list):
def | (self, eventstream, **kwfilters):
self.eventstream = eventstream
self.kwfilters = kwfilters
def __iter__(self):
for event in self.eventstream:
for k, v in self.kwfilters.items():
if event[k] != v:
continue
del event["type"]
yield event
def __nonzero__(self):
return True
class LinuxSystemTap(BehaviorHandler):
"""Parses systemtap generated plaintext logs (see data/strace.stp)."""
key = "processes"
def __init__(self, *args, **kwargs):
super(LinuxSystemTap, self).__init__(*args, **kwargs)
self.processes = []
self.pids_seen = set()
self.forkmap = {}
self.matched = False
self._check_for_probelkm()
def _check_for_probelkm(self):
path_lkm = os.path.join(self.analysis.logs_path, "all.lkm")
if os.path.exists(path_lkm):
lines = open(path_lkm).readlines()
forks = [re.findall("task (\d+)@0x[0-9a-f]+ forked to (\d+)@0x[0-9a-f]+", line) for line in lines]
self.forkmap = dict((j, i) for i, j in reduce(lambda x, y: x+y, forks, []))
# self.results["source"].append("probelkm")
def handles_path(self, path):
if path.endswith(".stap"):
self.matched = True
return True
def parse(self, path):
parser = StapParser(open(path))
for event in parser:
pid = event["pid"]
if pid not in self.pids_seen:
self.pids_seen.add(pid)
ppid = self.forkmap.get(pid, -1)
process = {
"pid": pid,
"ppid": ppid,
"process_name": event["process_name"],
"first_seen": event["time"],
}
# create a process event as we don't have those with linux+systemtap
pevent = dict(process)
pevent["type"] = "process"
yield pevent
process["calls"] = FilteredProcessLog(parser, pid=pid)
self.processes.append(process)
yield event
def run(self):
if not self.matched:
return
self.processes.sort(key=lambda process: process["first_seen"])
return self.processes
class StapParser(object):
"""Handle .stap logs from the Linux analyzer."""
def __init__(self, fd):
self.fd = fd
def __iter__(self):
self.fd.seek(0)
for line in self.fd:
# 'Thu May 7 14:58:43 2015.390178 python@7f798cb95240[2114] close(6) = 0\n'
# datetime is 31 characters
datetimepart, rest = line[:31], line[32:]
# incredibly sophisticated date time handling
dtms = datetime.timedelta(0, 0, int(datetimepart.split(".", 1)[1]))
dt = dateutil.parser.parse(datetimepart.split(".", 1)[0]) + dtms
parts = re.match("^(.+)@([a-f0-9]+)\[(\d+)\] (\w+)\((.*)\) = (\S+){0,1}\s{0,1}(\(\w+\)){0,1}$", rest)
if not parts:
log.warning("Could not parse syscall trace line: %s", line)
continue
pname, ip, pid, fn, arguments, retval, ecode = parts.groups()
argsplit = arguments.split(", ")
arguments = dict(("p%u" % pos, argsplit[pos]) for pos in range(len(argsplit)))
pid = int(pid) if pid.isdigit() else -1
yield {
"time": dt, "process_name": pname, "pid": pid,
"instruction_pointer": ip, "api": fn, "arguments": arguments,
"return_value": retval, "status": ecode,
"type": "apicall", "raw": line,
}
| __init__ | identifier_name |
linux.py | # Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import logging
import datetime
import re
import dateutil.parser
from lib.cuckoo.common.abstracts import BehaviorHandler
log = logging.getLogger(__name__)
class FilteredProcessLog(list):
def __init__(self, eventstream, **kwfilters):
self.eventstream = eventstream
self.kwfilters = kwfilters
def __iter__(self):
for event in self.eventstream:
for k, v in self.kwfilters.items():
if event[k] != v:
continue
del event["type"]
yield event
def __nonzero__(self):
return True
class LinuxSystemTap(BehaviorHandler):
"""Parses systemtap generated plaintext logs (see data/strace.stp)."""
key = "processes"
def __init__(self, *args, **kwargs):
super(LinuxSystemTap, self).__init__(*args, **kwargs)
self.processes = []
self.pids_seen = set()
self.forkmap = {}
self.matched = False
self._check_for_probelkm()
def _check_for_probelkm(self):
path_lkm = os.path.join(self.analysis.logs_path, "all.lkm")
if os.path.exists(path_lkm):
lines = open(path_lkm).readlines()
forks = [re.findall("task (\d+)@0x[0-9a-f]+ forked to (\d+)@0x[0-9a-f]+", line) for line in lines]
self.forkmap = dict((j, i) for i, j in reduce(lambda x, y: x+y, forks, []))
# self.results["source"].append("probelkm")
def handles_path(self, path):
if path.endswith(".stap"):
|
def parse(self, path):
parser = StapParser(open(path))
for event in parser:
pid = event["pid"]
if pid not in self.pids_seen:
self.pids_seen.add(pid)
ppid = self.forkmap.get(pid, -1)
process = {
"pid": pid,
"ppid": ppid,
"process_name": event["process_name"],
"first_seen": event["time"],
}
# create a process event as we don't have those with linux+systemtap
pevent = dict(process)
pevent["type"] = "process"
yield pevent
process["calls"] = FilteredProcessLog(parser, pid=pid)
self.processes.append(process)
yield event
def run(self):
if not self.matched:
return
self.processes.sort(key=lambda process: process["first_seen"])
return self.processes
class StapParser(object):
"""Handle .stap logs from the Linux analyzer."""
def __init__(self, fd):
self.fd = fd
def __iter__(self):
self.fd.seek(0)
for line in self.fd:
# 'Thu May 7 14:58:43 2015.390178 python@7f798cb95240[2114] close(6) = 0\n'
# datetime is 31 characters
datetimepart, rest = line[:31], line[32:]
# incredibly sophisticated date time handling
dtms = datetime.timedelta(0, 0, int(datetimepart.split(".", 1)[1]))
dt = dateutil.parser.parse(datetimepart.split(".", 1)[0]) + dtms
parts = re.match("^(.+)@([a-f0-9]+)\[(\d+)\] (\w+)\((.*)\) = (\S+){0,1}\s{0,1}(\(\w+\)){0,1}$", rest)
if not parts:
log.warning("Could not parse syscall trace line: %s", line)
continue
pname, ip, pid, fn, arguments, retval, ecode = parts.groups()
argsplit = arguments.split(", ")
arguments = dict(("p%u" % pos, argsplit[pos]) for pos in range(len(argsplit)))
pid = int(pid) if pid.isdigit() else -1
yield {
"time": dt, "process_name": pname, "pid": pid,
"instruction_pointer": ip, "api": fn, "arguments": arguments,
"return_value": retval, "status": ecode,
"type": "apicall", "raw": line,
}
| self.matched = True
return True | conditional_block |
linux.py | # Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import logging
import datetime
import re
import dateutil.parser
from lib.cuckoo.common.abstracts import BehaviorHandler
log = logging.getLogger(__name__)
class FilteredProcessLog(list):
def __init__(self, eventstream, **kwfilters):
self.eventstream = eventstream
self.kwfilters = kwfilters
def __iter__(self):
for event in self.eventstream:
for k, v in self.kwfilters.items():
if event[k] != v:
continue
del event["type"]
yield event
def __nonzero__(self):
return True
class LinuxSystemTap(BehaviorHandler):
"""Parses systemtap generated plaintext logs (see data/strace.stp)."""
key = "processes"
def __init__(self, *args, **kwargs):
super(LinuxSystemTap, self).__init__(*args, **kwargs)
self.processes = []
self.pids_seen = set()
self.forkmap = {}
self.matched = False
self._check_for_probelkm()
def _check_for_probelkm(self):
path_lkm = os.path.join(self.analysis.logs_path, "all.lkm")
if os.path.exists(path_lkm):
lines = open(path_lkm).readlines()
forks = [re.findall("task (\d+)@0x[0-9a-f]+ forked to (\d+)@0x[0-9a-f]+", line) for line in lines]
self.forkmap = dict((j, i) for i, j in reduce(lambda x, y: x+y, forks, []))
# self.results["source"].append("probelkm")
def handles_path(self, path):
if path.endswith(".stap"): | return True
def parse(self, path):
parser = StapParser(open(path))
for event in parser:
pid = event["pid"]
if pid not in self.pids_seen:
self.pids_seen.add(pid)
ppid = self.forkmap.get(pid, -1)
process = {
"pid": pid,
"ppid": ppid,
"process_name": event["process_name"],
"first_seen": event["time"],
}
# create a process event as we don't have those with linux+systemtap
pevent = dict(process)
pevent["type"] = "process"
yield pevent
process["calls"] = FilteredProcessLog(parser, pid=pid)
self.processes.append(process)
yield event
def run(self):
if not self.matched:
return
self.processes.sort(key=lambda process: process["first_seen"])
return self.processes
class StapParser(object):
"""Handle .stap logs from the Linux analyzer."""
def __init__(self, fd):
self.fd = fd
def __iter__(self):
self.fd.seek(0)
for line in self.fd:
# 'Thu May 7 14:58:43 2015.390178 python@7f798cb95240[2114] close(6) = 0\n'
# datetime is 31 characters
datetimepart, rest = line[:31], line[32:]
# incredibly sophisticated date time handling
dtms = datetime.timedelta(0, 0, int(datetimepart.split(".", 1)[1]))
dt = dateutil.parser.parse(datetimepart.split(".", 1)[0]) + dtms
parts = re.match("^(.+)@([a-f0-9]+)\[(\d+)\] (\w+)\((.*)\) = (\S+){0,1}\s{0,1}(\(\w+\)){0,1}$", rest)
if not parts:
log.warning("Could not parse syscall trace line: %s", line)
continue
pname, ip, pid, fn, arguments, retval, ecode = parts.groups()
argsplit = arguments.split(", ")
arguments = dict(("p%u" % pos, argsplit[pos]) for pos in range(len(argsplit)))
pid = int(pid) if pid.isdigit() else -1
yield {
"time": dt, "process_name": pname, "pid": pid,
"instruction_pointer": ip, "api": fn, "arguments": arguments,
"return_value": retval, "status": ecode,
"type": "apicall", "raw": line,
} | self.matched = True | random_line_split |
test_instance.py | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
import os
from . import spd
# K temps: [0.0, 100.0, 150.0, 200.0, 225.0, 250.0, 275.0, 300.0, 325.0, 350.0, 375.0, 400.0, 425.0, 450.0, 475.0, 500.0, 525.0, 550.0]
# C temps: [273, 373.0, 423.0, 473.0, 498.0, 523.0, 548.0, 573.0, 598.0, 623.0, 648.0, 673.0, 698.0, 723.0, 748.0, 773.0, 798.0, 823.0]
from . import new_lj_thellier_gui_spd as tgs
cwd = os.getcwd()
main_dir = cwd + '/SPD'
calculate = ['int_n', 'frac', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'mdrat', 'maxdev', 'dpal', 'md', 'tail_drat', 'dtr', 'dac', 'DANG']
#calculate = ['int_n', 'frac', 'f', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'z_md', 'q', 'r_sq', 'coeff_det_sq', 'int_mad', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'int_crm', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'drats', 'cdrat', 'mdrat', 'dck', 'maxdev', 'mdev', 'dpal', 'int_ptrm_tail_n', 'md', 'tail_drat', 'dtr', 'dt', 'ac_n', 'dac', 'gmax']
#calculate = ['int_n', 'int_alpha', 'f', 'k', 'drats', 'int_ptrm_tail_n']
#calculate = ['drats']
gui = tgs.Arai_GUI('/magic_measurements.txt', main_dir)
specimens = list(gui.Data.keys())
example = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', calculate)
example.calculate_all_statistics()
PintPars_example = example
def | (calculate=calculate):
for stat in calculate:
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', [stat])
spec.reqd_stats()
print('---------')
print(calculate)
def many_specimens(calculate=calculate):
from itertools import combinations
c = combinations(calculate, 2)
for combo in c:
print('combo', combo)
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', combo)
spec.reqd_stats()
print('XXXXXXXXXXXXXXX')
#spec.calculate_all_statistics()
SCAT_spec = spd.PintPars(gui.Data, '0238x6011044', 273., 673.) # 0, 400
SCAT_spec2 = spd.PintPars(gui.Data, '0238x6011044', 273., 698.) # 0, 425
SCAT_spec.York_Regression()
SCAT_spec2.York_Regression()
#new_spec = spd.PintPars(gui.Data, '0238x5721062', 100. + 273., 525. + 273.)
#new_spec.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('/consistency_tests/Yamamoto_Hushi_2008_magic_measurements.txt', cwd)
#thing2 = spd.PintPars(gui2.Data, 'SW01-01A-2', 100. + 273., 480. + 273.)
#thing2 = PintPars(gui.Data, specimens[0], 473., 623.)
#thing2.calculate_all_statistics()
#thing3 = PintPars(gui.Data, specimens[1], 473., 623.)
#thing3.calculate_all_statistics()
#thing4 = PintPars(gui.Data, specimens[2], 473., 623.)
#thing4.calculate_all_statistics()
#thing5 = PintPars(gui.Data, specimens[3], 473., 623.)
#thing5.calculate_all_statistics()
#thing6 = PintPars(gui.Data, specimens[4], 473., 623.)
#thing6.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('new_magic_measurements.txt')
#gui3 = tgs.Arai_GUI('consistency_tests/Bowles_etal_2006_magic_measurements.txt')
#gui4 = tgs.Arai_GUI('consistency_tests/Donadini_etal_2007_magic_measurements.txt')
#gui5 = tgs.Arai_GUI('consistency_tests/Krasa_2000_magic_measurements.txt')
#gui6 = tgs.Arai_GUI('consistency_tests/Muxworthy_etal_2011_magic_measurements.txt')
#gui7 = tgs.Arai_GUI('consistency_tests/Paterson_etal_2010_magic_measurements.txt')
#gui8 = tgs.Arai_GUI('consistency_tests/Selkin_etal_2000_magic_measurements.txt')
#gui10 = tgs.Arai_GUI('consistency_tests/Yamamoto_etal_2003_magic_measurements.txt')
| make_specimens | identifier_name |
test_instance.py | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
import os
from . import spd
# K temps: [0.0, 100.0, 150.0, 200.0, 225.0, 250.0, 275.0, 300.0, 325.0, 350.0, 375.0, 400.0, 425.0, 450.0, 475.0, 500.0, 525.0, 550.0]
# C temps: [273, 373.0, 423.0, 473.0, 498.0, 523.0, 548.0, 573.0, 598.0, 623.0, 648.0, 673.0, 698.0, 723.0, 748.0, 773.0, 798.0, 823.0]
from . import new_lj_thellier_gui_spd as tgs
cwd = os.getcwd()
main_dir = cwd + '/SPD'
calculate = ['int_n', 'frac', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'mdrat', 'maxdev', 'dpal', 'md', 'tail_drat', 'dtr', 'dac', 'DANG']
#calculate = ['int_n', 'frac', 'f', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'z_md', 'q', 'r_sq', 'coeff_det_sq', 'int_mad', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'int_crm', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'drats', 'cdrat', 'mdrat', 'dck', 'maxdev', 'mdev', 'dpal', 'int_ptrm_tail_n', 'md', 'tail_drat', 'dtr', 'dt', 'ac_n', 'dac', 'gmax']
#calculate = ['int_n', 'int_alpha', 'f', 'k', 'drats', 'int_ptrm_tail_n']
#calculate = ['drats']
gui = tgs.Arai_GUI('/magic_measurements.txt', main_dir)
specimens = list(gui.Data.keys())
example = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', calculate)
example.calculate_all_statistics()
PintPars_example = example
def make_specimens(calculate=calculate):
for stat in calculate:
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', [stat])
spec.reqd_stats()
print('---------')
print(calculate)
def many_specimens(calculate=calculate):
from itertools import combinations
c = combinations(calculate, 2)
for combo in c:
|
#spec.calculate_all_statistics()
SCAT_spec = spd.PintPars(gui.Data, '0238x6011044', 273., 673.) # 0, 400
SCAT_spec2 = spd.PintPars(gui.Data, '0238x6011044', 273., 698.) # 0, 425
SCAT_spec.York_Regression()
SCAT_spec2.York_Regression()
#new_spec = spd.PintPars(gui.Data, '0238x5721062', 100. + 273., 525. + 273.)
#new_spec.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('/consistency_tests/Yamamoto_Hushi_2008_magic_measurements.txt', cwd)
#thing2 = spd.PintPars(gui2.Data, 'SW01-01A-2', 100. + 273., 480. + 273.)
#thing2 = PintPars(gui.Data, specimens[0], 473., 623.)
#thing2.calculate_all_statistics()
#thing3 = PintPars(gui.Data, specimens[1], 473., 623.)
#thing3.calculate_all_statistics()
#thing4 = PintPars(gui.Data, specimens[2], 473., 623.)
#thing4.calculate_all_statistics()
#thing5 = PintPars(gui.Data, specimens[3], 473., 623.)
#thing5.calculate_all_statistics()
#thing6 = PintPars(gui.Data, specimens[4], 473., 623.)
#thing6.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('new_magic_measurements.txt')
#gui3 = tgs.Arai_GUI('consistency_tests/Bowles_etal_2006_magic_measurements.txt')
#gui4 = tgs.Arai_GUI('consistency_tests/Donadini_etal_2007_magic_measurements.txt')
#gui5 = tgs.Arai_GUI('consistency_tests/Krasa_2000_magic_measurements.txt')
#gui6 = tgs.Arai_GUI('consistency_tests/Muxworthy_etal_2011_magic_measurements.txt')
#gui7 = tgs.Arai_GUI('consistency_tests/Paterson_etal_2010_magic_measurements.txt')
#gui8 = tgs.Arai_GUI('consistency_tests/Selkin_etal_2000_magic_measurements.txt')
#gui10 = tgs.Arai_GUI('consistency_tests/Yamamoto_etal_2003_magic_measurements.txt')
| print('combo', combo)
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', combo)
spec.reqd_stats()
print('XXXXXXXXXXXXXXX') | conditional_block |
test_instance.py | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
import os
from . import spd
# K temps: [0.0, 100.0, 150.0, 200.0, 225.0, 250.0, 275.0, 300.0, 325.0, 350.0, 375.0, 400.0, 425.0, 450.0, 475.0, 500.0, 525.0, 550.0]
# C temps: [273, 373.0, 423.0, 473.0, 498.0, 523.0, 548.0, 573.0, 598.0, 623.0, 648.0, 673.0, 698.0, 723.0, 748.0, 773.0, 798.0, 823.0]
from . import new_lj_thellier_gui_spd as tgs
cwd = os.getcwd()
main_dir = cwd + '/SPD'
calculate = ['int_n', 'frac', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'mdrat', 'maxdev', 'dpal', 'md', 'tail_drat', 'dtr', 'dac', 'DANG']
#calculate = ['int_n', 'frac', 'f', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'z_md', 'q', 'r_sq', 'coeff_det_sq', 'int_mad', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'int_crm', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'drats', 'cdrat', 'mdrat', 'dck', 'maxdev', 'mdev', 'dpal', 'int_ptrm_tail_n', 'md', 'tail_drat', 'dtr', 'dt', 'ac_n', 'dac', 'gmax']
#calculate = ['int_n', 'int_alpha', 'f', 'k', 'drats', 'int_ptrm_tail_n']
#calculate = ['drats']
gui = tgs.Arai_GUI('/magic_measurements.txt', main_dir)
specimens = list(gui.Data.keys())
example = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', calculate)
example.calculate_all_statistics()
PintPars_example = example
def make_specimens(calculate=calculate):
for stat in calculate:
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', [stat])
spec.reqd_stats()
print('---------')
print(calculate)
def many_specimens(calculate=calculate):
from itertools import combinations
c = combinations(calculate, 2)
for combo in c:
print('combo', combo)
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', combo)
spec.reqd_stats()
print('XXXXXXXXXXXXXXX')
#spec.calculate_all_statistics()
SCAT_spec = spd.PintPars(gui.Data, '0238x6011044', 273., 673.) # 0, 400
SCAT_spec2 = spd.PintPars(gui.Data, '0238x6011044', 273., 698.) # 0, 425
SCAT_spec.York_Regression()
SCAT_spec2.York_Regression()
#new_spec = spd.PintPars(gui.Data, '0238x5721062', 100. + 273., 525. + 273.)
#new_spec.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('/consistency_tests/Yamamoto_Hushi_2008_magic_measurements.txt', cwd)
#thing2 = spd.PintPars(gui2.Data, 'SW01-01A-2', 100. + 273., 480. + 273.)
#thing2 = PintPars(gui.Data, specimens[0], 473., 623.)
#thing2.calculate_all_statistics()
#thing3 = PintPars(gui.Data, specimens[1], 473., 623.)
#thing3.calculate_all_statistics()
#thing4 = PintPars(gui.Data, specimens[2], 473., 623.)
#thing4.calculate_all_statistics()
#thing5 = PintPars(gui.Data, specimens[3], 473., 623.)
#thing5.calculate_all_statistics()
#thing6 = PintPars(gui.Data, specimens[4], 473., 623.)
#thing6.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('new_magic_measurements.txt')
#gui3 = tgs.Arai_GUI('consistency_tests/Bowles_etal_2006_magic_measurements.txt')
#gui4 = tgs.Arai_GUI('consistency_tests/Donadini_etal_2007_magic_measurements.txt')
#gui5 = tgs.Arai_GUI('consistency_tests/Krasa_2000_magic_measurements.txt') | #gui10 = tgs.Arai_GUI('consistency_tests/Yamamoto_etal_2003_magic_measurements.txt') | #gui6 = tgs.Arai_GUI('consistency_tests/Muxworthy_etal_2011_magic_measurements.txt')
#gui7 = tgs.Arai_GUI('consistency_tests/Paterson_etal_2010_magic_measurements.txt')
#gui8 = tgs.Arai_GUI('consistency_tests/Selkin_etal_2000_magic_measurements.txt') | random_line_split |
test_instance.py | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
import os
from . import spd
# K temps: [0.0, 100.0, 150.0, 200.0, 225.0, 250.0, 275.0, 300.0, 325.0, 350.0, 375.0, 400.0, 425.0, 450.0, 475.0, 500.0, 525.0, 550.0]
# C temps: [273, 373.0, 423.0, 473.0, 498.0, 523.0, 548.0, 573.0, 598.0, 623.0, 648.0, 673.0, 698.0, 723.0, 748.0, 773.0, 798.0, 823.0]
from . import new_lj_thellier_gui_spd as tgs
cwd = os.getcwd()
main_dir = cwd + '/SPD'
calculate = ['int_n', 'frac', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'mdrat', 'maxdev', 'dpal', 'md', 'tail_drat', 'dtr', 'dac', 'DANG']
#calculate = ['int_n', 'frac', 'f', 'fvds', 'b_sigma', 'b_beta', 'scat', 'g', 'k', 'k_sse', 'z', 'z_md', 'q', 'r_sq', 'coeff_det_sq', 'int_mad', 'int_mad_anc', 'int_dang', 'int_alpha', 'alpha_prime', 'theta', 'int_crm', 'gamma', 'int_ptrm_n', 'ptrm', 'drat', 'drats', 'cdrat', 'mdrat', 'dck', 'maxdev', 'mdev', 'dpal', 'int_ptrm_tail_n', 'md', 'tail_drat', 'dtr', 'dt', 'ac_n', 'dac', 'gmax']
#calculate = ['int_n', 'int_alpha', 'f', 'k', 'drats', 'int_ptrm_tail_n']
#calculate = ['drats']
gui = tgs.Arai_GUI('/magic_measurements.txt', main_dir)
specimens = list(gui.Data.keys())
example = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', calculate)
example.calculate_all_statistics()
PintPars_example = example
def make_specimens(calculate=calculate):
|
def many_specimens(calculate=calculate):
from itertools import combinations
c = combinations(calculate, 2)
for combo in c:
print('combo', combo)
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', combo)
spec.reqd_stats()
print('XXXXXXXXXXXXXXX')
#spec.calculate_all_statistics()
SCAT_spec = spd.PintPars(gui.Data, '0238x6011044', 273., 673.) # 0, 400
SCAT_spec2 = spd.PintPars(gui.Data, '0238x6011044', 273., 698.) # 0, 425
SCAT_spec.York_Regression()
SCAT_spec2.York_Regression()
#new_spec = spd.PintPars(gui.Data, '0238x5721062', 100. + 273., 525. + 273.)
#new_spec.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('/consistency_tests/Yamamoto_Hushi_2008_magic_measurements.txt', cwd)
#thing2 = spd.PintPars(gui2.Data, 'SW01-01A-2', 100. + 273., 480. + 273.)
#thing2 = PintPars(gui.Data, specimens[0], 473., 623.)
#thing2.calculate_all_statistics()
#thing3 = PintPars(gui.Data, specimens[1], 473., 623.)
#thing3.calculate_all_statistics()
#thing4 = PintPars(gui.Data, specimens[2], 473., 623.)
#thing4.calculate_all_statistics()
#thing5 = PintPars(gui.Data, specimens[3], 473., 623.)
#thing5.calculate_all_statistics()
#thing6 = PintPars(gui.Data, specimens[4], 473., 623.)
#thing6.calculate_all_statistics()
#gui2 = tgs.Arai_GUI('new_magic_measurements.txt')
#gui3 = tgs.Arai_GUI('consistency_tests/Bowles_etal_2006_magic_measurements.txt')
#gui4 = tgs.Arai_GUI('consistency_tests/Donadini_etal_2007_magic_measurements.txt')
#gui5 = tgs.Arai_GUI('consistency_tests/Krasa_2000_magic_measurements.txt')
#gui6 = tgs.Arai_GUI('consistency_tests/Muxworthy_etal_2011_magic_measurements.txt')
#gui7 = tgs.Arai_GUI('consistency_tests/Paterson_etal_2010_magic_measurements.txt')
#gui8 = tgs.Arai_GUI('consistency_tests/Selkin_etal_2000_magic_measurements.txt')
#gui10 = tgs.Arai_GUI('consistency_tests/Yamamoto_etal_2003_magic_measurements.txt')
| for stat in calculate:
spec = spd.PintPars(gui.Data, '0238x6011044', 473., 623., 'magic', [stat])
spec.reqd_stats()
print('---------')
print(calculate) | identifier_body |
projects.component.spec.ts | import { Observable } from 'rxjs';
import { provide } from '@angular/core';
import {
describe, it, inject, beforeEachProviders, expect
} from '@angular/core/testing';
import { ProjectsService } from './projects.service.ts';
import { ProjectsComponent } from './projects.component.ts';
/**
* The test mock for a {{#crossLink "ProjectsService"}}{{/crossLink}}.
*
* @module projects
* @class ProjectsServiceStub
*/
class ProjectsServiceStub {
/**
*
* @method getProjects
* @param project {string} the project name
* @return {Observable} the mock project objects sequence
*/
getProjects(project: string): Observable<Object[]> {
let values = [
{name: 'QIN_Test', description: 'Test'},
{name: 'QIN', description: 'Production'}
];
return Observable.of(values);
}
}
beforeEachProviders(() => {
return [
ProjectsComponent,
provide(ProjectsService, {useClass: ProjectsServiceStub})
];
});
/**
* Runs the given test body on the injected component and service.
*
* @function test
* @param body {function(CollectionsComponent, CollectionsService)} the
* test body
* @private
*/
function test(body) |
/**
* The {{#crossLink "ProjectsComponent"}}{{/crossLink}} validator.
* This test validates that the projects are listed in sort order.
*
* @module projects
* @class ProjectsComponentSpec
*/
describe('The Projects component', function() {
it('should sort the projects', test((component, service) => {
// The mocked projects are in reverse sort order.
service.getProjects().subscribe(projects => {
let expected = projects.reverse();
// Compare to the component projects property.
expect(component.projects, 'Projects are incorrect').to.eql(expected);
});
}));
});
| {
return inject(
[ProjectsComponent, ProjectsService],
(component: ProjectsComponent, service: ProjectsService) => {
body(component, service);
}
);
} | identifier_body |
projects.component.spec.ts | import { Observable } from 'rxjs';
import { provide } from '@angular/core';
import {
describe, it, inject, beforeEachProviders, expect
} from '@angular/core/testing';
import { ProjectsService } from './projects.service.ts';
import { ProjectsComponent } from './projects.component.ts';
/**
* The test mock for a {{#crossLink "ProjectsService"}}{{/crossLink}}.
*
* @module projects
* @class ProjectsServiceStub
*/
class ProjectsServiceStub {
/**
*
* @method getProjects
* @param project {string} the project name
* @return {Observable} the mock project objects sequence
*/
getProjects(project: string): Observable<Object[]> {
let values = [
{name: 'QIN_Test', description: 'Test'},
{name: 'QIN', description: 'Production'}
];
return Observable.of(values);
}
}
beforeEachProviders(() => {
return [
ProjectsComponent,
provide(ProjectsService, {useClass: ProjectsServiceStub})
];
});
/**
* Runs the given test body on the injected component and service.
*
* @function test
* @param body {function(CollectionsComponent, CollectionsService)} the
* test body
* @private
*/
function test(body) {
return inject(
[ProjectsComponent, ProjectsService],
(component: ProjectsComponent, service: ProjectsService) => {
body(component, service);
}
);
}
/**
* The {{#crossLink "ProjectsComponent"}}{{/crossLink}} validator.
* This test validates that the projects are listed in sort order.
*
* @module projects
* @class ProjectsComponentSpec
*/
describe('The Projects component', function() {
it('should sort the projects', test((component, service) => {
// The mocked projects are in reverse sort order.
service.getProjects().subscribe(projects => {
let expected = projects.reverse();
// Compare to the component projects property.
expect(component.projects, 'Projects are incorrect').to.eql(expected);
});
})); | }); | random_line_split |
|
projects.component.spec.ts | import { Observable } from 'rxjs';
import { provide } from '@angular/core';
import {
describe, it, inject, beforeEachProviders, expect
} from '@angular/core/testing';
import { ProjectsService } from './projects.service.ts';
import { ProjectsComponent } from './projects.component.ts';
/**
* The test mock for a {{#crossLink "ProjectsService"}}{{/crossLink}}.
*
* @module projects
* @class ProjectsServiceStub
*/
class ProjectsServiceStub {
/**
*
* @method getProjects
* @param project {string} the project name
* @return {Observable} the mock project objects sequence
*/
| (project: string): Observable<Object[]> {
let values = [
{name: 'QIN_Test', description: 'Test'},
{name: 'QIN', description: 'Production'}
];
return Observable.of(values);
}
}
beforeEachProviders(() => {
return [
ProjectsComponent,
provide(ProjectsService, {useClass: ProjectsServiceStub})
];
});
/**
* Runs the given test body on the injected component and service.
*
* @function test
* @param body {function(CollectionsComponent, CollectionsService)} the
* test body
* @private
*/
function test(body) {
return inject(
[ProjectsComponent, ProjectsService],
(component: ProjectsComponent, service: ProjectsService) => {
body(component, service);
}
);
}
/**
* The {{#crossLink "ProjectsComponent"}}{{/crossLink}} validator.
* This test validates that the projects are listed in sort order.
*
* @module projects
* @class ProjectsComponentSpec
*/
describe('The Projects component', function() {
it('should sort the projects', test((component, service) => {
// The mocked projects are in reverse sort order.
service.getProjects().subscribe(projects => {
let expected = projects.reverse();
// Compare to the component projects property.
expect(component.projects, 'Projects are incorrect').to.eql(expected);
});
}));
});
| getProjects | identifier_name |
eventloop.js | // JavaScript 运行机制详解:再谈Event Loop
// http://www.ruanyifeng.com/blog/2014/10/event-loop.html
// 除了放置异步任务的事件,"任务队列"还可以放置定时事件,即指定某些代码在多少时间之后执行。
// 这叫做"定时器"(timer)功能,也就是定时执行的代码。
// 定时器功能主要由setTimeout()和setInterval()这两个函数来完成,
// 它们的内部运行机制完全一样,区别在于前者指定的代码是一次性执行,后者则为反复执行。
// 以下主要讨论setTimeout()。
// setTimeout()接受两个参数,第一个是回调函数,第二个是推迟执行的毫秒数。
console.log(1);
setTimeout(function(){console.log(2);},1000);
console.log(3);
// 上面代码的执行结果是1,3,2,因为setTimeout()将第二行推迟到1000毫秒之后执行。
setTimeout(function(){console.log(11);}, 0);
console.log(22);
// 上面代码的执行结果总是2,1,因为只有在执行完第二行以后,系统才会去执行"任务队列"中的回调函数。
// Node.js也是单线程的Event Loop,但是它的运行机制不同于浏览器环境。
// Node.js的运行机制如下。
// (1)V8引擎解析JavaScript脚本。
// (2)解析后的代码,调用Node API。
// (3)libuv库负责Node API的执行。它将不同的任务分配给不同的线程,形成一个Event Loop(事件循环),以异步的方式将任务的执行结果返回给V8引擎。
// (4)V8引擎再将结果返回给用户。
// 除了setTimeout和setInterval这两个方法,Node.js还提供了另外两个与"任务队列"有关的方法:
// process.nextTick和setImmediate。它们可以帮助我们加深对"任务队列"的理解。
// process.nextTick方法可以在当前"执行栈"的尾部----下一次Event Loop(主线程读取"任务队列")之前----触发回调函数。
// 也就是说,它指定的任务总是发生在所有异步任务之前。
// setImmediate方法则是在当前"任务队列"的尾部添加事件,也就是说,它指定的任务总是在下一次Event Loop时执行,
// 这与setTimeout(fn, 0)很像。
// 请看下面的例子(via StackOverflow)。
process.nextTick(function A() {
console.log(111);
process.nextTick(function B(){console.log(222);});
});
setTimeout(function timeout() {
console.log('nextTick TIMEOUT FIRED');
}, 0);
// 111 |
// 现在,再看setImmediate。
setImmediate(function A() {
console.log(1111);
setImmediate(function B(){console.log(2222);});
});
setTimeout(function timeout() {
console.log('setImmediate TIMEOUT FIRED');
}, 0);
// 上面代码中,setImmediate与setTimeout(fn,0)各自添加了一个回调函数A和timeout,都是在下一次Event Loop触发。
// 那么,哪个回调函数先执行呢?答案是不确定。
// 运行结果可能是1--TIMEOUT FIRED--2,也可能是TIMEOUT FIRED--1--2。
// 令人困惑的是,Node.js文档中称,setImmediate指定的回调函数,总是排在setTimeout前面。
// 实际上,这种情况只发生在递归调用的时候。
setImmediate(function (){
setImmediate(function A() {
console.log(11111);
setImmediate(function B(){console.log(22222);});
});
setTimeout(function timeout() {
console.log('11111 TIMEOUT FIRED');
}, 0);
});
// 1
// TIMEOUT FIRED
// 2
// 上面代码中,setImmediate和setTimeout被封装在一个setImmediate里面,
// 它的运行结果总是1--TIMEOUT FIRED--2,这时函数A一定在timeout前面触发。
// 至于2排在TIMEOUT FIRED的后面(即函数B在timeout后面触发),是因为setImmediate总是将事件注册到下一轮Event Loop,
// 所以函数A和timeout是在同一轮Loop执行,而函数B在下一轮Loop执行。
// 我们由此得到了process.nextTick和setImmediate的一个重要区别:
// 多个process.nextTick语句总是在当前"执行栈"一次执行完,多个setImmediate可能则需要多次loop才能执行完。
// 事实上,这正是Node.js 10.0版添加setImmediate方法的原因,
// 否则像下面这样的递归调用process.nextTick,将会没完没了,主线程根本不会去读取"事件队列"!
// process.nextTick(function foo() {
// process.nextTick(foo);
// });
// setImmediate(function foo() {
// setImmediate(foo);
// });
// 事实上,现在要是你写出递归的process.nextTick,Node.js会抛出一个警告,要求你改成setImmediate。
// 另外,由于process.nextTick指定的回调函数是在本次"事件循环"触发,而setImmediate指定的是在下次"事件循环"触发,
// 所以很显然,前者总是比后者发生得早,而且执行效率也高(因为不用检查"任务队列")。 | // 2
// TIMEOUT FIRED
// 上面代码中,由于process.nextTick方法指定的回调函数,总是在当前"执行栈"的尾部触发,
// 所以不仅函数A比setTimeout指定的回调函数timeout先执行,而且函数B也比timeout先执行。
// 这说明,如果有多个process.nextTick语句(不管它们是否嵌套),将全部在当前"执行栈"执行。 | random_line_split |
certificate.rs | //
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use crate::{
get_sha256,
report::{AttestationInfo, Report},
};
use anyhow::Context;
use log::info;
use openssl::{
asn1::Asn1Time,
bn::{BigNum, MsbOption},
hash::MessageDigest,
pkey::{HasPublic, PKey, PKeyRef, Private},
rsa::Rsa,
stack::Stack,
x509::{
extension::{
AuthorityKeyIdentifier, BasicConstraints, KeyUsage, SubjectAlternativeName,
SubjectKeyIdentifier,
},
X509Builder, X509NameBuilder, X509Ref, X509Req, X509,
},
};
// X.509 certificate parameters.
//
// <https://tools.ietf.org/html/rfc5280>
const RSA_KEY_SIZE: u32 = 2048;
// Version is zero-indexed, so the value of `2` corresponds to the version `3`.
const CERTIFICATE_VERSION: i32 = 2;
// Length of the randomly generated X.509 certificate serial number (which is 20 bytes).
//
// The most significant bit is excluded because it's passed as a separate argument to:
// https://docs.rs/openssl/0.10.33/openssl/bn/struct.BigNum.html#method.rand
const SERIAL_NUMBER_SIZE: i32 = 159;
const CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS: u32 = 1;
const DEFAULT_DNS_NAME: &str = "localhost";
/// Indicates whether to add a custom TEE extension to a certificate.
#[derive(PartialEq)]
pub enum AddTeeExtension {
/// Enum value contains a PEM encoded TEE Provider's X.509 certificate that signs TEE firmware
/// key.
Yes(Vec<u8>),
No,
}
/// Convenience structure for generating X.509 certificates.
///
/// <https://tools.ietf.org/html/rfc5280>
pub struct CertificateAuthority {
pub key_pair: PKey<Private>,
pub root_certificate: X509,
}
impl CertificateAuthority {
/// Generates a root X.509 certificate and a corresponding private/public key pair.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report to
/// the root certificate.
pub fn create(add_tee_extension: AddTeeExtension) -> anyhow::Result<Self> {
let key_pair = CertificateAuthority::generate_key_pair()?;
let root_certificate =
CertificateAuthority::generate_root_certificate(&key_pair, add_tee_extension)?;
Ok(Self {
key_pair,
root_certificate,
})
}
/// Generates RSA private/public key pair.
fn generate_key_pair() -> anyhow::Result<PKey<Private>> {
let rsa = Rsa::generate(RSA_KEY_SIZE).context("Couldn't generate RSA key")?;
PKey::from_rsa(rsa).context("Couldn't parse RSA key")
}
/// Creates a root X.509 certificate.
fn generate_root_certificate(
key_pair: &PKey<Private>,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Generating root certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(key_pair)?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(true)?;
builder.add_key_usage_extension(true)?;
builder.add_subject_key_identifier_extension(None)?;
builder.add_subject_alt_name_extension()?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(key_pair, tee_certificate)?;
}
let certificate = builder.build(key_pair)?;
Ok(certificate)
}
/// Generates an X.509 certificate based on the certificate signing `request`.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report.
pub fn sign_certificate(
&self,
request: X509Req,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Signing certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(request.public_key()?.as_ref())?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(false)?;
builder.add_key_usage_extension(false)?;
builder.add_subject_key_identifier_extension(Some(&self.root_certificate))?;
builder.add_auth_key_identifier_extension(&self.root_certificate)?;
// Add X.509 extensions from the certificate signing request.
builder.add_extensions(request.extensions()?)?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(request.public_key()?.as_ref(), tee_certificate)?;
}
let certificate = builder.build(&self.key_pair)?;
Ok(certificate)
}
/// Get RSA key pair encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_private_key_pem(&self) -> anyhow::Result<Vec<u8>> {
self.key_pair
.private_key_to_pem_pkcs8()
.context("Couldn't encode key pair in PEM format")
}
/// Get a root X.509 certificate encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_root_certificate_pem(&self) -> anyhow::Result<Vec<u8>> {
self.root_certificate
.to_pem()
.context("Couldn't encode root certificate in PEM format")
}
}
/// Helper struct that implements certificate creation using `openssl`.
struct CertificateBuilder {
builder: X509Builder,
}
impl CertificateBuilder {
fn create() -> anyhow::Result<Self> {
let builder = X509::builder()?;
Ok(Self { builder })
}
fn set_version(&mut self, version: i32) -> anyhow::Result<&mut Self> {
self.builder.set_version(version)?;
Ok(self)
}
fn set_serial_number(&mut self, serial_number_size: i32) -> anyhow::Result<&mut Self> {
let serial_number = {
let mut serial = BigNum::new()?;
serial.rand(serial_number_size, MsbOption::MAYBE_ZERO, false)?;
serial.to_asn1_integer()?
};
self.builder.set_serial_number(&serial_number)?;
Ok(self)
}
fn set_name(&mut self) -> anyhow::Result<&mut Self> {
let mut name = X509NameBuilder::new()?;
name.append_entry_by_text("O", "Oak")?;
name.append_entry_by_text("CN", "Proxy Attestation Service")?;
let name = name.build();
self.builder.set_subject_name(&name)?;
self.builder.set_issuer_name(&name)?;
Ok(self)
}
fn set_public_key<T>(&mut self, public_key: &PKeyRef<T>) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
self.builder.set_pubkey(public_key)?;
Ok(self)
}
fn set_expiration_interval(&mut self, expiration_interval: u32) -> anyhow::Result<&mut Self> {
let not_before = Asn1Time::days_from_now(0)?;
self.builder.set_not_before(¬_before)?;
let not_after = Asn1Time::days_from_now(expiration_interval)?;
self.builder.set_not_after(¬_after)?;
Ok(self)
}
fn add_basic_constraints_extension(&mut self, is_critical: bool) -> anyhow::Result<&mut Self> {
if is_critical {
self.builder
.append_extension(BasicConstraints::new().critical().build()?)?;
} else {
self.builder
.append_extension(BasicConstraints::new().build()?)?;
}
Ok(self)
}
fn add_key_usage_extension(&mut self, is_root_certificate: bool) -> anyhow::Result<&mut Self> {
if is_root_certificate {
self.builder.append_extension(
KeyUsage::new()
.critical()
.key_cert_sign()
.crl_sign()
.build()?,
)?;
} else {
self.builder.append_extension(
KeyUsage::new()
.critical()
.non_repudiation()
.digital_signature()
.key_encipherment()
.build()?,
)?;
}
Ok(self)
}
fn add_subject_key_identifier_extension(
&mut self,
root_certificate: Option<&X509Ref>,
) -> anyhow::Result<&mut Self> {
let subject_key_identifier = SubjectKeyIdentifier::new()
.build(&self.builder.x509v3_context(root_certificate, None))?;
self.builder.append_extension(subject_key_identifier)?;
Ok(self)
}
fn add_subject_alt_name_extension(&mut self) -> anyhow::Result<&mut Self> {
let subject_alt_name = SubjectAlternativeName::new()
.dns(DEFAULT_DNS_NAME)
.build(&self.builder.x509v3_context(None, None))?;
self.builder.append_extension(subject_alt_name)?;
Ok(self)
}
fn add_auth_key_identifier_extension(
&mut self,
root_certificate: &X509Ref,
) -> anyhow::Result<&mut Self> {
let auth_key_identifier = AuthorityKeyIdentifier::new()
.keyid(false)
.issuer(false)
.build(&self.builder.x509v3_context(Some(root_certificate), None))?;
self.builder.append_extension(auth_key_identifier)?;
Ok(self)
}
// Generates a TEE report with the public key hash as data and add it to the certificate as a
// custom extension. This is required to bind the certificate to the TEE firmware.
fn | <T>(
&mut self,
public_key: &PKeyRef<T>,
tee_certificate: Vec<u8>,
) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
let public_key_hash = get_sha256(&public_key.public_key_to_der()?);
let tee_report = Report::new(&public_key_hash);
let attestation_info = AttestationInfo {
report: tee_report,
certificate: tee_certificate,
};
let tee_extension = attestation_info.to_extension()?;
self.builder.append_extension(tee_extension)?;
Ok(self)
}
fn add_extensions(
&mut self,
extensions: Stack<openssl::x509::X509Extension>,
) -> anyhow::Result<&mut Self> {
for extension in extensions.iter() {
self.builder.append_extension2(extension)?;
}
Ok(self)
}
fn build(mut self, private_key: &PKey<Private>) -> anyhow::Result<X509> {
self.builder.sign(private_key, MessageDigest::sha256())?;
Ok(self.builder.build())
}
}
| add_tee_extension | identifier_name |
certificate.rs | //
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use crate::{
get_sha256,
report::{AttestationInfo, Report},
};
use anyhow::Context;
use log::info;
use openssl::{
asn1::Asn1Time,
bn::{BigNum, MsbOption},
hash::MessageDigest,
pkey::{HasPublic, PKey, PKeyRef, Private},
rsa::Rsa,
stack::Stack,
x509::{
extension::{
AuthorityKeyIdentifier, BasicConstraints, KeyUsage, SubjectAlternativeName,
SubjectKeyIdentifier,
},
X509Builder, X509NameBuilder, X509Ref, X509Req, X509,
},
};
// X.509 certificate parameters.
//
// <https://tools.ietf.org/html/rfc5280>
const RSA_KEY_SIZE: u32 = 2048;
// Version is zero-indexed, so the value of `2` corresponds to the version `3`.
const CERTIFICATE_VERSION: i32 = 2;
// Length of the randomly generated X.509 certificate serial number (which is 20 bytes).
//
// The most significant bit is excluded because it's passed as a separate argument to:
// https://docs.rs/openssl/0.10.33/openssl/bn/struct.BigNum.html#method.rand
const SERIAL_NUMBER_SIZE: i32 = 159;
const CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS: u32 = 1;
const DEFAULT_DNS_NAME: &str = "localhost";
/// Indicates whether to add a custom TEE extension to a certificate.
#[derive(PartialEq)]
pub enum AddTeeExtension {
/// Enum value contains a PEM encoded TEE Provider's X.509 certificate that signs TEE firmware
/// key.
Yes(Vec<u8>),
No,
}
/// Convenience structure for generating X.509 certificates.
///
/// <https://tools.ietf.org/html/rfc5280>
pub struct CertificateAuthority {
pub key_pair: PKey<Private>,
pub root_certificate: X509,
}
impl CertificateAuthority {
/// Generates a root X.509 certificate and a corresponding private/public key pair.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report to
/// the root certificate.
pub fn create(add_tee_extension: AddTeeExtension) -> anyhow::Result<Self> {
let key_pair = CertificateAuthority::generate_key_pair()?;
let root_certificate =
CertificateAuthority::generate_root_certificate(&key_pair, add_tee_extension)?;
Ok(Self {
key_pair,
root_certificate,
})
}
/// Generates RSA private/public key pair.
fn generate_key_pair() -> anyhow::Result<PKey<Private>> {
let rsa = Rsa::generate(RSA_KEY_SIZE).context("Couldn't generate RSA key")?;
PKey::from_rsa(rsa).context("Couldn't parse RSA key")
}
/// Creates a root X.509 certificate.
fn generate_root_certificate(
key_pair: &PKey<Private>,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Generating root certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(key_pair)?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(true)?;
builder.add_key_usage_extension(true)?;
builder.add_subject_key_identifier_extension(None)?;
builder.add_subject_alt_name_extension()?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(key_pair, tee_certificate)?;
}
let certificate = builder.build(key_pair)?;
Ok(certificate)
}
/// Generates an X.509 certificate based on the certificate signing `request`.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report.
pub fn sign_certificate(
&self,
request: X509Req,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Signing certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(request.public_key()?.as_ref())?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(false)?;
builder.add_key_usage_extension(false)?;
builder.add_subject_key_identifier_extension(Some(&self.root_certificate))?;
builder.add_auth_key_identifier_extension(&self.root_certificate)?;
// Add X.509 extensions from the certificate signing request.
builder.add_extensions(request.extensions()?)?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(request.public_key()?.as_ref(), tee_certificate)?;
}
let certificate = builder.build(&self.key_pair)?;
Ok(certificate)
}
/// Get RSA key pair encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_private_key_pem(&self) -> anyhow::Result<Vec<u8>> {
self.key_pair
.private_key_to_pem_pkcs8()
.context("Couldn't encode key pair in PEM format")
}
/// Get a root X.509 certificate encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_root_certificate_pem(&self) -> anyhow::Result<Vec<u8>> {
self.root_certificate
.to_pem()
.context("Couldn't encode root certificate in PEM format")
}
}
/// Helper struct that implements certificate creation using `openssl`.
struct CertificateBuilder {
builder: X509Builder,
}
impl CertificateBuilder {
fn create() -> anyhow::Result<Self> |
fn set_version(&mut self, version: i32) -> anyhow::Result<&mut Self> {
self.builder.set_version(version)?;
Ok(self)
}
fn set_serial_number(&mut self, serial_number_size: i32) -> anyhow::Result<&mut Self> {
let serial_number = {
let mut serial = BigNum::new()?;
serial.rand(serial_number_size, MsbOption::MAYBE_ZERO, false)?;
serial.to_asn1_integer()?
};
self.builder.set_serial_number(&serial_number)?;
Ok(self)
}
fn set_name(&mut self) -> anyhow::Result<&mut Self> {
let mut name = X509NameBuilder::new()?;
name.append_entry_by_text("O", "Oak")?;
name.append_entry_by_text("CN", "Proxy Attestation Service")?;
let name = name.build();
self.builder.set_subject_name(&name)?;
self.builder.set_issuer_name(&name)?;
Ok(self)
}
fn set_public_key<T>(&mut self, public_key: &PKeyRef<T>) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
self.builder.set_pubkey(public_key)?;
Ok(self)
}
fn set_expiration_interval(&mut self, expiration_interval: u32) -> anyhow::Result<&mut Self> {
let not_before = Asn1Time::days_from_now(0)?;
self.builder.set_not_before(¬_before)?;
let not_after = Asn1Time::days_from_now(expiration_interval)?;
self.builder.set_not_after(¬_after)?;
Ok(self)
}
fn add_basic_constraints_extension(&mut self, is_critical: bool) -> anyhow::Result<&mut Self> {
if is_critical {
self.builder
.append_extension(BasicConstraints::new().critical().build()?)?;
} else {
self.builder
.append_extension(BasicConstraints::new().build()?)?;
}
Ok(self)
}
fn add_key_usage_extension(&mut self, is_root_certificate: bool) -> anyhow::Result<&mut Self> {
if is_root_certificate {
self.builder.append_extension(
KeyUsage::new()
.critical()
.key_cert_sign()
.crl_sign()
.build()?,
)?;
} else {
self.builder.append_extension(
KeyUsage::new()
.critical()
.non_repudiation()
.digital_signature()
.key_encipherment()
.build()?,
)?;
}
Ok(self)
}
fn add_subject_key_identifier_extension(
&mut self,
root_certificate: Option<&X509Ref>,
) -> anyhow::Result<&mut Self> {
let subject_key_identifier = SubjectKeyIdentifier::new()
.build(&self.builder.x509v3_context(root_certificate, None))?;
self.builder.append_extension(subject_key_identifier)?;
Ok(self)
}
fn add_subject_alt_name_extension(&mut self) -> anyhow::Result<&mut Self> {
let subject_alt_name = SubjectAlternativeName::new()
.dns(DEFAULT_DNS_NAME)
.build(&self.builder.x509v3_context(None, None))?;
self.builder.append_extension(subject_alt_name)?;
Ok(self)
}
fn add_auth_key_identifier_extension(
&mut self,
root_certificate: &X509Ref,
) -> anyhow::Result<&mut Self> {
let auth_key_identifier = AuthorityKeyIdentifier::new()
.keyid(false)
.issuer(false)
.build(&self.builder.x509v3_context(Some(root_certificate), None))?;
self.builder.append_extension(auth_key_identifier)?;
Ok(self)
}
// Generates a TEE report with the public key hash as data and add it to the certificate as a
// custom extension. This is required to bind the certificate to the TEE firmware.
fn add_tee_extension<T>(
&mut self,
public_key: &PKeyRef<T>,
tee_certificate: Vec<u8>,
) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
let public_key_hash = get_sha256(&public_key.public_key_to_der()?);
let tee_report = Report::new(&public_key_hash);
let attestation_info = AttestationInfo {
report: tee_report,
certificate: tee_certificate,
};
let tee_extension = attestation_info.to_extension()?;
self.builder.append_extension(tee_extension)?;
Ok(self)
}
fn add_extensions(
&mut self,
extensions: Stack<openssl::x509::X509Extension>,
) -> anyhow::Result<&mut Self> {
for extension in extensions.iter() {
self.builder.append_extension2(extension)?;
}
Ok(self)
}
fn build(mut self, private_key: &PKey<Private>) -> anyhow::Result<X509> {
self.builder.sign(private_key, MessageDigest::sha256())?;
Ok(self.builder.build())
}
}
| {
let builder = X509::builder()?;
Ok(Self { builder })
} | identifier_body |
certificate.rs | //
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use crate::{
get_sha256,
report::{AttestationInfo, Report},
};
use anyhow::Context;
use log::info;
use openssl::{
asn1::Asn1Time,
bn::{BigNum, MsbOption},
hash::MessageDigest,
pkey::{HasPublic, PKey, PKeyRef, Private},
rsa::Rsa,
stack::Stack,
x509::{
extension::{
AuthorityKeyIdentifier, BasicConstraints, KeyUsage, SubjectAlternativeName,
SubjectKeyIdentifier,
},
X509Builder, X509NameBuilder, X509Ref, X509Req, X509,
},
};
// X.509 certificate parameters.
//
// <https://tools.ietf.org/html/rfc5280>
const RSA_KEY_SIZE: u32 = 2048;
// Version is zero-indexed, so the value of `2` corresponds to the version `3`.
const CERTIFICATE_VERSION: i32 = 2;
// Length of the randomly generated X.509 certificate serial number (which is 20 bytes).
//
// The most significant bit is excluded because it's passed as a separate argument to:
// https://docs.rs/openssl/0.10.33/openssl/bn/struct.BigNum.html#method.rand
const SERIAL_NUMBER_SIZE: i32 = 159;
const CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS: u32 = 1;
const DEFAULT_DNS_NAME: &str = "localhost";
/// Indicates whether to add a custom TEE extension to a certificate.
#[derive(PartialEq)]
pub enum AddTeeExtension {
/// Enum value contains a PEM encoded TEE Provider's X.509 certificate that signs TEE firmware
/// key.
Yes(Vec<u8>),
No,
}
/// Convenience structure for generating X.509 certificates.
///
/// <https://tools.ietf.org/html/rfc5280>
pub struct CertificateAuthority {
pub key_pair: PKey<Private>,
pub root_certificate: X509,
}
impl CertificateAuthority {
/// Generates a root X.509 certificate and a corresponding private/public key pair.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report to
/// the root certificate.
pub fn create(add_tee_extension: AddTeeExtension) -> anyhow::Result<Self> {
let key_pair = CertificateAuthority::generate_key_pair()?;
let root_certificate =
CertificateAuthority::generate_root_certificate(&key_pair, add_tee_extension)?;
Ok(Self {
key_pair,
root_certificate,
})
}
/// Generates RSA private/public key pair.
fn generate_key_pair() -> anyhow::Result<PKey<Private>> {
let rsa = Rsa::generate(RSA_KEY_SIZE).context("Couldn't generate RSA key")?;
PKey::from_rsa(rsa).context("Couldn't parse RSA key")
}
/// Creates a root X.509 certificate.
fn generate_root_certificate(
key_pair: &PKey<Private>,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Generating root certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(key_pair)?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(true)?;
builder.add_key_usage_extension(true)?;
builder.add_subject_key_identifier_extension(None)?;
builder.add_subject_alt_name_extension()?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension |
let certificate = builder.build(key_pair)?;
Ok(certificate)
}
/// Generates an X.509 certificate based on the certificate signing `request`.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report.
pub fn sign_certificate(
&self,
request: X509Req,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Signing certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(request.public_key()?.as_ref())?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(false)?;
builder.add_key_usage_extension(false)?;
builder.add_subject_key_identifier_extension(Some(&self.root_certificate))?;
builder.add_auth_key_identifier_extension(&self.root_certificate)?;
// Add X.509 extensions from the certificate signing request.
builder.add_extensions(request.extensions()?)?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(request.public_key()?.as_ref(), tee_certificate)?;
}
let certificate = builder.build(&self.key_pair)?;
Ok(certificate)
}
/// Get RSA key pair encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_private_key_pem(&self) -> anyhow::Result<Vec<u8>> {
self.key_pair
.private_key_to_pem_pkcs8()
.context("Couldn't encode key pair in PEM format")
}
/// Get a root X.509 certificate encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_root_certificate_pem(&self) -> anyhow::Result<Vec<u8>> {
self.root_certificate
.to_pem()
.context("Couldn't encode root certificate in PEM format")
}
}
/// Helper struct that implements certificate creation using `openssl`.
struct CertificateBuilder {
builder: X509Builder,
}
impl CertificateBuilder {
fn create() -> anyhow::Result<Self> {
let builder = X509::builder()?;
Ok(Self { builder })
}
fn set_version(&mut self, version: i32) -> anyhow::Result<&mut Self> {
self.builder.set_version(version)?;
Ok(self)
}
fn set_serial_number(&mut self, serial_number_size: i32) -> anyhow::Result<&mut Self> {
let serial_number = {
let mut serial = BigNum::new()?;
serial.rand(serial_number_size, MsbOption::MAYBE_ZERO, false)?;
serial.to_asn1_integer()?
};
self.builder.set_serial_number(&serial_number)?;
Ok(self)
}
fn set_name(&mut self) -> anyhow::Result<&mut Self> {
let mut name = X509NameBuilder::new()?;
name.append_entry_by_text("O", "Oak")?;
name.append_entry_by_text("CN", "Proxy Attestation Service")?;
let name = name.build();
self.builder.set_subject_name(&name)?;
self.builder.set_issuer_name(&name)?;
Ok(self)
}
fn set_public_key<T>(&mut self, public_key: &PKeyRef<T>) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
self.builder.set_pubkey(public_key)?;
Ok(self)
}
fn set_expiration_interval(&mut self, expiration_interval: u32) -> anyhow::Result<&mut Self> {
let not_before = Asn1Time::days_from_now(0)?;
self.builder.set_not_before(¬_before)?;
let not_after = Asn1Time::days_from_now(expiration_interval)?;
self.builder.set_not_after(¬_after)?;
Ok(self)
}
fn add_basic_constraints_extension(&mut self, is_critical: bool) -> anyhow::Result<&mut Self> {
if is_critical {
self.builder
.append_extension(BasicConstraints::new().critical().build()?)?;
} else {
self.builder
.append_extension(BasicConstraints::new().build()?)?;
}
Ok(self)
}
fn add_key_usage_extension(&mut self, is_root_certificate: bool) -> anyhow::Result<&mut Self> {
if is_root_certificate {
self.builder.append_extension(
KeyUsage::new()
.critical()
.key_cert_sign()
.crl_sign()
.build()?,
)?;
} else {
self.builder.append_extension(
KeyUsage::new()
.critical()
.non_repudiation()
.digital_signature()
.key_encipherment()
.build()?,
)?;
}
Ok(self)
}
fn add_subject_key_identifier_extension(
&mut self,
root_certificate: Option<&X509Ref>,
) -> anyhow::Result<&mut Self> {
let subject_key_identifier = SubjectKeyIdentifier::new()
.build(&self.builder.x509v3_context(root_certificate, None))?;
self.builder.append_extension(subject_key_identifier)?;
Ok(self)
}
fn add_subject_alt_name_extension(&mut self) -> anyhow::Result<&mut Self> {
let subject_alt_name = SubjectAlternativeName::new()
.dns(DEFAULT_DNS_NAME)
.build(&self.builder.x509v3_context(None, None))?;
self.builder.append_extension(subject_alt_name)?;
Ok(self)
}
fn add_auth_key_identifier_extension(
&mut self,
root_certificate: &X509Ref,
) -> anyhow::Result<&mut Self> {
let auth_key_identifier = AuthorityKeyIdentifier::new()
.keyid(false)
.issuer(false)
.build(&self.builder.x509v3_context(Some(root_certificate), None))?;
self.builder.append_extension(auth_key_identifier)?;
Ok(self)
}
// Generates a TEE report with the public key hash as data and add it to the certificate as a
// custom extension. This is required to bind the certificate to the TEE firmware.
fn add_tee_extension<T>(
&mut self,
public_key: &PKeyRef<T>,
tee_certificate: Vec<u8>,
) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
let public_key_hash = get_sha256(&public_key.public_key_to_der()?);
let tee_report = Report::new(&public_key_hash);
let attestation_info = AttestationInfo {
report: tee_report,
certificate: tee_certificate,
};
let tee_extension = attestation_info.to_extension()?;
self.builder.append_extension(tee_extension)?;
Ok(self)
}
fn add_extensions(
&mut self,
extensions: Stack<openssl::x509::X509Extension>,
) -> anyhow::Result<&mut Self> {
for extension in extensions.iter() {
self.builder.append_extension2(extension)?;
}
Ok(self)
}
fn build(mut self, private_key: &PKey<Private>) -> anyhow::Result<X509> {
self.builder.sign(private_key, MessageDigest::sha256())?;
Ok(self.builder.build())
}
}
| {
builder.add_tee_extension(key_pair, tee_certificate)?;
} | conditional_block |
certificate.rs | //
// Copyright 2021 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use crate::{
get_sha256,
report::{AttestationInfo, Report},
};
use anyhow::Context;
use log::info;
use openssl::{
asn1::Asn1Time,
bn::{BigNum, MsbOption},
hash::MessageDigest,
pkey::{HasPublic, PKey, PKeyRef, Private},
rsa::Rsa,
stack::Stack,
x509::{
extension::{
AuthorityKeyIdentifier, BasicConstraints, KeyUsage, SubjectAlternativeName,
SubjectKeyIdentifier,
},
X509Builder, X509NameBuilder, X509Ref, X509Req, X509,
},
};
// X.509 certificate parameters.
//
// <https://tools.ietf.org/html/rfc5280>
const RSA_KEY_SIZE: u32 = 2048;
// Version is zero-indexed, so the value of `2` corresponds to the version `3`.
const CERTIFICATE_VERSION: i32 = 2;
// Length of the randomly generated X.509 certificate serial number (which is 20 bytes).
//
// The most significant bit is excluded because it's passed as a separate argument to:
// https://docs.rs/openssl/0.10.33/openssl/bn/struct.BigNum.html#method.rand
const SERIAL_NUMBER_SIZE: i32 = 159;
const CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS: u32 = 1;
const DEFAULT_DNS_NAME: &str = "localhost";
/// Indicates whether to add a custom TEE extension to a certificate.
#[derive(PartialEq)]
pub enum AddTeeExtension {
/// Enum value contains a PEM encoded TEE Provider's X.509 certificate that signs TEE firmware
/// key.
Yes(Vec<u8>),
No,
}
/// Convenience structure for generating X.509 certificates.
///
/// <https://tools.ietf.org/html/rfc5280>
pub struct CertificateAuthority {
pub key_pair: PKey<Private>,
pub root_certificate: X509,
}
impl CertificateAuthority {
/// Generates a root X.509 certificate and a corresponding private/public key pair.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report to
/// the root certificate.
pub fn create(add_tee_extension: AddTeeExtension) -> anyhow::Result<Self> {
let key_pair = CertificateAuthority::generate_key_pair()?;
let root_certificate =
CertificateAuthority::generate_root_certificate(&key_pair, add_tee_extension)?;
Ok(Self {
key_pair,
root_certificate,
})
}
/// Generates RSA private/public key pair.
fn generate_key_pair() -> anyhow::Result<PKey<Private>> {
let rsa = Rsa::generate(RSA_KEY_SIZE).context("Couldn't generate RSA key")?;
PKey::from_rsa(rsa).context("Couldn't parse RSA key")
}
/// Creates a root X.509 certificate.
fn generate_root_certificate(
key_pair: &PKey<Private>,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Generating root certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(key_pair)?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(true)?;
builder.add_key_usage_extension(true)?;
builder.add_subject_key_identifier_extension(None)?;
builder.add_subject_alt_name_extension()?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(key_pair, tee_certificate)?;
}
let certificate = builder.build(key_pair)?;
Ok(certificate)
}
/// Generates an X.509 certificate based on the certificate signing `request`.
///
/// `add_tee_extension` indicates whether to add a custom extension containing a TEE report.
pub fn sign_certificate(
&self,
request: X509Req,
add_tee_extension: AddTeeExtension,
) -> anyhow::Result<X509> {
info!("Signing certificate");
let mut builder = CertificateBuilder::create()?;
builder.set_version(CERTIFICATE_VERSION)?;
builder.set_serial_number(SERIAL_NUMBER_SIZE)?;
builder.set_name()?;
builder.set_public_key(request.public_key()?.as_ref())?;
builder.set_expiration_interval(CERTIFICATE_EXPIRATION_INTERVAL_IN_DAYS)?;
builder.add_basic_constraints_extension(false)?;
builder.add_key_usage_extension(false)?;
builder.add_subject_key_identifier_extension(Some(&self.root_certificate))?;
builder.add_auth_key_identifier_extension(&self.root_certificate)?;
// Add X.509 extensions from the certificate signing request.
builder.add_extensions(request.extensions()?)?;
// Bind the certificate to the TEE firmware using an X.509 TEE extension.
if let AddTeeExtension::Yes(tee_certificate) = add_tee_extension {
builder.add_tee_extension(request.public_key()?.as_ref(), tee_certificate)?;
}
let certificate = builder.build(&self.key_pair)?;
Ok(certificate)
}
/// Get RSA key pair encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_private_key_pem(&self) -> anyhow::Result<Vec<u8>> {
self.key_pair
.private_key_to_pem_pkcs8()
.context("Couldn't encode key pair in PEM format")
}
/// Get a root X.509 certificate encoded in PEM format.
///
/// <https://tools.ietf.org/html/rfc7468>
pub fn get_root_certificate_pem(&self) -> anyhow::Result<Vec<u8>> {
self.root_certificate
.to_pem()
.context("Couldn't encode root certificate in PEM format")
}
}
/// Helper struct that implements certificate creation using `openssl`.
struct CertificateBuilder {
builder: X509Builder,
}
impl CertificateBuilder {
fn create() -> anyhow::Result<Self> {
let builder = X509::builder()?;
Ok(Self { builder })
}
fn set_version(&mut self, version: i32) -> anyhow::Result<&mut Self> {
self.builder.set_version(version)?;
Ok(self)
}
fn set_serial_number(&mut self, serial_number_size: i32) -> anyhow::Result<&mut Self> {
let serial_number = {
let mut serial = BigNum::new()?;
serial.rand(serial_number_size, MsbOption::MAYBE_ZERO, false)?;
serial.to_asn1_integer()?
};
self.builder.set_serial_number(&serial_number)?;
Ok(self)
}
fn set_name(&mut self) -> anyhow::Result<&mut Self> {
let mut name = X509NameBuilder::new()?;
name.append_entry_by_text("O", "Oak")?;
name.append_entry_by_text("CN", "Proxy Attestation Service")?;
let name = name.build();
self.builder.set_subject_name(&name)?;
self.builder.set_issuer_name(&name)?;
Ok(self)
}
fn set_public_key<T>(&mut self, public_key: &PKeyRef<T>) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
self.builder.set_pubkey(public_key)?;
Ok(self)
}
fn set_expiration_interval(&mut self, expiration_interval: u32) -> anyhow::Result<&mut Self> {
let not_before = Asn1Time::days_from_now(0)?;
self.builder.set_not_before(¬_before)?;
let not_after = Asn1Time::days_from_now(expiration_interval)?;
self.builder.set_not_after(¬_after)?;
Ok(self)
}
fn add_basic_constraints_extension(&mut self, is_critical: bool) -> anyhow::Result<&mut Self> {
if is_critical {
self.builder
.append_extension(BasicConstraints::new().critical().build()?)?;
} else {
self.builder
.append_extension(BasicConstraints::new().build()?)?;
}
Ok(self)
}
fn add_key_usage_extension(&mut self, is_root_certificate: bool) -> anyhow::Result<&mut Self> {
if is_root_certificate {
self.builder.append_extension(
KeyUsage::new()
.critical()
.key_cert_sign()
.crl_sign()
.build()?,
)?;
} else {
self.builder.append_extension(
KeyUsage::new()
.critical()
.non_repudiation()
.digital_signature()
.key_encipherment()
.build()?,
)?;
}
Ok(self)
}
fn add_subject_key_identifier_extension(
&mut self,
root_certificate: Option<&X509Ref>,
) -> anyhow::Result<&mut Self> {
let subject_key_identifier = SubjectKeyIdentifier::new() | .build(&self.builder.x509v3_context(root_certificate, None))?;
self.builder.append_extension(subject_key_identifier)?;
Ok(self)
}
fn add_subject_alt_name_extension(&mut self) -> anyhow::Result<&mut Self> {
let subject_alt_name = SubjectAlternativeName::new()
.dns(DEFAULT_DNS_NAME)
.build(&self.builder.x509v3_context(None, None))?;
self.builder.append_extension(subject_alt_name)?;
Ok(self)
}
fn add_auth_key_identifier_extension(
&mut self,
root_certificate: &X509Ref,
) -> anyhow::Result<&mut Self> {
let auth_key_identifier = AuthorityKeyIdentifier::new()
.keyid(false)
.issuer(false)
.build(&self.builder.x509v3_context(Some(root_certificate), None))?;
self.builder.append_extension(auth_key_identifier)?;
Ok(self)
}
// Generates a TEE report with the public key hash as data and add it to the certificate as a
// custom extension. This is required to bind the certificate to the TEE firmware.
fn add_tee_extension<T>(
&mut self,
public_key: &PKeyRef<T>,
tee_certificate: Vec<u8>,
) -> anyhow::Result<&mut Self>
where
T: HasPublic,
{
let public_key_hash = get_sha256(&public_key.public_key_to_der()?);
let tee_report = Report::new(&public_key_hash);
let attestation_info = AttestationInfo {
report: tee_report,
certificate: tee_certificate,
};
let tee_extension = attestation_info.to_extension()?;
self.builder.append_extension(tee_extension)?;
Ok(self)
}
fn add_extensions(
&mut self,
extensions: Stack<openssl::x509::X509Extension>,
) -> anyhow::Result<&mut Self> {
for extension in extensions.iter() {
self.builder.append_extension2(extension)?;
}
Ok(self)
}
fn build(mut self, private_key: &PKey<Private>) -> anyhow::Result<X509> {
self.builder.sign(private_key, MessageDigest::sha256())?;
Ok(self.builder.build())
}
} | random_line_split |
|
variables_3.js | var searchData=
[
['direction',['direction',['../structplatform.html#a886d551d5381dc3e53f17825ffc51641',1,'platform::direction()'],['../structprojectile.html#a886d551d5381dc3e53f17825ffc51641',1,'projectile::direction()']]],
['dirx',['dirX',['../struct_character.html#ab1761c91e3594dec827fe60e992d2e1a',1,'Character']]],
['diry',['dirY',['../struct_character.html#a72324d3d2f391a353c57aaa03b148d8d',1,'Character']]], | ['doublejump',['doubleJump',['../struct_character.html#a917917ad1fee47a2101d4ece8dbd33e8',1,'Character']]]
]; | random_line_split |
|
viewport.followedge.js | Crafty.viewport.followEdge = (function() {
var oldTarget, offx, offy, edx, edy
function change() |
function stopFollow() {
if (oldTarget) {
oldTarget.unbind('Move', change)
oldTarget.unbind('ViewportScale', change)
oldTarget.unbind('ViewportResize', change)
}
}
Crafty._preBind("StopCamera", stopFollow)
return function(target, offsetx, offsety, edgex, edgey) {
if (!target || !target.has('2D'))
return
Crafty.trigger("StopCamera")
oldTarget = target
offx = (typeof offsetx !== 'undefined') ? offsetx : 0
offy = (typeof offsety !== 'undefined') ? offsety : 0
edy = (typeof edgex !== 'undefined') ? edgex : 0
edx = (typeof edgey !== 'undefined') ? edgey : 0
target.bind('Move', change)
target.bind('ViewportScale', change)
target.bind('ViewportResize', change)
change.call(target)
}
})()
| {
var scale = Crafty.viewport._scale
// if (this.x > -Crafty.viewport.x + Crafty.viewport.width / 2 + edx - 10) {
Crafty.viewport.scroll('_x', -(this.x + (this.w / 2) - (Crafty.viewport.width / 2 / scale) - offx * scale) + edx)
Crafty.viewport.scroll('_y', -(this.y + (this.h / 2) - (Crafty.viewport.height / 2 / scale) - offy * scale))
Crafty.viewport._clamp()
// }
} | identifier_body |
viewport.followedge.js | Crafty.viewport.followEdge = (function() {
var oldTarget, offx, offy, edx, edy
function | () {
var scale = Crafty.viewport._scale
// if (this.x > -Crafty.viewport.x + Crafty.viewport.width / 2 + edx - 10) {
Crafty.viewport.scroll('_x', -(this.x + (this.w / 2) - (Crafty.viewport.width / 2 / scale) - offx * scale) + edx)
Crafty.viewport.scroll('_y', -(this.y + (this.h / 2) - (Crafty.viewport.height / 2 / scale) - offy * scale))
Crafty.viewport._clamp()
// }
}
function stopFollow() {
if (oldTarget) {
oldTarget.unbind('Move', change)
oldTarget.unbind('ViewportScale', change)
oldTarget.unbind('ViewportResize', change)
}
}
Crafty._preBind("StopCamera", stopFollow)
return function(target, offsetx, offsety, edgex, edgey) {
if (!target || !target.has('2D'))
return
Crafty.trigger("StopCamera")
oldTarget = target
offx = (typeof offsetx !== 'undefined') ? offsetx : 0
offy = (typeof offsety !== 'undefined') ? offsety : 0
edy = (typeof edgex !== 'undefined') ? edgex : 0
edx = (typeof edgey !== 'undefined') ? edgey : 0
target.bind('Move', change)
target.bind('ViewportScale', change)
target.bind('ViewportResize', change)
change.call(target)
}
})()
| change | identifier_name |
viewport.followedge.js | Crafty.viewport.followEdge = (function() {
var oldTarget, offx, offy, edx, edy
function change() {
var scale = Crafty.viewport._scale
// if (this.x > -Crafty.viewport.x + Crafty.viewport.width / 2 + edx - 10) {
Crafty.viewport.scroll('_x', -(this.x + (this.w / 2) - (Crafty.viewport.width / 2 / scale) - offx * scale) + edx)
Crafty.viewport.scroll('_y', -(this.y + (this.h / 2) - (Crafty.viewport.height / 2 / scale) - offy * scale))
Crafty.viewport._clamp()
// }
}
function stopFollow() {
if (oldTarget) |
}
Crafty._preBind("StopCamera", stopFollow)
return function(target, offsetx, offsety, edgex, edgey) {
if (!target || !target.has('2D'))
return
Crafty.trigger("StopCamera")
oldTarget = target
offx = (typeof offsetx !== 'undefined') ? offsetx : 0
offy = (typeof offsety !== 'undefined') ? offsety : 0
edy = (typeof edgex !== 'undefined') ? edgex : 0
edx = (typeof edgey !== 'undefined') ? edgey : 0
target.bind('Move', change)
target.bind('ViewportScale', change)
target.bind('ViewportResize', change)
change.call(target)
}
})()
| {
oldTarget.unbind('Move', change)
oldTarget.unbind('ViewportScale', change)
oldTarget.unbind('ViewportResize', change)
} | conditional_block |
viewport.followedge.js | Crafty.viewport.followEdge = (function() {
var oldTarget, offx, offy, edx, edy
function change() {
var scale = Crafty.viewport._scale
// if (this.x > -Crafty.viewport.x + Crafty.viewport.width / 2 + edx - 10) {
Crafty.viewport.scroll('_x', -(this.x + (this.w / 2) - (Crafty.viewport.width / 2 / scale) - offx * scale) + edx)
Crafty.viewport.scroll('_y', -(this.y + (this.h / 2) - (Crafty.viewport.height / 2 / scale) - offy * scale))
Crafty.viewport._clamp()
// }
}
function stopFollow() {
if (oldTarget) {
oldTarget.unbind('Move', change)
oldTarget.unbind('ViewportScale', change)
oldTarget.unbind('ViewportResize', change)
}
}
Crafty._preBind("StopCamera", stopFollow)
return function(target, offsetx, offsety, edgex, edgey) {
if (!target || !target.has('2D'))
return
Crafty.trigger("StopCamera")
oldTarget = target
offx = (typeof offsetx !== 'undefined') ? offsetx : 0
offy = (typeof offsety !== 'undefined') ? offsety : 0
edy = (typeof edgex !== 'undefined') ? edgex : 0
edx = (typeof edgey !== 'undefined') ? edgey : 0
target.bind('Move', change)
target.bind('ViewportScale', change)
target.bind('ViewportResize', change)
change.call(target) | })() | } | random_line_split |
webpack.config.ts | import * as path from 'path';
import * as webpack from 'webpack';
import TerserPlugin from 'terser-webpack-plugin';
import * as pkg from '../package.json';
const debug = process.argv.indexOf('--mode=development') > 0;
const license = `opensource.org/licenses/${pkg.license}`;
const copyright = `(c) ${new Date().getFullYear()} ${pkg.author}, ${license}`;
const banner = `kdbxweb v${pkg.version}, ${copyright}`;
module.exports = {
context: path.join(__dirname, '../lib'),
entry: './index.ts',
output: {
path: path.join(__dirname, '../dist'),
filename: 'kdbxweb' + (debug ? '' : '.min') + '.js',
library: 'kdbxweb', | libraryTarget: 'umd',
globalObject: 'this'
},
module: {
rules: [
{
test: /\.ts$/,
exclude: /node_modules/,
use: {
loader: 'ts-loader',
options: {
configFile: path.join(
__dirname,
`tsconfig.build-${debug ? 'debug' : 'prod'}.json`
)
}
}
}
]
},
resolve: {
extensions: ['.ts', '.js'],
modules: [path.join(__dirname, '../lib'), path.join(__dirname, '../node_modules')],
fallback: {
console: false,
process: false,
Buffer: false,
crypto: false,
zlib: false
}
},
plugins: [new webpack.BannerPlugin({ banner })],
node: {
__filename: false,
__dirname: false
},
optimization: {
minimize: !debug,
minimizer: debug
? []
: [
new TerserPlugin({
extractComments: false
})
]
},
externals: {
fs: true,
path: true,
'@xmldom/xmldom': true,
crypto: true,
zlib: true
},
performance: {
hints: false
}
}; | random_line_split |
|
main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PROJETO LAVAGEM A SECO
#
# MAIN
#
# Felipe Bandeira da Silva
# 26 jul 15
#
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.options
import tornado.websocket
import tornado.httpserver
import os.path
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
import socket
import fcntl
import struct
import random
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
import multiprocessing
import controle
import time
import os
import signal
import subprocess
import sys
from platform import uname
#NAVEGADOR = 'epiphany'
NAVEGADOR = 'midori -e Fullscreen -a'
# A pagina HTML contém informações interessantes e que devem ser
# apresentadas ao usuário. Quanto menor o tempo maior o processamento
# por parte do cliente ou dependendo do caso pelo servidor.
TEMPO_MS_ATUALIZACAO_HTML = 500
# Via websocket é possível mais um cliente conectado e todos devem
# receber as mensagens do servidor, bem como enviar.
# clientes do websocket
clients = []
# tarefa para atualizacao do pagina html
queue_joyx = multiprocessing.Queue()
queue_joyy = multiprocessing.Queue()
queue_joyz = multiprocessing.Queue()
# anemometro
queue_velocidade = multiprocessing.Queue()
queue_direcao = multiprocessing.Queue()
queue_distancia = multiprocessing.Queue()
# usado para o controle da página pelo joystick
queue_joy_botoes = multiprocessing.Queue()
#class NavegadorWEB(multiprocessing.Process):
# def __init__(self):
# multiprocessing.Process.__init__(self)
#
# self.navegador = subprocess.Popen(['epiphany-browser 192.168.42.1:8888'], stdout=subprocess.PIPE, \
# shell=True, preexec_fn=os.setsid)
#
# def run(self):
# while True:
# time.sleep(0.01)
def inicia_navegador():
navegador = subprocess.Popen([NAVEGADOR+' 192.168.42.1:8888'], \
stdout=subprocess.PIPE, \
shell=True, preexec_fn=os.setsid)
def fecha_navegador():
processos = subprocess.Popen(['pgrep', NAVEGADOR], stdout=subprocess.PIPE)
print 'PID dos processos', processos.stdout
for pid in processos.stdout:
os.kill(int(pid), signal.SIGTERM)
try:
time.sleep(3)
os.kill(int(pid), 0)
print u'erro: o processo %d ainda existe' % pid
except OSError as ex:
continue
def get_ip_address():
# Informa o endereço IP da primeira conexão funcionando
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
ifname = 'eth0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
try:
ifname = 'wlan0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "127.0.0.1"
def get_ip_address_interface(ifname):
# Informa o endereço de IP de uma rede <ifname>
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "0.0.0.0"
class MainHandler(tornado.web.RequestHandler):
# Atende ao GET e POST do cliente
def get(self):
# é possível via argumento renderizar a página html com
# informações interessantes, os comentários devem ter o mesmo
# nome da variável da página
self.render("index.html", title="LAVAGEM A SECO", \
ip_host=get_ip_address()+":"+str(options.port), \
msg_status="LIGADO")
class WebSocketHandler(tornado.websocket.WebSocketHandler):
# Todo cliente se encarrega de conectar-se ao servidor websocket.
# Quando existe uma nova conexão é salvo qual cliente foi.
def open(self):
print 'tornado: websocket: aviso: nova conexão de um cliente'
clients.append(self)
self.write_message("connected")
# Quando um cliente envia uma mensagem, esta é a função responsável
# por ler e aqui deve ficar a chamada dos get das filas(queue)
def on_message(self, message):
print 'tornado: websocket: aviso: nova mensagem: %s' % message
q = self.application.settings.get('queue')
q.put(message)
# Para evitar envios de informações a clientes que não existem mais
# é necessário retirá-los da lista
def on_close(self):
print 'tornado: websocket: aviso: conexão finalizada/perdida'
clients.remove(self)
fecha_navegador()
inicia_navegador()
def envia_cmd_websocket(cmd, arg):
# Facilita o trabalho repetitivo de envia mensagem para todo os clientes
# Envia um comando e seu argumento para todos os clientes
for c in clients:
c.write_message(cmd+";"+arg)
def tarefa_atualizacao_html():
# Esta função tem uma chamada periódica, responsável por atualizar os
# elementos atualizáveis na página html
envia_cmd_websocket("lan", get_ip_address())
envia_cmd_websocket("random", str(random.randint(0,1000)))
# para envia algo é necessário que fila tenha algo
if not queue_joyx.empty():
resultado = queue_joyx.get()
envia_cmd_websocket("joyx", str(resultado)[:6])
if not queue_joyy.empty():
resultado = queue_joyy.get()
envia_cmd_websocket("joyy", str(resultado)[:6])
if not queue_joyz.empty():
resultado = queue_joyz.get()
envia_cmd_websocket("joyz", str(resultado)[:6])
if not queue_joy_botoes.empty():
resultado = queue_joy_botoes.get()
envia_cmd_websocket("b", str(resultado))
if not queue_velocidade.empty():
resultado = queue_velocidade.get()
envia_cmd_websocket("v", str(resultado))
if not queue_direcao.empty():
resultado = queue_direcao.get()
envia_cmd_websocket("d", str(resultado))
if not queue_distancia.empty():
resultado = queue_distancia.get()
envia_cmd_websocket("x", str(resultado)[:6])
def main():
print u"Iniciando o servidor Tornado"
fecha_navegador()
tarefa_controle = multiprocessing.Queue()
# esse loop ler os dados do joystick e envia para o lavos
# sem ele, nenhuma resposta do Joystick é atendida.
controle_loop = controle.ControleLavagem(tarefa_controle, \
queue_joyx, \
queue_joyy, \
queue_joyz, \
queue_joy_botoes, \
queue_velocidade, \
queue_direcao, \
queue_distancia)
controle_loop.daemon = True
controle_loop.start()
# espera um pouco para que a tarefa esteja realmente pronta
# sincronismo é mais interessante?
time.sleep(1)
tarefa_controle.put("Testando Tarefa :)")
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/ws", WebSocketHandler)
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
autoreload=True,
queue=tarefa_controle,
)
| # porta que o servidor irá usar
app.listen(options.port)
# carrega o servidor mas não inicia
main_loop = tornado.ioloop.IOLoop.instance()
# Aqui será a principal tarefa do lavagem, leitura e acionamento
tarefa_atualizacao_html_loop = tornado.ioloop.PeriodicCallback(tarefa_atualizacao_html,\
TEMPO_MS_ATUALIZACAO_HTML, \
io_loop = main_loop)
print u"aviso: tornado: start"
tarefa_atualizacao_html_loop.start()
inicia_navegador()
# o loop do servidor deve ser o último, já que não um daemon
main_loop.start()
if __name__ == "__main__":
main() | random_line_split |
|
main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PROJETO LAVAGEM A SECO
#
# MAIN
#
# Felipe Bandeira da Silva
# 26 jul 15
#
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.options
import tornado.websocket
import tornado.httpserver
import os.path
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
import socket
import fcntl
import struct
import random
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
import multiprocessing
import controle
import time
import os
import signal
import subprocess
import sys
from platform import uname
#NAVEGADOR = 'epiphany'
NAVEGADOR = 'midori -e Fullscreen -a'
# A pagina HTML contém informações interessantes e que devem ser
# apresentadas ao usuário. Quanto menor o tempo maior o processamento
# por parte do cliente ou dependendo do caso pelo servidor.
TEMPO_MS_ATUALIZACAO_HTML = 500
# Via websocket é possível mais um cliente conectado e todos devem
# receber as mensagens do servidor, bem como enviar.
# clientes do websocket
clients = []
# tarefa para atualizacao do pagina html
queue_joyx = multiprocessing.Queue()
queue_joyy = multiprocessing.Queue()
queue_joyz = multiprocessing.Queue()
# anemometro
queue_velocidade = multiprocessing.Queue()
queue_direcao = multiprocessing.Queue()
queue_distancia = multiprocessing.Queue()
# usado para o controle da página pelo joystick
queue_joy_botoes = multiprocessing.Queue()
#class NavegadorWEB(multiprocessing.Process):
# def __init__(self):
# multiprocessing.Process.__init__(self)
#
# self.navegador = subprocess.Popen(['epiphany-browser 192.168.42.1:8888'], stdout=subprocess.PIPE, \
# shell=True, preexec_fn=os.setsid)
#
# def run(self):
# while True:
# time.sleep(0.01)
def inicia_navegador():
navegador = subprocess.Popen([NAVEGADOR+' 192.168.42.1:8888'], \
stdout=subprocess.PIPE, \
shell=True, preexec_fn=os.setsid)
def fecha_navegador():
processos = subprocess.Popen(['pgrep', NAVEGADOR], stdout=subprocess.PIPE)
print 'PID dos processos', processos.stdout
for pid in processos.stdout:
os.kill(int(pid), signal.SIGTERM)
try:
time.sleep(3)
os.kill(int(pid), 0)
print u'erro: o processo %d ainda existe' % pid
except OSError as ex:
continue
def get_ip_address():
# Informa o endereço IP da primeira conexão funcionando
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
ifname = 'eth0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
try:
ifname = 'wlan0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "127.0.0.1"
def get_ip_address_interface(ifname):
# Informa o endereço de IP de uma rede <ifname>
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "0.0.0.0"
class MainHandler(tornado.web.RequestHandler):
# Atende ao GET e POST do cliente
def get(self):
# é possível via argumento renderizar a página html com
# informações interessantes, os comentários devem ter o mesmo
# nome da variável da página
self.render("index.html", title="LAVAGEM A SECO", \
ip_host=get_ip_address()+":"+str(options.port), \
msg_status="LIGADO")
class WebSocketHandler(tornado.websocket.WebSocketHandler):
# Todo cliente se encarrega de conectar-se ao servidor websocket.
# Quando existe uma nova conexão é salvo qual cliente foi.
def open(self):
print 'tornado: websocket: aviso: nova conexão de um cliente'
clients.append(self)
self.write_message("connected")
# Quando um cliente envia uma mensagem, esta é a função responsável
# por ler e aqui deve ficar a chamada dos get das filas(queue)
def on_message(self, message):
print 'tornado: websocket: aviso: nova mensagem: %s' % message
q = self.application.settings.get('queue')
q.put(message)
# Para evitar envios de informações a clientes que não existem mais
# é necessário retirá-los da lista
def on_close(self):
print ' | websocket: aviso: conexão finalizada/perdida'
clients.remove(self)
fecha_navegador()
inicia_navegador()
def envia_cmd_websocket(cmd, arg):
# Facilita o trabalho repetitivo de envia mensagem para todo os clientes
# Envia um comando e seu argumento para todos os clientes
for c in clients:
c.write_message(cmd+";"+arg)
def tarefa_atualizacao_html():
# Esta função tem uma chamada periódica, responsável por atualizar os
# elementos atualizáveis na página html
envia_cmd_websocket("lan", get_ip_address())
envia_cmd_websocket("random", str(random.randint(0,1000)))
# para envia algo é necessário que fila tenha algo
if not queue_joyx.empty():
resultado = queue_joyx.get()
envia_cmd_websocket("joyx", str(resultado)[:6])
if not queue_joyy.empty():
resultado = queue_joyy.get()
envia_cmd_websocket("joyy", str(resultado)[:6])
if not queue_joyz.empty():
resultado = queue_joyz.get()
envia_cmd_websocket("joyz", str(resultado)[:6])
if not queue_joy_botoes.empty():
resultado = queue_joy_botoes.get()
envia_cmd_websocket("b", str(resultado))
if not queue_velocidade.empty():
resultado = queue_velocidade.get()
envia_cmd_websocket("v", str(resultado))
if not queue_direcao.empty():
resultado = queue_direcao.get()
envia_cmd_websocket("d", str(resultado))
if not queue_distancia.empty():
resultado = queue_distancia.get()
envia_cmd_websocket("x", str(resultado)[:6])
def main():
print u"Iniciando o servidor Tornado"
fecha_navegador()
tarefa_controle = multiprocessing.Queue()
# esse loop ler os dados do joystick e envia para o lavos
# sem ele, nenhuma resposta do Joystick é atendida.
controle_loop = controle.ControleLavagem(tarefa_controle, \
queue_joyx, \
queue_joyy, \
queue_joyz, \
queue_joy_botoes, \
queue_velocidade, \
queue_direcao, \
queue_distancia)
controle_loop.daemon = True
controle_loop.start()
# espera um pouco para que a tarefa esteja realmente pronta
# sincronismo é mais interessante?
time.sleep(1)
tarefa_controle.put("Testando Tarefa :)")
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/ws", WebSocketHandler)
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
autoreload=True,
queue=tarefa_controle,
)
# porta que o servidor irá usar
app.listen(options.port)
# carrega o servidor mas não inicia
main_loop = tornado.ioloop.IOLoop.instance()
# Aqui será a principal tarefa do lavagem, leitura e acionamento
tarefa_atualizacao_html_loop = tornado.ioloop.PeriodicCallback(tarefa_atualizacao_html,\
TEMPO_MS_ATUALIZACAO_HTML, \
io_loop = main_loop)
print u"aviso: tornado: start"
tarefa_atualizacao_html_loop.start()
inicia_navegador()
# o loop do servidor deve ser o último, já que não um daemon
main_loop.start()
if __name__ == "__main__":
main()
| tornado: | identifier_name |
main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PROJETO LAVAGEM A SECO
#
# MAIN
#
# Felipe Bandeira da Silva
# 26 jul 15
#
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.options
import tornado.websocket
import tornado.httpserver
import os.path
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
import socket
import fcntl
import struct
import random
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
import multiprocessing
import controle
import time
import os
import signal
import subprocess
import sys
from platform import uname
#NAVEGADOR = 'epiphany'
NAVEGADOR = 'midori -e Fullscreen -a'
# A pagina HTML contém informações interessantes e que devem ser
# apresentadas ao usuário. Quanto menor o tempo maior o processamento
# por parte do cliente ou dependendo do caso pelo servidor.
TEMPO_MS_ATUALIZACAO_HTML = 500
# Via websocket é possível mais um cliente conectado e todos devem
# receber as mensagens do servidor, bem como enviar.
# clientes do websocket
clients = []
# tarefa para atualizacao do pagina html
queue_joyx = multiprocessing.Queue()
queue_joyy = multiprocessing.Queue()
queue_joyz = multiprocessing.Queue()
# anemometro
queue_velocidade = multiprocessing.Queue()
queue_direcao = multiprocessing.Queue()
queue_distancia = multiprocessing.Queue()
# usado para o controle da página pelo joystick
queue_joy_botoes = multiprocessing.Queue()
#class NavegadorWEB(multiprocessing.Process):
# def __init__(self):
# multiprocessing.Process.__init__(self)
#
# self.navegador = subprocess.Popen(['epiphany-browser 192.168.42.1:8888'], stdout=subprocess.PIPE, \
# shell=True, preexec_fn=os.setsid)
#
# def run(self):
# while True:
# time.sleep(0.01)
def inicia_navegador():
navegador = subprocess.Popen([NAVEGADOR+' 192.168.42.1:8888'], \
stdout=subprocess.PIPE, \
shell=True, preexec_fn=os.setsid)
def fecha_navegador():
processos = subprocess.Popen(['pgrep', NAVEGADOR], stdout=subprocess.PIPE)
print 'PID dos processos', processos.stdout
for pid in processos.stdout:
os.kill(int(pid), signal.SIGTERM)
try:
time.sleep(3)
os.kill(int(pid), 0)
print u'erro: o processo %d ainda existe' % pid
except OSError as ex:
continue
def get_ip_address():
# Informa o endereço IP da primeira conexão funcionando
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
ifname = 'eth0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
try:
ifname = 'wlan0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "127.0.0.1"
def get_ip_address_interface(ifname):
# Informa o endereço de IP de uma rede <ifname>
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "0.0.0.0"
class MainHandler(tornado.web.RequestHandler):
# Atende ao GET e POST do cliente
def get(self):
# é possível via argumento renderizar a página html com
# informações interessantes, os comentários devem ter o mesmo
# nome da variável da página
self.render("index.html", title="LAVAGEM A SECO", \
ip_host=get_ip_address()+":"+str(options.port), \
msg_status="LIGADO")
class WebSocketHandler(tornado.websocket.WebSocketHandler):
# Todo cliente se encarrega de conectar-se ao servidor websocket.
# Quando existe uma nova conexão é salvo qual cliente foi.
def open(self):
print 'tornado: websocket: aviso: nova conexão de um cliente'
clients.append(self)
self.write_message("connected")
# Quando um cliente envia uma mensagem, esta é a função responsável
# por ler e aqui deve ficar a chamada dos get das filas(queue)
def on_message(self, message):
print 'tornado: websocket: aviso: nova mensagem: %s' % message
q = self.application.settings.get('queue')
q.put(message)
# Para evitar envios de informações a clientes que não existem mais
# é necessário retirá-los da lista
def on_close(self):
print 'tornado: websocket: aviso: conexão finalizada/perdida'
clients.remove(self)
fecha_navegador()
inicia_navegador()
def envia_cmd_websocket(cmd, arg):
# Facilita o trabalho repetitivo de envia mensagem para todo os clientes
# Envia um comando e seu argumento para todos os clientes
for c in clients:
c.write_message(cmd+";"+arg)
def tarefa_atualizacao_html():
# Esta função tem uma chamada periódica, responsável por atualizar os
# elementos atualizáveis na página html
envia_cmd_websocket("lan", get_ip_address())
envia_cmd_websocket("random", str(random.randint(0,1000)))
# para envia algo é necessário que fila tenha algo
if not queue_joyx.empty():
resultado = queue_joyx.get()
envia_cmd_websocket("joyx", str(resultado)[:6])
if not queue_joyy.empty():
resultado = queue_joyy.get()
envia_cmd_websocket("joyy", str(resultado)[:6])
if not queue_joyz.empty():
resultado = queue_joyz.get()
envia_cmd_websocket("joyz", str(resultado)[:6])
if not queue_joy_botoes.empty():
resultado = queue_joy_botoes.get()
envia_cmd_websocket("b", str(resultado))
if not queue_velocidade.empty():
resultado = queue_velocidade.get()
| resultado = queue_direcao.get()
envia_cmd_websocket("d", str(resultado))
if not queue_distancia.empty():
resultado = queue_distancia.get()
envia_cmd_websocket("x", str(resultado)[:6])
def main():
print u"Iniciando o servidor Tornado"
fecha_navegador()
tarefa_controle = multiprocessing.Queue()
# esse loop ler os dados do joystick e envia para o lavos
# sem ele, nenhuma resposta do Joystick é atendida.
controle_loop = controle.ControleLavagem(tarefa_controle, \
queue_joyx, \
queue_joyy, \
queue_joyz, \
queue_joy_botoes, \
queue_velocidade, \
queue_direcao, \
queue_distancia)
controle_loop.daemon = True
controle_loop.start()
# espera um pouco para que a tarefa esteja realmente pronta
# sincronismo é mais interessante?
time.sleep(1)
tarefa_controle.put("Testando Tarefa :)")
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/ws", WebSocketHandler)
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
autoreload=True,
queue=tarefa_controle,
)
# porta que o servidor irá usar
app.listen(options.port)
# carrega o servidor mas não inicia
main_loop = tornado.ioloop.IOLoop.instance()
# Aqui será a principal tarefa do lavagem, leitura e acionamento
tarefa_atualizacao_html_loop = tornado.ioloop.PeriodicCallback(tarefa_atualizacao_html,\
TEMPO_MS_ATUALIZACAO_HTML, \
io_loop = main_loop)
print u"aviso: tornado: start"
tarefa_atualizacao_html_loop.start()
inicia_navegador()
# o loop do servidor deve ser o último, já que não um daemon
main_loop.start()
if __name__ == "__main__":
main()
| envia_cmd_websocket("v", str(resultado))
if not queue_direcao.empty():
| conditional_block |
main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PROJETO LAVAGEM A SECO
#
# MAIN
#
# Felipe Bandeira da Silva
# 26 jul 15
#
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.options
import tornado.websocket
import tornado.httpserver
import os.path
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
import socket
import fcntl
import struct
import random
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
import multiprocessing
import controle
import time
import os
import signal
import subprocess
import sys
from platform import uname
#NAVEGADOR = 'epiphany'
NAVEGADOR = 'midori -e Fullscreen -a'
# A pagina HTML contém informações interessantes e que devem ser
# apresentadas ao usuário. Quanto menor o tempo maior o processamento
# por parte do cliente ou dependendo do caso pelo servidor.
TEMPO_MS_ATUALIZACAO_HTML = 500
# Via websocket é possível mais um cliente conectado e todos devem
# receber as mensagens do servidor, bem como enviar.
# clientes do websocket
clients = []
# tarefa para atualizacao do pagina html
queue_joyx = multiprocessing.Queue()
queue_joyy = multiprocessing.Queue()
queue_joyz = multiprocessing.Queue()
# anemometro
queue_velocidade = multiprocessing.Queue()
queue_direcao = multiprocessing.Queue()
queue_distancia = multiprocessing.Queue()
# usado para o controle da página pelo joystick
queue_joy_botoes = multiprocessing.Queue()
#class NavegadorWEB(multiprocessing.Process):
# def __init__(self):
# multiprocessing.Process.__init__(self)
#
# self.navegador = subprocess.Popen(['epiphany-browser 192.168.42.1:8888'], stdout=subprocess.PIPE, \
# shell=True, preexec_fn=os.setsid)
#
# def run(self):
# while True:
# time.sleep(0.01)
def inicia_navegador():
navegador = subprocess.Popen([NAVEGADOR+' 192.168.42.1:8888'], \
stdout=subprocess.PIPE, \
shell=True, preexec_fn=os.setsid)
def fecha_navegador():
processos = subprocess.Popen(['pgrep', NAVEGADOR], stdout=subprocess.PIPE)
print 'PID dos processos', processos.stdout
for pid in processos.stdout:
os.kill(int(pid), signal.SIGTERM)
try:
time.sleep(3)
os.kill(int(pid), 0)
print u'erro: o processo %d ainda existe' % pid
except OSError as ex:
continue
def get_ip_address():
# Informa o endereço IP da primeira conexão funcionando
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
ifname = 'eth0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
try:
ifname = 'wlan0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "127.0.0.1"
def get_ip_address_interface(ifname):
# Informa o endereço de IP de uma rede <ifname>
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "0.0.0.0"
class MainHandler(tornado.web.RequestHandler):
# Atende ao GET e POST do cliente
def get(self):
# é possível via argumento renderizar a página html com
# informações interessantes, os comentários devem ter o mesmo
# nome da variável da página
self.render("index | andler(tornado.websocket.WebSocketHandler):
# Todo cliente se encarrega de conectar-se ao servidor websocket.
# Quando existe uma nova conexão é salvo qual cliente foi.
def open(self):
print 'tornado: websocket: aviso: nova conexão de um cliente'
clients.append(self)
self.write_message("connected")
# Quando um cliente envia uma mensagem, esta é a função responsável
# por ler e aqui deve ficar a chamada dos get das filas(queue)
def on_message(self, message):
print 'tornado: websocket: aviso: nova mensagem: %s' % message
q = self.application.settings.get('queue')
q.put(message)
# Para evitar envios de informações a clientes que não existem mais
# é necessário retirá-los da lista
def on_close(self):
print 'tornado: websocket: aviso: conexão finalizada/perdida'
clients.remove(self)
fecha_navegador()
inicia_navegador()
def envia_cmd_websocket(cmd, arg):
# Facilita o trabalho repetitivo de envia mensagem para todo os clientes
# Envia um comando e seu argumento para todos os clientes
for c in clients:
c.write_message(cmd+";"+arg)
def tarefa_atualizacao_html():
# Esta função tem uma chamada periódica, responsável por atualizar os
# elementos atualizáveis na página html
envia_cmd_websocket("lan", get_ip_address())
envia_cmd_websocket("random", str(random.randint(0,1000)))
# para envia algo é necessário que fila tenha algo
if not queue_joyx.empty():
resultado = queue_joyx.get()
envia_cmd_websocket("joyx", str(resultado)[:6])
if not queue_joyy.empty():
resultado = queue_joyy.get()
envia_cmd_websocket("joyy", str(resultado)[:6])
if not queue_joyz.empty():
resultado = queue_joyz.get()
envia_cmd_websocket("joyz", str(resultado)[:6])
if not queue_joy_botoes.empty():
resultado = queue_joy_botoes.get()
envia_cmd_websocket("b", str(resultado))
if not queue_velocidade.empty():
resultado = queue_velocidade.get()
envia_cmd_websocket("v", str(resultado))
if not queue_direcao.empty():
resultado = queue_direcao.get()
envia_cmd_websocket("d", str(resultado))
if not queue_distancia.empty():
resultado = queue_distancia.get()
envia_cmd_websocket("x", str(resultado)[:6])
def main():
print u"Iniciando o servidor Tornado"
fecha_navegador()
tarefa_controle = multiprocessing.Queue()
# esse loop ler os dados do joystick e envia para o lavos
# sem ele, nenhuma resposta do Joystick é atendida.
controle_loop = controle.ControleLavagem(tarefa_controle, \
queue_joyx, \
queue_joyy, \
queue_joyz, \
queue_joy_botoes, \
queue_velocidade, \
queue_direcao, \
queue_distancia)
controle_loop.daemon = True
controle_loop.start()
# espera um pouco para que a tarefa esteja realmente pronta
# sincronismo é mais interessante?
time.sleep(1)
tarefa_controle.put("Testando Tarefa :)")
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/ws", WebSocketHandler)
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
autoreload=True,
queue=tarefa_controle,
)
# porta que o servidor irá usar
app.listen(options.port)
# carrega o servidor mas não inicia
main_loop = tornado.ioloop.IOLoop.instance()
# Aqui será a principal tarefa do lavagem, leitura e acionamento
tarefa_atualizacao_html_loop = tornado.ioloop.PeriodicCallback(tarefa_atualizacao_html,\
TEMPO_MS_ATUALIZACAO_HTML, \
io_loop = main_loop)
print u"aviso: tornado: start"
tarefa_atualizacao_html_loop.start()
inicia_navegador()
# o loop do servidor deve ser o último, já que não um daemon
main_loop.start()
if __name__ == "__main__":
main()
| .html", title="LAVAGEM A SECO", \
ip_host=get_ip_address()+":"+str(options.port), \
msg_status="LIGADO")
class WebSocketH | identifier_body |
print_context.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use PageSetup; | use std::mem;
glib_wrapper! {
pub struct PrintContext(Object<ffi::GtkPrintContext, PrintContextClass>);
match fn {
get_type => || ffi::gtk_print_context_get_type(),
}
}
impl PrintContext {
pub fn create_pango_context(&self) -> Option<pango::Context> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_context(self.to_glib_none().0))
}
}
pub fn create_pango_layout(&self) -> Option<pango::Layout> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_layout(self.to_glib_none().0))
}
}
pub fn get_cairo_context(&self) -> Option<cairo::Context> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_cairo_context(self.to_glib_none().0))
}
}
pub fn get_dpi_x(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_x(self.to_glib_none().0)
}
}
pub fn get_dpi_y(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_y(self.to_glib_none().0)
}
}
pub fn get_hard_margins(&self) -> Option<(f64, f64, f64, f64)> {
unsafe {
let mut top = mem::uninitialized();
let mut bottom = mem::uninitialized();
let mut left = mem::uninitialized();
let mut right = mem::uninitialized();
let ret = from_glib(ffi::gtk_print_context_get_hard_margins(self.to_glib_none().0, &mut top, &mut bottom, &mut left, &mut right));
if ret { Some((top, bottom, left, right)) } else { None }
}
}
pub fn get_height(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_height(self.to_glib_none().0)
}
}
pub fn get_page_setup(&self) -> Option<PageSetup> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_page_setup(self.to_glib_none().0))
}
}
pub fn get_pango_fontmap(&self) -> Option<pango::FontMap> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_pango_fontmap(self.to_glib_none().0))
}
}
pub fn get_width(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_width(self.to_glib_none().0)
}
}
pub fn set_cairo_context(&self, cr: &cairo::Context, dpi_x: f64, dpi_y: f64) {
unsafe {
ffi::gtk_print_context_set_cairo_context(self.to_glib_none().0, mut_override(cr.to_glib_none().0), dpi_x, dpi_y);
}
}
}
impl fmt::Display for PrintContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PrintContext")
}
} | use cairo;
use ffi;
use glib::translate::*;
use pango;
use std::fmt; | random_line_split |
print_context.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use PageSetup;
use cairo;
use ffi;
use glib::translate::*;
use pango;
use std::fmt;
use std::mem;
glib_wrapper! {
pub struct PrintContext(Object<ffi::GtkPrintContext, PrintContextClass>);
match fn {
get_type => || ffi::gtk_print_context_get_type(),
}
}
impl PrintContext {
pub fn create_pango_context(&self) -> Option<pango::Context> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_context(self.to_glib_none().0))
}
}
pub fn create_pango_layout(&self) -> Option<pango::Layout> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_layout(self.to_glib_none().0))
}
}
pub fn get_cairo_context(&self) -> Option<cairo::Context> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_cairo_context(self.to_glib_none().0))
}
}
pub fn get_dpi_x(&self) -> f64 |
pub fn get_dpi_y(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_y(self.to_glib_none().0)
}
}
pub fn get_hard_margins(&self) -> Option<(f64, f64, f64, f64)> {
unsafe {
let mut top = mem::uninitialized();
let mut bottom = mem::uninitialized();
let mut left = mem::uninitialized();
let mut right = mem::uninitialized();
let ret = from_glib(ffi::gtk_print_context_get_hard_margins(self.to_glib_none().0, &mut top, &mut bottom, &mut left, &mut right));
if ret { Some((top, bottom, left, right)) } else { None }
}
}
pub fn get_height(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_height(self.to_glib_none().0)
}
}
pub fn get_page_setup(&self) -> Option<PageSetup> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_page_setup(self.to_glib_none().0))
}
}
pub fn get_pango_fontmap(&self) -> Option<pango::FontMap> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_pango_fontmap(self.to_glib_none().0))
}
}
pub fn get_width(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_width(self.to_glib_none().0)
}
}
pub fn set_cairo_context(&self, cr: &cairo::Context, dpi_x: f64, dpi_y: f64) {
unsafe {
ffi::gtk_print_context_set_cairo_context(self.to_glib_none().0, mut_override(cr.to_glib_none().0), dpi_x, dpi_y);
}
}
}
impl fmt::Display for PrintContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PrintContext")
}
}
| {
unsafe {
ffi::gtk_print_context_get_dpi_x(self.to_glib_none().0)
}
} | identifier_body |
print_context.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use PageSetup;
use cairo;
use ffi;
use glib::translate::*;
use pango;
use std::fmt;
use std::mem;
glib_wrapper! {
pub struct PrintContext(Object<ffi::GtkPrintContext, PrintContextClass>);
match fn {
get_type => || ffi::gtk_print_context_get_type(),
}
}
impl PrintContext {
pub fn create_pango_context(&self) -> Option<pango::Context> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_context(self.to_glib_none().0))
}
}
pub fn create_pango_layout(&self) -> Option<pango::Layout> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_layout(self.to_glib_none().0))
}
}
pub fn get_cairo_context(&self) -> Option<cairo::Context> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_cairo_context(self.to_glib_none().0))
}
}
pub fn get_dpi_x(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_x(self.to_glib_none().0)
}
}
pub fn get_dpi_y(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_y(self.to_glib_none().0)
}
}
pub fn get_hard_margins(&self) -> Option<(f64, f64, f64, f64)> {
unsafe {
let mut top = mem::uninitialized();
let mut bottom = mem::uninitialized();
let mut left = mem::uninitialized();
let mut right = mem::uninitialized();
let ret = from_glib(ffi::gtk_print_context_get_hard_margins(self.to_glib_none().0, &mut top, &mut bottom, &mut left, &mut right));
if ret { Some((top, bottom, left, right)) } else { None }
}
}
pub fn get_height(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_height(self.to_glib_none().0)
}
}
pub fn get_page_setup(&self) -> Option<PageSetup> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_page_setup(self.to_glib_none().0))
}
}
pub fn | (&self) -> Option<pango::FontMap> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_pango_fontmap(self.to_glib_none().0))
}
}
pub fn get_width(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_width(self.to_glib_none().0)
}
}
pub fn set_cairo_context(&self, cr: &cairo::Context, dpi_x: f64, dpi_y: f64) {
unsafe {
ffi::gtk_print_context_set_cairo_context(self.to_glib_none().0, mut_override(cr.to_glib_none().0), dpi_x, dpi_y);
}
}
}
impl fmt::Display for PrintContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PrintContext")
}
}
| get_pango_fontmap | identifier_name |
print_context.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use PageSetup;
use cairo;
use ffi;
use glib::translate::*;
use pango;
use std::fmt;
use std::mem;
glib_wrapper! {
pub struct PrintContext(Object<ffi::GtkPrintContext, PrintContextClass>);
match fn {
get_type => || ffi::gtk_print_context_get_type(),
}
}
impl PrintContext {
pub fn create_pango_context(&self) -> Option<pango::Context> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_context(self.to_glib_none().0))
}
}
pub fn create_pango_layout(&self) -> Option<pango::Layout> {
unsafe {
from_glib_full(ffi::gtk_print_context_create_pango_layout(self.to_glib_none().0))
}
}
pub fn get_cairo_context(&self) -> Option<cairo::Context> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_cairo_context(self.to_glib_none().0))
}
}
pub fn get_dpi_x(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_x(self.to_glib_none().0)
}
}
pub fn get_dpi_y(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_dpi_y(self.to_glib_none().0)
}
}
pub fn get_hard_margins(&self) -> Option<(f64, f64, f64, f64)> {
unsafe {
let mut top = mem::uninitialized();
let mut bottom = mem::uninitialized();
let mut left = mem::uninitialized();
let mut right = mem::uninitialized();
let ret = from_glib(ffi::gtk_print_context_get_hard_margins(self.to_glib_none().0, &mut top, &mut bottom, &mut left, &mut right));
if ret | else { None }
}
}
pub fn get_height(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_height(self.to_glib_none().0)
}
}
pub fn get_page_setup(&self) -> Option<PageSetup> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_page_setup(self.to_glib_none().0))
}
}
pub fn get_pango_fontmap(&self) -> Option<pango::FontMap> {
unsafe {
from_glib_none(ffi::gtk_print_context_get_pango_fontmap(self.to_glib_none().0))
}
}
pub fn get_width(&self) -> f64 {
unsafe {
ffi::gtk_print_context_get_width(self.to_glib_none().0)
}
}
pub fn set_cairo_context(&self, cr: &cairo::Context, dpi_x: f64, dpi_y: f64) {
unsafe {
ffi::gtk_print_context_set_cairo_context(self.to_glib_none().0, mut_override(cr.to_glib_none().0), dpi_x, dpi_y);
}
}
}
impl fmt::Display for PrintContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PrintContext")
}
}
| { Some((top, bottom, left, right)) } | conditional_block |
trie.rs | /* Copyright 2017 Joel Pedraza
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* A Radix 26 Trie
*
* I'd prefer if if each letter was represented as Enum rather than a u8 (for safety)
* Can they be used without sacrifing perf?
*/
use boggle_util;
use bitset::BitSet32;
use bitset::IndexIter32;
use std::mem;
type Node = Option<Box<Trie>>;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum NodeType {
Prefix,
Word(usize),
}
#[derive(Debug)]
pub struct | {
node_type: NodeType,
children: [Node; boggle_util::ALPHABET_SIZE],
child_set: BitSet32,
}
impl Trie {
pub fn new() -> Self {
Trie {
node_type: NodeType::Prefix,
children: Default::default(),
child_set: BitSet32::new(),
}
}
pub fn node_type(&self) -> NodeType {
self.node_type
}
pub fn insert(&mut self, s: &str, id: usize) -> bool {
if boggle_util::is_alpha(s) {
self.ins(s.to_lowercase().as_bytes(), id);
true
} else {
false
}
}
#[inline]
fn ins(&mut self, s: &[u8], id: usize) -> () {
let first = boggle_util::ascii_byte_to_idx(s[0]);
if self.children[first].is_none() {
self.child_set.add(first as u32);
mem::replace(&mut (self.children[first]), Some(Box::new(Trie::new())));
}
let child = self.children[first].as_mut().unwrap();
if s.len() > 1 {
child.ins(&s[1..], id);
} else {
child.node_type = NodeType::Word(id);
}
}
#[allow(dead_code)]
pub fn contains(&self, s: &str) -> Option<NodeType> {
if boggle_util::is_alpha(s) {
self.cns(s.to_lowercase().as_bytes())
} else {
None
}
}
#[inline]
fn cns(&self, s: &[u8]) -> Option<NodeType> {
let first = boggle_util::ascii_byte_to_idx(s[0]);
if let Some(child) = self.children[first].as_ref() {
if s.len() == 1 {
Some(child.node_type)
} else {
let rest = &s[1..];
child.cns(rest)
}
} else {
None
}
}
pub fn iter(&self) -> TrieIterator {
TrieIterator::new(self)
}
}
pub struct TrieIterator<'a> {
trie: &'a Trie,
iter: IndexIter32<'a>,
}
impl<'a> TrieIterator<'a> {
fn new(trie: &'a Trie) -> TrieIterator<'a> {
TrieIterator {
trie: trie,
iter: trie.child_set.iter_ones(),
}
}
}
impl<'a> Iterator for TrieIterator<'a> {
type Item = (&'a Trie, u8);
fn next(&mut self) -> Option<(&'a Trie, u8)> {
match self.iter.next() {
Some(i) => {
match self.trie.children[i as usize] {
Some(ref trie) => Some((trie, i as u8)),
None => None
}
},
None => None
}
}
}
//==============================================================================
#[cfg(test)]
mod test{
use std::str;
use super::Trie;
use super::NodeType;
#[test]
fn valid_words_are_inserted() {
let mut trie = Trie::new();
assert_eq!(trie.contains("a"), None);
assert_eq!(trie.contains("abba"), None);
assert!(trie.insert("abba", 0));
assert_eq!(trie.contains("a"), Some(NodeType::Prefix));
assert_eq!(trie.contains("ab"), Some(NodeType::Prefix));
assert_eq!(trie.contains("abb"), Some(NodeType::Prefix));
assert_eq!(trie.contains("abba"), Some(NodeType::Word(0)));
}
#[test]
fn invalid_words_are_not_inserted() {
let mut trie = Trie::new();
let mut id = 0;
for s in ('\u{0}' as u8 .. 'A' as u8)
.chain('[' as u8 .. 'a' as u8)
.chain('{' as u8 .. '\u{ff}' as u8)
.map(|b| unsafe { str::from_utf8_unchecked(&[b]) }.to_owned() ) {
id += 1;
assert!(!trie.insert(&s, id));
assert_eq!(trie.contains(&s), None);
}
}
#[test]
fn is_case_insensitive() {
let mut trie = Trie::new();
trie.insert("a", 0);
assert_eq!(trie.contains("a"), Some(NodeType::Word(0)));
assert_eq!(trie.contains("A"), Some(NodeType::Word(0)));
trie.insert("B", 1);
assert_eq!(trie.contains("b"), Some(NodeType::Word(1)));
assert_eq!(trie.contains("B"), Some(NodeType::Word(1)));
}
} | Trie | identifier_name |
trie.rs | /* Copyright 2017 Joel Pedraza
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* A Radix 26 Trie
*
* I'd prefer if if each letter was represented as Enum rather than a u8 (for safety)
* Can they be used without sacrifing perf?
*/
use boggle_util;
use bitset::BitSet32;
use bitset::IndexIter32;
use std::mem;
type Node = Option<Box<Trie>>;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum NodeType {
Prefix,
Word(usize),
}
#[derive(Debug)]
pub struct Trie {
node_type: NodeType,
children: [Node; boggle_util::ALPHABET_SIZE],
child_set: BitSet32,
}
impl Trie {
pub fn new() -> Self {
Trie {
node_type: NodeType::Prefix,
children: Default::default(),
child_set: BitSet32::new(),
}
}
pub fn node_type(&self) -> NodeType {
self.node_type
}
pub fn insert(&mut self, s: &str, id: usize) -> bool {
if boggle_util::is_alpha(s) {
self.ins(s.to_lowercase().as_bytes(), id);
true
} else {
false
}
}
#[inline]
fn ins(&mut self, s: &[u8], id: usize) -> () {
let first = boggle_util::ascii_byte_to_idx(s[0]);
if self.children[first].is_none() {
self.child_set.add(first as u32);
mem::replace(&mut (self.children[first]), Some(Box::new(Trie::new())));
}
let child = self.children[first].as_mut().unwrap();
if s.len() > 1 {
child.ins(&s[1..], id);
} else {
child.node_type = NodeType::Word(id);
}
}
#[allow(dead_code)]
pub fn contains(&self, s: &str) -> Option<NodeType> {
if boggle_util::is_alpha(s) {
self.cns(s.to_lowercase().as_bytes())
} else {
None
}
}
#[inline]
fn cns(&self, s: &[u8]) -> Option<NodeType> {
let first = boggle_util::ascii_byte_to_idx(s[0]);
if let Some(child) = self.children[first].as_ref() {
if s.len() == 1 {
Some(child.node_type)
} else {
let rest = &s[1..];
child.cns(rest)
}
} else {
None
}
}
pub fn iter(&self) -> TrieIterator {
TrieIterator::new(self)
}
}
| trie: &'a Trie,
iter: IndexIter32<'a>,
}
impl<'a> TrieIterator<'a> {
fn new(trie: &'a Trie) -> TrieIterator<'a> {
TrieIterator {
trie: trie,
iter: trie.child_set.iter_ones(),
}
}
}
impl<'a> Iterator for TrieIterator<'a> {
type Item = (&'a Trie, u8);
fn next(&mut self) -> Option<(&'a Trie, u8)> {
match self.iter.next() {
Some(i) => {
match self.trie.children[i as usize] {
Some(ref trie) => Some((trie, i as u8)),
None => None
}
},
None => None
}
}
}
//==============================================================================
#[cfg(test)]
mod test{
use std::str;
use super::Trie;
use super::NodeType;
#[test]
fn valid_words_are_inserted() {
let mut trie = Trie::new();
assert_eq!(trie.contains("a"), None);
assert_eq!(trie.contains("abba"), None);
assert!(trie.insert("abba", 0));
assert_eq!(trie.contains("a"), Some(NodeType::Prefix));
assert_eq!(trie.contains("ab"), Some(NodeType::Prefix));
assert_eq!(trie.contains("abb"), Some(NodeType::Prefix));
assert_eq!(trie.contains("abba"), Some(NodeType::Word(0)));
}
#[test]
fn invalid_words_are_not_inserted() {
let mut trie = Trie::new();
let mut id = 0;
for s in ('\u{0}' as u8 .. 'A' as u8)
.chain('[' as u8 .. 'a' as u8)
.chain('{' as u8 .. '\u{ff}' as u8)
.map(|b| unsafe { str::from_utf8_unchecked(&[b]) }.to_owned() ) {
id += 1;
assert!(!trie.insert(&s, id));
assert_eq!(trie.contains(&s), None);
}
}
#[test]
fn is_case_insensitive() {
let mut trie = Trie::new();
trie.insert("a", 0);
assert_eq!(trie.contains("a"), Some(NodeType::Word(0)));
assert_eq!(trie.contains("A"), Some(NodeType::Word(0)));
trie.insert("B", 1);
assert_eq!(trie.contains("b"), Some(NodeType::Word(1)));
assert_eq!(trie.contains("B"), Some(NodeType::Word(1)));
}
} |
pub struct TrieIterator<'a> { | random_line_split |
trie.rs | /* Copyright 2017 Joel Pedraza
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* A Radix 26 Trie
*
* I'd prefer if if each letter was represented as Enum rather than a u8 (for safety)
* Can they be used without sacrifing perf?
*/
use boggle_util;
use bitset::BitSet32;
use bitset::IndexIter32;
use std::mem;
type Node = Option<Box<Trie>>;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum NodeType {
Prefix,
Word(usize),
}
#[derive(Debug)]
pub struct Trie {
node_type: NodeType,
children: [Node; boggle_util::ALPHABET_SIZE],
child_set: BitSet32,
}
impl Trie {
pub fn new() -> Self {
Trie {
node_type: NodeType::Prefix,
children: Default::default(),
child_set: BitSet32::new(),
}
}
pub fn node_type(&self) -> NodeType {
self.node_type
}
pub fn insert(&mut self, s: &str, id: usize) -> bool {
if boggle_util::is_alpha(s) {
self.ins(s.to_lowercase().as_bytes(), id);
true
} else {
false
}
}
#[inline]
fn ins(&mut self, s: &[u8], id: usize) -> () {
let first = boggle_util::ascii_byte_to_idx(s[0]);
if self.children[first].is_none() {
self.child_set.add(first as u32);
mem::replace(&mut (self.children[first]), Some(Box::new(Trie::new())));
}
let child = self.children[first].as_mut().unwrap();
if s.len() > 1 {
child.ins(&s[1..], id);
} else {
child.node_type = NodeType::Word(id);
}
}
#[allow(dead_code)]
pub fn contains(&self, s: &str) -> Option<NodeType> {
if boggle_util::is_alpha(s) {
self.cns(s.to_lowercase().as_bytes())
} else {
None
}
}
#[inline]
fn cns(&self, s: &[u8]) -> Option<NodeType> {
let first = boggle_util::ascii_byte_to_idx(s[0]);
if let Some(child) = self.children[first].as_ref() {
if s.len() == 1 {
Some(child.node_type)
} else |
} else {
None
}
}
pub fn iter(&self) -> TrieIterator {
TrieIterator::new(self)
}
}
pub struct TrieIterator<'a> {
trie: &'a Trie,
iter: IndexIter32<'a>,
}
impl<'a> TrieIterator<'a> {
fn new(trie: &'a Trie) -> TrieIterator<'a> {
TrieIterator {
trie: trie,
iter: trie.child_set.iter_ones(),
}
}
}
impl<'a> Iterator for TrieIterator<'a> {
type Item = (&'a Trie, u8);
fn next(&mut self) -> Option<(&'a Trie, u8)> {
match self.iter.next() {
Some(i) => {
match self.trie.children[i as usize] {
Some(ref trie) => Some((trie, i as u8)),
None => None
}
},
None => None
}
}
}
//==============================================================================
#[cfg(test)]
mod test{
use std::str;
use super::Trie;
use super::NodeType;
#[test]
fn valid_words_are_inserted() {
let mut trie = Trie::new();
assert_eq!(trie.contains("a"), None);
assert_eq!(trie.contains("abba"), None);
assert!(trie.insert("abba", 0));
assert_eq!(trie.contains("a"), Some(NodeType::Prefix));
assert_eq!(trie.contains("ab"), Some(NodeType::Prefix));
assert_eq!(trie.contains("abb"), Some(NodeType::Prefix));
assert_eq!(trie.contains("abba"), Some(NodeType::Word(0)));
}
#[test]
fn invalid_words_are_not_inserted() {
let mut trie = Trie::new();
let mut id = 0;
for s in ('\u{0}' as u8 .. 'A' as u8)
.chain('[' as u8 .. 'a' as u8)
.chain('{' as u8 .. '\u{ff}' as u8)
.map(|b| unsafe { str::from_utf8_unchecked(&[b]) }.to_owned() ) {
id += 1;
assert!(!trie.insert(&s, id));
assert_eq!(trie.contains(&s), None);
}
}
#[test]
fn is_case_insensitive() {
let mut trie = Trie::new();
trie.insert("a", 0);
assert_eq!(trie.contains("a"), Some(NodeType::Word(0)));
assert_eq!(trie.contains("A"), Some(NodeType::Word(0)));
trie.insert("B", 1);
assert_eq!(trie.contains("b"), Some(NodeType::Word(1)));
assert_eq!(trie.contains("B"), Some(NodeType::Word(1)));
}
} | {
let rest = &s[1..];
child.cns(rest)
} | conditional_block |
_hasnolatorlon.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Place/_HasNoLatOrLon.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasNoLatOrLon
#
#-------------------------------------------------------------------------
class HasNoLatOrLon(Rule):
"""Rule that checks if Latitude or Longitude are not given"""
labels = []
name = _('Places with no latitude or longitude given')
description = _("Matches places with empty latitude or longitude")
category = _('Position filters')
def | (self,db,place):
if place.get_latitude().strip and place.get_longitude().strip():
return False
return True
| apply | identifier_name |
_hasnolatorlon.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Place/_HasNoLatOrLon.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasNoLatOrLon
#
#-------------------------------------------------------------------------
class HasNoLatOrLon(Rule):
"""Rule that checks if Latitude or Longitude are not given"""
labels = []
name = _('Places with no latitude or longitude given')
description = _("Matches places with empty latitude or longitude")
category = _('Position filters')
def apply(self,db,place):
if place.get_latitude().strip and place.get_longitude().strip():
|
return True
| return False | conditional_block |
_hasnolatorlon.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Place/_HasNoLatOrLon.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasNoLatOrLon
#
#-------------------------------------------------------------------------
class HasNoLatOrLon(Rule):
"""Rule that checks if Latitude or Longitude are not given"""
labels = []
name = _('Places with no latitude or longitude given')
description = _("Matches places with empty latitude or longitude")
category = _('Position filters')
def apply(self,db,place):
| if place.get_latitude().strip and place.get_longitude().strip():
return False
return True | identifier_body |
|
_hasnolatorlon.py | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | # You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/Place/_HasNoLatOrLon.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasNoLatOrLon
#
#-------------------------------------------------------------------------
class HasNoLatOrLon(Rule):
"""Rule that checks if Latitude or Longitude are not given"""
labels = []
name = _('Places with no latitude or longitude given')
description = _("Matches places with empty latitude or longitude")
category = _('Position filters')
def apply(self,db,place):
if place.get_latitude().strip and place.get_longitude().strip():
return False
return True | # GNU General Public License for more details.
# | random_line_split |
sort.py | #!/usr/bin/env python
import sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, action='store', dest='input', default=None, help="Input file")
args = parser.parse_args()
stats = dict()
if args.input is None:
print "Error: No input file"
with open(args.input) as in_file:
for line in in_file.readlines():
time = int(line.split()[0])
tx_bytes = int(line.split()[1])
stats[time] = tx_bytes
stats = sorted(stats.items())
start_time = stats[0][0]
prev_tx = stats[0][1]
no_traffic_flag = True
for time, tx_bytes in stats:
if no_traffic_flag:
if tx_bytes > (prev_tx+100000):
no_traffic_flag = False
start_time, prev_tx = time, tx_bytes
else:
print (time-start_time), (tx_bytes-prev_tx)
prev_tx = tx_bytes | if __name__ == "__main__":
main() | random_line_split |
|
sort.py | #!/usr/bin/env python
import sys, argparse
def | ():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, action='store', dest='input', default=None, help="Input file")
args = parser.parse_args()
stats = dict()
if args.input is None:
print "Error: No input file"
with open(args.input) as in_file:
for line in in_file.readlines():
time = int(line.split()[0])
tx_bytes = int(line.split()[1])
stats[time] = tx_bytes
stats = sorted(stats.items())
start_time = stats[0][0]
prev_tx = stats[0][1]
no_traffic_flag = True
for time, tx_bytes in stats:
if no_traffic_flag:
if tx_bytes > (prev_tx+100000):
no_traffic_flag = False
start_time, prev_tx = time, tx_bytes
else:
print (time-start_time), (tx_bytes-prev_tx)
prev_tx = tx_bytes
if __name__ == "__main__":
main()
| main | identifier_name |
sort.py | #!/usr/bin/env python
import sys, argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, action='store', dest='input', default=None, help="Input file")
args = parser.parse_args()
stats = dict()
if args.input is None:
|
with open(args.input) as in_file:
for line in in_file.readlines():
time = int(line.split()[0])
tx_bytes = int(line.split()[1])
stats[time] = tx_bytes
stats = sorted(stats.items())
start_time = stats[0][0]
prev_tx = stats[0][1]
no_traffic_flag = True
for time, tx_bytes in stats:
if no_traffic_flag:
if tx_bytes > (prev_tx+100000):
no_traffic_flag = False
start_time, prev_tx = time, tx_bytes
else:
print (time-start_time), (tx_bytes-prev_tx)
prev_tx = tx_bytes
if __name__ == "__main__":
main()
| print "Error: No input file" | conditional_block |
sort.py | #!/usr/bin/env python
import sys, argparse
def main():
|
if __name__ == "__main__":
main()
| parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, action='store', dest='input', default=None, help="Input file")
args = parser.parse_args()
stats = dict()
if args.input is None:
print "Error: No input file"
with open(args.input) as in_file:
for line in in_file.readlines():
time = int(line.split()[0])
tx_bytes = int(line.split()[1])
stats[time] = tx_bytes
stats = sorted(stats.items())
start_time = stats[0][0]
prev_tx = stats[0][1]
no_traffic_flag = True
for time, tx_bytes in stats:
if no_traffic_flag:
if tx_bytes > (prev_tx+100000):
no_traffic_flag = False
start_time, prev_tx = time, tx_bytes
else:
print (time-start_time), (tx_bytes-prev_tx)
prev_tx = tx_bytes | identifier_body |
menu.js | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file | * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var App = require('app');
// This logic is substituted by MainAdminView for now.
App.MainAdminMenuView = Em.CollectionView.extend({
//contentBinding: 'controller',
/*content: [
{
route:'user',
label:'Users'
},
{
route:'security',
label:'Security'
},
{
route:'cluster',
label:'Cluster'
}
/*,
{
route:'authentication',
label:'Authentication'
},
{
route: 'user',
label: 'Users'
},
{
route: 'security',
label: 'Security'
}/*,
{
route:'authentication',
label:'Authentication'
},
{
route:'audit',
label:'Audit'
}*/
/*,
{
route:'advanced',
label:'Advanced'
}
],
tagName: "ul",
classNames: ["nav", "nav-list"],
init: function () {
this._super();
this.activateView(); // default selected menu
},
activateView: function () {
var route = App.get('router.mainAdminController.category');
$.each(this._childViews, function () {
this.set('active', (this.get('content.route') == route ? "active" : ""));
});
}.observes('App.router.mainAdminController.category'),
itemViewClass:Em.View.extend({
classNameBindings:["active"],
active:"",
template:Ember.Handlebars.compile('<a class="text-center" {{action adminNavigate view.content.route }} href="#"> {{unbound view.content.label}}</a>')
})
*/
}); | * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* | random_line_split |
visitors.js | import * as virtualTypes from "./path/lib/virtual-types";
import * as messages from "babel-messages";
import * as t from "babel-types";
import clone from "lodash/clone";
/**
* explode() will take a visitor object with all of the various shorthands
* that we support, and validates & normalizes it into a common format, ready
* to be used in traversal
*
* The various shorthands are:
* * `Identifier() { ... }` -> `Identifier: { enter() { ... } }`
* * `"Identifier|NumericLiteral": { ... }` -> `Identifier: { ... }, NumericLiteral: { ... }`
* * Aliases in `babel-types`: e.g. `Property: { ... }` -> `ObjectProperty: { ... }, ClassProperty: { ... }`
*
* Other normalizations are:
* * Visitors of virtual types are wrapped, so that they are only visited when
* their dynamic check passes
* * `enter` and `exit` functions are wrapped in arrays, to ease merging of
* visitors
*/
export function explode(visitor) {
if (visitor._exploded) return visitor;
visitor._exploded = true;
// normalise pipes
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
let parts: Array<string> = nodeType.split("|");
if (parts.length === 1) continue;
let fns = visitor[nodeType];
delete visitor[nodeType];
for (let part of parts) {
visitor[part] = fns;
}
}
// verify data structure
verify(visitor);
// make sure there's no __esModule type since this is because we're using loose mode
// and it sets __esModule to be enumerable on all modules :(
delete visitor.__esModule;
// ensure visitors are objects
ensureEntranceObjects(visitor);
// ensure enter/exit callbacks are arrays
ensureCallbackArrays(visitor);
// add type wrappers
for (let nodeType of (Object.keys(visitor): Array)) {
if (shouldIgnoreKey(nodeType)) continue;
let wrapper = virtualTypes[nodeType];
if (!wrapper) continue;
// wrap all the functions
let fns = visitor[nodeType];
for (let type in fns) {
fns[type] = wrapCheck(wrapper, fns[type]);
}
// clear it from the visitor
delete visitor[nodeType];
if (wrapper.types) {
for (let type of (wrapper.types: Array<string>)) {
// merge the visitor if necessary or just put it back in
if (visitor[type]) {
mergePair(visitor[type], fns);
} else {
visitor[type] = fns;
}
}
} else {
mergePair(visitor, fns);
}
}
// add aliases
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
let fns = visitor[nodeType];
let aliases: ?Array<string> = t.FLIPPED_ALIAS_KEYS[nodeType];
let deprecratedKey = t.DEPRECATED_KEYS[nodeType];
if (deprecratedKey) {
console.trace(`Visitor defined for ${nodeType} but it has been renamed to ${deprecratedKey}`);
aliases = [deprecratedKey];
}
if (!aliases) continue;
// clear it from the visitor
delete visitor[nodeType];
for (let alias of aliases) {
let existing = visitor[alias];
if (existing) {
mergePair(existing, fns);
} else {
visitor[alias] = clone(fns);
}
}
}
for (let nodeType in visitor) {
if (shouldIgnoreKey(nodeType)) continue;
ensureCallbackArrays(visitor[nodeType]);
}
return visitor;
}
export function verify(visitor) {
if (visitor._verified) return;
if (typeof visitor === "function") {
throw new Error(messages.get("traverseVerifyRootFunction"));
}
for (let nodeType in visitor) {
if (nodeType === "enter" || nodeType === "exit") {
validateVisitorMethods(nodeType, visitor[nodeType]);
}
if (shouldIgnoreKey(nodeType)) continue;
if (t.TYPES.indexOf(nodeType) < 0) {
throw new Error(messages.get("traverseVerifyNodeType", nodeType));
}
let visitors = visitor[nodeType];
if (typeof visitors === "object") {
for (let visitorKey in visitors) {
if (visitorKey === "enter" || visitorKey === "exit") {
// verify that it just contains functions
validateVisitorMethods(`${nodeType}.${visitorKey}`, visitors[visitorKey]);
} else {
throw new Error(messages.get("traverseVerifyVisitorProperty", nodeType, visitorKey));
}
}
}
}
visitor._verified = true;
}
function validateVisitorMethods(path, val) |
export function merge(visitors: Array, states: Array = []) {
let rootVisitor = {};
for (let i = 0; i < visitors.length; i++) {
let visitor = visitors[i];
let state = states[i];
explode(visitor);
for (let type in visitor) {
let visitorType = visitor[type];
// if we have state then overload the callbacks to take it
if (state) visitorType = wrapWithState(visitorType, state);
let nodeVisitor = rootVisitor[type] = rootVisitor[type] || {};
mergePair(nodeVisitor, visitorType);
}
}
return rootVisitor;
}
function wrapWithState(oldVisitor, state) {
let newVisitor = {};
for (let key in oldVisitor) {
let fns = oldVisitor[key];
// not an enter/exit array of callbacks
if (!Array.isArray(fns)) continue;
fns = fns.map(function (fn) {
let newFn = function (path) {
return fn.call(state, path, state);
};
newFn.toString = () => fn.toString();
return newFn;
});
newVisitor[key] = fns;
}
return newVisitor;
}
function ensureEntranceObjects(obj) {
for (let key in obj) {
if (shouldIgnoreKey(key)) continue;
let fns = obj[key];
if (typeof fns === "function") {
obj[key] = { enter: fns };
}
}
}
function ensureCallbackArrays(obj) {
if (obj.enter && !Array.isArray(obj.enter)) obj.enter = [obj.enter];
if (obj.exit && !Array.isArray(obj.exit)) obj.exit = [obj.exit];
}
function wrapCheck(wrapper, fn) {
let newFn = function (path) {
if (wrapper.checkPath(path)) {
return fn.apply(this, arguments);
}
};
newFn.toString = () => fn.toString();
return newFn;
}
function shouldIgnoreKey(key) {
// internal/hidden key
if (key[0] === "_") return true;
// ignore function keys
if (key === "enter" || key === "exit" || key === "shouldSkip") return true;
// ignore other options
if (key === "blacklist" || key === "noScope" || key === "skipKeys") return true;
return false;
}
function mergePair(dest, src) {
for (let key in src) {
dest[key] = [].concat(dest[key] || [], src[key]);
}
}
| {
let fns = [].concat(val);
for (let fn of fns) {
if (typeof fn !== "function") {
throw new TypeError(`Non-function found defined in ${path} with type ${typeof fn}`);
}
}
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.