commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
e84b2e11088878d44433bfc767b8abba79eca0a7
|
use environment variable for config folder
|
Rediker-Software/litle-sdk-for-python,LitleCo/litle-sdk-for-python
|
litleSdkPython/Configuration.py
|
litleSdkPython/Configuration.py
|
#Copyright (c) 2011-2012 Litle & Co.
#
#Permission is hereby granted, free of charge, to any person
#obtaining a copy of this software and associated documentation
#files (the "Software"), to deal in the Software without
#restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the
#Software is furnished to do so, subject to the following
#conditions:
#
#The above copyright notice and this permission notice shall be
#included in all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
#OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
#NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
#WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
#FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
#OTHER DEALINGS IN THE SOFTWARE.
import os
class Configuration(object):
def __init__(self):
self.version = 8.25
self.reportGroup = 'Default Report Group'
self._url = 'Sandbox'
self.proxy = None
self.timeout = 65
self.printXml = False
self.configFolder = os.environ['LITLE_SDK_CONFIG']\
if 'LITLE_SDK_CONFIG' in os.environ else os.path.expanduser('~')
self.__LITLE_SDK_CONFIG = '.litle_Python_SDK_config'
@property
def url(self):
return self._urlMapper(self._url)
@url.setter
def url(self, value):
self._url = value
def getConfigFileName(self):
return self.__LITLE_SDK_CONFIG
def _urlMapper(self,target):
if (target == "Cert"):
return 'https://cert.litle.com/vap/communicator/online'
elif(target == "Sandbox"):
return 'https://www.testlitle.com/sandbox/communicator/online'
elif(target == "Precert"):
return 'https://precert.litle.com/vap/communicator/online'
elif(target == "Prod"):
return 'https://production.litle.com/vap/communicator/online'
else:
return target
|
#Copyright (c) 2011-2012 Litle & Co.
#
#Permission is hereby granted, free of charge, to any person
#obtaining a copy of this software and associated documentation
#files (the "Software"), to deal in the Software without
#restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the
#Software is furnished to do so, subject to the following
#conditions:
#
#The above copyright notice and this permission notice shall be
#included in all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
#OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
#NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
#WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
#FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
#OTHER DEALINGS IN THE SOFTWARE.
import os
class Configuration(object):
def __init__(self):
self.version = 8.25
self.reportGroup = 'Default Report Group'
self._url = 'Sandbox'
self.proxy = None
self.timeout = 65
self.printXml = False
self.configFolder = os.path.expanduser('~')
self.__LITLE_SDK_CONFIG = '.litle_Python_SDK_config'
@property
def url(self):
return self._urlMapper(self._url)
@url.setter
def url(self, value):
self._url = value
def getConfigFileName(self):
return self.__LITLE_SDK_CONFIG
def _urlMapper(self,target):
if (target == "Cert"):
return 'https://cert.litle.com/vap/communicator/online'
elif(target == "Sandbox"):
return 'https://www.testlitle.com/sandbox/communicator/online'
elif(target == "Precert"):
return 'https://precert.litle.com/vap/communicator/online'
elif(target == "Prod"):
return 'https://production.litle.com/vap/communicator/online'
else:
return target
|
mit
|
Python
|
24e42d5d4a21c1f3ffd36a163b89ee7f39375945
|
Update P05_trafficLight add assertion to check for red light
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
books/AutomateTheBoringStuffWithPython/Chapter10/P05_trafficLight.py
|
books/AutomateTheBoringStuffWithPython/Chapter10/P05_trafficLight.py
|
# This program emulates traffic lights at intersections with assertions
market_2nd = {"ns": "green", "ew": "red"}
mission_16th = {"ns": "red", "ew": "green"}
def switchLights(stoplight):
for key in stoplight.keys():
if stoplight[key] == "green":
stoplight[key] = "yellow"
elif stoplight[key] == "yellow":
stoplight[key] = "red"
elif stoplight[key] == "red":
stoplight[key] = "green"
assert "red" in stoplight.values(), "Neither light is red! " + str(stoplight)
switchLights(market_2nd)
|
# This program emulates traffic lights at intersections with assertions
market_2nd = {"ns": "green", "ew": "red"}
mission_16th = {"ns": "red", "ew": "green"}
def switchLights(stoplight):
for key in stoplight.keys():
if stoplight[key] == "green":
stoplight[key] = "yellow"
elif stoplight[key] == "yellow":
stoplight[key] = "red"
elif stoplight[key] == "red":
stoplight[key] = "green"
switchLights(market_2nd)
|
mit
|
Python
|
304a220e99694ec6b41a31db8150c7f4604f6ef5
|
Remove old logging import
|
ianstalk/Flexget,crawln45/Flexget,malkavi/Flexget,crawln45/Flexget,Flexget/Flexget,ianstalk/Flexget,malkavi/Flexget,Flexget/Flexget,Flexget/Flexget,malkavi/Flexget,Flexget/Flexget,crawln45/Flexget,crawln45/Flexget,malkavi/Flexget,ianstalk/Flexget
|
flexget/components/notify/notifiers/gotify.py
|
flexget/components/notify/notifiers/gotify.py
|
from http import HTTPStatus
from requests.exceptions import RequestException
from urllib.parse import urljoin
from flexget import plugin
from flexget.event import event
from flexget.plugin import PluginWarning
from flexget.utils.requests import Session as RequestSession, TimedLimiter
plugin_name = 'gotify'
requests = RequestSession(max_retries=3)
class GotifyNotifier(object):
"""
Example::
notify:
entries:
via:
- gotify:
url: <GOTIFY_SERVER_URL>
token: <GOTIFY_TOKEN>
priority: <PRIORITY>
Configuration parameters are also supported from entries (eg. through set).
"""
schema = {
'type': 'object',
'properties': {
'url': {'format': 'url'},
'token': {'type': 'string'},
'priority': {'type': 'integer', 'default': 4},
},
'required': [
'token',
'url',
],
'additionalProperties': False,
}
def notify(self, title, message, config):
"""
Send a Gotify notification
"""
base_url = config['url']
api_endpoint = '/message'
url = urljoin(base_url, api_endpoint)
params = {'token': config['token']}
priority = config['priority']
notification = {'title': title, 'message': message, 'priority': priority}
# Make the request
try:
response = requests.post(url, params=params, json=notification)
except RequestException as e:
if e.response is not None:
if e.response.status_code in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
message = 'Invalid Gotify access token'
else:
message = e.response.json()['error']['message']
else:
message = str(e)
raise PluginWarning(message)
@event('plugin.register')
def register_plugin():
plugin.register(GotifyNotifier, plugin_name, api_ver=2, interfaces=['notifiers'])
|
import logging
from http import HTTPStatus
from requests.exceptions import RequestException
from urllib.parse import urljoin
from flexget import plugin
from flexget.event import event
from flexget.plugin import PluginWarning
from flexget.utils.requests import Session as RequestSession, TimedLimiter
plugin_name = 'gotify'
log = logging.getLogger(plugin_name)
requests = RequestSession(max_retries=3)
class GotifyNotifier(object):
"""
Example::
notify:
entries:
via:
- gotify:
url: <GOTIFY_SERVER_URL>
token: <GOTIFY_TOKEN>
priority: <PRIORITY>
Configuration parameters are also supported from entries (eg. through set).
"""
schema = {
'type': 'object',
'properties': {
'url': {'format': 'url'},
'token': {'type': 'string'},
'priority': {'type': 'integer', 'default': 4},
},
'required': [
'token',
'url',
],
'additionalProperties': False,
}
def notify(self, title, message, config):
"""
Send a Gotify notification
"""
base_url = config['url']
api_endpoint = '/message'
url = urljoin(base_url, api_endpoint)
params = {'token': config['token']}
priority = config['priority']
notification = {'title': title, 'message': message, 'priority': priority}
# Make the request
try:
response = requests.post(url, params=params, json=notification)
except RequestException as e:
if e.response is not None:
if e.response.status_code in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
message = 'Invalid Gotify access token'
else:
message = e.response.json()['error']['message']
else:
message = str(e)
raise PluginWarning(message)
@event('plugin.register')
def register_plugin():
plugin.register(GotifyNotifier, plugin_name, api_ver=2, interfaces=['notifiers'])
|
mit
|
Python
|
77ac4b3cc97731c0fcb387a10fadd1509e057a6d
|
update with main function and header
|
NCSSM-CS/CSAssess,NCSSM-CS/CSAssess,NCSSM-CS/CSAssess,NCSSM-CS/CSAssess
|
controller/test.py
|
controller/test.py
|
#!/usr/bin/python2.7
"""
created_by: Micah Halter
created_date: 2/28/2015
last_modified_by: Micah Halter
last_modified_date: 3/2/2015
"""
#imports
import constants
import sys
sys.path.insert(0, "./view/")
import viewAssessment
import viewQuestion
import viewTopic
import viewSection
import viewCourse
import viewUser
sys.path.insert(0, "./edit/")
import editAssessment
import editQuestion
import editTopic
import editSection
import editCourse
sys.path.insert(0, "./objects/")
from assessment import Assessment
from question import Question
from topic import Topic
from section import Section
from course import Course
from user import User
# functions
def main():
print(viewAssessment.byID(1).sortByTopic())
# running code
if __name__ == "__main__":
main()
|
#!/usr/bin/python2.7
import constants
import sys
sys.path.insert(0, "./view/")
import viewAssessment
import viewQuestion
import viewTopic
import viewSection
import viewCourse
import viewUser
sys.path.insert(0, "./edit/")
import editAssessment
import editQuestion
import editTopic
import editSection
import editCourse
sys.path.insert(0, "./objects/")
from assessment import Assessment
from question import Question
from topic import Topic
from section import Section
from course import Course
from user import User
print(viewAssessment.byID(1).sortByTopic())
|
mit
|
Python
|
b8cec88e733237b94fafb2aa978dcb6b758c954f
|
Add string representation of Log
|
BlankOn/irclogview,fajran/irclogview,BlankOn/irclogview,fajran/irclogview
|
irclogview/models.py
|
irclogview/models.py
|
from django.db import models
from django.core.urlresolvers import reverse
from picklefield.fields import PickledObjectField
from . import utils
class Channel(models.Model):
name = models.SlugField(max_length=50, unique=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['name']
def __unicode__(self):
return u'#%s' % self.name
def get_absolute_url(self):
return reverse('irclogview_channel', args=[self.name])
class Log(models.Model):
channel = models.ForeignKey(Channel, db_index=True)
date = models.DateField(db_index=True)
mtime = models.DateTimeField()
updated = models.DateTimeField(auto_now=True)
content = PickledObjectField()
class Meta:
ordering = ['-date']
unique_together = ('channel', 'date')
def __unicode__(self):
return u'#%s - %s' % (self.channel.name,
self.date.strftime('%Y-%m-%d'))
def get_absolute_url(self):
date = self.date
return reverse('irclogview_show',
args=[self.channel.name,
'%04d' % date.year,
'%02d' % date.month,
'%02d' % date.day])
def content_dict(self):
colors = utils.RainbowColor()
for data in self.content:
item = dict(zip(['time', 'type', 'name', 'text'], data))
item['name_color'] = item['type'] == 'act' \
and 'inherit' \
or colors.get_color(item['name'])
yield item
|
from django.db import models
from django.core.urlresolvers import reverse
from picklefield.fields import PickledObjectField
from . import utils
class Channel(models.Model):
name = models.SlugField(max_length=50, unique=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['name']
def __unicode__(self):
return u'#%s' % self.name
def get_absolute_url(self):
return reverse('irclogview_channel', args=[self.name])
class Log(models.Model):
channel = models.ForeignKey(Channel, db_index=True)
date = models.DateField(db_index=True)
mtime = models.DateTimeField()
updated = models.DateTimeField(auto_now=True)
content = PickledObjectField()
class Meta:
ordering = ['-date']
unique_together = ('channel', 'date')
def get_absolute_url(self):
date = self.date
return reverse('irclogview_show',
args=[self.channel.name,
'%04d' % date.year,
'%02d' % date.month,
'%02d' % date.day])
def content_dict(self):
colors = utils.RainbowColor()
for data in self.content:
item = dict(zip(['time', 'type', 'name', 'text'], data))
item['name_color'] = item['type'] == 'act' \
and 'inherit' \
or colors.get_color(item['name'])
yield item
|
agpl-3.0
|
Python
|
c62a658eb469e449372207f146f60375d7497f63
|
update dataset api
|
ghisvail/ismrmrdpy
|
ismrmrdpy/dataset.py
|
ismrmrdpy/dataset.py
|
# Copyright (c) 2014-2015 Ghislain Antony Vaillant.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
class Dataset(object):
"""
"""
def __init__(self, *args, **kwargs):
pass
def open(self):
pass
def close(self):
pass
def read_header(self):
pass
def write_header(self, xmlstring):
pass
def append_acquisition(self, acq):
pass
def read_acquisition(self, index):
pass
def number_of_acquisitions(self):
pass
def append_image(self, img):
pass
def read_image(self, index):
pass
def number_of_images(self):
pass
def append_array(self, arr):
pass
def read_array(self, index):
pass
def number_of_arrays(self):
pass
|
# Copyright (c) 2014-2015 Ghislain Antony Vaillant.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
class Dataset(object):
"""
"""
def __init__(self, *args, **kwargs):
pass
def open(self):
pass
def close(self):
pass
def read_header(self):
pass
def write_header(self, xmlstring):
pass
def append_acquisition(self, acq):
pass
def read_acquisition(self, index):
pass
def append_image(self, img):
pass
def read_image(self, index):
pass
def append_array(self, arr):
pass
def read_array(self, index):
pass
|
bsd-2-clause
|
Python
|
3effb540220f4ce1918d0210e882d926e268473f
|
Bump P4Runtime to v1.2.0
|
gkatsikas/onos,opennetworkinglab/onos,gkatsikas/onos,opennetworkinglab/onos,opennetworkinglab/onos,gkatsikas/onos,opennetworkinglab/onos,opennetworkinglab/onos,gkatsikas/onos,opennetworkinglab/onos,gkatsikas/onos,gkatsikas/onos
|
tools/build/bazel/p4lang_workspace.bzl
|
tools/build/bazel/p4lang_workspace.bzl
|
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
P4RUNTIME_VER = "1.2.0"
P4RUNTIME_SHA = "0fce7e06c63e60a8cddfe56f3db3d341953560c054d4c09ffda0e84476124f5a"
def generate_p4lang():
http_archive(
name = "com_github_p4lang_p4runtime",
urls = ["https://github.com/p4lang/p4runtime/archive/v%s.zip" % P4RUNTIME_VER],
sha256 = P4RUNTIME_SHA,
strip_prefix = "p4runtime-%s/proto" % P4RUNTIME_VER,
build_file = "//tools/build/bazel:p4runtime_BUILD",
)
|
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
P4RUNTIME_VER = "1.0.0"
P4RUNTIME_SHA = "667464bd369b40b58dc9552be2c84e190a160b6e77137b735bd86e5b81c6adc0"
def generate_p4lang():
http_archive(
name = "com_github_p4lang_p4runtime",
urls = ["https://github.com/p4lang/p4runtime/archive/v%s.zip" % P4RUNTIME_VER],
sha256 = P4RUNTIME_SHA,
strip_prefix = "p4runtime-%s/proto" % P4RUNTIME_VER,
build_file = "//tools/build/bazel:p4runtime_BUILD",
)
|
apache-2.0
|
Python
|
64e902fae3117c246272cbde943d013da1345b7b
|
Fix RenameField alteration
|
thorrak/fermentrack,thorrak/fermentrack,thorrak/fermentrack,thorrak/fermentrack,thorrak/fermentrack
|
gravity/migrations/0003_tiltbridge_mdns_id.py
|
gravity/migrations/0003_tiltbridge_mdns_id.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2019-03-18 23:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('gravity', '0002_tilt_refactor'),
]
operations = [
# Converting from AlterField to RemoveField/AddField because of issues with Django 2.0+ migration:
# https://docs.djangoproject.com/en/3.0/releases/2.0/#foreign-key-constraints-are-now-enabled-on-sqlite
migrations.RemoveField(
model_name='tiltbridge',
name='api_key',
),
migrations.AddField(
model_name='tiltbridge',
name='mdns_id',
field=models.CharField(help_text="mDNS ID used by the TiltBridge to identify itself both on your network and to Fermentrack. NOTE - Prefix only - do not include '.local'", max_length=64, primary_key=True, serialize=False, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9]+$')]),
),
migrations.AlterField(
model_name='tiltbridge',
name='mdns_id',
field=models.CharField(default='tiltbridge', help_text="mDNS ID used by the TiltBridge to identify itself both on your network and to Fermentrack. NOTE - Prefix only - do not include '.local'", max_length=64, primary_key=True, serialize=False),
preserve_default=False,
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2019-03-18 23:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('gravity', '0002_tilt_refactor'),
]
operations = [
# Converting from AlterField to RemoveField/AddField because of issues with Django 2.0+ migration:
# https://docs.djangoproject.com/en/3.0/releases/2.0/#foreign-key-constraints-are-now-enabled-on-sqlite
migrations.RemoveField(
model_name='tiltbridge',
name='api_key',
),
migrations.AddField(
model_name='tiltbridge',
name='mdns_id',
field=models.CharField(help_text="mDNS ID used by the TiltBridge to identify itself both on your network and to Fermentrack. NOTE - Prefix only - do not include '.local'", max_length=64, primary_key=True, serialize=False, validators=[django.core.validators.RegexValidator(regex='^[a-zA-Z0-9]+$')]),
),
migrations.RenameField(
model_name='tiltbridge',
old_name='api_key',
new_name='mdns_id',
),
migrations.AlterField(
model_name='tiltbridge',
name='mdns_id',
field=models.CharField(default='tiltbridge', help_text="mDNS ID used by the TiltBridge to identify itself both on your network and to Fermentrack. NOTE - Prefix only - do not include '.local'", max_length=64, primary_key=True, serialize=False),
preserve_default=False,
),
]
|
mit
|
Python
|
fc4aada050fd995ecf5375871fa1e6ed1884293f
|
fix hail-apiserver.py module path (#4850)
|
cseed/hail,danking/hail,hail-is/hail,cseed/hail,hail-is/hail,cseed/hail,hail-is/hail,cseed/hail,danking/hail,danking/hail,danking/hail,cseed/hail,danking/hail,cseed/hail,hail-is/hail,danking/hail,cseed/hail,danking/hail,hail-is/hail,hail-is/hail,danking/hail,hail-is/hail,cseed/hail,hail-is/hail
|
hail/python/hail-apiserver/hail-apiserver.py
|
hail/python/hail-apiserver/hail-apiserver.py
|
import hail as hl
from hail.utils.java import Env, info
import logging
import flask
hl.init()
app = flask.Flask('hail-apiserver')
@app.route('/execute', methods=['POST'])
def execute():
code = flask.request.json
info(f'execute: {code}')
jir = Env.hail().expr.ir.IRParser.parse_value_ir(code, {}, {})
typ = hl.HailType._from_java(jir.typ())
value = Env.hail().expr.ir.Interpret.interpretPyIR(code, {}, {})
result = {
'type': str(typ),
'value': value
}
info(f'result: {result}')
return flask.jsonify(result)
app.run(threaded=False, host='0.0.0.0')
|
import hail as hl
from hail.utils.java import Env, info
import logging
import flask
hl.init()
app = flask.Flask('hail-apiserver')
@app.route('/execute', methods=['POST'])
def execute():
code = flask.request.json
info(f'execute: {code}')
jir = Env.hail().expr.Parser.parse_value_ir(code, {}, {})
typ = hl.HailType._from_java(jir.typ())
value = Env.hail().expr.ir.Interpret.interpretPyIR(code, {}, {})
result = {
'type': str(typ),
'value': value
}
info(f'result: {result}')
return flask.jsonify(result)
app.run(threaded=False, host='0.0.0.0')
|
mit
|
Python
|
1d10582d622ce6867a85d9e4e8c279ab7e4ab5ab
|
Revert "Don't complain about \r when core.autocrlf is on in Git"
|
ejjeong/rust,omasanori/rust,quornian/rust,mvdnes/rust,barosl/rust,aturon/rust,carols10cents/rust,mdinger/rust,AerialX/rust,krzysz00/rust,krzysz00/rust,sarojaba/rust-doc-korean,SiegeLord/rust,l0kod/rust,philyoon/rust,KokaKiwi/rust,nwin/rust,ktossell/rust,victorvde/rust,dwillmer/rust,0x73/rust,waynenilsen/rand,fabricedesre/rust,vhbit/rust,jroesch/rust,andars/rust,ejjeong/rust,avdi/rust,j16r/rust,michaelballantyne/rust-gpu,michaelballantyne/rust-gpu,l0kod/rust,ebfull/rust,XMPPwocky/rust,mitsuhiko/rust,philyoon/rust,kwantam/rust,Ryman/rust,bombless/rust,gifnksm/rust,fabricedesre/rust,defuz/rust,XMPPwocky/rust,bombless/rust,philyoon/rust,zaeleus/rust,pshc/rust,krzysz00/rust,pelmers/rust,andars/rust,michaelballantyne/rust-gpu,mitsuhiko/rust,bhickey/rand,XMPPwocky/rust,robertg/rust,LeoTestard/rust,rohitjoshi/rust,ebfull/rust,P1start/rust,vhbit/rust,dwillmer/rust,l0kod/rust,aepsil0n/rust,barosl/rust,miniupnp/rust,zachwick/rust,zachwick/rust,aidancully/rust,jashank/rust,mahkoh/rust,aidancully/rust,krzysz00/rust,erickt/rust,pelmers/rust,kmcallister/rust,bombless/rust,victorvde/rust,victorvde/rust,reem/rust,emk/rust,ruud-v-a/rust,GBGamer/rust,kwantam/rust,stepancheg/rust-ide-rust,cllns/rust,vhbit/rust,LeoTestard/rust,l0kod/rust,zachwick/rust,avdi/rust,AerialX/rust-rt-minimal,dwillmer/rust,richo/rust,pshc/rust,j16r/rust,kwantam/rust,emk/rust,kimroen/rust,avdi/rust,aneeshusa/rust,carols10cents/rust,victorvde/rust,aturon/rust,avdi/rust,aidancully/rust,ebfull/rust,hauleth/rust,rohitjoshi/rust,philyoon/rust,untitaker/rust,GBGamer/rust,stepancheg/rust-ide-rust,barosl/rust,TheNeikos/rust,SiegeLord/rust,P1start/rust,ebfull/rust,omasanori/rust,andars/rust,seanrivera/rust,pczarn/rust,huonw/rand,ktossell/rust,rohitjoshi/rust,barosl/rust,jroesch/rust,jbclements/rust,AerialX/rust-rt-minimal,erickt/rust,servo/rust,GBGamer/rust,zaeleus/rust,pythonesque/rust,TheNeikos/rust,richo/rust,aidancully/rust,emk/rust,jashank/rust,ktossell/rust,AerialX/rust-rt-minimal,bombless/rust,l0kod/rust,pythonesque/rust,LeoTestard/rust,kmcallister/rust,zubron/rust,seanrivera/rust,jbclements/rust,sarojaba/rust-doc-korean,TheNeikos/rust,KokaKiwi/rust,mahkoh/rust,cllns/rust,erickt/rust,pelmers/rust,aneeshusa/rust,sarojaba/rust-doc-korean,zubron/rust,erickt/rust,zubron/rust,KokaKiwi/rust,bluss/rand,hauleth/rust,defuz/rust,emk/rust,omasanori/rust,victorvde/rust,mihneadb/rust,barosl/rust,sarojaba/rust-doc-korean,ruud-v-a/rust,sae-bom/rust,zaeleus/rust,zachwick/rust,jashank/rust,krzysz00/rust,cllns/rust,aturon/rust,ruud-v-a/rust,ruud-v-a/rust,shepmaster/rand,TheNeikos/rust,gifnksm/rust,kimroen/rust,Ryman/rust,gifnksm/rust,graydon/rust,erickt/rust,servo/rust,aneeshusa/rust,GBGamer/rust,kimroen/rust,hauleth/rust,pshc/rust,jashank/rust,jbclements/rust,mitsuhiko/rust,jroesch/rust,kimroen/rust,miniupnp/rust,bombless/rust,kimroen/rust,pczarn/rust,LeoTestard/rust,untitaker/rust,sae-bom/rust,vhbit/rust,mahkoh/rust,jashank/rust,fabricedesre/rust,omasanori/rust,jbclements/rust,TheNeikos/rust,sarojaba/rust-doc-korean,XMPPwocky/rust,sae-bom/rust,emk/rust,jbclements/rust,quornian/rust,j16r/rust,victorvde/rust,richo/rust,andars/rust,KokaKiwi/rust,aneeshusa/rust,dinfuehr/rust,GBGamer/rust,jroesch/rust,pythonesque/rust,LeoTestard/rust,defuz/rust,mitsuhiko/rust,miniupnp/rust,mdinger/rust,SiegeLord/rust,mihneadb/rust,dwillmer/rust,robertg/rust,dwillmer/rust,AerialX/rust,erickt/rust,0x73/rust,defuz/rust,nwin/rust,Ryman/rust,jbclements/rust,zaeleus/rust,avdi/rust,nwin/rust,kmcallister/rust,erickt/rust,rohitjoshi/rust,aturon/rust,ruud-v-a/rust,dinfuehr/rust,nham/rust,dinfuehr/rust,mitsuhiko/rust,aepsil0n/rust,j16r/rust,quornian/rust,carols10cents/rust,j16r/rust,dinfuehr/rust,l0kod/rust,richo/rust,krzysz00/rust,gifnksm/rust,untitaker/rust,kimroen/rust,pczarn/rust,kmcallister/rust,fabricedesre/rust,ruud-v-a/rust,mvdnes/rust,pczarn/rust,Ryman/rust,mdinger/rust,l0kod/rust,reem/rust,michaelballantyne/rust-gpu,Ryman/rust,jbclements/rust,robertg/rust,jbclements/rust,zubron/rust,P1start/rust,XMPPwocky/rust,jroesch/rust,miniupnp/rust,rprichard/rust,rohitjoshi/rust,0x73/rust,sarojaba/rust-doc-korean,stepancheg/rust-ide-rust,michaelballantyne/rust-gpu,jashank/rust,ejjeong/rust,aepsil0n/rust,aidancully/rust,pythonesque/rust,philyoon/rust,kwantam/rust,dwillmer/rust,dwillmer/rust,achanda/rand,robertg/rust,j16r/rust,mahkoh/rust,pshc/rust,pythonesque/rust,ebfull/rust,omasanori/rust,quornian/rust,graydon/rust,barosl/rust,zachwick/rust,richo/rust,mvdnes/rust,untitaker/rust,P1start/rust,aneeshusa/rust,stepancheg/rust-ide-rust,pczarn/rust,reem/rust,zubron/rust,gifnksm/rust,servo/rust,servo/rust,nham/rust,GBGamer/rust,robertg/rust,SiegeLord/rust,0x73/rust,jashank/rust,rprichard/rust,rprichard/rust,michaelballantyne/rust-gpu,aepsil0n/rust,zubron/rust,reem/rust,pczarn/rust,fabricedesre/rust,andars/rust,zubron/rust,pshc/rust,kimroen/rust,AerialX/rust,mdinger/rust,kwantam/rust,jashank/rust,mahkoh/rust,reem/rust,seanrivera/rust,KokaKiwi/rust,SiegeLord/rust,richo/rust,P1start/rust,0x73/rust,retep998/rand,rprichard/rust,bombless/rust-docs-chinese,untitaker/rust,Ryman/rust,robertg/rust,pshc/rust,carols10cents/rust,quornian/rust,l0kod/rust,omasanori/rust,jroesch/rust,0x73/rust,mitsuhiko/rust,mvdnes/rust,KokaKiwi/rust,pythonesque/rust,aturon/rust,GrahamDennis/rand,AerialX/rust-rt-minimal,arthurprs/rand,ejjeong/rust,mdinger/rust,andars/rust,michaelballantyne/rust-gpu,graydon/rust,sae-bom/rust,emk/rust,fabricedesre/rust,bombless/rust,aneeshusa/rust,vhbit/rust,hauleth/rust,P1start/rust,philyoon/rust,XMPPwocky/rust,zubron/rust,seanrivera/rust,dwillmer/rust,servo/rust,kmcallister/rust,miniupnp/rust,AerialX/rust-rt-minimal,dinfuehr/rust,GBGamer/rust,nham/rust,nham/rust,rprichard/rust,carols10cents/rust,dinfuehr/rust,aepsil0n/rust,nwin/rust,ejjeong/rust,cllns/rust,reem/rust,pshc/rust,kmcallister/rust,pythonesque/rust,AerialX/rust,mvdnes/rust,carols10cents/rust,vhbit/rust,graydon/rust,hauleth/rust,mitsuhiko/rust,quornian/rust,mihneadb/rust,nwin/rust,graydon/rust,cllns/rust,defuz/rust,zachwick/rust,servo/rust,sae-bom/rust,LeoTestard/rust,mihneadb/rust,pelmers/rust,TheNeikos/rust,graydon/rust,avdi/rust,sae-bom/rust,GBGamer/rust,gifnksm/rust,barosl/rust,aturon/rust,ktossell/rust,rohitjoshi/rust,ejjeong/rust,untitaker/rust,mahkoh/rust,mvdnes/rust,jroesch/rust,kwantam/rust,mihneadb/rust,0x73/rust,defuz/rust,stepancheg/rust-ide-rust,P1start/rust,ebfull/rust,j16r/rust,sarojaba/rust-doc-korean,aturon/rust,AerialX/rust,pshc/rust,AerialX/rust-rt-minimal,rprichard/rust,kmcallister/rust,SiegeLord/rust,vhbit/rust,ktossell/rust,stepancheg/rust-ide-rust,mihneadb/rust,nwin/rust,zaeleus/rust,cllns/rust,zaeleus/rust,nwin/rust,miniupnp/rust,stepancheg/rust-ide-rust,aidancully/rust,nham/rust,LeoTestard/rust,seanrivera/rust,hauleth/rust,servo/rust,vhbit/rust,AerialX/rust,jroesch/rust,jbclements/rust,SiegeLord/rust,mdinger/rust,ktossell/rust,miniupnp/rust,aepsil0n/rust,emk/rust,ebfull/rand,pelmers/rust,pczarn/rust,pelmers/rust,nham/rust,nham/rust,miniupnp/rust,ktossell/rust,Ryman/rust,nwin/rust,fabricedesre/rust,seanrivera/rust,quornian/rust
|
src/etc/tidy.py
|
src/etc/tidy.py
|
#!/usr/bin/python
import sys, fileinput
err=0
cols=78
def report_err(s):
global err
print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s))
err=1
for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")):
if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1:
report_err("tab character")
if line.find('\r') != -1:
report_err("CR character")
if len(line)-1 > cols:
report_err("line longer than %d chars" % cols)
sys.exit(err)
|
#!/usr/bin/python
import sys, fileinput, subprocess
err=0
cols=78
config_proc=subprocess.Popen([ "git", "config", "core.autocrlf" ],
stdout=subprocess.PIPE)
result=config_proc.communicate()[0]
autocrlf=result.strip() == b"true" if result is not None else False
def report_err(s):
global err
print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s))
err=1
for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")):
if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1:
report_err("tab character")
if not autocrlf and line.find('\r') != -1:
report_err("CR character")
line_len = len(line)-2 if autocrlf else len(line)-1
if line_len > cols:
report_err("line longer than %d chars" % cols)
sys.exit(err)
|
apache-2.0
|
Python
|
9dcf5e0b30141641a0e182257b34720bcf07d730
|
Fix typo in S3_Bucket_With_Versioning_And_Lifecycle_Rules.py (#693)
|
ikben/troposphere,7digital/troposphere,cloudtools/troposphere,cloudtools/troposphere,johnctitus/troposphere,pas256/troposphere,7digital/troposphere,horacio3/troposphere,horacio3/troposphere,pas256/troposphere,johnctitus/troposphere,ikben/troposphere
|
examples/S3_Bucket_With_Versioning_And_Lifecycle_Rules.py
|
examples/S3_Bucket_With_Versioning_And_Lifecycle_Rules.py
|
# Converted from S3_Bucket.template located at:
# http://aws.amazon.com/cloudformation/aws-cloudformation-templates/
from troposphere import Output, Ref, Template
from troposphere.s3 import Bucket, PublicRead, VersioningConfiguration, \
LifecycleConfiguration, LifecycleRule, NoncurrentVersionTransition, \
LifecycleRuleTransition
t = Template()
t.add_description(
"AWS CloudFormation Sample Template S3_Bucket: Sample template showing :"
"How to create a publicly accessible S3 bucket. "
"How to enable bucket object versions. "
"How to archive and delete current objects. "
"How to archive and delete non current (versioned) objects. "
"**WARNING** This template creates an Amazon S3 Bucket. "
"You will be billed for the AWS resources used if you create "
"a stack from this template.")
s3bucket = t.add_resource(Bucket(
"S3Bucket",
# Make public Read
AccessControl=PublicRead,
# Turn on Versioning to the whole S3 Bucket
VersioningConfiguration=VersioningConfiguration(
Status="Enabled",
),
# Attach a LifeCycle Configuration
LifecycleConfiguration=LifecycleConfiguration(Rules=[
# Add a rule to
LifecycleRule(
# Rule attributes
Id="S3BucketRule001",
Prefix="/only-this-sub-dir",
Status="Enabled",
# Applies to current objects
ExpirationInDays=3650,
Transitions=[
LifecycleRuleTransition(
StorageClass="STANDARD_IA",
TransitionInDays=60,
),
],
# Applies to Non Current objects
NoncurrentVersionExpirationInDays=365,
NoncurrentVersionTransitions=[
NoncurrentVersionTransition(
StorageClass="STANDARD_IA",
TransitionInDays=30,
),
NoncurrentVersionTransition(
StorageClass="GLACIER",
TransitionInDays=120,
),
],
),
]),
))
t.add_output(Output(
"BucketName",
Value=Ref(s3bucket),
Description="Name of S3 bucket to hold website content"
))
print(t.to_json())
|
# Converted from S3_Bucket.template located at:
# http://aws.amazon.com/cloudformation/aws-cloudformation-templates/
from troposphere import Output, Ref, Template
from troposphere.s3 import Bucket, PublicRead, VersioningConfiguration, \
LifecycleConfiguration, LifecycleRule, NoncurrentVersionTransition, \
LifecycleRuleTransition
t = Template()
t.add_description(
"AWS CloudFormation Sample Template S3_Bucket: Sample template showing :"
"How to create a publicly accessible S3 bucket. "
"How to enable bucket object versions. "
"How to archive and delete current objects. "
"How to archive and delete non current (versioned) objects. "
"**WARNING** This template creates an Amazon S3 Bucket. "
"You will be billed for the AWS resources used if you create "
"a stack from this template.")
s3bucket = t.add_resource(Bucket(
"S3Bucket",
# Make public Read
AccessControl=PublicRead,
# Turn on Versioning to the whole S3 Bucket
VersioningConfiguration=VersioningConfiguration(
Status="Enabled",
),
# Attach a LifeCycle Confiragtion
LifecycleConfiguration=LifecycleConfiguration(Rules=[
# Add a rule to
LifecycleRule(
# Rule attributes
Id="S3BucketRule001",
Prefix="/only-this-sub-dir",
Status="Enabled",
# Applies to current objects
ExpirationInDays=3650,
Transitions=[
LifecycleRuleTransition(
StorageClass="STANDARD_IA",
TransitionInDays=60,
),
],
# Applies to Non Current objects
NoncurrentVersionExpirationInDays=365,
NoncurrentVersionTransitions=[
NoncurrentVersionTransition(
StorageClass="STANDARD_IA",
TransitionInDays=30,
),
NoncurrentVersionTransition(
StorageClass="GLACIER",
TransitionInDays=120,
),
],
),
]),
))
t.add_output(Output(
"BucketName",
Value=Ref(s3bucket),
Description="Name of S3 bucket to hold website content"
))
print(t.to_json())
|
bsd-2-clause
|
Python
|
a378649f85f0bc55060ad0238e426f587bc2ff1a
|
Send location only when printing exception (Avoid leaking ID/UUID)
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
core/exceptions.py
|
core/exceptions.py
|
"""
exceptions - Core exceptions
"""
class InvalidMembership(Exception):
"""
The membership provided is not valid
"""
pass
class SourceNotFound(Exception):
"""
InstanceSource doesn't have an associated source.
"""
pass
class RequestLimitExceeded(Exception):
"""
A limit was exceeded for the specific request
"""
pass
class ProviderLimitExceeded(Exception):
"""
A limit was exceeded for the specific provider
"""
pass
class ProviderNotActive(Exception):
"""
The provider that was requested is not active
"""
def __init__(self, provider, *args, **kwargs):
self.message = "Cannot create driver on an inactive provider: %s" \
% (provider.location,)
pass
|
"""
exceptions - Core exceptions
"""
class InvalidMembership(Exception):
"""
The membership provided is not valid
"""
pass
class SourceNotFound(Exception):
"""
InstanceSource doesn't have an associated source.
"""
pass
class RequestLimitExceeded(Exception):
"""
A limit was exceeded for the specific request
"""
pass
class ProviderLimitExceeded(Exception):
"""
A limit was exceeded for the specific provider
"""
pass
class ProviderNotActive(Exception):
"""
The provider that was requested is not active
"""
def __init__(self, provider, *args, **kwargs):
self.message = "Cannot create driver on an inactive provider:%s" \
% (provider,)
pass
|
apache-2.0
|
Python
|
93323426c22a08965544b19c818e53c8f2b29e8c
|
clean select_channel widget
|
Koheron/lase
|
ldk/gui/select_channel_widget.py
|
ldk/gui/select_channel_widget.py
|
# -*- coding: utf-8 -*-
from pyqtgraph.Qt import QtGui, QtCore
class SelectChannelWidget(QtGui.QWidget):
def __init__(self, plot_widget):
super(SelectChannelWidget, self).__init__()
self.plot_widget = plot_widget
self.layout = QtGui.QGridLayout()
self.adc_checkbox = []
self.add_checkbox(self.adc_checkbox, 0, 'ADC')
self.dac_checkbox = []
self.add_checkbox(self.dac_checkbox, 1, 'DAC')
# Connections
for i in range(2):
self.adc_checkbox[i].stateChanged.connect(lambda: self.show_adc(i))
self.dac_checkbox[i].stateChanged.connect(lambda: self.show_dac(i))
def add_checkbox(self, checkbox, y_pos, text):
for i in range(2):
checkbox.append(QtGui.QCheckBox(text +' '+str(i+1), self))
checkbox[i].setCheckState(QtCore.Qt.Checked)
self.layout.addWidget(checkbox[i], y_pos, i, QtCore.Qt.AlignCenter)
def show_adc(self, index):
self.plot_widget.show_adc[index] = self.adc_checkbox[index].isChecked()
self.plot_widget.dataItem[index].setVisible(self.plot_widget.show_adc[index])
self.plot_widget.enableAutoRange()
def show_dac(self, index):
self.plot_widget.show_dac[index] = self.dac_checkbox[index].isChecked()
self.plot_widget.dataItem[2+index].setVisible(self.plot_widget.show_dac[index])
self.plot_widget.enableAutoRange()
def uncheck_all(self):
for i in range(2):
self.adc_checkbox[i].setCheckState(QtCore.Qt.Unchecked)
self.dac_checkbox[i].setCheckState(QtCore.Qt.Unchecked)
|
# -*- coding: utf-8 -*-
from pyqtgraph.Qt import QtGui, QtCore
class SelectChannelWidget(QtGui.QWidget):
def __init__(self, plot_widget):
super(SelectChannelWidget, self).__init__()
self.plot_widget = plot_widget
self.layout = QtGui.QGridLayout()
self.adc_checkbox = []
for i in range(2):
self.adc_checkbox.append(QtGui.QCheckBox('ADC '+str(i+1), self))
self.adc_checkbox[i].setCheckState(QtCore.Qt.Checked)
self.layout.addWidget(self.adc_checkbox[i],0,i,QtCore.Qt.AlignCenter)
self.dac_checkbox = []
for i in range(2):
self.dac_checkbox.append(QtGui.QCheckBox('DAC '+str(i+1), self))
self.dac_checkbox[i].setCheckState(QtCore.Qt.Unchecked)
self.layout.addWidget(self.dac_checkbox[i],1,i,QtCore.Qt.AlignCenter)
# Connections
self.adc_checkbox[0].stateChanged.connect(lambda: self.show_adc(0))
self.adc_checkbox[1].stateChanged.connect(lambda: self.show_adc(1))
self.dac_checkbox[0].stateChanged.connect(lambda: self.show_dac(0))
self.dac_checkbox[1].stateChanged.connect(lambda: self.show_dac(1))
def show_adc(self, index):
if self.adc_checkbox[index].isChecked():
self.plot_widget.show_adc[index] = True
else:
self.plot_widget.show_adc[index] = False
self.plot_widget.dataItem[index].setVisible(self.plot_widget.show_adc[index])
self.plot_widget.enableAutoRange()
def show_dac(self, index):
if self.dac_checkbox[index].isChecked():
self.plot_widget.show_dac[index] = True
else:
self.plot_widget.show_dac[index] = False
self.plot_widget.dataItem[2+index].setVisible(self.plot_widget.show_dac[index])
self.plot_widget.enableAutoRange()
def uncheck_all(self):
for i in range(2):
self.adc_checkbox[i].setCheckState(QtCore.Qt.Unchecked)
self.dac_checkbox[i].setCheckState(QtCore.Qt.Unchecked)
|
mit
|
Python
|
0f4e977f18dc1e3b9bbe2f25c3c326ac769fecbd
|
order size to have thumbnail in first
|
novapost/insight
|
insight/api/async.py
|
insight/api/async.py
|
# -*- coding: utf-8 -*-
"""Async API view"""
from flask import abort, request
from redis import StrictRedis
import json
from insight.api.config import INSIGHT_ENGINES
try:
import settings
except ImportError:
settings = None
REDIS_QUEUE_KEY = getattr(settings, 'REDIS_QUEUE_KEY', 'insight')
REDIS_HOST = getattr(settings, 'REDIS_HOST', 'localhost')
REDIS_PORT = getattr(settings, 'REDIS_PORT', 6379)
REDIS_DB = getattr(settings, 'REDIS_PORT', 0)
redis = StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
def view(engine):
"""Get job parameters and add it to a redis queue"""
params = {'url': request.args.get('url', None),
'engine': engine,
'callback': request.args.get('callback', None)}
# Get URL
if params['url']:
if params['url'].startswith('/'):
params['url'] = '%s%s' % (request.host_url, url[1:])
else:
abort(404)
# Get Engine
if engine not in INSIGHT_ENGINES:
abort(400, '%s engine is not installed on this server' % engine)
# Process sizes
widths = [int(x) for x in request.args.getlist('width')]
heights = [int(y) for y in request.args.getlist('height')]
nb_width = len(widths)
nb_height = len(heights)
if nb_width == 0 and nb_height == 0:
abort(400, u'You must set either width or height')
if nb_width == 0:
widths = heights
nb_width = nb_height
if nb_height == 0:
heights = widths
nb_height = nb_width
if nb_width == nb_height:
sizes = zip(widths, heights)
elif nb_width == 1:
if nb_height > 1:
sizes = zip(widths*nb_height, heights)
else:
sizes = zip(widths, heights)
elif nb_height == 1:
if nb_width > 1:
sizes = zip(widths, heights*nb_width)
else:
sizes = zip(widths, heights)
else:
abort(400, u'Number of widths and heights should be the same')
# Max number of pages to compile
try:
params['max_previews'] = int(request.args.get('pages', 20))
except:
params['max_previews'] = 20
params['sizes'] = sorted(sizes)
message = json.dumps(params)
redis.rpush(REDIS_QUEUE_KEY, message)
return "Job added to queue"
|
# -*- coding: utf-8 -*-
"""Async API view"""
from flask import abort, request
from redis import StrictRedis
import json
from insight.api.config import INSIGHT_ENGINES
try:
import settings
except ImportError:
settings = None
REDIS_QUEUE_KEY = getattr(settings, 'REDIS_QUEUE_KEY', 'insight')
REDIS_HOST = getattr(settings, 'REDIS_HOST', 'localhost')
REDIS_PORT = getattr(settings, 'REDIS_PORT', 6379)
REDIS_DB = getattr(settings, 'REDIS_PORT', 0)
redis = StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
def view(engine):
"""Get job parameters and add it to a redis queue"""
params = {'url': request.args.get('url', None),
'engine': engine,
'callback': request.args.get('callback', None)}
# Get URL
if params['url']:
if params['url'].startswith('/'):
params['url'] = '%s%s' % (request.host_url, url[1:])
else:
abort(404)
# Get Engine
if engine not in INSIGHT_ENGINES:
abort(400, '%s engine is not installed on this server' % engine)
# Process sizes
widths = [int(x) for x in request.args.getlist('width')]
heights = [int(y) for y in request.args.getlist('height')]
nb_width = len(widths)
nb_height = len(heights)
if nb_width == 0 and nb_height == 0:
abort(400, u'You must set either width or height')
if nb_width == 0:
widths = heights
nb_width = nb_height
if nb_height == 0:
heights = widths
nb_height = nb_width
if nb_width == nb_height:
sizes = zip(widths, heights)
elif nb_width == 1:
if nb_height > 1:
sizes = zip(widths*nb_height, heights)
else:
sizes = zip(widths, heights)
elif nb_height == 1:
if nb_width > 1:
sizes = zip(widths, heights*nb_width)
else:
sizes = zip(widths, heights)
else:
abort(400, u'Number of widths and heights should be the same')
# Max number of pages to compile
try:
params['max_previews'] = int(request.args.get('pages', 20))
except:
params['max_previews'] = 20
params['sizes'] = sizes
message = json.dumps(params)
redis.rpush(REDIS_QUEUE_KEY, message)
return "Job added to queue"
|
mit
|
Python
|
0116f38160c03939306470127f0489c98aeee954
|
Update nanomsg build file
|
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
|
shipyard/shipyard/nanomsg/build.py
|
shipyard/shipyard/nanomsg/build.py
|
"""Build nanomsg from source."""
from foreman import define_parameter, define_rule, decorate_rule
from shipyard import (
ensure_directory,
git_clone,
run_commands,
install_packages,
copy_libraries,
)
(define_parameter('deps')
.with_doc("""Build-time Debian packages.""")
.with_type(list)
.with_parse(lambda pkgs: pkgs.split(','))
.with_default([
'build-essential',
'cmake',
])
)
(define_parameter('repo')
.with_doc("""Location of source repo.""")
.with_type(str)
.with_default('https://github.com/nanomsg/nanomsg.git')
)
(define_parameter('version')
.with_doc("""Version to build.""")
.with_type(str)
.with_default('1.0.0')
)
@decorate_rule('//base:build')
def build(parameters):
"""Build nanomsg from source."""
install_packages(parameters['deps'])
build_src = parameters['//base:build_src'] / 'nanomsg'
git_clone(parameters['repo'], build_src, parameters['version'])
build_dir = build_src / 'build'
if not ensure_directory(build_dir):
# Don't run `ctest .` at the moment.
run_commands(path=build_dir, commands_str='''
cmake ..
cmake --build .
sudo cmake --build . --target install
sudo ldconfig
''')
(define_rule('tapeout')
.with_doc("""Copy build artifacts.""")
.with_build(
lambda ps: copy_libraries(ps, '/usr/local/lib', ['libnanomsg']))
.depend('build')
.reverse_depend('//base:tapeout')
)
|
"""Build nanomsg from source."""
from foreman import define_parameter, define_rule, decorate_rule
from shipyard import (
ensure_directory,
git_clone,
run_commands,
install_packages,
copy_libraries,
)
(define_parameter('deps')
.with_doc("""Build-time Debian packages.""")
.with_type(list)
.with_parse(lambda pkgs: pkgs.split(','))
.with_default([
'build-essential',
'cmake',
])
)
(define_parameter('repo')
.with_doc("""Location of source repo.""")
.with_type(str)
.with_default('https://github.com/nanomsg/nanomsg.git')
)
(define_parameter('version')
.with_doc("""Version to build.""")
.with_type(str)
.with_default('1.0.0')
)
@decorate_rule('//base:build')
def build(parameters):
"""Build nanomsg from source."""
install_packages(parameters['deps'])
build_src = parameters['//base:build_src'] / 'nanomsg'
git_clone(parameters['repo'], build_src, parameters['version'])
build_dir = build_src / 'build'
if not ensure_directory(build_dir):
# Don't run `ctest .` at the moment.
run_commands(path=build_dir, commands_str='''
cmake ..
cmake --build .
sudo make install
''')
(define_rule('tapeout')
.with_doc("""Copy build artifacts.""")
.with_build(
lambda ps: copy_libraries(ps, '/usr/local/lib', ['libnanomsg']))
.depend('build')
.reverse_depend('//base:tapeout')
)
|
mit
|
Python
|
22f3b74fec790847c3e353aad84b51252637a90f
|
Revert "oe.path.relative: switch to a different appraoch"
|
xifengchuo/openembedded,sledz/oe,Martix/Eonos,bticino/openembedded,openpli-arm/openembedded,xifengchuo/openembedded,John-NY/overo-oe,openembedded/openembedded,SIFTeam/openembedded,SIFTeam/openembedded,sutajiokousagi/openembedded,libo/openembedded,xifengchuo/openembedded,nx111/openembeded_openpli2.1_nx111,JamesAng/oe,Martix/Eonos,dellysunnymtech/sakoman-oe,openembedded/openembedded,anguslees/openembedded-android,BlackPole/bp-openembedded,trini/openembedded,Martix/Eonos,giobauermeister/openembedded,rascalmicro/openembedded-rascal,sentient-energy/emsw-oe-mirror,buglabs/oe-buglabs,trini/openembedded,libo/openembedded,SIFTeam/openembedded,mrchapp/arago-oe-dev,openembedded/openembedded,sampov2/audio-openembedded,libo/openembedded,dave-billin/overo-ui-moos-auv,sentient-energy/emsw-oe-mirror,John-NY/overo-oe,bticino/openembedded,libo/openembedded,sampov2/audio-openembedded,xifengchuo/openembedded,nx111/openembeded_openpli2.1_nx111,trini/openembedded,sampov2/audio-openembedded,bticino/openembedded,nx111/openembeded_openpli2.1_nx111,sutajiokousagi/openembedded,nx111/openembeded_openpli2.1_nx111,hulifox008/openembedded,JamesAng/oe,buglabs/oe-buglabs,Martix/Eonos,dave-billin/overo-ui-moos-auv,crystalfontz/openembedded,nx111/openembeded_openpli2.1_nx111,sampov2/audio-openembedded,SIFTeam/openembedded,trini/openembedded,dave-billin/overo-ui-moos-auv,buglabs/oe-buglabs,xifengchuo/openembedded,scottellis/overo-oe,crystalfontz/openembedded,dellysunnymtech/sakoman-oe,openembedded/openembedded,scottellis/overo-oe,crystalfontz/openembedded,trini/openembedded,John-NY/overo-oe,thebohemian/openembedded,dellysunnymtech/sakoman-oe,sledz/oe,dellysunnymtech/sakoman-oe,trini/openembedded,openembedded/openembedded,yyli/overo-oe,buglabs/oe-buglabs,hulifox008/openembedded,openpli-arm/openembedded,dellysunnymtech/sakoman-oe,openpli-arm/openembedded,BlackPole/bp-openembedded,libo/openembedded,giobauermeister/openembedded,scottellis/overo-oe,thebohemian/openembedded,mrchapp/arago-oe-dev,JamesAng/oe,anguslees/openembedded-android,BlackPole/bp-openembedded,JamesAng/goe,openpli-arm/openembedded,anguslees/openembedded-android,John-NY/overo-oe,sampov2/audio-openembedded,sutajiokousagi/openembedded,crystalfontz/openembedded,dave-billin/overo-ui-moos-auv,libo/openembedded,sledz/oe,JamesAng/goe,giobauermeister/openembedded,giobauermeister/openembedded,openembedded/openembedded,JamesAng/goe,sutajiokousagi/openembedded,xifengchuo/openembedded,scottellis/overo-oe,thebohemian/openembedded,giobauermeister/openembedded,sledz/oe,xifengchuo/openembedded,JamesAng/goe,SIFTeam/openembedded,openembedded/openembedded,sutajiokousagi/openembedded,libo/openembedded,BlackPole/bp-openembedded,rascalmicro/openembedded-rascal,mrchapp/arago-oe-dev,scottellis/overo-oe,yyli/overo-oe,dave-billin/overo-ui-moos-auv,nx111/openembeded_openpli2.1_nx111,hulifox008/openembedded,Martix/Eonos,openembedded/openembedded,yyli/overo-oe,thebohemian/openembedded,sentient-energy/emsw-oe-mirror,sentient-energy/emsw-oe-mirror,Martix/Eonos,dellysunnymtech/sakoman-oe,BlackPole/bp-openembedded,bticino/openembedded,rascalmicro/openembedded-rascal,JamesAng/oe,JamesAng/oe,giobauermeister/openembedded,mrchapp/arago-oe-dev,sledz/oe,bticino/openembedded,John-NY/overo-oe,anguslees/openembedded-android,Martix/Eonos,sledz/oe,crystalfontz/openembedded,anguslees/openembedded-android,SIFTeam/openembedded,yyli/overo-oe,yyli/overo-oe,scottellis/overo-oe,crystalfontz/openembedded,giobauermeister/openembedded,John-NY/overo-oe,hulifox008/openembedded,openembedded/openembedded,giobauermeister/openembedded,openembedded/openembedded,buglabs/oe-buglabs,trini/openembedded,JamesAng/goe,sampov2/audio-openembedded,thebohemian/openembedded,anguslees/openembedded-android,dellysunnymtech/sakoman-oe,BlackPole/bp-openembedded,yyli/overo-oe,hulifox008/openembedded,JamesAng/goe,dellysunnymtech/sakoman-oe,bticino/openembedded,thebohemian/openembedded,sledz/oe,mrchapp/arago-oe-dev,yyli/overo-oe,rascalmicro/openembedded-rascal,thebohemian/openembedded,sutajiokousagi/openembedded,hulifox008/openembedded,sentient-energy/emsw-oe-mirror,sampov2/audio-openembedded,giobauermeister/openembedded,crystalfontz/openembedded,buglabs/oe-buglabs,sentient-energy/emsw-oe-mirror,openpli-arm/openembedded,dellysunnymtech/sakoman-oe,dave-billin/overo-ui-moos-auv,nx111/openembeded_openpli2.1_nx111,openpli-arm/openembedded,sutajiokousagi/openembedded,yyli/overo-oe,scottellis/overo-oe,rascalmicro/openembedded-rascal,SIFTeam/openembedded,rascalmicro/openembedded-rascal,mrchapp/arago-oe-dev,hulifox008/openembedded,JamesAng/goe,JamesAng/oe,BlackPole/bp-openembedded,dave-billin/overo-ui-moos-auv,sentient-energy/emsw-oe-mirror,bticino/openembedded,rascalmicro/openembedded-rascal,nx111/openembeded_openpli2.1_nx111,rascalmicro/openembedded-rascal,JamesAng/oe,buglabs/oe-buglabs,anguslees/openembedded-android,buglabs/oe-buglabs,xifengchuo/openembedded,xifengchuo/openembedded,openembedded/openembedded,John-NY/overo-oe,openpli-arm/openembedded,mrchapp/arago-oe-dev
|
lib/oe/path.py
|
lib/oe/path.py
|
def join(*paths):
"""Like os.path.join but doesn't treat absolute RHS specially"""
import os.path
return os.path.normpath("/".join(paths))
def relative(src, dest):
""" Return a relative path from src to dest.
>>> relative("/usr/bin", "/tmp/foo/bar")
../../tmp/foo/bar
>>> relative("/usr/bin", "/usr/lib")
../lib
>>> relative("/tmp", "/tmp/foo/bar")
foo/bar
"""
import os.path
if hasattr(os.path, "relpath"):
return os.path.relpath(dest, src)
else:
destlist = os.path.normpath(dest).split(os.path.sep)
srclist = os.path.normpath(src).split(os.path.sep)
# Find common section of the path
common = os.path.commonprefix([destlist, srclist])
commonlen = len(common)
# Climb back to the point where they differentiate
relpath = [ pardir ] * (len(srclist) - commonlen)
if commonlen < len(destlist):
# Add remaining portion
relpath += destlist[commonlen:]
return sep.join(relpath)
def format_display(path, metadata):
""" Prepare a path for display to the user. """
rel = relative(metadata.getVar("TOPDIR", 1), path)
if len(rel) > len(path):
return path
else:
return rel
|
def join(*paths):
"""Like os.path.join but doesn't treat absolute RHS specially"""
from os import sep
from os.path import normpath
return normpath(sep.join(paths))
def relative(src, dest=None):
""" Return a relative path from src to dest(default=cwd).
>>> relative("/usr/bin", "/tmp/foo/bar")
../../tmp/foo/bar
>>> relative("/usr/bin", "/usr/lib")
../lib
>>> relative("/tmp", "/tmp/foo/bar")
foo/bar
"""
if dest is None:
dest = getcwd()
if hasattr(os.path, "relpath"):
return os.path.relpath(dest, src)
else:
from os import getcwd, sep
from os.path import abspath, normpath
srclist = abspath(src).split(sep)
destlist = abspath(dest).split(sep)
loc = [spath == dpath for spath, dpath in zip(srclist, destlist)].index(False)
rellist = ([ ".." ] * (len(srclist) - loc)) + destlist[loc:]
return sep.join(rellist)
def format_display(path, metadata):
""" Prepare a path for display to the user. """
rel = relative(metadata.getVar("TOPDIR", 1), path)
if len(rel) > len(path):
return path
else:
return rel
|
mit
|
Python
|
0f004830bd220ad8da1d4b151897630431d2f195
|
tweak scoring functions, always
|
axtl/cryptools
|
cryptools/crack.py
|
cryptools/crack.py
|
# -*- coding: utf-8 -*-
import math
import string
from stringutils import convert, freq
def brute_xor(cyphertext, st_freqs):
"""Bruteforce a given single-character XOR-encrypted cyphertext.
Statistical information is used to choose which character is the most
likely key.
:param cyphertext: the cyphertext to crack
:param st_freqs: a Counter of standard frequencies in the target language
:return: ``(key, message, score)``
"""
# standard frequency counts
st_keys = st_freqs.keys()
st_len = len(st_keys)
# store a map of each candidate and a simple frequency score
topchoice = None
lowdist = float('inf')
key = None
# bruteforce for each character
for test in (string.letters + string.digits):
dec = convert.xor(test, cyphertext)
cand_freqs = freq.get_freqs(freq.char_count(dec.lower()))
cand_keys = cand_freqs.keys()
score = 0.0
for c in cand_freqs:
# scoring
try:
st_in = st_keys.index(c)
except ValueError:
st_in = st_len
# find better scoring functions
score += abs(cand_keys.index(c) - st_in)
score += abs(st_freqs[c] - cand_freqs[c]) * 100
if lowdist > score:
lowdist = score
topchoice = dec
key = test
return key, topchoice, lowdist
|
# -*- coding: utf-8 -*-
import string
from stringutils import convert, freq
def brute_xor(cyphertext, st_freqs):
"""Bruteforce a given single-character XOR-encrypted cyphertext.
Statistical information is used to choose which character is the most
likely key.
:param cyphertext: the cyphertext to crack
:param st_freqs: a Counter of standard frequencies in the target language
:return: ``(key, message, distance)``
"""
# standard frequency counts
st_keys = st_freqs.keys()
st_len = len(st_keys)
# store a map of each candidate and a simple frequency score
topchoice = None
lowdist = float('inf')
key = None
# bruteforce for each character
for test in (string.letters + string.digits):
dec = convert.xor(test, cyphertext)
cand_freqs = freq.get_freqs(freq.char_count(dec.lower()))
cand_keys = cand_freqs.keys()
distance = 0.0
for c in cand_freqs:
# use two classifiers, based on pos'n in std freq list & freq dist
try:
st_in = st_keys.index(c)
except ValueError:
st_in = st_len
distance += abs(cand_keys.index(c) - st_in)
distance += abs(st_freqs[c] - cand_freqs[c]) * 100
if lowdist > distance:
lowdist = distance
topchoice = dec
key = test
return key, topchoice, lowdist
|
mit
|
Python
|
c6a9fcfe817128d3e7b0f52625bcd2e6c1c92f76
|
fix #4491: auth1 test needs sapi for login (#4492)
|
radiasoft/sirepo,radiasoft/sirepo,radiasoft/sirepo,radiasoft/sirepo,radiasoft/sirepo
|
tests/auth1_test.py
|
tests/auth1_test.py
|
# -*- coding: utf-8 -*-
u"""Test sirepo.auth
:copyright: Copyright (c) 2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import pytest
from pykern import pkcollections
from sirepo import srunit
@srunit.wrap_in_request(sim_types='myapp', want_user=False)
def test_login():
from pykern import pkunit, pkcompat
from pykern.pkunit import pkeq, pkok, pkre, pkfail, pkexcept
from sirepo import auth
import flask
import sirepo.api
import sirepo.auth.guest
import sirepo.cookie
import sirepo.http_request
import sirepo.uri_router
import sirepo.util
r = sirepo.uri_router.call_api('authState')
pkre('LoggedIn": false.*Registration": false', pkcompat.from_bytes(r.data))
auth.process_request()
with pkunit.pkexcept('SRException.*routeName=login'):
auth.logged_in_user()
with pkexcept('SRException.*routeName=login'):
auth.require_user()
sirepo.cookie.set_sentinel()
# copying examples for new user takes time
try:
# TODO(rorour): get sapi from current request
r = auth.login(sirepo.auth.guest, sim_type='myapp', sapi=sirepo.api.Base())
pkfail('expecting sirepo.util.Response')
except sirepo.util.Response as e:
r = e.sr_args.response
pkre(r'LoggedIn":\s*true.*Registration":\s*false', pkcompat.from_bytes(r.data))
u = auth.logged_in_user()
pkok(u, 'user should exist')
# guests do not require completeRegistration
auth.require_user()
|
# -*- coding: utf-8 -*-
u"""Test sirepo.auth
:copyright: Copyright (c) 2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import pytest
from pykern import pkcollections
from sirepo import srunit
@srunit.wrap_in_request(sim_types='myapp', want_user=False)
def test_login():
from pykern import pkunit, pkcompat
from pykern.pkunit import pkeq, pkok, pkre, pkfail, pkexcept
from sirepo import auth
import flask
import sirepo.auth.guest
import sirepo.cookie
import sirepo.http_request
import sirepo.uri_router
import sirepo.util
r = sirepo.uri_router.call_api('authState')
pkre('LoggedIn": false.*Registration": false', pkcompat.from_bytes(r.data))
auth.process_request()
with pkunit.pkexcept('SRException.*routeName=login'):
auth.logged_in_user()
with pkexcept('SRException.*routeName=login'):
auth.require_user()
sirepo.cookie.set_sentinel()
# copying examples for new user takes time
try:
r = auth.login(sirepo.auth.guest, sim_type='myapp')
pkfail('expecting sirepo.util.Response')
except sirepo.util.Response as e:
r = e.sr_args.response
pkre(r'LoggedIn":\s*true.*Registration":\s*false', pkcompat.from_bytes(r.data))
u = auth.logged_in_user()
pkok(u, 'user should exist')
# guests do not require completeRegistration
auth.require_user()
|
apache-2.0
|
Python
|
646db72eca34f6006d189f0a143d0c00388d1955
|
Update viehicle.py
|
kantel/processingpy,kantel/processingpy,kantel/processingpy
|
sketches/ev_steering_1/viehicle.py
|
sketches/ev_steering_1/viehicle.py
|
class Viehicle():
def __init__(self, x, y):
self.acceleration = PVector(0, 0)
self.velocity = PVector(0, 0)
self.location = PVector(x, y)
self.r = 8.0
self.maxspeed = 5
self.maxforce = 0.1
self.d = 25
def update(self):
self.velocity.add(self.acceleration)
self.velocity.limit(self.maxspeed)
self.location.add(self.velocity)
self.acceleration.mult(0)
def applyForce(self, force):
self.acceleration.add(force)
def seek(self, target):
desired = PVector.sub(target, self.location)
# Check Boundaries
if self.location.x < self.d:
desired = PVector(self.maxspeed, self.velocity.y)
elif self.location.x > width - self.d:
desired = PVector(-self.maxspeed, self.velocity.y)
if self.location.y < self.d:
desired = PVector(self.velocity.x, self.maxspeed)
elif self.location.y > height - self.d:
desired = PVector(self.velocity.x, -self.maxspeed)
desired.normalize()
desired.mult(self.maxspeed)
steer = PVector.sub(desired, self.velocity)
steer.limit(self.maxforce)
self.applyForce(steer)
def display(self):
theta = self.velocity.heading() + PI/2
fill(color(98, 199, 119))
stroke(1)
strokeWeight(1)
with pushMatrix():
translate(self.location.x, self.location.y)
rotate(theta)
with beginShape():
vertex(0, -self.r*2)
vertex(-self.r, self.r*2)
vertex(self.r, self.r*2)
|
class Viehicle():
def __init__(self, x, y):
self.acceleration = PVector(0, 0)
self.velocity = PVector(0, 0)
self.location = PVector(x, y)
self.r = 8.0
self.maxspeed = 5
self.maxforce = 0.1
self.d = 25
def update(self):
self.velocity.add(self.acceleration)
self.velocity.limit(self.maxspeed)
self.location.add(self.velocity)
self.acceleration.mult(0)
def applyForce(self, force):
self.acceleration.add(force)
def seek(self, target):
desired = PVector.sub(target, self.location)
# Check Boundaries
if self.location.x < self.d:
desired = PVector(self.maxspeed, self.velocity.y)
elif self.location.x > width - self.d:
desired = PVector(-self.maxspeed, self.velocity.y)
if self.location.y < self.d:
desired = PVector(self.velocity.x, self.maxspeed)
elif self.location.y > height - self.d:
desired = PVector(self.velocity.x, -self.maxspeed)
desired.normalize()
desired.mult(self.maxspeed)
steer = PVector.sub(desired, self.velocity)
steer.limit(self.maxforce)
self.applyForce(steer)
def display(self):
theta = self.velocity.heading() + PI/2
fill(color(98, 199, 119))
stroke(1)
strokeWeight(1)
with pushMatrix():
translate(self.location.x, self.location.y)
rotate(theta)
with beginShape():
vertex(0, -self.r*2)
vertex(-self.r, self.r*2)
vertex(self.r, self.r*2)
|
mit
|
Python
|
b58c8b4f9d049207b7e7e0e4de7058959df90b70
|
Use sendgrid's Subject type when sending email. (#1033)
|
google/clusterfuzz,google/clusterfuzz,google/clusterfuzz,google/clusterfuzz,google/clusterfuzz,google/clusterfuzz,google/clusterfuzz,google/clusterfuzz
|
src/appengine/libs/mail.py
|
src/appengine/libs/mail.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for sending mail."""
from builtins import str
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import From
from sendgrid.helpers.mail import HtmlContent
from sendgrid.helpers.mail import Mail
from sendgrid.helpers.mail import Subject
from sendgrid.helpers.mail import To
from config import db_config
from metrics import logs
def send(to_email, subject, html_content):
"""Send email."""
sendgrid_api_key = db_config.get_value('sendgrid_api_key')
if not sendgrid_api_key:
logs.log_warn('Skipping email as SendGrid API key is not set in config.')
return
from_email = db_config.get_value('sendgrid_sender')
if not from_email:
logs.log_warn('Skipping email as SendGrid sender is not set in config.')
return
message = Mail(
from_email=From(str(from_email)),
to_emails=To(str(to_email)),
subject=Subject(subject),
html_content=HtmlContent(str(html_content)))
try:
sg = SendGridAPIClient(sendgrid_api_key)
response = sg.send(message)
logs.log(
'Sent email to %s.' % to_email,
status_code=response.status_code,
body=response.body,
headers=response.headers)
except Exception:
logs.log_error('Failed to send email to %s.' % to_email)
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers for sending mail."""
from builtins import str
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import From
from sendgrid.helpers.mail import HtmlContent
from sendgrid.helpers.mail import Mail
from sendgrid.helpers.mail import To
from config import db_config
from metrics import logs
def send(to_email, subject, html_content):
"""Send email."""
sendgrid_api_key = db_config.get_value('sendgrid_api_key')
if not sendgrid_api_key:
logs.log_warn('Skipping email as SendGrid API key is not set in config.')
return
from_email = db_config.get_value('sendgrid_sender')
if not from_email:
logs.log_warn('Skipping email as SendGrid sender is not set in config.')
return
message = Mail(
from_email=From(str(from_email)),
to_emails=To(str(to_email)),
subject=subject,
html_content=HtmlContent(str(html_content)))
try:
sg = SendGridAPIClient(sendgrid_api_key)
response = sg.send(message)
logs.log(
'Sent email to %s.' % to_email,
status_code=response.status_code,
body=response.body,
headers=response.headers)
except Exception:
logs.log_error('Failed to send email to %s.' % to_email)
|
apache-2.0
|
Python
|
b393b432c4f24906e1919999402ed56bde49086e
|
Fix test case - found another trunk tunnel on layer 0.
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
integration-test/546-road-sort-keys-tunnel.py
|
integration-test/546-road-sort-keys-tunnel.py
|
# tunnels at level = 0
#https://www.openstreetmap.org/way/167952621
assert_has_feature(
16, 10475, 25324, "roads",
{"kind": "highway", "kind_detail": "motorway", "id": 167952621,
"name": "Presidio Pkwy.", "is_tunnel": True, "sort_rank": 333})
# http://www.openstreetmap.org/way/259492789
assert_has_feature(
16, 19266, 24635, "roads",
{"kind": "major_road", "kind_detail": "trunk", "id": 259492789,
"name": "McCarter Hwy.", "is_tunnel": True, "sort_rank": 331})
# http://www.openstreetmap.org/way/277441866
assert_has_feature(
16, 17563, 25792, "roads",
{"kind": "major_road", "kind_detail": "trunk", "id": 277441866,
"name": "Gatlinburg Spur Road (north)", "is_tunnel": True, "sort_rank": 331})
#https://www.openstreetmap.org/way/117837633
assert_has_feature(
16, 16808, 24434, "roads",
{"kind": "major_road", "kind_detail": "primary", "id": 117837633,
"name": "Dixie Hwy.", "is_tunnel": True, "sort_rank": 330})
#https://www.openstreetmap.org/way/57782075
assert_has_feature(
16, 16812, 24391, "roads",
{"kind": "major_road", "kind_detail": "secondary", "id": 57782075,
"name": "S Halsted St.", "is_tunnel": True, "sort_rank": 329})
#https://www.openstreetmap.org/way/57708079
assert_has_feature(
16, 16813, 24386, "roads",
{"kind": "major_road", "kind_detail": "tertiary", "id": 57708079,
"name": "W 74th St.", "is_tunnel": True, "sort_rank": 327})
#https://www.openstreetmap.org/way/56393654
assert_has_feature(
16, 16808, 24362, "roads",
{"kind": "minor_road", "kind_detail": "residential", "id": 56393654,
"name": "S Paulina St.", "is_tunnel": True, "sort_rank": 310})
#https://www.openstreetmap.org/way/190835369
assert_has_feature(
16, 16814, 24363, "roads",
{"kind": "minor_road", "kind_detail": "service", "id": 190835369,
"name": "S Wong Pkwy.", "is_tunnel": True, "sort_rank": 308})
|
# tunnels at level = 0
#https://www.openstreetmap.org/way/167952621
assert_has_feature(
16, 10475, 25324, "roads",
{"kind": "highway", "kind_detail": "motorway", "id": 167952621,
"name": "Presidio Pkwy.", "is_tunnel": True, "sort_rank": 333})
# http://www.openstreetmap.org/way/259492762
assert_has_feature(
16, 19267, 24634, "roads",
{"kind": "major_road", "kind_detail": "trunk", "id": 259492762,
"name": "Raymond Blvd.", "is_tunnel": True, "sort_rank": 331})
# http://www.openstreetmap.org/way/277441866
assert_has_feature(
16, 17563, 25792, "roads",
{"kind": "major_road", "kind_detail": "trunk", "id": 277441866,
"name": "Gatlinburg Spur Road (north)", "is_tunnel": True, "sort_rank": 331})
#https://www.openstreetmap.org/way/117837633
assert_has_feature(
16, 16808, 24434, "roads",
{"kind": "major_road", "kind_detail": "primary", "id": 117837633,
"name": "Dixie Hwy.", "is_tunnel": True, "sort_rank": 330})
#https://www.openstreetmap.org/way/57782075
assert_has_feature(
16, 16812, 24391, "roads",
{"kind": "major_road", "kind_detail": "secondary", "id": 57782075,
"name": "S Halsted St.", "is_tunnel": True, "sort_rank": 329})
#https://www.openstreetmap.org/way/57708079
assert_has_feature(
16, 16813, 24386, "roads",
{"kind": "major_road", "kind_detail": "tertiary", "id": 57708079,
"name": "W 74th St.", "is_tunnel": True, "sort_rank": 327})
#https://www.openstreetmap.org/way/56393654
assert_has_feature(
16, 16808, 24362, "roads",
{"kind": "minor_road", "kind_detail": "residential", "id": 56393654,
"name": "S Paulina St.", "is_tunnel": True, "sort_rank": 310})
#https://www.openstreetmap.org/way/190835369
assert_has_feature(
16, 16814, 24363, "roads",
{"kind": "minor_road", "kind_detail": "service", "id": 190835369,
"name": "S Wong Pkwy.", "is_tunnel": True, "sort_rank": 308})
|
mit
|
Python
|
90c42beafe4dc5168224fd96cf7891695c7cf346
|
fix save default values
|
haoNoQ/wztools2100,haoNoQ/wztools2100,haoNoQ/wztools2100
|
ini_tools/ini_file.py
|
ini_tools/ini_file.py
|
import os
from config_parser import WZConfigParser
from profile_loader import Profile, get_profiles_name_list
from generate_ini_header import get_header
class WZException(Exception):
pass
class IniFile(dict):
profiles = get_profiles_name_list()
def get_profile_for_ini(self):
name = os.path.basename(self.path)[:-4]
if name in self.profiles:
return Profile(name)
# hack for research
elif name[:-5] in self.profiles:
return Profile(name[:-5])
else:
raise WZException("Can't find profile for %s" % self.path)
def __init__(self, path, data_dict=None):
self.path = path
self.name = os.path.basename(path)[:-4]
self.profile = self.get_profile_for_ini()
if data_dict:
self.update(data_dict)
else:
config = WZConfigParser()
config.load(path)
for section_name in config.sections():
self[section_name] = dict(config.items(section_name))
def save(self, filename=None):
if filename is None:
filename = self.path
text_list = [get_header(self.profile)]
for section_name, section_items in self.items():
section_list = ['', '[%s]' % section_name]
for item in sorted(section_items.items(), key=lambda x: self.profile.field_order.index(x[0])):
prepared_value = self.prepare_value(item)
if prepared_value:
section_list.append(prepared_value)
text_list.extend(section_list)
with open(filename, 'w') as fd:
fd.write('\n'.join(text_list))
def prepare_value(self, item):
key, val = item
field = self.profile[key]
if str(field.get('default')) == str(val):
return None
if field['type'] == 'pie':
return "%s = %s" % (key, val.lower())
return "%s = %s" % item
#if __name__ == '__main__':
# ini_file = IniFile("G:/warzone2100/data/base/stats/propulsion.ini")
# with open('tmp.ini', 'w') as fd:
# ini_file.save(fd)
@classmethod
def from_dict(cls, data_dict, dest_file):
return IniFile()
|
import os
from config_parser import WZConfigParser
from profile_loader import Profile, get_profiles_name_list
from generate_ini_header import get_header
class WZException(Exception):
pass
class IniFile(dict):
profiles = get_profiles_name_list()
def get_profile_for_ini(self):
name = os.path.basename(self.path)[:-4]
if name in self.profiles:
return Profile(name)
# hack for research
elif name[:-5] in self.profiles:
return Profile(name[:-5])
else:
raise WZException("Can't find profile for %s" % self.path)
def __init__(self, path, data_dict=None):
self.path = path
self.name = os.path.basename(path)[:-4]
self.profile = self.get_profile_for_ini()
if data_dict:
self.update(data_dict)
else:
config = WZConfigParser()
config.load(path)
for section_name in config.sections():
self[section_name] = dict(config.items(section_name))
def save(self, filename=None):
if filename is None:
filename = self.path
text_list = [get_header(self.profile)]
for section_name, section_items in self.items():
section_list = ['', '[%s]' % section_name]
for item in sorted(section_items.items(), key=lambda x: self.profile.field_order.index(x[0])):
prepared_value = self.prepare_value(item)
if prepared_value:
section_list.append(prepared_value)
text_list.extend(section_list)
with open(filename, 'w') as fd:
fd.write('\n'.join(text_list))
def prepare_value(self, item):
key, val = item
field = self.profile[key]
if str(field.get('default')) == val:
return None
if field['type'] == 'pie':
return "%s = %s" % (key, val.lower())
return "%s = %s" % item
#if __name__ == '__main__':
# ini_file = IniFile("G:/warzone2100/data/base/stats/propulsion.ini")
# with open('tmp.ini', 'w') as fd:
# ini_file.save(fd)
@classmethod
def from_dict(cls, data_dict, dest_file):
return IniFile()
|
cc0-1.0
|
Python
|
3640cb895bb93d144a615d4b745af135016d67af
|
order imports
|
plone/plone.server,plone/plone.server
|
src/plone.server/plone/server/__init__.py
|
src/plone.server/plone/server/__init__.py
|
# -*- encoding: utf-8 -*-
# load the patch before anything else.
from plone.server import patch # noqa
from plone.server import interfaces
from plone.server import languages
# load defined migrations
from plone.server.migrate import migrations # noqa
from zope.i18nmessageid import MessageFactory
import collections
import logging
# create logging
logger = logging.getLogger('plone.server')
_ = MessageFactory('plone')
app_settings = {
"databases": [],
"address": 8080,
"static": [],
"utilities": [],
"root_user": {
"password": ""
},
"auth_extractors": [
"plone.server.auth.extractors.BearerAuthPolicy",
"plone.server.auth.extractors.BasicAuthPolicy",
"plone.server.auth.extractors.WSTokenAuthPolicy",
],
"auth_user_identifiers": [],
"auth_token_validators": [
"plone.server.auth.validators.SaltedHashPasswordValidator",
"plone.server.auth.validators.JWTValidator"
],
"default_layers": [
interfaces.IDefaultLayer
],
"http_methods": {
"PUT": interfaces.IPUT,
"POST": interfaces.IPOST,
"PATCH": interfaces.IPATCH,
"DELETE": interfaces.IDELETE,
"GET": interfaces.IGET,
"OPTIONS": interfaces.IOPTIONS,
"HEAD": interfaces.IHEAD,
"CONNECT": interfaces.ICONNECT
},
"renderers": collections.OrderedDict({
"application/json": interfaces.IRendererFormatJson,
"text/html": interfaces.IRendererFormatHtml,
"*/*": interfaces.IRendererFormatRaw
}),
"languages": {
"en": languages.IEN,
"en-us": languages.IENUS,
"ca": languages.ICA
},
"default_permission": 'zope.Public',
"available_addons": {},
"api_definition": {},
"cors": {
"allow_origin": ["http://localhost:8080"],
"allow_methods": ["GET", "POST", "DELETE", "HEAD", "PATCH", "OPTIONS"],
"allow_headers": ["*"],
"expose_headers": ["*"],
"allow_credentials": True,
"max_age": 3660
},
"jwt": {
"secret": "foobar",
"algorithm": "HS256"
}
}
SCHEMA_CACHE = {}
PERMISSIONS_CACHE = {}
FACTORY_CACHE = {}
BEHAVIOR_CACHE = {}
|
# -*- encoding: utf-8 -*-
# create logging
import logging
logger = logging.getLogger('plone.server')
from zope.i18nmessageid import MessageFactory # noqa
_ = MessageFactory('plone')
# load the patch before anything else.
from plone.server import patch # noqa
# load defined migrations
from plone.server.migrate import migrations # noqa
from plone.server import interfaces
from plone.server import languages
import collections
app_settings = {
"databases": [],
"address": 8080,
"static": [],
"utilities": [],
"root_user": {
"password": ""
},
"auth_extractors": [
"plone.server.auth.extractors.BearerAuthPolicy",
"plone.server.auth.extractors.BasicAuthPolicy",
"plone.server.auth.extractors.WSTokenAuthPolicy",
],
"auth_user_identifiers": [],
"auth_token_validators": [
"plone.server.auth.validators.SaltedHashPasswordValidator",
"plone.server.auth.validators.JWTValidator"
],
"default_layers": [
interfaces.IDefaultLayer
],
"http_methods": {
"PUT": interfaces.IPUT,
"POST": interfaces.IPOST,
"PATCH": interfaces.IPATCH,
"DELETE": interfaces.IDELETE,
"GET": interfaces.IGET,
"OPTIONS": interfaces.IOPTIONS,
"HEAD": interfaces.IHEAD,
"CONNECT": interfaces.ICONNECT
},
"renderers": collections.OrderedDict({
"application/json": interfaces.IRendererFormatJson,
"text/html": interfaces.IRendererFormatHtml,
"*/*": interfaces.IRendererFormatRaw
}),
"languages": {
"en": languages.IEN,
"en-us": languages.IENUS,
"ca": languages.ICA
},
"default_permission": 'zope.Public',
"available_addons": {},
"api_definition": {},
"cors": {
"allow_origin": ["http://localhost:8080"],
"allow_methods": ["GET", "POST", "DELETE", "HEAD", "PATCH", "OPTIONS"],
"allow_headers": ["*"],
"expose_headers": ["*"],
"allow_credentials": True,
"max_age": 3660
},
"jwt": {
"secret": "foobar",
"algorithm": "HS256"
}
}
SCHEMA_CACHE = {}
PERMISSIONS_CACHE = {}
FACTORY_CACHE = {}
BEHAVIOR_CACHE = {}
|
bsd-2-clause
|
Python
|
63cdfe0de155ed32af0332310340b4d57dcef145
|
bump version for release
|
astraw/stdeb,astraw/stdeb,astraw/stdeb,benthomasson/stdeb,pombredanne/stdeb,LeMeteore/stdeb,sathieu/stdeb,sathieu/stdeb,pombredanne/stdeb,sathieu/stdeb,benthomasson/stdeb,LeMeteore/stdeb,pombredanne/stdeb,benthomasson/stdeb,LeMeteore/stdeb
|
stdeb/__init__.py
|
stdeb/__init__.py
|
# setuptools is required for distutils.commands plugin we use
import logging
import setuptools
__version__ = '0.4.3'
log = logging.getLogger('stdeb')
log.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
|
# setuptools is required for distutils.commands plugin we use
import logging
import setuptools
__version__ = '0.4.2.git'
log = logging.getLogger('stdeb')
log.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
|
mit
|
Python
|
cbe58b74f6d5fe5c96b197ced9c2269cf8886d24
|
make boolean functions in utils return real booleans
|
craigds/django-livesettings
|
livesettings/utils.py
|
livesettings/utils.py
|
import sys
import types
import os
def can_loop_over(maybe):
"""Test value to see if it is list like"""
try:
iter(maybe)
except TypeError:
return False
return True
def is_list_or_tuple(maybe):
return isinstance(maybe, (types.TupleType, types.ListType))
def is_scalar(maybe):
"""Test to see value is a string, an int, or some other scalar type"""
return is_string_like(maybe) or not can_loop_over(maybe)
def is_string_like(maybe):
"""Test value to see if it acts like a string"""
try:
maybe+""
except TypeError:
return False
return True
def flatten_list(sequence, scalarp=is_scalar, result=None):
"""flatten out a list by putting sublist entries in the main list"""
if result is None:
result = []
for item in sequence:
if scalarp(item):
result.append(item)
else:
flatten_list(item, scalarp, result)
def load_module(module):
"""Load a named python module."""
try:
module = sys.modules[module]
except KeyError:
__import__(module)
module = sys.modules[module]
return module
def get_flat_list(sequence):
"""flatten out a list and return the flat list"""
flat = []
flatten_list(sequence, result=flat)
return flat
def url_join(*args):
"""Join any arbitrary strings into a forward-slash delimited string.
Do not strip leading / from first element, nor trailing / from last element.
This function can take lists as arguments, flattening them appropriately.
example:
url_join('one','two',['three','four'],'five') => 'one/two/three/four/five'
"""
if len(args) == 0:
return ""
args = get_flat_list(args)
if len(args) == 1:
return str(args[0])
else:
args = [str(arg).replace("\\", "/") for arg in args]
work = [args[0]]
for arg in args[1:]:
if arg.startswith("/"):
work.append(arg[1:])
else:
work.append(arg)
joined = reduce(os.path.join, work)
return joined.replace("\\", "/")
|
import sys
import types
import os
def can_loop_over(maybe):
"""Test value to see if it is list like"""
try:
iter(maybe)
except:
return 0
else:
return 1
def is_list_or_tuple(maybe):
return isinstance(maybe, (types.TupleType, types.ListType))
def is_scalar(maybe):
"""Test to see value is a string, an int, or some other scalar type"""
return is_string_like(maybe) or not can_loop_over(maybe)
def is_string_like(maybe):
"""Test value to see if it acts like a string"""
try:
maybe+""
except TypeError:
return 0
else:
return 1
def flatten_list(sequence, scalarp=is_scalar, result=None):
"""flatten out a list by putting sublist entries in the main list"""
if result is None:
result = []
for item in sequence:
if scalarp(item):
result.append(item)
else:
flatten_list(item, scalarp, result)
def load_module(module):
"""Load a named python module."""
try:
module = sys.modules[module]
except KeyError:
__import__(module)
module = sys.modules[module]
return module
def get_flat_list(sequence):
"""flatten out a list and return the flat list"""
flat = []
flatten_list(sequence, result=flat)
return flat
def url_join(*args):
"""Join any arbitrary strings into a forward-slash delimited string.
Do not strip leading / from first element, nor trailing / from last element.
This function can take lists as arguments, flattening them appropriately.
example:
url_join('one','two',['three','four'],'five') => 'one/two/three/four/five'
"""
if len(args) == 0:
return ""
args = get_flat_list(args)
if len(args) == 1:
return str(args[0])
else:
args = [str(arg).replace("\\", "/") for arg in args]
work = [args[0]]
for arg in args[1:]:
if arg.startswith("/"):
work.append(arg[1:])
else:
work.append(arg)
joined = reduce(os.path.join, work)
return joined.replace("\\", "/")
|
bsd-3-clause
|
Python
|
a91ac10af21cf644bfc45ef729e465726491db7b
|
Enable android_test and friends as waf commands.
|
playedonline/flambe,aduros/flambe,markknol/flambe,Disar/flambe,mikedotalmond/flambe,markknol/flambe,weilitao/flambe,aduros/flambe,playedonline/flambe,Disar/flambe,mikedotalmond/flambe,weilitao/flambe,mikedotalmond/flambe,aduros/flambe,playedonline/flambe,markknol/flambe,Disar/flambe,aduros/flambe,weilitao/flambe,mikedotalmond/flambe,playedonline/flambe,Disar/flambe,weilitao/flambe
|
tools/flambe.py
|
tools/flambe.py
|
#!/usr/bin/env python
from waflib import *
from waflib.TaskGen import *
import os
# Waf hates absolute paths for some reason
FLAMBE_ROOT = os.path.dirname(__file__) + "/.."
def options(ctx):
ctx.add_option("--debug", action="store_true", default=False, help="Build a development version")
def configure(ctx):
ctx.load("haxe", tooldir=FLAMBE_ROOT+"/tools")
ctx.env.debug = ctx.options.debug
@feature("flambe")
def apply_flambe(ctx):
flags = ["-main", ctx.main]
hasBootstrap = ctx.path.find_dir("res/bootstrap")
if ctx.env.debug:
flags += "-debug --no-opt --no-inline".split()
else:
#flags += "--dead-code-elimination --no-traces".split()
flags += "--no-traces".split()
ctx.bld(features="haxe", classpath=["src", FLAMBE_ROOT+"/src"],
flags=flags,
swflib="bootstrap.swf" if hasBootstrap else None,
target="app.swf")
ctx.bld(features="haxe", classpath=["src", FLAMBE_ROOT+"/src"],
flags=flags + "-D amity --macro flambe.macro.AmityJSGenerator.use()".split(),
target="app.js")
res = ctx.path.find_dir("res")
if res is not None:
# Create asset swfs from the directories in /res
ctx.bld(features="haxe", classpath=FLAMBE_ROOT+"/tools",
flags="-main AssetPackager",
libs="format",
target="packager.n")
# -interp because neko JIT is unstable...
ctx.bld(rule="neko -interp ${SRC} " + res.abspath() + " .",
source="packager.n", target= "bootstrap.swf" if hasBootstrap else None, always=True)
def android_test(ctx):
os.system("adb push res /sdcard/amity-dev")
os.system("adb push build/app.js /sdcard/amity-dev")
os.system("adb shell am start -a android.intent.action.MAIN " +
"-c android.intent.category.HOME")
os.system("adb shell am start -a android.intent.action.MAIN " +
"-n com.threerings.amity/.AmityActivity")
Context.g_module.__dict__["android_test"] = android_test
def android_log(ctx):
os.system("adb logcat -v tag amity:V SDL:V *:W")
Context.g_module.__dict__["android_log"] = android_log
def flash_test(ctx):
os.system("flashplayer build/app.swf")
Context.g_module.__dict__["flash_test"] = flash_test
|
#!/usr/bin/env python
from waflib import *
from waflib.TaskGen import *
import os
# Waf hates absolute paths for some reason
FLAMBE_ROOT = os.path.dirname(__file__) + "/.."
def options(ctx):
ctx.add_option("--debug", action="store_true", default=False, help="Build a development version")
def configure(ctx):
ctx.load("haxe", tooldir=FLAMBE_ROOT+"/tools")
ctx.env.debug = ctx.options.debug
@feature("flambe")
def apply_flambe(ctx):
flags = ["-main", ctx.main]
hasBootstrap = ctx.path.find_dir("res/bootstrap")
if ctx.env.debug:
flags += "-debug --no-opt --no-inline".split()
else:
#flags += "--dead-code-elimination --no-traces".split()
flags += "--no-traces".split()
ctx.bld(features="haxe", classpath=["src", FLAMBE_ROOT+"/src"],
flags=flags,
swflib="bootstrap.swf" if hasBootstrap else None,
target="app.swf")
ctx.bld(features="haxe", classpath=["src", FLAMBE_ROOT+"/src"],
flags=flags + "-D amity --macro flambe.macro.AmityJSGenerator.use()".split(),
target="app.js")
res = ctx.path.find_dir("res")
if res is not None:
# Create asset swfs from the directories in /res
ctx.bld(features="haxe", classpath=FLAMBE_ROOT+"/tools",
flags="-main AssetPackager",
libs="format",
target="packager.n")
# -interp because neko JIT is unstable...
ctx.bld(rule="neko -interp ${SRC} " + res.abspath() + " .",
source="packager.n", target= "bootstrap.swf" if hasBootstrap else None, always=True)
# TODO: How can we expose these handy commands to the main wscript?
def android_test(ctx):
os.system("adb push res /sdcard/amity-dev")
os.system("adb push build/app.js /sdcard/amity-dev")
os.system("adb shell am start -a android.intent.action.MAIN " +
"-c android.intent.category.HOME")
os.system("adb shell am start -a android.intent.action.MAIN " +
"-n com.threerings.amity/.AmityActivity")
def flash_test(ctx):
os.system("flashplayer build/app.swf")
def android_log(ctx):
os.system("adb logcat -v tag amity:V SDL:V *:W")
|
mit
|
Python
|
d42b47f971675af4b12f59089326276b3b8ff9f4
|
Bump version to 0.14.0
|
dmulholland/syntex,dmulholland/syntex
|
syntex/pkgmeta.py
|
syntex/pkgmeta.py
|
# -------------------------------------------------------------------------
# Package meta data.
# -------------------------------------------------------------------------
# Package version number.
__version__ = "0.14.0"
|
# -------------------------------------------------------------------------
# Package meta data.
# -------------------------------------------------------------------------
# Package version number.
__version__ = "0.13.4"
|
unlicense
|
Python
|
dadd800384358356542ccc49bbdad1ae54006cfc
|
Fix test_Bucket.BucketDataTests to test `needed` attribute.
|
pagea/bridgedb,pagea/bridgedb
|
lib/bridgedb/test/test_Bucket.py
|
lib/bridgedb/test/test_Bucket.py
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
from io import StringIO
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
from twisted.trial import unittest
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
needed = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(bucket.name).should.be.equal(distname)
this(bucket.needed).should.be.equal(needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
needed = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(bucket.name).should.be.equal(distname)
this(bucket.needed).should.be.equal(needed)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
from io import StringIO
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
from twisted.trial import unittest
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
alloc = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(bucket.needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
alloc = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(1000000)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
bsd-3-clause
|
Python
|
c86c80854ac5ea60f43619610a21bfba9b1094f2
|
add ratio
|
John-Lin/pydcard
|
example/simple_male_female_ratio.py
|
example/simple_male_female_ratio.py
|
import pydcard
def main():
male = 0
female = 0
for page_num in range(1, 41):
print ('Sending request to page %d' % page_num)
page = pydcard.get_all_page(page_num)
for post_thread in range(0, len(page)):
if page[post_thread].get('member').get('gender') == 'M':
male = male + 1
elif page[post_thread].get('member').get('gender') == 'F':
female = female + 1
else:
print ('Unknown gender')
print (page[post_thread].get('member').get('gender'))
print ('Female posts: %d, Male posts: %d' % (female, male))
print ('Female to Male ratio: %f' % (female/male))
if __name__ == '__main__':
main()
|
import pydcard
def main():
male = 0
female = 0
for page_num in range(1, 41):
print ('Sending request to page %d' % page_num)
page = pydcard.getAllPage(page_num)
for post_thread in range(0, len(page)):
if page[post_thread].get('member').get('gender') == 'M':
male = male + 1
elif page[post_thread].get('member').get('gender') == 'F':
female = female + 1
else:
print ('Unknown gender')
print (page[post_thread].get('member').get('gender'))
print ('Female posts: %d, Male posts: %d' % (female, male))
if __name__ == '__main__':
main()
|
mit
|
Python
|
1bb4059a783fdbc8f397b596d5d5d5ed6d97a7b4
|
use radiasoft/beamsim-jupyter image
|
biviosoftware/salt-conf,radiasoft/salt-conf,radiasoft/salt-conf,biviosoftware/salt-conf
|
srv/salt/jupyterhub/jupyterhub_config.py
|
srv/salt/jupyterhub/jupyterhub_config.py
|
c.Authenticator.admin_users = {'{{ pillar.jupyterhub.admin_user }}',}
c.JupyterHub.confirm_no_ssl = True
c.JupyterHub.ip = '0.0.0.0'
import base64
c.JupyterHub.cookie_secret = base64.b64decode('{{ pillar.jupyterhub.cookie_secret }}')
c.JupyterHub.proxy_auth_token = '{{ pillar.jupyterhub.proxy_auth_token }}'
# Allow both local and GitHub users; Useful for bootstrap
c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator'
c.GitHubOAuthenticator.oauth_callback_url = 'https://jupyter.radiasoft.org/hub/oauth_callback'
c.GitHubOAuthenticator.client_id = '{{ pillar.jupyterhub.github_client_id }}'
c.GitHubOAuthenticator.client_secret = '{{ pillar.jupyterhub.github_client_secret }}'
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
c.DockerSpawner.use_internal_ip = True
from IPython.utils.localinterfaces import public_ips
c.JupyterHub.hub_ip = public_ips()[0]
c.DockerSpawner.container_image = 'radiasoft/beamsim-jupyter'
|
c.Authenticator.admin_users = {'{{ pillar.jupyterhub.admin_user }}',}
c.JupyterHub.confirm_no_ssl = True
c.JupyterHub.ip = '0.0.0.0'
import base64
c.JupyterHub.cookie_secret = base64.b64decode('{{ pillar.jupyterhub.cookie_secret }}')
c.JupyterHub.proxy_auth_token = '{{ pillar.jupyterhub.proxy_auth_token }}'
# Allow both local and GitHub users; Useful for bootstrap
c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator'
c.GitHubOAuthenticator.oauth_callback_url = 'https://jupyter.radiasoft.org/hub/oauth_callback'
c.GitHubOAuthenticator.client_id = '{{ pillar.jupyterhub.github_client_id }}'
c.GitHubOAuthenticator.client_secret = '{{ pillar.jupyterhub.github_client_secret }}'
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
c.DockerSpawner.use_internal_ip = True
from IPython.utils.localinterfaces import public_ips
c.JupyterHub.hub_ip = public_ips()[0]
# jupyter_client.localinterfaces
#container_image = radiasoft/jupyterhub-singleuser
|
apache-2.0
|
Python
|
16fc80f36fa0bade1f4e5e7bef5595b3617a42bc
|
fix bartlett to pass participant not participant uuid
|
jcpeterson/Dallinger,Dallinger/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,jcpeterson/Dallinger,suchow/Wallace,Dallinger/Dallinger,suchow/Wallace,berkeley-cocosci/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,jcpeterson/Dallinger,suchow/Wallace,jcpeterson/Dallinger
|
examples/bartlett1932/experiment.py
|
examples/bartlett1932/experiment.py
|
"""Bartlett's trasmission chain experiment from Remembering (1932)."""
from wallace.networks import Chain
from wallace.nodes import Source, ReplicatorAgent
from wallace import processes
from wallace.experiments import Experiment
import random
class Bartlett1932(Experiment):
"""Defines the experiment."""
def __init__(self, session):
"""Set up the initial networks."""
super(Bartlett1932, self).__init__(session)
self.practice_repeats = 0
self.experiment_repeats = 1
self.agent = ReplicatorAgent
self.network = lambda: Chain(max_size=3)
if not self.networks():
self.setup()
self.save()
def setup(self):
super(Bartlett1932, self).setup()
# Setup for first time experiment is accessed
for net in self.networks():
if not net.nodes(type=Source):
source = WarOfTheGhostsSource(network=net)
net.add_source(source)
def create_agent_trigger(self, agent, network):
"""When an agent is created, add it to the network and take a step."""
network.add_agent(agent)
processes.random_walk(network)
def recruit(self):
"""Recruit participants to the experiment as needed."""
if self.networks(full=False):
self.recruiter().recruit_participants(n=1)
else:
self.recruiter().close_recruitment()
def bonus(self, participant=None):
"""Compute the bonus for the given participant.
This is called automatically when a participant finishes,
it is called immediately prior to the participant_submission_trigger
"""
return 1
class WarOfTheGhostsSource(Source):
"""Transmit a story from Bartlett (1932)."""
__mapper_args__ = {"polymorphic_identity": "war_of_the_ghosts_source"}
def _contents(self):
"""Read the markdown source of the story from a file."""
stories = [
"ghosts.md",
"cricket.md",
"moochi.md",
"outwit.md",
"raid.md",
"species.md",
"tennis.md",
"vagabond.md"
]
story = random.choice(stories)
with open("static/stimuli/{}".format(story), "r") as f:
return f.read()
|
"""Bartlett's trasmission chain experiment from Remembering (1932)."""
from wallace.networks import Chain
from wallace.nodes import Source, ReplicatorAgent
from wallace import processes
from wallace.experiments import Experiment
import random
class Bartlett1932(Experiment):
"""Defines the experiment."""
def __init__(self, session):
"""Set up the initial networks."""
super(Bartlett1932, self).__init__(session)
self.practice_repeats = 0
self.experiment_repeats = 1
self.agent = ReplicatorAgent
self.network = lambda: Chain(max_size=3)
if not self.networks():
self.setup()
self.save()
def setup(self):
super(Bartlett1932, self).setup()
# Setup for first time experiment is accessed
for net in self.networks():
if not net.nodes(type=Source):
source = WarOfTheGhostsSource(network=net)
net.add_source(source)
def create_agent_trigger(self, agent, network):
"""When an agent is created, add it to the network and take a step."""
network.add_agent(agent)
processes.random_walk(network)
def recruit(self):
"""Recruit participants to the experiment as needed."""
if self.networks(full=False):
self.recruiter().recruit_participants(n=1)
else:
self.recruiter().close_recruitment()
def bonus(self, participant_uuid=None):
"""Compute the bonus for the given participant.
This is called automatically when a participant finishes,
it is called immediately prior to the participant_submission_trigger
"""
return 1
class WarOfTheGhostsSource(Source):
"""Transmit a story from Bartlett (1932)."""
__mapper_args__ = {"polymorphic_identity": "war_of_the_ghosts_source"}
def _contents(self):
"""Read the markdown source of the story from a file."""
stories = [
"ghosts.md",
"cricket.md",
"moochi.md",
"outwit.md",
"raid.md",
"species.md",
"tennis.md",
"vagabond.md"
]
story = random.choice(stories)
with open("static/stimuli/{}".format(story), "r") as f:
return f.read()
|
mit
|
Python
|
6073610cb08e03e142b80dc7b1196ce359a1f55a
|
fix pylint import error
|
commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot
|
selfdrive/debug/toyota_eps_factor.py
|
selfdrive/debug/toyota_eps_factor.py
|
#!/usr/bin/env python3
import sys
import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model # pylint: disable=import-error
from tools.lib.route import Route
from tools.lib.logreader import MultiLogIterator
MIN_SAMPLES = 30*100
def to_signed(n, bits):
if n >= (1 << max((bits - 1), 0)):
n = n - (1 << max(bits, 0))
return n
def get_eps_factor(lr, plot=False):
engaged = False
torque_cmd, eps_torque = None, None
cmds, eps = [], []
for msg in lr:
if msg.which() != 'can':
continue
for m in msg.can:
if m.address == 0x2e4 and m.src == 128:
engaged = bool(m.dat[0] & 1)
torque_cmd = to_signed((m.dat[1] << 8) | m.dat[2], 16)
elif m.address == 0x260 and m.src == 0:
eps_torque = to_signed((m.dat[5] << 8) | m.dat[6], 16)
if engaged and torque_cmd is not None and eps_torque is not None:
cmds.append(torque_cmd)
eps.append(eps_torque)
else:
if len(cmds) > MIN_SAMPLES:
break
cmds, eps = [], []
if len(cmds) < MIN_SAMPLES:
raise Exception("too few samples found in route")
lm = linear_model.LinearRegression(fit_intercept=False)
lm.fit(np.array(cmds).reshape(-1, 1), eps)
scale_factor = 1./lm.coef_[0]
if plot:
plt.plot(np.array(eps)*scale_factor)
plt.plot(cmds)
plt.show()
return scale_factor
if __name__ == "__main__":
r = Route(sys.argv[1])
lr = MultiLogIterator(r.log_paths(), wraparound=False)
n = get_eps_factor(lr, plot="--plot" in sys.argv)
print("EPS torque factor: ", n)
|
#!/usr/bin/env python3
import sys
import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model
from tools.lib.route import Route
from tools.lib.logreader import MultiLogIterator
MIN_SAMPLES = 30*100
def to_signed(n, bits):
if n >= (1 << max((bits - 1), 0)):
n = n - (1 << max(bits, 0))
return n
def get_eps_factor(lr, plot=False):
engaged = False
torque_cmd, eps_torque = None, None
cmds, eps = [], []
for msg in lr:
if msg.which() != 'can':
continue
for m in msg.can:
if m.address == 0x2e4 and m.src == 128:
engaged = bool(m.dat[0] & 1)
torque_cmd = to_signed((m.dat[1] << 8) | m.dat[2], 16)
elif m.address == 0x260 and m.src == 0:
eps_torque = to_signed((m.dat[5] << 8) | m.dat[6], 16)
if engaged and torque_cmd is not None and eps_torque is not None:
cmds.append(torque_cmd)
eps.append(eps_torque)
else:
if len(cmds) > MIN_SAMPLES:
break
cmds, eps = [], []
if len(cmds) < MIN_SAMPLES:
raise Exception("too few samples found in route")
lm = linear_model.LinearRegression(fit_intercept=False)
lm.fit(np.array(cmds).reshape(-1, 1), eps)
scale_factor = 1./lm.coef_[0]
if plot:
plt.plot(np.array(eps)*scale_factor)
plt.plot(cmds)
plt.show()
return scale_factor
if __name__ == "__main__":
r = Route(sys.argv[1])
lr = MultiLogIterator(r.log_paths(), wraparound=False)
n = get_eps_factor(lr, plot="--plot" in sys.argv)
print("EPS torque factor: ", n)
|
mit
|
Python
|
60ebebb4cc167a010904763c5a4ffed6347c029e
|
Fix license tab.
|
Livit/Livit.Learn.EdX,Livit/Livit.Learn.EdX,Livit/Livit.Learn.EdX,Livit/Livit.Learn.EdX
|
lms/djangoapps/labster_course_license/tabs.py
|
lms/djangoapps/labster_course_license/tabs.py
|
"""
Registers the Labster Course License for the edX platform.
"""
from django.conf import settings
from django.utils.translation import ugettext_noop
from xmodule.tabs import CourseTab
from student.roles import CourseCcxCoachRole
from courseware.access import has_access
class LicenseCourseTab(CourseTab):
"""
The representation of the LTI Passport course tab
"""
type = "course_license"
title = ugettext_noop("License")
view_name = "labster_license_handler"
is_dynamic = True
@classmethod
def is_enabled(cls, course, user=None):
"""
Returns true if CCX has been enabled and the specified user is a coach
"""
if not settings.FEATURES.get('CUSTOM_COURSES_EDX', False) or not course.enable_ccx:
# If ccx is not enable do not show License tab.
return False
if has_access(user, 'staff', course) or has_access(user, 'instructor', course):
# if user is staff or instructor then he can always see License tab.
return True
role = CourseCcxCoachRole(course.id)
return role.has_user(user)
|
"""
Registers the Labster Course License for the edX platform.
"""
from django.conf import settings
from django.utils.translation import ugettext_noop
from xmodule.tabs import CourseTab
from student.roles import CourseCcxCoachRole
class LicenseCourseTab(CourseTab):
"""
The representation of the LTI Passport course tab
"""
type = "course_license"
title = ugettext_noop("License")
view_name = "labster_license_handler"
is_dynamic = True
@classmethod
def is_enabled(cls, course, user=None):
"""
Returns true if CCX has been enabled and the specified user is a coach
"""
if not user:
return True
if not settings.FEATURES.get('CUSTOM_COURSES_EDX', False) or not course.enable_ccx:
# If ccx is not enable do not show License tab.
return False
if has_access(user, 'staff', course) or has_access(user, 'instructor', course):
# if user is staff or instructor then he can always see License tab.
return True
role = CourseCcxCoachRole(course.id)
return role.has_user(user)
|
agpl-3.0
|
Python
|
d8e872c3d2aa141c29d993c08c207c1b7994b055
|
Add missing filter decorators
|
thoas/django-sequere
|
sequere/templatetags/sequere_tags.py
|
sequere/templatetags/sequere_tags.py
|
from django import template
from sequere.registry import registry
from sequere.models import (get_followers_count, get_followings_count)
register = template.Library()
@register.filter
def identifier(instance, arg=None):
return registry.get_identifier(instance)
@register.filter
def followers_count(instance, identifier=None):
return get_followers_count(instance, identifier)
@register.filter
def followings_count(instance, identifier=None):
return get_followings_count(instance, identifier)
|
from django import template
from sequere.registry import registry
from sequere.models import (get_followers_count, get_followings_count)
register = template.Library()
def identifier(instance, arg=None):
return registry.get_identifier(instance)
def followers_count(instance, identifier=None):
return get_followers_count(instance, identifier)
def followings_count(instance, identifier=None):
return get_followings_count(instance, identifier)
|
mit
|
Python
|
99c3eba0d6384cd42c90ef347823e6d66659d6e3
|
Fix typo in division operator
|
pdarragh/Viper
|
viper/interpreter/prelude/operators.py
|
viper/interpreter/prelude/operators.py
|
from ..value import ForeignCloVal
def plus(a: int, b: int) -> int:
return a + b
def minus(a: int, b: int) -> int:
return a - b
def times(a: int, b: int) -> int:
return a * b
def divide(a: int, b: int) -> float:
return a / b
env = {
'+': ForeignCloVal(plus, {}),
'-': ForeignCloVal(minus, {}),
'*': ForeignCloVal(times, {}),
'/': ForeignCloVal(divide, {}),
}
|
from ..value import ForeignCloVal
def plus(a: int, b: int) -> int:
return a + b
def minus(a: int, b: int) -> int:
return a - b
def times(a: int, b: int) -> int:
return a * b
def divide(a: int, b: int) -> float:
return a / b
env = {
'+': ForeignCloVal(plus, {}),
'-': ForeignCloVal(minus, {}),
'*': ForeignCloVal(times, {}),
'//': ForeignCloVal(divide, {}),
}
|
apache-2.0
|
Python
|
340e872114363ddc041b2c5cdcc5769c9b793efe
|
Add test_select_with_seed_too_small_raise_Exception
|
Commonists/bingo
|
tests/test_bingo.py
|
tests/test_bingo.py
|
"""Unit tests for cat2cohort."""
import unittest
from bingo import bingo
class TestBingoGenerator(unittest.TestCase):
"""Test methods from bingo."""
def test_bingo_generator_has_default_size(self):
bingo_generator = bingo.BingoGenerator()
expected = pow(bingo.DEFAULT_SIZE, 2)
self.assertEquals(bingo_generator.size, expected)
def test_bingo_generator_has_given_size(self):
bingo_generator = bingo.BingoGenerator(4)
self.assertEquals(bingo_generator.size, 16)
def test_select_words_should_have_the_right_size(self):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']
bingo_generator.words = seed_list
selection = bingo_generator.select_words()
self.assertEquals(len(selection), pow(test_size, 2))
def test_select_words_should_return_words_from_the_seed_list(self):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = set(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'])
bingo_generator.words = seed_list
selection = set(bingo_generator.select_words())
self.assertTrue(seed_list.difference(selection))
def test_select_with_seed_too_small_raise_Exception(self):
with self.assertRaises(ValueError):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = ['a', 'b', 'c']
bingo_generator.words = seed_list
bingo_generator.select_words()
|
"""Unit tests for cat2cohort."""
import unittest
from bingo import bingo
class TestBingoGenerator(unittest.TestCase):
"""Test methods from bingo."""
def test_bingo_generator_has_default_size(self):
bingo_generator = bingo.BingoGenerator()
expected = pow(bingo.DEFAULT_SIZE, 2)
self.assertEquals(bingo_generator.size, expected)
def test_bingo_generator_has_given_size(self):
bingo_generator = bingo.BingoGenerator(4)
self.assertEquals(bingo_generator.size, 16)
def test_select_words_should_have_the_right_size(self):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']
bingo_generator.words = seed_list
selection = bingo_generator.select_words()
self.assertEquals(len(selection), pow(test_size, 2))
def test_select_words_should_return_words_from_the_seed_list(self):
test_size = 2
bingo_generator = bingo.BingoGenerator(size=test_size)
seed_list = set(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'])
bingo_generator.words = seed_list
selection = set(bingo_generator.select_words())
self.assertTrue(seed_list.difference(selection))
|
mit
|
Python
|
36f2376a2f23b295bba8cc2af16577efd3fe03ff
|
Add a couple of snippets.
|
sam33r/dotfiles,sam33r/dotfiles,sam33r/dotfiles,sam33r/dotfiles
|
utils/snippets.py
|
utils/snippets.py
|
#!/usr/bin/env python
# A hacky script to do dynamic snippets.
import sys
import os
import datetime
snippet_map = {
'date': datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time': datetime.datetime.now().strftime('%I:%M%p '),
'best': 'Best,\nSameer',
'cheers': 'Cheers,\nSameer',
'thanks': 'Thanks,\nSameer',
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(
snippet_map[selected_key]))
|
#!/usr/bin/env python
# A hacky script to do dynamic snippets.
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
'sign' : 'Best,\nSameer',
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(snippet_map[selected_key]))
|
mit
|
Python
|
8887ac66a221b443215e7ab57a2f21b1521b167b
|
move docs to readme
|
obestwalter/obestwalter.github.io,obestwalter/obestwalter.github.io
|
utils/workflow.py
|
utils/workflow.py
|
from __future__ import print_function
import os
import subprocess
import sys
from datetime import datetime
from string import Template
from lektor.utils import slugify
HERE = os.path.dirname(__file__)
PROJECT_PATH = os.path.join(HERE, '..')
DRAFTS_PATH = os.path.join(PROJECT_PATH, 'drafts')
CONTENT_PATH = os.path.join(PROJECT_PATH, 'content')
def draft():
title = sys.argv[1]
with open(os.path.join(HERE, 'article-blueprint.md')) as f:
content = f.read()
rep = dict(title=title)
content = Template(content).safe_substitute(rep)
dst = os.path.join(DRAFTS_PATH, '%s.md' % slugify(title))
assert not os.path.exists(dst), dst
with open(dst, 'w') as f:
f.write(content)
def publish():
srcPath = sys.argv[1]
with open(srcPath) as f:
content = f.read()
rep = dict(date=datetime.now().strftime('%Y-%m-%d'))
content = Template(content).safe_substitute(rep)
slug = os.path.splitext(os.path.basename(srcPath))[0]
containerPath = os.path.join(CONTENT_PATH, slug)
assert not os.path.exists(containerPath), containerPath
os.mkdir(containerPath)
dst = os.path.join(containerPath, 'contents.lr')
with open(dst, 'w') as f:
f.write(content)
os.remove(srcPath)
def deploy():
if len(sys.argv) > 2 and sys.argv[2] == 'clean':
print(subprocess.check_output(['lektor', 'clean', '--yes']))
else:
print(subprocess.check_output(['lektor', 'build']))
print(subprocess.check_output(['lektor', 'deploy']))
|
"""
Helpers for my evolving workflow.
draft [art] "My super article"
creates a prepared md file with all the necessary settings to work on.
publish drafts/my-super-article.md
will make the necessary adjustments and publish it in the contents.
deploy [clean]
will create a [clean] build and push it online.
"""
from __future__ import print_function
import os
import subprocess
import sys
from datetime import datetime
from string import Template
from lektor.utils import slugify
HERE = os.path.dirname(__file__)
PROJECT_PATH = os.path.join(HERE, '..')
DRAFTS_PATH = os.path.join(PROJECT_PATH, 'drafts')
CONTENT_PATH = os.path.join(PROJECT_PATH, 'content')
def draft():
title = sys.argv[1]
with open(os.path.join(HERE, 'article-blueprint.md')) as f:
content = f.read()
rep = dict(title=title)
content = Template(content).safe_substitute(rep)
dst = os.path.join(DRAFTS_PATH, '%s.md' % slugify(title))
assert not os.path.exists(dst), dst
with open(dst, 'w') as f:
f.write(content)
def publish():
srcPath = sys.argv[1]
with open(srcPath) as f:
content = f.read()
rep = dict(date=datetime.now().strftime('%Y-%m-%d'))
content = Template(content).safe_substitute(rep)
slug = os.path.splitext(os.path.basename(srcPath))[0]
containerPath = os.path.join(CONTENT_PATH, slug)
assert not os.path.exists(containerPath), containerPath
os.mkdir(containerPath)
dst = os.path.join(containerPath, 'contents.lr')
with open(dst, 'w') as f:
f.write(content)
os.remove(srcPath)
def deploy():
if len(sys.argv) > 2 and sys.argv[2] == 'clean':
print(subprocess.check_output(['lektor', 'clean', '--yes']))
else:
print(subprocess.check_output(['lektor', 'build']))
print(subprocess.check_output(['lektor', 'deploy']))
|
mit
|
Python
|
377f2120b3474d131b02dab90b6e51c35deb0c74
|
Add comments
|
lnls-fac/mathphys
|
mathphys/constants.py
|
mathphys/constants.py
|
"""Constants module."""
import math as _math
from . import base_units as _u
# temporary auxiliary derived units
_volt = (_u.kilogram * _u.meter**2) / (_u.ampere * _u.second**2)
_coulomb = _u.second * _u.ampere
_joule = _u.kilogram * _u.meter**2 / _u.second**2
_pascal = _u.kilogram / (_u.meter * _u.second**2)
# physical constants
# ==================
# --- exact by definition --
light_speed = 299792458 * (_u.meter / _u.second)
gas_constant = 8.314462618 * (_joule / _u.mole / _u.kelvin)
boltzmann_constant = 1.380649e-23 * (_joule / _u.kelvin)
avogadro_constant = 6.02214076e23 * (1 / _u.mole)
elementary_charge = 1.602176634e-19 * (_coulomb)
reduced_planck_constant = 1.054571817e-34 * (_joule * _u.second)
# --- measured ---
# 2021-04-15 - https://physics.nist.gov/cgi-bin/cuu/Value?me|search_for=electron+mass
electron_mass = 9.1093837015e-31 * (_u.kilogram)
# 2021-04-15 - https://physics.nist.gov/cgi-bin/cuu/Value?mu0|search_for=vacuum+permeability
vacuum_permeability = 1.25663706212e-6 * \
(_volt * _u.second / _u.ampere / _u.meter)
# --- derived ---
# [Kg̣*m^2/s^2] - derived
electron_rest_energy = electron_mass * _math.pow(light_speed, 2)
# [V·s/(A.m)] - derived
vacuum_permitticity = 1.0/(vacuum_permeability * _math.pow(light_speed, 2))
# [T·m^2/(A·s)] - derived
vacuum_impedance = vacuum_permeability * light_speed
# [m] - derived
electron_radius = _math.pow(elementary_charge, 2) / \
(4*_math.pi*vacuum_permitticity*electron_rest_energy)
_joule_2_eV = _joule / elementary_charge
# [m]/[GeV]^3 - derived
rad_cgamma = 4*_math.pi*electron_radius / \
_math.pow(electron_rest_energy/elementary_charge/1.0e9, 3) / 3
# [m] - derived
Cq = (55.0/(32*_math.sqrt(3.0))) * (reduced_planck_constant) * \
light_speed / electron_rest_energy
# [m^2/(s·GeV^3)] - derived
Ca = electron_radius*light_speed / \
(3*_math.pow(electron_rest_energy*_joule_2_eV/1.0e9, 3))
|
"""Constants module."""
import math as _math
from . import base_units as _u
# temporary auxiliary derived units
_volt = (_u.kilogram * _u.meter**2) / (_u.ampere * _u.second**2)
_coulomb = _u.second * _u.ampere
_joule = _u.kilogram * _u.meter**2 / _u.second**2
_pascal = _u.kilogram / (_u.meter * _u.second**2)
# physical constants
# ==================
# --- exact --
light_speed = 299792458 * (_u.meter / _u.second)
gas_constant = 8.314462618 * (_joule / _u.mole / _u.kelvin)
boltzmann_constant = 1.380649e-23 * (_joule / _u.kelvin)
avogadro_constant = 6.02214076e23 * (1 / _u.mole)
elementary_charge = 1.602176634e-19 * (_coulomb)
reduced_planck_constant = 1.054571817e-34 * (_joule * _u.second)
# --- measured ---
# 2021-04-15 - https://physics.nist.gov/cgi-bin/cuu/Value?me|search_for=electron+mass
electron_mass = 9.1093837015e-31 * (_u.kilogram)
# 2021-04-15 - https://physics.nist.gov/cgi-bin/cuu/Value?mu0|search_for=vacuum+permeability
vacuum_permeability = 1.25663706212e-6 * \
(_volt * _u.second / _u.ampere / _u.meter)
# --- derived ---
# [Kg̣*m^2/s^2] - derived
electron_rest_energy = electron_mass * _math.pow(light_speed, 2)
# [V·s/(A.m)] - derived
vacuum_permitticity = 1.0/(vacuum_permeability * _math.pow(light_speed, 2))
# [T·m^2/(A·s)] - derived
vacuum_impedance = vacuum_permeability * light_speed
# [m] - derived
electron_radius = _math.pow(elementary_charge, 2) / \
(4*_math.pi*vacuum_permitticity*electron_rest_energy)
_joule_2_eV = _joule / elementary_charge
# [m]/[GeV]^3 - derived
rad_cgamma = 4*_math.pi*electron_radius / \
_math.pow(electron_rest_energy/elementary_charge/1.0e9, 3) / 3
# [m] - derived
Cq = (55.0/(32*_math.sqrt(3.0))) * (reduced_planck_constant) * \
light_speed / electron_rest_energy
# [m^2/(s·GeV^3)] - derived
Ca = electron_radius*light_speed / \
(3*_math.pow(electron_rest_energy*_joule_2_eV/1.0e9, 3))
|
mit
|
Python
|
a26f04bddcdb92af050c2d8237ccb6c2ef1406e5
|
Fix identation
|
rzhilkibaev/jst,rzhilkibaev/jst
|
jst/common/context.py
|
jst/common/context.py
|
'''
Created on Jan 18, 2015
@author: rz
'''
import configparser
import os
from os.path import expanduser
def load():
global_cfg_file = expanduser("~") + '/.jst/jst.properties'
if (not os.path.isfile(global_cfg_file)):
raise FileNotFoundError(global_cfg_file)
cwd = os.getcwd()
ctx_file = cwd + '/jstcontext.properties'
if (not os.path.isfile(ctx_file)):
raise FileNotFoundError(ctx_file)
global_cfg = configparser.ConfigParser()
global_cfg.read(global_cfg_file)
ctx = configparser.ConfigParser()
ctx.read(ctx_file)
ctx['src']['url_ce'] = 'svn+ssh://' + global_cfg['src']['user'] + '@' + global_cfg['src']['url_ce'] + '/' + ctx['src']['branch_ce']
ctx['src']['url_pro'] = 'svn+ssh://' + global_cfg['src']['user'] + '@' + global_cfg['src']['url_pro'] + '/' + ctx['src']['branch_pro']
ctx['src']['working_copy_ce'] = cwd + '/ce'
ctx['src']['working_copy_pro'] = cwd + '/pro'
ctx['tc']['distribution'] = global_cfg['tc']['distribution']
ctx['tc']['home'] = cwd + '/tc'
return ctx
def show(ctx):
print('src.url_ce = ' + ctx['src']['url_ce'])
print('src.url_pro = ' + ctx['src']['url_pro'])
print('src.working_copy_ce = ' + ctx['src']['working_copy_ce'])
print('src.working_copy_pro = ' + ctx['src']['working_copy_pro'])
print('tc.home = ' + ctx['tc']['home'])
print('tc.distribution = ' + ctx['tc']['distribution'])
print('tc.catalina_opts = ' + ctx['tc']['catalina_opts'])
print('tc.java_opts = ' + ctx['tc']['java_opts'])
|
'''
Created on Jan 18, 2015
@author: rz
'''
import configparser
import os
from os.path import expanduser
def load():
global_cfg_file = expanduser("~") + '/.jst/jst.properties'
if (not os.path.isfile(global_cfg_file)):
raise FileNotFoundError(global_cfg_file)
cwd = os.getcwd()
ctx_file = cwd + '/jstcontext.properties'
if (not os.path.isfile(ctx_file)):
raise FileNotFoundError(ctx_file)
global_cfg = configparser.ConfigParser()
global_cfg.read(global_cfg_file)
ctx = configparser.ConfigParser()
ctx.read(ctx_file)
ctx['src']['url_ce'] = 'svn+ssh://' + global_cfg['src']['user'] + '@' + global_cfg['src']['url_ce'] + '/' + ctx['src']['branch_ce']
ctx['src']['url_pro'] = 'svn+ssh://' + global_cfg['src']['user'] + '@' + global_cfg['src']['url_pro'] + '/' + ctx['src']['branch_pro']
ctx['src']['working_copy_ce'] = cwd + '/ce'
ctx['src']['working_copy_pro'] = cwd + '/pro'
ctx['tc']['distribution'] = global_cfg['tc']['distribution']
ctx['tc']['home'] = cwd + '/tc'
return ctx
def show(ctx):
print('src.url_ce = ' + ctx['src']['url_ce'])
print('src.url_pro = ' + ctx['src']['url_pro'])
print('src.working_copy_ce = ' + ctx['src']['working_copy_ce'])
print('src.working_copy_pro = ' + ctx['src']['working_copy_pro'])
print('tc.home = ' + ctx['tc']['home'])
print('tc.distribution = ' + ctx['tc']['distribution'])
print('tc.catalina_opts = ' + ctx['tc']['catalina_opts'])
print('tc.java_opts = ' + ctx['tc']['java_opts'])
|
mit
|
Python
|
99bc38b7d33eef76fd99d7ce362b00080edf5067
|
Change dependencies
|
xpansa/stock-logistics-transport,open-synergy/stock-logistics-transport,Antiun/stock-logistics-transport,Endika/stock-logistics-transport
|
stock_shipment_management/__openerp__.py
|
stock_shipment_management/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joël Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more description.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{"name": "Transportation Plan",
"version": "0.1",
"author": "Camptocamp",
"category": "Transportation",
"license": 'AGPL-3',
'complexity': "normal",
"images" : [],
"website": "http://www.camptocamp.com",
"depends" : ["delivery",
"stock_route_transit",
],
"demo": [],
"data": ["data/tranport_plan_sequence.xml",
"data/tranport_mode_data.xml",
"view/transport_plan.xml",
"view/transport_mode.xml",
"security/ir.model.access.csv",
],
"auto_install": False,
"test": [],
'installable': True,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joël Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more description.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{"name": "Transportation Plan",
"version": "0.1",
"author": "Camptocamp",
"category": "Transportation",
"license": 'AGPL-3',
'complexity': "normal",
"images" : [],
"website": "http://www.camptocamp.com",
"depends" : ["sale",
"purchase",
"stock",
],
"demo": [],
"data": ["data/tranport_plan_sequence.xml",
"data/tranport_mode_data.xml",
"view/transport_plan.xml",
"view/transport_mode.xml",
"security/ir.model.access.csv",
],
"auto_install": False,
"test": [],
'installable': True,
}
|
agpl-3.0
|
Python
|
aad19b0373f2b331ffbada431385173d2bf3e43e
|
Update cronjob.py
|
fiaas/k8s
|
k8s/models/cronjob.py
|
k8s/models/cronjob.py
|
#!/usr/bin/env python
# -*- coding: utf-8
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import six
from .common import ObjectMeta, ObjectReference, Time, ListMeta
from .job import JobTemplateSpec
from ..base import Model
from ..fields import Field, ListField
class CronJobSpec(Model):
concurrencyPolicy = Field(six.text_type)
failedJobsHistoryLimit = Field(int)
jobTemplate = Field(JobTemplateSpec)
schedule = Field(six.text_type)
startingDeadlineSeconds = Field(int)
successfulJobsHistoryLimit = Field(int)
suspend = Field(bool)
class CronJobStatus(Model):
active = ListField(ObjectReference)
lastScheduleTime = Field(Time)
class CronJob(Model):
class Meta:
list_url = "/apis/batch/v1beta1/cronjobs"
url_template = "/apis/batch/v1beta1/namespaces/{namespace}/cronjobs/{name}"
metadata = Field(ObjectMeta)
spec = Field(CronJobSpec)
status = Field(CronJobStatus)
|
#!/usr/bin/env python
# -*- coding: utf-8
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import six
from .common import ObjectMeta, ObjectReference, Time, ListMeta
from .job import JobTemplateSpec
from ..base import Model
from ..fields import Field, ListField
class CronJobSpec(Model):
concurrencyPolicy = Field(six.text_type)
failedJobsHistoryLimit = Field(int)
jobTemplate = Field(JobTemplateSpec)
schedule = Field(six.text_type)
startingDeadlineSeconds = Field(int)
successfulJobsHistoryLimit = Field(int)
suspend = Field(bool)
class CronJobStatus(Model):
active = ListField(ObjectReference)
lastScheduleTime = Field(Time)
class CronJob(Model):
class Meta:
list_url = "/apis/batch/v1beta1/cronjobs"
url_template = "/apis/batch/v1beta1/namespaces/{namespace}/cronjobs/{name}"
metadata = Field(ObjectMeta)
spec = Field(CronJobSpec)
status = Field(CronJobStatus)
class CronJobList(Model):
apiVersion = Field(six.text_type)
items = ListField(CronJob)
kind = Field(six.text_type)
metadata = Field(ListMeta)
|
apache-2.0
|
Python
|
e7dca1dae8300dd702ecfc36110518b16c9c5231
|
change directory back to previous location (prevents following tests from pointing into the forest)
|
ohumbel/rtc2git,jacobilsoe/rtc2git,cwill747/rtc2git,rtcTo/rtc2git,WtfJoke/rtc2git,akchinSTC/rtc2git
|
tests/testhelper.py
|
tests/testhelper.py
|
from contextlib import contextmanager
import tempfile
import os
import shutil
from configuration import Builder
from gitFunctions import Initializer
import configuration
@contextmanager
def mkchdir(subfolder, folderprefix="rtc2test_case"):
tempfolder = tempfile.mkdtemp(prefix=folderprefix + subfolder)
previousdir = os.getcwd()
os.chdir(tempfolder)
try:
yield tempfolder
finally:
os.chdir(previousdir)
shutil.rmtree(tempfolder, ignore_errors=True) # on windows folder remains in temp, git process locks it
@contextmanager
def createrepo(reponame="test.git", folderprefix="rtc2test_case"):
repodir = tempfile.mkdtemp(prefix=folderprefix)
configuration.config = Builder().setworkdirectory(repodir).setgitreponame(reponame).build()
initializer = Initializer()
previousdir = os.getcwd()
os.chdir(repodir)
initializer.initalize()
try:
yield
finally:
os.chdir(previousdir)
shutil.rmtree(repodir, ignore_errors=True) # on windows folder remains in temp, git process locks it
|
from contextlib import contextmanager
import tempfile
import os
import shutil
from configuration import Builder
from gitFunctions import Initializer
import configuration
@contextmanager
def mkchdir(subfolder, folderprefix="rtc2test_case"):
tempfolder = tempfile.mkdtemp(prefix=folderprefix + subfolder)
os.chdir(tempfolder)
try:
yield tempfolder
finally:
shutil.rmtree(tempfolder, ignore_errors=True) # on windows folder remains in temp, git process locks it
@contextmanager
def createrepo(reponame="test.git", folderprefix="rtc2test_case"):
repodir = tempfile.mkdtemp(prefix=folderprefix)
configuration.config = Builder().setworkdirectory(repodir).setgitreponame(reponame).build()
initializer = Initializer()
os.chdir(repodir)
initializer.initalize()
try:
yield
finally:
shutil.rmtree(repodir, ignore_errors=True) # on windows folder remains in temp, git process locks it
|
mit
|
Python
|
5a8199744bf658d491721b16fea7639303e47d3f
|
Edit view pre-populates with data from user object
|
ChimeraCoder/GOctober,ChimeraCoder/GOctober,julython/julython.org,ChimeraCoder/GOctober,julython/julython.org,julython/julython.org,julython/julython.org
|
july/people/views.py
|
july/people/views.py
|
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.template.context import RequestContext
#from google.appengine.ext import db
from july.people.models import Commit
from gae_django.auth.models import User
from django.http import Http404, HttpResponseRedirect
from django.core.urlresolvers import reverse
def user_profile(request, username):
user = User.all().filter("username", username).get()
if user == None:
raise Http404("User not found")
commits = Commit.all().ancestor(request.user.key())
return render_to_response('people/profile.html',
{"commits":commits},
RequestContext(request))
@login_required
def edit_profile(request, username, template_name='people/edit.html'):
from forms import EditUserForm
user = request.user
form = EditUserForm(request.POST or None, user=request.user)
if form.is_valid():
for key in form.cleaned_data:
setattr(user, key, form.cleaned_data.get(key))
user.put()
return HttpResponseRedirect(
reverse('member-profile', kwargs={'username':request.user.username})
)
if user == None:
raise Http404("User not found")
return render_to_response(template_name,
{'form':form},
RequestContext(request))
|
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.template.context import RequestContext
#from google.appengine.ext import db
from july.people.models import Commit
from gae_django.auth.models import User
from django.http import Http404, HttpResponseRedirect
from django.core.urlresolvers import reverse
def user_profile(request, username):
user = User.all().filter("username", username).get()
if user == None:
raise Http404("User not found")
commits = Commit.all().ancestor(request.user.key())
return render_to_response('people/profile.html',
{"commits":commits},
RequestContext(request))
@login_required
def edit_profile(request, username, template_name='people/edit.html'):
from forms import EditUserForm
user = request.user
#CONSIDER FILES with no POST? Can that happen?
form = EditUserForm(request.POST or None, request.FILES or None)
if form.is_valid():
for key in form.cleaned_data:
setattr(user,key,form.cleaned_data.get(key))
user.put()
return HttpResponseRedirect(
reverse('member-profile', kwargs={'username': request.user.username})
)
if user == None:
raise Http404("User not found")
return render_to_response(template_name,
{'form':form,},
RequestContext(request))
|
mit
|
Python
|
8e9edf002368df0cd4bfa33975271b75af191ef0
|
fix cache expiring
|
googleinterns/userjourneytool,googleinterns/userjourneytool
|
ujt/dash_app.py
|
ujt/dash_app.py
|
""" Configuration for Dash app.
Exposes app and cache to enable other files (namely callbacks) to register callbacks and update cache.
App is actually started by ujt.py
"""
import dash
import dash_bootstrap_components as dbc
import dash_cytoscape as cyto
from flask_caching import Cache
# Initialize Dash app and Flask-Cache
cyto.load_extra_layouts()
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
cache = Cache()
cache.init_app(
app.server,
config={
"CACHE_TYPE": "filesystem",
"CACHE_DIR": "cache_dir",
"CACHE_DEFAULT_TIMEOUT": 0,
"CACHE_THRESHOLD": 0,
},
)
|
""" Configuration for Dash app.
Exposes app and cache to enable other files (namely callbacks) to register callbacks and update cache.
App is actually started by ujt.py
"""
import dash
import dash_bootstrap_components as dbc
import dash_cytoscape as cyto
from flask_caching import Cache
# Initialize Dash app and Flask-Cache
cyto.load_extra_layouts()
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
cache = Cache()
cache.init_app(
app.server,
config={"CACHE_TYPE": "filesystem", "CACHE_DIR": "cache_dir"},
)
|
apache-2.0
|
Python
|
144cd7bb846c5bcbadf1676d5455a065de52419a
|
Leverage fixtures for sample project and user_overrides.
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
setuptools/tests/test_upload_docs.py
|
setuptools/tests/test_upload_docs.py
|
import os
import shutil
import tempfile
import zipfile
import pytest
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
@pytest.fixture
def sample_project(tmpdir_cwd):
# setup.py
with open('setup.py', 'wt') as f:
f.write(SETUP_PY)
os.mkdir('build')
# A test document.
with open('build/index.html', 'w') as f:
f.write("Hello world.")
# An empty folder.
os.mkdir('build/empty')
@pytest.mark.usefixtures('sample_project')
@pytest.mark.usefixtures('user_override')
class TestUploadDocsTest:
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
dist = Distribution()
cmd = upload_docs(dist)
cmd.target_dir = cmd.upload_dir = 'build'
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
|
import sys
import os
import shutil
import tempfile
import site
import zipfile
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
from .textwrap import DALS
SETUP_PY = DALS(
"""
from setuptools import setup
setup(name='foo')
""")
class TestUploadDocsTest:
def setup_method(self, method):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build')
os.mkdir(self.upload_dir)
# A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
f.write("Hello world.")
f.close()
# An empty folder.
os.mkdir(os.path.join(self.upload_dir, 'empty'))
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def teardown_method(self, method):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
dist = Distribution()
cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir
cmd.target_dir = self.upload_dir
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
|
mit
|
Python
|
a8e43dcdbdd00de9d4336385b3f3def1ae5c2515
|
Update UserX, with back compatibility
|
vanessa-bell/hd-kiosk-v2,carylF/lab5,gmist/fix-5studio,lipis/the-smallest-creature,NeftaliYagua/gae-init,gmist/my-gae-init-auth,jakedotio/gae-init,carylF/lab5,lipis/gae-init,lipis/gae-init,lovesoft/gae-init,gae-init/gae-init-docs,mdxs/gae-init,tonyin/optionstg,gmist/my-gae-init,gae-init/gae-init-babel,terradigital/gae-init,lipis/life-line,gae-init/gae-init-upload,gmist/nashi-5studio,CLOUGH/info3180-lab5,topless/gae-init,tiberiucorbu/av-website,Kingclove/lab5info3180,gmist/alice-box,gmist/five-studio2,chineyting/lab5-Info3180,gae-init/phonebook,lipis/meet-notes,lipis/hurry-app,tkstman/lab5,lipis/gae-init,tonyin/optionstg,gae-init/gae-init-docs,gae-init/gae-init-babel,vanessa-bell/hd-kiosk-v2,jakedotio/gae-init,gae-init/gae-init-debug,wodore/wodore-gae,gae-init/gae-init-upload,mdxs/gae-init,d4rr3ll/gae-init-docker,vanessa-bell/hd-kiosk-v2,terradigital/gae-init,jaja14/lab5,wilfriedE/gae-init,gmist/nashi-5studio,tiberiucorbu/av-website,mdxs/gae-init,jaja14/lab5,georgekis/salary,gmist/fix-5studio,JoeyCodinja/INFO3180LAB3,gae-init/gae-init-babel,mdxs/gae-init-babel,gae-init/gae-init,gmist/1businka2,topless/gae-init-upload,gmist/1businka2,dhstack/gae-init,gmist/nashi-5studio,gmist/ctm-5studio,lovesoft/gae-init,lipis/gae-init,JoeyCodinja/INFO3180LAB3,wodore/wodore-gae,gae-init/gae-init,topless/gae-init-upload,gmist/fix-5studio,lipis/hurry-app,mdxs/gae-init-babel,wilfriedE/gae-init,lipis/the-smallest-creature,topless/gae-init-upload,lipis/life-line,lipis/the-smallest-creature,gmist/my-gae-init,NeftaliYagua/gae-init,gae-init/gae-init-docs,d4rr3ll/gae-init-docker,gae-init/gae-init-babel,jakedotio/gae-init,d4rr3ll/gae-init-docker,gmist/ctm-5studio,lovesoft/gae-init,CLOUGH/info3180-lab5,lipis/guestbook,lipis/github-stats,gae-init/gae-init,gmist/fix-5studio,JoeyCodinja/INFO3180LAB3,gae-init/gae-init-debug,d4rr3ll/gae-init-docker,lipis/electron-crash-reporter,lipis/github-stats,topless/gae-init,michals/hurry-app,gae-init/gae-init-docs,wodore/wodore-gae,michals/hurry-app,tkstman/lab5,NeftaliYagua/gae-init,gmist/ctm-5studio,jakedotio/gae-init,gmist/1businka2,lipis/electron-crash-reporter,gae-init/gae-init-upload,mdxs/gae-init-babel,georgekis/salary,michals/hurry-app,gae-init/gae-init-debug,lipis/github-stats,gmist/five-studio2,gmist/ctm-5studio,gae-init/gae-init-debug,JoeyCodinja/INFO3180LAB3,lipis/hurry-app,antotodd/lab5,gmist/alice-box,gae-init/gae-init,dhstack/gae-init,gmist/five-studio2,wilfriedE/gae-init,lipis/life-line,antotodd/lab5,topless/gae-init,gmist/my-gae-init,gae-init/gae-init-upload,gmist/five-studio2,Kingclove/lab5info3180,gmist/my-gae-init,lipis/electron-crash-reporter,chineyting/lab5-Info3180,lipis/meet-notes,mdxs/gae-init-docs,lipis/github-stats,dhstack/gae-init,topless/gae-init,terradigital/gae-init,lipis/meet-notes,tiberiucorbu/av-website,mdxs/gae-init,wodore/wodore-gae,vanessa-bell/hd-kiosk-v2,georgekis/salary
|
main/modelx.py
|
main/modelx.py
|
# -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
def avatar_url_size(self, size=None):
return '//gravatar.com/avatar/%(hash)s?d=identicon&r=x%(size)s' % {
'hash': hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
'size': '&s=%d' % size if size > 0 else '',
}
avatar_url = property(avatar_url_size)
|
# -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
def avatar_url(self, size=None):
return '//gravatar.com/avatar/%(hash)s?d=identicon&r=x%(size)s' % {
'hash': hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
'size': '&s=%d' % size if size > 0 else '',
}
|
mit
|
Python
|
6ba3fc5c9fade3609695aa7f5b0498b77a8c18fa
|
revert to 0.2.7 tag
|
keras-team/keras-cv,keras-team/keras-cv,keras-team/keras-cv
|
keras_cv/__init__.py
|
keras_cv/__init__.py
|
# Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.7"
|
# Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keras_cv import layers
from keras_cv import metrics
from keras_cv import utils
from keras_cv import version_check
from keras_cv.core import ConstantFactorSampler
from keras_cv.core import FactorSampler
from keras_cv.core import NormalFactorSampler
from keras_cv.core import UniformFactorSampler
version_check.check_tf_version()
__version__ = "0.2.8dev"
|
apache-2.0
|
Python
|
55a3b3a845014d0e4c4c4d057bbe088d7791d43d
|
Prepare for v1.10.0
|
antidot/Pyckson
|
src/pyckson/__init__.py
|
src/pyckson/__init__.py
|
from pyckson.decorators import *
from pyckson.json import *
from pyckson.parser import parse
from pyckson.parsers.base import Parser
from pyckson.serializer import serialize
from pyckson.serializers.base import Serializer
from pyckson.dates.helpers import configure_date_formatter, configure_explicit_nulls
from pyckson.defaults import set_defaults
__version__ = '1.10.0'
|
from pyckson.decorators import *
from pyckson.json import *
from pyckson.parser import parse
from pyckson.parsers.base import Parser
from pyckson.serializer import serialize
from pyckson.serializers.base import Serializer
from pyckson.dates.helpers import configure_date_formatter, configure_explicit_nulls
from pyckson.defaults import set_defaults
__version__ = '1.9.0'
|
lgpl-2.1
|
Python
|
6d63ab2ef50512a794948c86cf1ce834b59acd90
|
Add str method for map area
|
sevazhidkov/greenland,sevazhidkov/greenland
|
maps/models.py
|
maps/models.py
|
import json
from django.conf import settings
# from django.contrib.postgres.fields import JSONField
from django.db import models
JSONTextField = models.TextField
# See
# https://developers.google.com/maps/documentation/javascript/reference?hl=en#LatLngBoundsLiteral
class LatLngBounds(models.Model):
east = models.FloatField()
north = models.FloatField()
south = models.FloatField()
west = models.FloatField()
class MapArea(models.Model):
title = models.CharField(max_length=100, blank=True)
display_area = models.ForeignKey(LatLngBounds, related_name='+')
# This data should be obviously moved to a storage suitable
# for blobs. Keeping in here to ease deployment. Migration is
# an exercise for the happy future developer.
# The data stored is image in PNG or JPEG format.
contour_map_image = models.BinaryField()
# Which part of the Earth the rectangular stored
# in contour_map_image represents.
contour_map_reference = models.ForeignKey(LatLngBounds, related_name='+')
def __str__(self):
return self.title or 'Map area #{}'.format(self.id)
class Question(models.Model):
map_area = models.ForeignKey(MapArea)
max_duration = models.DurationField()
creator = models.ForeignKey(settings.AUTH_USER_MODEL)
# See "JSON Objects per Question Type" for more details.
type = models.TextField()
statement_data = JSONTextField()
reference_data = JSONTextField()
class QuestionSet(models.Model):
title = models.CharField(max_length=100)
creator = models.ForeignKey(settings.AUTH_USER_MODEL)
max_duration = models.DurationField()
question_ids = JSONTextField(default=json.dumps(None))
def get_questions(self):
questions = []
for question_id in json.loads(self.question_ids):
questions.append(Question.objects.get(id=question_id))
return questions
class AnswerSet(models.Model):
student = models.ForeignKey(settings.AUTH_USER_MODEL, db_index=True)
question_set = models.ForeignKey(QuestionSet, null=True)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Answer(models.Model):
answer_set = models.ForeignKey(AnswerSet, db_index=True)
question_set = models.ForeignKey(QuestionSet, null=True)
question = models.ForeignKey(Question, null=True)
answer_data = JSONTextField(default=json.dumps(None))
scoring_data = JSONTextField(default=json.dumps(None)) # May be recalculated
duration = models.DurationField()
submission_time = models.DateTimeField()
|
import json
from django.conf import settings
# from django.contrib.postgres.fields import JSONField
from django.db import models
JSONTextField = models.TextField
# See
# https://developers.google.com/maps/documentation/javascript/reference?hl=en#LatLngBoundsLiteral
class LatLngBounds(models.Model):
east = models.FloatField()
north = models.FloatField()
south = models.FloatField()
west = models.FloatField()
class MapArea(models.Model):
title = models.CharField(max_length=100, blank=True)
display_area = models.ForeignKey(LatLngBounds, related_name='+')
# This data should be obviously moved to a storage suitable
# for blobs. Keeping in here to ease deployment. Migration is
# an exercise for the happy future developer.
# The data stored is image in PNG or JPEG format.
contour_map_image = models.BinaryField()
# Which part of the Earth the rectangular stored
# in contour_map_image represents.
contour_map_reference = models.ForeignKey(LatLngBounds, related_name='+')
class Question(models.Model):
map_area = models.ForeignKey(MapArea)
max_duration = models.DurationField()
creator = models.ForeignKey(settings.AUTH_USER_MODEL)
# See "JSON Objects per Question Type" for more details.
type = models.TextField()
statement_data = JSONTextField()
reference_data = JSONTextField()
class QuestionSet(models.Model):
title = models.CharField(max_length=100)
creator = models.ForeignKey(settings.AUTH_USER_MODEL)
max_duration = models.DurationField()
question_ids = JSONTextField(default=json.dumps(None))
def get_questions(self):
questions = []
for question_id in json.loads(self.question_ids):
questions.append(Question.objects.get(id=question_id))
return questions
class AnswerSet(models.Model):
student = models.ForeignKey(settings.AUTH_USER_MODEL, db_index=True)
question_set = models.ForeignKey(QuestionSet, null=True)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Answer(models.Model):
answer_set = models.ForeignKey(AnswerSet, db_index=True)
question_set = models.ForeignKey(QuestionSet, null=True)
question = models.ForeignKey(Question, null=True)
answer_data = JSONTextField(default=json.dumps(None))
scoring_data = JSONTextField(default=json.dumps(None)) # May be recalculated
duration = models.DurationField()
submission_time = models.DateTimeField()
|
mit
|
Python
|
f8fde8fd984242f75e36644d2e54c1d306c1b785
|
Remove --population=default
|
dmtucker/keysmith
|
keysmith/__main__.py
|
keysmith/__main__.py
|
"""Keysmith Default Interface"""
import argparse
import math
import string
import pkg_resources
import keysmith
def cli(parser=None):
"""Parse CLI arguments and options."""
if parser is None:
parser = argparse.ArgumentParser(prog=keysmith.CONSOLE_SCRIPT)
parser.add_argument(
'-d', '--delimiter',
help='a delimiter for the samples (teeth) in the key',
default=' ',
)
parser.add_argument(
'-n', '--nsamples',
help='the number of random samples to take',
type=int,
default=3,
dest='nteeth',
)
parser.add_argument(
'-p', '--population',
help='alphanumeric, printable, or a path',
default=pkg_resources.resource_filename('keysmith', 'words.txt'),
)
parser.add_argument(
'--stats',
help='statistics for the key',
default=False,
action='store_true',
)
parser.add_argument(
'--version',
action='version',
version='%(prog)s {0}'.format(keysmith.__version__),
)
return parser
def main(args=None):
"""Execute CLI commands."""
if args is None:
args = cli().parse_args()
words = {
'alphanumeric': string.ascii_letters + string.digits,
'printable': string.printable,
}.get(args.population)
if words is None:
with open(args.population, 'r') as f:
words = f.read().splitlines()
key = keysmith.key(
seq=words,
nteeth=args.nteeth,
delimiter=args.delimiter,
)
print(key)
if args.stats:
print('=' * len(key))
print('characters = {characters}'.format(characters=len(key)))
print(' samples = {nteeth}'.format(nteeth=args.nteeth))
print('population = {pop}'.format(pop=len(words)))
print(' entropy {sign} {bits}b'.format(
sign='<' if len(args.delimiter) < 1 else '~',
bits=round(math.log(len(words), 2) * args.nteeth, 2),
))
if __name__ == '__main__':
main()
|
"""Keysmith Default Interface"""
import argparse
import math
import string
import pkg_resources
import keysmith
def cli(parser=None):
"""Parse CLI arguments and options."""
if parser is None:
parser = argparse.ArgumentParser(prog=keysmith.CONSOLE_SCRIPT)
parser.add_argument(
'-d', '--delimiter',
help='a delimiter for the samples (teeth) in the key',
default=' ',
)
parser.add_argument(
'-n', '--nsamples',
help='the number of random samples to take',
type=int,
default=3,
dest='nteeth',
)
parser.add_argument(
'-p', '--population',
help='alphanumeric, default, printable, or a path',
default='default',
)
parser.add_argument(
'--stats',
help='statistics for the key',
default=False,
action='store_true',
)
parser.add_argument(
'--version',
action='version',
version='%(prog)s {0}'.format(keysmith.__version__),
)
return parser
def main(args=None):
"""Execute CLI commands."""
if args is None:
args = cli().parse_args()
words = {
'alphanumeric': string.ascii_letters + string.digits,
'printable': string.printable,
}.get(args.population)
if words is None:
if args.population == 'default':
args.population = pkg_resources.resource_filename('keysmith', 'words.txt')
with open(args.population, 'r') as f:
words = f.read().splitlines()
key = keysmith.key(
seq=words,
nteeth=args.nteeth,
delimiter=args.delimiter,
)
print(key)
if args.stats:
print('=' * len(key))
print('characters = {characters}'.format(characters=len(key)))
print(' samples = {nteeth}'.format(nteeth=args.nteeth))
print('population = {pop}'.format(pop=len(words)))
print(' entropy {sign} {bits}b'.format(
sign='<' if len(args.delimiter) < 1 else '~',
bits=round(math.log(len(words), 2) * args.nteeth, 2),
))
if __name__ == '__main__':
main()
|
bsd-3-clause
|
Python
|
646548dff38ea476a35462cf51ba028e3275748a
|
Fix some undefined reference and attribute errors in the deallocate simprocedure
|
chubbymaggie/simuvex,iamahuman/angr,chubbymaggie/angr,angr/angr,iamahuman/angr,chubbymaggie/simuvex,axt/angr,f-prettyland/angr,tyb0807/angr,axt/angr,angr/simuvex,chubbymaggie/simuvex,schieb/angr,tyb0807/angr,angr/angr,chubbymaggie/angr,angr/angr,tyb0807/angr,f-prettyland/angr,iamahuman/angr,f-prettyland/angr,schieb/angr,axt/angr,chubbymaggie/angr,schieb/angr
|
simuvex/procedures/cgc/deallocate.py
|
simuvex/procedures/cgc/deallocate.py
|
import simuvex
import logging
l = logging.getLogger("simuvex.procedures.cgc.deallocate")
class deallocate(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, addr, length): #pylint:disable=unused-argument
# return code (see deallocate() docs)
r = self.state.se.ite_cases((
(addr % 0x1000 != 0, self.state.cgc.EINVAL),
(length == 0, self.state.cgc.EINVAL),
(self.state.cgc.addr_invalid(addr), self.state.cgc.EINVAL),
(self.state.cgc.addr_invalid(addr + length), self.state.cgc.EINVAL),
), self.state.se.BVV(0, self.state.arch.bits))
aligned_length = ((length + 0xfff) / 0x1000) * 0x1000
# TODO: not sure if this is valuable until we actually model CGC
# allocations accurately
# self.state.memory.unmap_region(addr, aligned_length)
return r
|
import simuvex
class deallocate(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, addr, length): #pylint:disable=unused-argument
# return code (see deallocate() docs)
r = self.state.se.ite_cases((
(addr % 0x1000 != 0, self.state.cgc.EINVAL),
(length == 0, self.state.cgc.EINVAL),
(self.state.cgc.addr_invalid(addr), self.state.cgc.EINVAL),
(self.state.cgc.addr_invalid(addr + length), self.state.cgc.EINVAL),
), self.state.se.BVV(0, self.state.arch.bits))
return r
|
bsd-2-clause
|
Python
|
70c520d3ff882b499febfe021d02108f79171773
|
Fix ST2(python26) compatibility.
|
Lyleo/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,timonwong/OmniMarkupPreviewer,Lyleo/OmniMarkupPreviewer
|
OmniMarkupLib/Renderers/MarkdownRenderer.py
|
OmniMarkupLib/Renderers/MarkdownRenderer.py
|
from .base_renderer import *
import re
import markdown
@renderer
class MarkdownRenderer(MarkupRenderer):
FILENAME_PATTERN_RE = re.compile(r'\.(md|mkdn?|mdwn|mdown|markdown|litcoffee)$')
YAML_FRONTMATTER_RE = re.compile(r'\A---\s*\n.*?\n?^---\s*$\n?', re.DOTALL | re.MULTILINE)
def load_settings(self, renderer_options, global_setting):
super(MarkdownRenderer, self).load_settings(renderer_options, global_setting)
if 'extensions' in renderer_options:
extensions = renderer_options['extensions']
else:
# Fallback to the default GFM style
extensions = ['tables', 'strikeout', 'fenced_code', 'codehilite']
extensions = set(extensions)
if global_setting.mathjax_enabled:
if 'mathjax' not in extensions:
extensions.add('mathjax')
if 'smartypants' in extensions:
extensions.remove('smartypants')
extensions.add('smarty')
self.extensions = list(extensions)
@classmethod
def is_enabled(cls, filename, syntax):
if syntax == "text.html.markdown":
return True
return cls.FILENAME_PATTERN_RE.search(filename) is not None
def render(self, text, **kwargs):
text = self.YAML_FRONTMATTER_RE.sub('', text)
return markdown.markdown(text, output_format='html5',
extensions=self.extensions)
|
from .base_renderer import *
import re
import markdown
@renderer
class MarkdownRenderer(MarkupRenderer):
FILENAME_PATTERN_RE = re.compile(r'\.(md|mkdn?|mdwn|mdown|markdown|litcoffee)$')
YAML_FRONTMATTER_RE = re.compile(r'\A---\s*\n.*?\n?^---\s*$\n?', re.DOTALL | re.MULTILINE)
def load_settings(self, renderer_options, global_setting):
super(MarkdownRenderer, self).load_settings(renderer_options, global_setting)
if 'extensions' in renderer_options:
extensions = set(renderer_options['extensions'])
else:
# Fallback to the default GFM style
extensions = {'tables', 'strikeout', 'fenced_code', 'codehilite'}
if global_setting.mathjax_enabled:
if 'mathjax' not in extensions:
extensions.add('mathjax')
if 'smartypants' in extensions:
extensions.remove('smartypants')
extensions.add('smarty')
self.extensions = list(extensions)
@classmethod
def is_enabled(cls, filename, syntax):
if syntax == "text.html.markdown":
return True
return cls.FILENAME_PATTERN_RE.search(filename) is not None
def render(self, text, **kwargs):
text = self.YAML_FRONTMATTER_RE.sub('', text)
return markdown.markdown(text, output_format='html5',
extensions=self.extensions)
|
mit
|
Python
|
b7523a8bbac9fdce7d97afda32b9a7982f00a6d0
|
Update Exp 7_2
|
MrYsLab/pymata-aio
|
examples/sparkfun_redbot/sparkfun_experiments/Exp7_2_DriveDistance.py
|
examples/sparkfun_redbot/sparkfun_experiments/Exp7_2_DriveDistance.py
|
"""
Exp7_2_DriveDistance -- RedBot Experiment 7.2
In an earlier experiment, we used a combination of speed and time to
drive a certain distance. Using the encoders, we can me much more accurate.
In this example, we will show you how to setup your robot to drive a certain
distance regardless of the motorPower.
This sketch was written by SparkFun Electronics, with lots of help from
the Arduino community. This code is completely free for any use.
8 Oct 2013 M. Hord
Revised, 31 Oct 2014 B. Huang
"""
from pymata_aio.pymata3 import PyMata3
from pymata_aio.constants import Constants
from library.redbot import RedBotMotors,RedBotEncoder
import math
COM_PORT = None # Use automatic com port detection (the default)
#COM_PORT = "COM10" # Manually specify the com port (optional)
board = PyMata3(com_port=COM_PORT)
motors = RedBotMotors(board)
encoders = RedBotEncoder(board)
BUTTON_PIN = 12
COUNT_PER_REV = 192 # 4 pairs of N-S x 48:1 gearbox = 192 ticks per wheel rev
WHEEL_DIAM = 2.56 # diam = 65mm / 25.4 mm/in
WHEEL_CIRC = math.pi * WHEEL_DIAM
print(WHEEL_CIRC)
ENCODER_PIN_LEFT = 16
ENCODER_PIN_RIGHT = 10
def setup():
board.set_pin_mode(BUTTON_PIN, Constants.INPUT)
board.digital_write(BUTTON_PIN, 1) # writing pin high sets the pull-up resistor
def loop():
# wait for a button press to start driving.
if board.digital_read(BUTTON_PIN) == 0:
driveDistance(12, 150) # drive 12 inches at motor_power = 150
def driveDistance(distance, motor_power):
left_count= 0
right_count = 0
num_rev = distance / WHEEL_CIRC
# debug
print("drive_distance() {} inches at {} power for {:.2f} revolutions".format(distance, motor_power, num_rev))
encoders.clear_enc() # clear the encoder count
motors.drive(motor_power)
while right_count < num_rev * COUNT_PER_REV:
left_count = encoders.get_ticks(ENCODER_PIN_LEFT)
right_count = encoders.get_ticks(ENCODER_PIN_RIGHT)
print("{} {} stop once over {:.0f} ticks".format(left_count, right_count, num_rev * COUNT_PER_REV))
board.sleep(0.1)
motors.brake()
if __name__ == "__main__":
setup()
while True:
loop()
board.sleep(.01)
# print("Encoder Read: {}".format(board.encoder_read(encoder_pin_right)))
|
"""
Exp7_2_DriveDistance -- RedBot Experiment 7.2
In an earlier experiment, we used a combination of speed and time to
drive a certain distance. Using the encoders, we can me much more accurate.
In this example, we will show you how to setup your robot to drive a certain
distance regardless of the motorPower.
This sketch was written by SparkFun Electronics, with lots of help from
the Arduino community. This code is completely free for any use.
8 Oct 2013 M. Hord
Revised, 31 Oct 2014 B. Huang
"""
from pymata_aio.pymata3 import PyMata3
from pymata_aio.constants import Constants
from library.redbot import RedBotMotors,RedBotEncoder
import math
# This line "includes" the RedBot library into your sketch.
# Provides special objects, methods, and functions for the RedBot.
board = PyMata3()
encoders = RedBotEncoder(board)
motors = RedBotMotors(board)
encoder_pin_left = 16
encoder_pin_right = 10
BUTTON_PIN = 12
counts_per_rev = 192 # 4 pairs of N-S x 48:1 gearbox = 192 ticks per wheel rev
wheel_diam = 2.56 # diam = 65mm / 25.4 mm/in
wheel_circ = math.pi * wheel_diam
# variables used to store the left and right encoder counts.
left_count = 0
right_count = 0
def setup():
board.set_pin_mode(BUTTON_PIN, Constants.INPUT)
board.digital_write(BUTTON_PIN, 1) # writing pin high sets the pull-up resistor
def loop():
# wait for a button press to start driving.
if board.digital_read(BUTTON_PIN) == 0:
board.sleep(0.05)
if board.digital_read(BUTTON_PIN) == 0:
driveDistance(12, 150) # drive 12 inches at motor_power = 150
def driveDistance(distance, motor_power):
global left_count
global right_count
left_count= 0
right_count = 0
numRev = float(distance/wheel_circ)
# debug
print("drive_distance() {} inches at {} power".format(distance,motor_power))
print(numRev)
encoders.clear_enc() # clear the encoder count
motors.drive(motor_power)
# TODO: Find the 'proper' way to access these variables
iteration = 0
while right_count< numRev*counts_per_rev:
left_count = encoders.get_ticks(encoder_pin_left)
right_count = encoders.get_ticks(encoder_pin_right)
print("{} {}".format(left_count,right_count)) # stores the encoder count to a variable
# print(numRev*counts_per_rev)
board.sleep(0.01)
# if either left or right motor are more than 5 revolutions, stop
motors.brake()
if __name__ == "__main__":
setup()
while True:
loop()
board.sleep(.01)
# print("Encoder Read: {}".format(board.encoder_read(encoder_pin_right)))
|
agpl-3.0
|
Python
|
168c80e3bf024f74fbb49184ceffbc2a09abe6c1
|
Allow empty labels
|
vikoivun/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi
|
kk/models/hearing.py
|
kk/models/hearing.py
|
from django.conf import settings
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from .base import ModifiableModel
class Label(ModifiableModel):
label = models.CharField(verbose_name=_('Label'), default='', max_length=200)
def __str__(self):
return self.label
class Hearing(ModifiableModel):
COMMENT_OPTION_DISALLOW = '1'
COMMENT_OPTION_REGISTERED = '2'
COMMENT_OPTION_ANONYMOUS = '3'
COMMENT_OPTION = (
(COMMENT_OPTION_DISALLOW, 'Disallow'),
(COMMENT_OPTION_REGISTERED, 'Registered'),
(COMMENT_OPTION_ANONYMOUS, 'Anonymous')
)
close_at = models.DateTimeField(verbose_name=_('Closing time'), default=timezone.now)
n_comments = models.IntegerField(verbose_name=_('Number of comments'), blank=True, default=0)
closed = models.BooleanField(verbose_name=_('Whether hearing is closed'), default=False)
heading = models.TextField(verbose_name=_('Heading'), blank=True, default='')
abstract = models.TextField(verbose_name=_('Abstract'), blank=True, default='')
heading = models.TextField(verbose_name=_('Content'), blank=True, default='')
borough = models.CharField(verbose_name=_('Borough to which hearing concerns'), blank=True, default='', max_length=200)
comment_option = models.CharField(verbose_name=_('Commenting option'), max_length=1, choices=COMMENT_OPTION, default='1')
servicemap_url = models.CharField(verbose_name=_('Servicemap url'), default='', max_length=255, blank=True)
latitude = models.CharField(verbose_name=_('Latitude'), max_length=20, default='', blank=True)
longitude = models.CharField(verbose_name=_('Longitude'), max_length=20, default='', blank=True)
labels = models.ManyToManyField(Label, blank=True)
|
from django.conf import settings
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from .base import ModifiableModel
class Label(ModifiableModel):
label = models.CharField(verbose_name=_('Label'), default='', max_length=200)
def __str__(self):
return self.label
class Hearing(ModifiableModel):
COMMENT_OPTION_DISALLOW = '1'
COMMENT_OPTION_REGISTERED = '2'
COMMENT_OPTION_ANONYMOUS = '3'
COMMENT_OPTION = (
(COMMENT_OPTION_DISALLOW, 'Disallow'),
(COMMENT_OPTION_REGISTERED, 'Registered'),
(COMMENT_OPTION_ANONYMOUS, 'Anonymous')
)
close_at = models.DateTimeField(verbose_name=_('Closing time'), default=timezone.now)
n_comments = models.IntegerField(verbose_name=_('Number of comments'), blank=True, default=0)
closed = models.BooleanField(verbose_name=_('Whether hearing is closed'), default=False)
heading = models.TextField(verbose_name=_('Heading'), blank=True, default='')
abstract = models.TextField(verbose_name=_('Abstract'), blank=True, default='')
heading = models.TextField(verbose_name=_('Content'), blank=True, default='')
borough = models.CharField(verbose_name=_('Borough to which hearing concerns'), blank=True, default='', max_length=200)
comment_option = models.CharField(verbose_name=_('Commenting option'), max_length=1, choices=COMMENT_OPTION, default='1')
servicemap_url = models.CharField(verbose_name=_('Servicemap url'), default='', max_length=255, blank=True)
latitude = models.CharField(verbose_name=_('Latitude'), max_length=20, default='', blank=True)
longitude = models.CharField(verbose_name=_('Longitude'), max_length=20, default='', blank=True)
labels = models.ManyToManyField(Label)
|
mit
|
Python
|
d58b82997d9e5d616da2f517c19c5191c43cd823
|
make membship optional, on which we revert to matching_dissim; speed improvement
|
nicodv/kmodes
|
kmodes/util/dissim.py
|
kmodes/util/dissim.py
|
"""
Dissimilarity measures for clustering
"""
import numpy as np
def matching_dissim(a, b, **_):
"""Simple matching dissimilarity function"""
return np.sum(a != b, axis=1)
def euclidean_dissim(a, b, **_):
"""Euclidean distance dissimilarity function"""
if np.isnan(a).any() or np.isnan(b).any():
raise ValueError("Missing values detected in numerical columns.")
return np.sum((a - b) ** 2, axis=1)
def ng_dissim(a, b, X, membship=None):
"""Ng et al.'s dissimilarity measure, as presented in
Michael K. Ng, Mark Junjie Li, Joshua Zhexue Huang, and Zengyou He, "On the
Impact of Dissimilarity Measure in k-Modes Clustering Algorithm", IEEE
Transactions on Pattern Analysis and Machine Intelligence, Vol. 29, No. 3,
January, 2007
Note that membship must be a rectangular array such that the
len(membship) = len(a) and len(membship[i]) = X.shape[1]
In case of missing membship, this function reverts back to
matching dissimilarity.
"""
# Without membership, revert to matching dissimilarity
if membship is None:
return matching_dissim(a, b)
def calc_cjr(b, X, memj, idr):
"""Num objects w/ category value x_{i,r} for rth attr in jth cluster"""
xcids = np.where(memj == 1)
return float((np.take(X, xcids, axis=0)[0][:, idr] == b[idr]).sum(0))
def calc_dissim(b, X, memj, idr):
# Size of jth cluster
cj = float(np.sum(memj))
return (1.0 - (calc_cjr(b, X, memj, idr) / cj)) if cj != 0.0 else 0.0
if len(membship) != a.shape[0] and len(membship[0]) != X.shape[1]:
raise ValueError("'membship' must be a rectangular array where "
"the number of rows in 'membship' equals the "
"number of rows in 'a' and the number of "
"columns in 'membship' equals the number of rows in 'X'.")
return np.array([np.array([calc_dissim(b, X, membship[idj], idr)
if b[idr] == t else 1.0
for idr, t in enumerate(val_a)]).sum(0)
for idj, val_a in enumerate(a)])
|
"""
Dissimilarity measures for clustering
"""
import numpy as np
def matching_dissim(a, b, **_):
"""Simple matching dissimilarity function"""
return np.sum(a != b, axis=1)
def euclidean_dissim(a, b, **_):
"""Euclidean distance dissimilarity function"""
if np.isnan(a).any() or np.isnan(b).any():
raise ValueError("Missing values detected in numerical columns.")
return np.sum((a - b) ** 2, axis=1)
def ng_dissim(a, b, X, membship):
"""Ng et al.'s dissimilarity measure, as presented in
Michael K. Ng, Mark Junjie Li, Joshua Zhexue Huang, and Zengyou He, "On the
Impact of Dissimilarity Measure in k-Modes Clustering Algorithm", IEEE
Transactions on Pattern Analysis and Machine Intelligence, Vol. 29, No. 3,
January, 2007
Note that membship must be a rectangular array such that the
len(membship) = len(a) and len(membship[i]) = X.shape[1]
"""
def calcCJR(b, X, memj, idr):
"""Num objects w/ category value x_{i,r} for rth attr in jth cluster"""
xcids = np.where(np.in1d(memj.ravel(), [1]).reshape(memj.shape))
return float((np.take(X, xcids, axis=0)[0][:, idr] == b[idr]).sum(0))
def calc_dissim(b, X, memj, idr):
# Size of jth cluster
CJ = float(np.sum(memj))
return (1.0 - (calcCJR(b, X, memj, idr) / CJ)) if CJ != 0.0 else 0.0
if len(membship) != a.shape[0] and len(membship[0]) != X.shape[1]:
raise ValueError("'membship' must be a rectangular array where "
"the number of rows in 'membship' equals the "
"number of rows in 'a' and the number of "
"columns in 'membship' equals the number of rows in 'X'.")
return np.array([np.array([calc_dissim(b, X, membship[idj], idr)
if b[idr] == t else 1.0
for idr, t in enumerate(val_a)]).sum(0)
for idj, val_a in enumerate(a)])
|
mit
|
Python
|
1b972c4ab088fd6566dd144992167f4a4ae62356
|
rebuild LevelRenderData after saving changed_geometries
|
c3nav/c3nav,c3nav/c3nav,c3nav/c3nav,c3nav/c3nav
|
src/c3nav/mapdata/models/update.py
|
src/c3nav/mapdata/models/update.py
|
from contextlib import contextmanager
from django.conf import settings
from django.core.cache import cache
from django.db import models, transaction
from django.utils.http import int_to_base36
from django.utils.timezone import make_naive
from django.utils.translation import ugettext_lazy as _
from c3nav.mapdata.tasks import delete_old_cached_tiles
class MapUpdate(models.Model):
"""
A map update. created whenever mapdata is changed.
"""
datetime = models.DateTimeField(auto_now_add=True, db_index=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.PROTECT)
type = models.CharField(max_length=32)
class Meta:
verbose_name = _('Map update')
verbose_name_plural = _('Map updates')
default_related_name = 'mapupdates'
get_latest_by = 'datetime'
@classmethod
def last_update(cls):
last_update = cache.get('mapdata:last_update', None)
if last_update is not None:
return last_update
with cls.lock():
last_update = cls.objects.latest()
cache.set('mapdata:last_update', (last_update.pk, last_update.datetime), 900)
return last_update.pk, last_update.datetime
@property
def cache_key(self):
return int_to_base36(self.pk)+'_'+int_to_base36(int(make_naive(self.datetime).timestamp()))
@classmethod
def current_cache_key(cls):
pk, dt = cls.last_update()
return int_to_base36(pk)+'_'+int_to_base36(int(make_naive(dt).timestamp()))
@classmethod
@contextmanager
def lock(cls):
with transaction.atomic():
yield cls.objects.select_for_update().earliest()
def save(self, **kwargs):
if self.pk is not None:
raise TypeError
old_cache_key = MapUpdate.current_cache_key()
from c3nav.mapdata.models import AltitudeArea
AltitudeArea.recalculate()
super().save(**kwargs)
from c3nav.mapdata.cache import changed_geometries
changed_geometries.save(old_cache_key, self.cache_key)
from c3nav.mapdata.render.base import LevelRenderData
LevelRenderData.rebuild()
cache.set('mapdata:last_update', (self.pk, self.datetime), 900)
delete_old_cached_tiles.apply_async(countdown=5)
|
from contextlib import contextmanager
from django.conf import settings
from django.core.cache import cache
from django.db import models, transaction
from django.utils.http import int_to_base36
from django.utils.timezone import make_naive
from django.utils.translation import ugettext_lazy as _
from c3nav.mapdata.tasks import delete_old_cached_tiles
class MapUpdate(models.Model):
"""
A map update. created whenever mapdata is changed.
"""
datetime = models.DateTimeField(auto_now_add=True, db_index=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.PROTECT)
type = models.CharField(max_length=32)
class Meta:
verbose_name = _('Map update')
verbose_name_plural = _('Map updates')
default_related_name = 'mapupdates'
get_latest_by = 'datetime'
@classmethod
def last_update(cls):
last_update = cache.get('mapdata:last_update', None)
if last_update is not None:
return last_update
with cls.lock():
last_update = cls.objects.latest()
cache.set('mapdata:last_update', (last_update.pk, last_update.datetime), 900)
return last_update.pk, last_update.datetime
@property
def cache_key(self):
return int_to_base36(self.pk)+'_'+int_to_base36(int(make_naive(self.datetime).timestamp()))
@classmethod
def current_cache_key(cls):
pk, dt = cls.last_update()
return int_to_base36(pk)+'_'+int_to_base36(int(make_naive(dt).timestamp()))
@classmethod
@contextmanager
def lock(cls):
with transaction.atomic():
yield cls.objects.select_for_update().earliest()
def save(self, **kwargs):
if self.pk is not None:
raise TypeError
old_cache_key = MapUpdate.current_cache_key()
from c3nav.mapdata.models import AltitudeArea
AltitudeArea.recalculate()
from c3nav.mapdata.render.base import LevelRenderData
LevelRenderData.rebuild()
super().save(**kwargs)
from c3nav.mapdata.cache import changed_geometries
changed_geometries.save(old_cache_key, self.cache_key)
cache.set('mapdata:last_update', (self.pk, self.datetime), 900)
delete_old_cached_tiles.apply_async(countdown=5)
|
apache-2.0
|
Python
|
73b67a30495e7a6d638421ba8b9544a5e2dc4185
|
Fix task full resource
|
cgwire/zou
|
zou/app/resources/project/task_full.py
|
zou/app/resources/project/task_full.py
|
from flask import abort
from flask_login import login_required
from zou.app.models.task import Task
from zou.app.models.project import Project
from zou.app.models.person import Person
from zou.app.models.entity import Entity
from zou.app.models.entity_type import EntityType
from zou.app.models.task_status import TaskStatus
from zou.app.models.task_type import TaskType
from zou.app.resources.data.base import BaseModelResource
from zou.app.project import task_info
from zou.app.project.exception import TaskNotFoundException
class TaskFullResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, Task)
@login_required
def get(self, instance_id):
try:
task = task_info.get_task(instance_id)
except TaskNotFoundException:
abort(404)
result = task.serialize()
task_type = TaskType.get(task.task_type_id)
result["task_type"] = task_type.serialize()
assigner = Person.get(task.assigner_id)
result["assigner"] = assigner.serialize()
project = Project.get(task.project_id)
result["project"] = project.serialize()
task_status = TaskStatus.get(task.task_status_id)
result["task_status"] = task_status.serialize()
entity = Entity.get(task.entity_id)
result["entity"] = entity.serialize()
if entity.parent_id is not None:
parent = Entity.get(entity.parent_id)
result["entity_parent"] = parent.serialize()
entity_type = EntityType.get(entity.entity_type_id)
result["entity_type"] = entity_type.serialize()
assignees = []
for assignee in task.assignees:
assignees.append(assignee.serialize())
result["persons"] = assignees
return result, 200
|
from flask import abort
from flask_login import login_required
from zou.app.models.task import Task
from zou.app.models.project import Project
from zou.app.models.person import Person
from zou.app.models.entity import Entity
from zou.app.models.entity_type import EntityType
from zou.app.models.task_status import TaskStatus
from zou.app.models.task_type import TaskType
from zou.app.resources.data.base import BaseModelResource
from zou.app.project import task_info
from zou.app.project.exception import TaskNotFoundException
class TaskFullResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, Task)
@login_required
def get(self, instance_id):
try:
task = task_info.get_task(instance_id)
except TaskNotFoundException:
abort(404)
result = task.serialize()
task_type = TaskType.get(task.task_type_id)
result["task_type"] = task_type.serialize()
assigner = Person.get(task.assigner_id)
result["assigner"] = assigner.serialize()
project = Project.get(task.project_id)
result["project"] = project.serialize()
task_status = TaskStatus.get(task.task_status_id)
result["task_status"] = task_status.serialize()
entity = Entity.get(task.entity_id)
result["entity"] = entity.serialize()
parent = Entity.get(entity.parent_id)
if parent is not None:
result["entity_parent"] = parent.serialize()
entity_type = EntityType.get(entity.entity_type_id)
result["entity_type"] = entity_type.serialize()
assignees = []
for assignee in task.assignees:
assignees.append(assignee.serialize())
result["persons"] = assignees
return result, 200
|
agpl-3.0
|
Python
|
d4563fe6991ee644350528a469884f697f02308d
|
Add production of very high S/N model images
|
MegaMorph/galfitm-illustrations,MegaMorph/galfitm-illustrations
|
models/make_images.py
|
models/make_images.py
|
#!/usr/bin/env python
from glob import glob
import pyfits
import sys, os
import numpy
shape = (100,100)
bands = ['u', 'g', 'r', 'i', 'z', 'Y', 'J', 'H', 'K']
zp = numpy.array([16.75,15.957,15.0,14.563,14.259,14.162,13.955,13.636,13.525])
def make_images(model='A', noiselevel=5,
bandsel=['u', 'g', 'r', 'i', 'z', 'Y', 'J', 'H', 'K']):
noisebands = 10**(-0.4*(zp-15.0)) * noiselevel/2.0
noise = []
for n in noisebands:
noise.append(numpy.random.normal(0.0, n, shape))
gals = glob('model%s.galfit'%model)
for g in gals:
os.system('nice galfit %s > %s.out'%(g,g))
imgname = g.replace('.galfit', '')
img = pyfits.open(imgname+'.fits')
for j, b in enumerate(bands):
if b in bandsel:
ext = img['MODEL_'+b]
print g, b, j, ext.name, noisebands[j]
ext.data += noise[j]
pyfits.writeto(imgname+'_%i%s_n%i.fits'%(j+1, b, noiselevel), ext.data, clobber=True)
if __name__ =='__main__':
make_images('A', 0.01)
make_images('A', 5)
make_images('A', 50, ['H'])
make_images('B', 0.01)
make_images('B', 5)
make_images('B', 50, ['H'])
for x in 'abcdefghi':
make_images('C'+x, 0, ['r'])
make_images('C'+x, 5, ['r'])
make_images('D', 0.01)
make_images('D', 5)
make_images('E', 0.01)
make_images('E', 5)
|
#!/usr/bin/env python
from glob import glob
import pyfits
import sys, os
import numpy
shape = (100,100)
bands = ['u', 'g', 'r', 'i', 'z', 'Y', 'J', 'H', 'K']
zp = numpy.array([16.75,15.957,15.0,14.563,14.259,14.162,13.955,13.636,13.525])
def make_images(model='A', noiselevel=5,
bandsel=['u', 'g', 'r', 'i', 'z', 'Y', 'J', 'H', 'K']):
noisebands = 10**(-0.4*(zp-15.0)) * noiselevel/2.0
noise = []
for n in noisebands:
noise.append(numpy.random.normal(0.0, n, shape))
gals = glob('model%s.galfit'%model)
for g in gals:
os.system('nice galfit %s > %s.out'%(g,g))
imgname = g.replace('.galfit', '')
img = pyfits.open(imgname+'.fits')
for j, b in enumerate(bands):
if b in bandsel:
ext = img['MODEL_'+b]
print g, b, j, ext.name, noisebands[j]
ext.data += noise[j]
pyfits.writeto(imgname+'_%i%s_n%i.fits'%(j+1, b, noiselevel), ext.data, clobber=True)
if __name__ =='__main__':
make_images('A', 5)
make_images('A', 50, ['H'])
make_images('B', 5)
make_images('B', 50, ['H'])
for x in 'abcdefghi':
make_images('C'+x, 5, ['r'])
make_images('D', 5)
make_images('E', 5)
|
mit
|
Python
|
2417f7e3c445c7f369c9eb8cb48c83ebb4c2e43d
|
Change blueprints to be more container-like.
|
SunDwarf/Kyoukai
|
kyokai/blueprints.py
|
kyokai/blueprints.py
|
"""
Kyōkai blueprints are simply groups of routes.
They're a simpler way of grouping your routes together instead of having to import your app object manually all of
the time.
"""
from kyokai.route import Route
class Blueprint(object):
"""
A Blueprint is a container for routes.
"""
def __init__(self, name: str, parent: 'Blueprint',
url_prefix: str=""):
self._prefix = url_prefix
self._name = name
self.routes = []
self.errhandlers = {}
self._parent = parent
@property
def parent(self) -> 'Blueprint':
"""
Returns the parent Blueprint of the currentl Blueprint.
"""
return self._parent
def route(self, regex, methods: list = None, hard_match: bool = False):
"""
Create an incoming route for a function.
Parameters:
regex:
The regular expression to match the path to.
In standard Python `re` forme.
Group matches are automatically extracted from the regex, and passed as arguments.
methods:
The list of allowed methods, e.g ["GET", "POST"].
You can check the method with `request.method`.
hard_match:
Should we match based on equality, rather than regex?
This prevents index or lower level paths from matching 404s at higher levels.
"""
if not methods:
methods = ["GET"]
# Override hard match if it's a `/` route.
if regex == "/":
hard_match = True
regex = self._prefix + regex
r = Route(self, regex, methods, hard_match)
self.routes.append(r)
return r
def errorhandler(self, code: int):
"""
Create an error handler for the specified code.
This will wrap the function in a Route.
"""
r = Route(self, "", [])
self.errhandlers[code] = r
return r
|
"""
Kyōkai are simply groups of routes.
They're a simpler way of grouping your routes together instead of having to import your app object manually all of
the time.
"""
from kyokai.route import Route
class Blueprint(object):
"""
A Blueprint contains one public method: `bp.route`. It acts exactly the same as a normal route method.
If you set a `url_prefix` in the constructor, this prefix will be added onto your routes.
"""
def __init__(self, name: str, url_prefix: str=""):
self._prefix = url_prefix
self._name = name
self.routes = []
self.errhandlers = {}
def _bp_get_errhandler(self, code: int):
return self.errhandlers.get(code)
def route(self, regex, methods: list = None, hard_match: bool = False):
"""
Create an incoming route for a function.
Parameters:
regex:
The regular expression to match the path to.
In standard Python `re` forme.
Group matches are automatically extracted from the regex, and passed as arguments.
methods:
The list of allowed methods, e.g ["GET", "POST"].
You can check the method with `request.method`.
hard_match:
Should we match based on equality, rather than regex?
This prevents index or lower level paths from matching 404s at higher levels.
"""
if not methods:
methods = ["GET"]
# Override hard match if it's a `/` route.
if regex == "/":
hard_match = True
regex = self._prefix + regex
r = Route(regex, methods, hard_match)
r.set_errorhandler_factory(self._bp_get_errhandler)
self.routes.append(r)
return r
def errorhandler(self, code: int):
"""
Create an error handler for the specified code.
This will wrap the function in a Route.
"""
r = Route("", [])
self.errhandlers[code] = r
return r
def _init_bp(self):
return self.routes
|
mit
|
Python
|
a43ada7785db136f3a5d7d96c6b64b0a686d052e
|
fix total_force missing
|
adengz/nano266,adengz/nano266
|
labs/lab2/analyze.py
|
labs/lab2/analyze.py
|
#!/usr/bin/env python
import re
import sys
import csv
import argparse
# This defines the patterns for extracting relevant data from the output
# files.
patterns = {
"energy": re.compile("total energy\s+=\s+([\d\.\-]+)\sRy"),
"ecut": re.compile("kinetic\-energy cutoff\s+=\s+([\d\.\-]+)\s+Ry"),
"alat": re.compile("celldm\(1\)=\s+([\d\.]+)\s"),
"nkpts": re.compile("number of k points=\s+([\d]+)"),
"total_force": re.compile("Total force =\s+([\d\.]+)")
}
def get_results(filename):
data = {}
with open(filename) as f:
for l in f:
for k, p in patterns.items():
m = p.search(l)
if m:
data[k] = float(m.group(1))
continue
return data
def analyze(filenames):
fieldnames = ['filename', 'ecut', 'nkpts', 'alat', 'energy','total_force']
with open('results.csv', 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for f in filenames:
r = get_results(f)
r["filename"] = f
writer.writerow(r)
print("Results written to results.csv!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='''Tool for analysis of PWSCF calculations.''')
parser.add_argument(
'filenames', metavar='filenames', type=str, nargs="+",
help='Files to process. You may use wildcards, e.g., "python analyze.py *.out".')
args = parser.parse_args()
analyze(args.filenames)
|
#!/usr/bin/env python
import re
import sys
import csv
import argparse
# This defines the patterns for extracting relevant data from the output
# files.
patterns = {
"energy": re.compile("total energy\s+=\s+([\d\.\-]+)\sRy"),
"ecut": re.compile("kinetic\-energy cutoff\s+=\s+([\d\.\-]+)\s+Ry"),
"alat": re.compile("celldm\(1\)=\s+([\d\.]+)\s"),
"nkpts": re.compile("number of k points=\s+([\d]+)"),
"total_force": re.compile("Total force =\s+([\d\.]+)")
}
def get_results(filename):
data = {}
with open(filename) as f:
for l in f:
for k, p in patterns.items():
m = p.search(l)
if m:
data[k] = float(m.group(1))
continue
return data
def analyze(filenames):
fieldnames = ['filename', 'ecut', 'nkpts', 'alat', 'energy']
with open('results.csv', 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for f in filenames:
r = get_results(f)
r["filename"] = f
writer.writerow(r)
print("Results written to results.csv!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='''Tool for analysis of PWSCF calculations.''')
parser.add_argument(
'filenames', metavar='filenames', type=str, nargs="+",
help='Files to process. You may use wildcards, e.g., "python analyze.py *.out".')
args = parser.parse_args()
analyze(args.filenames)
|
bsd-3-clause
|
Python
|
0ce8050b797b3e2c2a9b0e74cbc67fd8e31736b3
|
Remove working distros to focus on non-working ones
|
FOGProject/fog-community-scripts,FOGProject/fog-community-scripts,FOGProject/fog-community-scripts
|
fog-aws-testing/scripts/settings.py
|
fog-aws-testing/scripts/settings.py
|
# The list of OSs.
#OSs = ["debian9","centos7","rhel7","fedora29","arch","ubuntu18_04"]
OSs = ["rhel7","fedora29","arch","ubuntu18_04"]
#dnsAddresses = ["debian9.fogtesting.cloud","centos7.fogtesting.cloud","rhel7.fogtesting.cloud","fedora29.fogtesting.cloud","arch.fogtesting.cloud","ubuntu18_04.fogtesting.cloud"]
dnsAddresses = ["rhel7.fogtesting.cloud","fedora29.fogtesting.cloud","arch.fogtesting.cloud","ubuntu18_04.fogtesting.cloud"]
# The list of branches to process.
branches = ["master"]
branches = ["master","dev-branch"]
# The region we operate in, dictated by terraform.
theRegion = "us-east-2"
# The availibility zone, which we use just one zone.
zone = theRegion + 'a'
# For when we need to wait for something to get done while in a loop, wait this long.
wait = 1
scriptDir = "/home/admin/fog-community-scripts/fog-aws-testing/scripts"
webdir = '/tmp/webdir'
statusDir = '/tmp/statuses'
indexHtml = 'index.html'
green = "green.png"
orange = "orange.png"
red = "red.png"
s3bucket = "fogtesting2.theworkmans.us"
http = "http://"
port = ""
netdir = ""
remoteResult = "/root/result"
ssh = "/usr/bin/ssh"
scp = "/usr/bin/scp"
timeout = "/usr/bin/timeout"
s3cmd = "/usr/bin/s3cmd"
ssh_keyscan = "/usr/bin/ssh-keyscan"
sshTimeout = "15"
fogTimeout= "15m" #Time to wait for FOG installation to complete. Must end with a unit of time. s for seconds, m for minutes.
sshTime="15s" #Time to wait for small SSH commands to complete. Must end with a unit of time. s for seconds, m for minutes.
codes = {
"-1":{
"reason":"Installer did not complete within alotted time.",
"status":orange
},
"0":{
"reason":"Success.",
"status":green
},
"1":{
"reason":"Failed to call script properly.",
"status":orange
},
"2":{
"reason":"Failed to reset git.",
"status":orange
},
"3":{
"reason":"Failed to pull git.",
"status":orange
},
"4":{
"reason":"Failed to checkout git.",
"status":orange
},
"5":{
"reason":"Failed to change directory.",
"status":orange
},
"6":{
"reason":"Installation failed.",
"status":red
}
}
|
# The list of OSs.
OSs = ["debian9","centos7","rhel7","fedora29","arch","ubuntu18_04"]
dnsAddresses = ["debian9.fogtesting.cloud","centos7.fogtesting.cloud","rhel7.fogtesting.cloud","fedora29.fogtesting.cloud","arch.fogtesting.cloud","ubuntu18_04.fogtesting.cloud"]
# The list of branches to process.
branches = ["master","dev-branch"]
# The region we operate in, dictated by terraform.
theRegion = "us-east-2"
# The availibility zone, which we use just one zone.
zone = theRegion + 'a'
# For when we need to wait for something to get done while in a loop, wait this long.
wait = 1
scriptDir = "/home/admin/fog-community-scripts/fog-aws-testing/scripts"
webdir = '/tmp/webdir'
statusDir = '/tmp/statuses'
indexHtml = 'index.html'
green = "green.png"
orange = "orange.png"
red = "red.png"
s3bucket = "fogtesting2.theworkmans.us"
http = "http://"
port = ""
netdir = ""
remoteResult = "/root/result"
ssh = "/usr/bin/ssh"
scp = "/usr/bin/scp"
timeout = "/usr/bin/timeout"
s3cmd = "/usr/bin/s3cmd"
ssh_keyscan = "/usr/bin/ssh-keyscan"
sshTimeout = "15"
fogTimeout= "15m" #Time to wait for FOG installation to complete. Must end with a unit of time. s for seconds, m for minutes.
sshTime="15s" #Time to wait for small SSH commands to complete. Must end with a unit of time. s for seconds, m for minutes.
codes = {
"-1":{
"reason":"Installer did not complete within alotted time.",
"status":orange
},
"0":{
"reason":"Success.",
"status":green
},
"1":{
"reason":"Failed to call script properly.",
"status":orange
},
"2":{
"reason":"Failed to reset git.",
"status":orange
},
"3":{
"reason":"Failed to pull git.",
"status":orange
},
"4":{
"reason":"Failed to checkout git.",
"status":orange
},
"5":{
"reason":"Failed to change directory.",
"status":orange
},
"6":{
"reason":"Installation failed.",
"status":red
}
}
|
mit
|
Python
|
b72ab35056ca6ec1e48db963d61c31d89ec80161
|
fix on winsock2
|
tuttleofx/sconsProject
|
autoconf/winsock2.py
|
autoconf/winsock2.py
|
from _external import *
winsock2 = LibWithHeaderChecker( 'ws2_32',
['winsock2.h'],
'c',
name='winsock2' )
|
from _external import *
winsock2 = LibWithHeaderChecker( 'winsock2',
['winsock2.h'],
'c',
name='ws2_32' )
|
mit
|
Python
|
0b499f01d517775fb03294c1c785318ca6224874
|
Bump to v0.0.5
|
cogniteev/backache
|
backache/__init__.py
|
backache/__init__.py
|
from . core import *
from . antioxidant import celerize # flake8: noqa
from . errors import *
__version__ = (0, 0, 5)
|
from . core import *
from . antioxidant import celerize # flake8: noqa
from . errors import *
__version__ = (0, 0, 4)
|
apache-2.0
|
Python
|
ce2cf07d9fa9dc3bdd229b1cbb56745784e3049d
|
Fix stray char.
|
riga/law,riga/law
|
law/sandbox/docker.py
|
law/sandbox/docker.py
|
# -*- coding: utf-8 -*-
"""
Docker sandbox implementation.
"""
__all__ = ["DockerSandbox"]
from law.sandbox.base import Sandbox
class DockerSandbox(Sandbox):
sandbox_type = "docker"
@property
def image(self):
return self.name
def cmd(self, task, task_cmd):
# get args for the docker command as configured in the task
docker_args = getattr(task, "docker_args", ["--rm"])
if isinstance(docker_args, (list, tuple)):
docker_args = " ".join(str(arg) for arg in docker_args)
cmd = "docker run {docker_args} {image} \"{task_cmd}\""
cmd = cmd.format(docker_args=docker_args, image=self.image, task_cmd=task_cmd)
return cmd
|
# -*- coding: utf-8 -*-
"""
Docker sandbox implementation.
"""
__all__ = ["DockerSandbox"]
from law.sandbox.base import Sandbox
class DockerSandbox(Sandbox):
sandbox_type = "docker"
@property
def image(self):
return self.name
def cmd(self, task, task_cmd):
# get args for the docker command as configured in the task
docker_args = getattr(task, "docker_args", ["--rm"]):
if isinstance(docker_args, (list, tuple)):
docker_args = " ".join(str(arg) for arg in docker_args)
cmd = "docker run {docker_args} {image} \"{task_cmd}\""
cmd = cmd.format(docker_args=docker_args, image=self.image, task_cmd=task_cmd)
return cmd
|
bsd-3-clause
|
Python
|
699085edd1db5aa7a827a16ffffcbcc9a69cbf52
|
Add forgotten imports for bucketlist endpoints
|
brayoh/bucket-list-api
|
app/endpoints.py
|
app/endpoints.py
|
from flask import request, Blueprint
from flask_restful import Api
from controllers.accounts_manager import LoginResource, RegisterResource
from controllers.bucketlist import BucketListsResource, BucketListResource
from controllers.bucketlist_items import BucketListItems
bucketlist_blueprint = Blueprint('bucket_list', __name__)
api = Api(bucketlist_blueprint)
# login routes
api.add_resource(RegisterResource, '/auth/register')
api.add_resource(LoginResource, '/auth/login')
# bucketlist routes
api.add_resource(BucketListsResource, '/bucketlists')
api.add_resource(BucketListResource, '/bucketlists/<int:id>')
# bucketlist items routes
api.add_resource(BucketListItems,
'/bucketlists/<int:bucketlist_id>/items',
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
|
from flask import request, Blueprint
from flask_restful import Api
from controllers.accounts_manager import LoginResource, RegisterResource
from controllers.bucketlist import GetAllBucketLists, GetBucketList
from controllers.bucketlist_items import BucketListItems
bucketlist_blueprint = Blueprint('bucket_list', __name__)
api = Api(bucketlist_blueprint)
# login routes
api.add_resource(RegisterResource, '/auth/register')
api.add_resource(LoginResource, '/auth/login')
# bucketlist routes
api.add_resource(BucketListsResource, '/bucketlists')
api.add_resource(BucketListResource, '/bucketlists/<int:id>')
# bucketlist items routes
api.add_resource(BucketListItems,
'/bucketlists/<int:bucketlist_id>/items',
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
|
mit
|
Python
|
6908060af5b872e54d42f63e580591931b7ff230
|
Check empty string
|
DrDos0016/z2,DrDos0016/z2,DrDos0016/z2
|
museum_site/scroll.py
|
museum_site/scroll.py
|
from django.db import models
class Scroll(models.Model):
# Constants
SCROLL_TOP = """```
╞╤═════════════════════════════════════════════╤╡
│ Scroll ### │
╞═════════════════════════════════════════════╡
│ • • • • • • • • •│"""
SCROLL_BOTTOM = """\n │ • • • • • • • • •│
╞╧═════════════════════════════════════════════╧╡```"""
# Fields
identifier = models.IntegerField()
content = models.TextField(
default="",
help_text="Lines starting with @ will be skipped. Initial whitespace is trimmed by DB, so an extra @ line is a fix."
)
source = models.CharField(max_length=160)
published = models.BooleanField(default=False)
suggestion = models.CharField(max_length=500, blank=True, default="")
class Meta:
ordering = ["-id"]
def __str__(self):
return "Scroll #{} ID:{} Pub:{}".format(self.identifier, self.id, self.published)
def lines(self):
return self.content.split("\n")
def render_for_discord(self):
lines = self.lines()
output = self.SCROLL_TOP.replace("###", ("000"+str(self.identifier))[-3:])
for line in lines:
line = line.replace("\r", "")
line = line.replace("\n", "")
if line and line[0] == "@":
continue
output += "\n │ " + (line + " " * 42)[:42] + " │ "
output += self.SCROLL_BOTTOM
return output
|
from django.db import models
class Scroll(models.Model):
# Constants
SCROLL_TOP = """```
╞╤═════════════════════════════════════════════╤╡
│ Scroll ### │
╞═════════════════════════════════════════════╡
│ • • • • • • • • •│"""
SCROLL_BOTTOM = """\n │ • • • • • • • • •│
╞╧═════════════════════════════════════════════╧╡```"""
# Fields
identifier = models.IntegerField()
content = models.TextField(
default="",
help_text="Lines starting with @ will be skipped. Initial whitespace is trimmed by DB, so an extra @ line is a fix."
)
source = models.CharField(max_length=160)
published = models.BooleanField(default=False)
suggestion = models.CharField(max_length=500, blank=True, default="")
class Meta:
ordering = ["-id"]
def __str__(self):
return "Scroll #{} ID:{} Pub:{}".format(self.identifier, self.id, self.published)
def lines(self):
return self.content.split("\n")
def render_for_discord(self):
lines = self.lines()
output = self.SCROLL_TOP.replace("###", ("000"+str(self.identifier))[-3:])
for line in lines:
line = line.replace("\r", "")
line = line.replace("\n", "")
if line[0] == "@":
continue
output += "\n │ " + (line + " " * 42)[:42] + " │ "
output += self.SCROLL_BOTTOM
return output
|
mit
|
Python
|
e00b7c612f34c938a3d42dada006874ffea021c8
|
complete localizer
|
minggli/fisheries-convnet,minggli/fisheries-convnet
|
app/localizer.py
|
app/localizer.py
|
# -*- coding: utf-8 -*-
"""
localizer
localize bounding boxes and pad rest of image with zeros (255, 255, 255)
"""
import os
import cv2
import numpy as np
import multiprocessing as mp
from app.pipeline import generate_data_skeleton
from app.cv.serializer import deserialize_json
from app.settings import BOUNDINGBOX, IMAGE_PATH
class Localizer(object):
def __init__(self, path_to_image):
# cv2 loads image in BGR channel order
self.path = path_to_image
self.image = cv2.imread(path_to_image, -1)
self.fname = os.path.split(path_to_image)[1]
try:
self.bboxes = \
deserialize_json(BOUNDINGBOX)[self.fname]['annotations']
except IndexError:
self.bboxes = None
self.output_image = None
@property
def coordinates_factory(self):
"""yield bounding boxes"""
for bbox in self.bboxes:
x = int(bbox['x'])
y = int(bbox['y'])
height = int(bbox['height'])
width = int(bbox['width'])
yield x, x + width, y, y + height
def declutter(self):
filter_layer = np.zeros(shape=self.image.shape)
# highlight image with (1, 1, 1) on background of zeros
if self.bboxes:
for x, x_end, y, y_end in self.coordinates_factory:
filter_layer[y: y_end, x: x_end, :] = (1., 1., 1.)
# elementwise multiplication of filter layer and original image
self.output_image = cv2.convertScaleAbs(self.image * filter_layer)
elif not self.bboxes:
self.output_image = self.image
return self
def show(self):
cv2.imshow("output", self.output_image)
cv2.waitKey(0)
def write(self):
print('writing {}'.format(self.path))
cv2.imwrite(self.path, self.output_image)
def localize(path_to_image):
Localizer(path_to_image).declutter().write()
paths_to_images = generate_data_skeleton(IMAGE_PATH)[0]
with mp.Pool(10) as p:
p.map(localize, paths_to_images)
|
# -*- coding: utf-8 -*-
"""
localizer
localize bounding boxes and pad rest of image with zeros (255, 255, 255)
"""
import os
import cv2
import numpy as np
from app.cv.serializer import deserialize_json
from app.settings import CV_SAMPLE_PATH, BOUNDINGBOX
test_image = CV_SAMPLE_PATH + 'pos/img_00003.jpg'
class Localizer(object):
def __init__(self, path_to_image):
self.image = cv2.imread(path_to_image, -1)
self.fname = os.path.split(path_to_image)[1]
self.bboxes = \
deserialize_json(BOUNDINGBOX)[self.fname]['annotations']
@property
def factory(self):
"""yield bounding boxes"""
for bbox in self.bboxes:
x = int(bbox['x'])
y = int(bbox['y'])
height = int(bbox['height'])
width = int(bbox['width'])
yield x, x + width, y, y + height
def new_image(self):
background = np.zeros(shape=self.image.shape)
# highlight image with (1, 1, 1) on background of zeros
for x, x_end, y, y_end in self.factory:
background[x: x_end, y: y_end] = [1, 1, 1]
# mirrir original image's bounding boxes into new
self.output_image = np.mutiply(self.image, background)
def show(self):
cv2.imshow("Display window", self.output_image)
cv2.waitKey(0)
# # image read as it is in as BGR
# image = cv2.imread(test_image, -1)
# b = image[2: 10, 3: 11, :]
# print(b)
# c = np.zeros(shape=(8, 8, 3))
# c[3, 3] = (1, 1, 1)
# d = np.multiply(b, c)
# print(d)
|
mit
|
Python
|
73b9246164994049d291d5b482d4dbf2ca41a124
|
Rename master branch to main
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
tests/app/test_accessibility_statement.py
|
tests/app/test_accessibility_statement.py
|
import re
import subprocess
from datetime import datetime
def test_last_review_date():
statement_file_path = "app/templates/views/accessibility_statement.html"
# test local changes against main for a full diff of what will be merged
statement_diff = subprocess.run(
[f"git diff --exit-code origin/main -- {statement_file_path}"], stdout=subprocess.PIPE, shell=True
)
# if statement has changed, test the review date was part of those changes
if statement_diff.returncode == 1:
raw_diff = statement_diff.stdout.decode("utf-8")
today = datetime.now().strftime("%d %B %Y")
with open(statement_file_path, "r") as statement_file:
current_review_date = re.search(
(r'"Last updated": "(\d{1,2} [A-Z]{1}[a-z]+ \d{4})"'), statement_file.read()
).group(1)
# guard against changes that don't need to update the review date
if current_review_date != today:
assert '"Last updated": "' in raw_diff
|
import re
import subprocess
from datetime import datetime
def test_last_review_date():
statement_file_path = "app/templates/views/accessibility_statement.html"
# test local changes against master for a full diff of what will be merged
statement_diff = subprocess.run(
[f"git diff --exit-code origin/master -- {statement_file_path}"], stdout=subprocess.PIPE, shell=True
)
# if statement has changed, test the review date was part of those changes
if statement_diff.returncode == 1:
raw_diff = statement_diff.stdout.decode("utf-8")
today = datetime.now().strftime("%d %B %Y")
with open(statement_file_path, "r") as statement_file:
current_review_date = re.search(
(r'"Last updated": "(\d{1,2} [A-Z]{1}[a-z]+ \d{4})"'), statement_file.read()
).group(1)
# guard against changes that don't need to update the review date
if current_review_date != today:
assert '"Last updated": "' in raw_diff
|
mit
|
Python
|
5f5bdcf5c6b6fb70dc94945d463c5200a46699d6
|
revert unfinished task test
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
tests/integration/unfinished_task_test.py
|
tests/integration/unfinished_task_test.py
|
# stdlib
import time
# third party
import pytest
# syft absolute
import syft as sy
from syft.core.node.common.action.save_object_action import SaveObjectAction
from syft.core.store.storeable_object import StorableObject
@pytest.mark.general
def test_unfinished_task(get_clients) -> None:
print("running test_unfinished_task")
client = get_clients(1)[0]
list_pointer = sy.lib.python.List().send(client)
int_pointer = sy.lib.python.Int(1).send(client)
time.sleep(5)
int_obj = int_pointer.get()
list_pointer.append(int_pointer)
storeable_object = StorableObject(id=int_pointer.id_at_location, data=int_obj)
save_object_action = SaveObjectAction(obj=storeable_object, address=client.address)
client.send_immediate_msg_without_reply(msg=save_object_action)
time.sleep(5)
assert list_pointer.get() == [1]
|
# third party
import pytest
# syft absolute
import syft as sy
from syft.core.node.common.action.save_object_action import SaveObjectAction
from syft.core.store.storeable_object import StorableObject
@pytest.mark.general
def test_unfinished_task(get_clients) -> None:
print("running test_unfinished_task")
client = get_clients(1)[0]
list_pointer = sy.lib.python.List().send(client)
int_pointer = sy.lib.python.Int(1).send(client)
int_pointer.block_with_timeout(secs=10)
int_obj = int_pointer.get()
list_pointer.append(int_pointer)
storeable_object = StorableObject(id=int_pointer.id_at_location, data=int_obj)
save_object_action = SaveObjectAction(obj=storeable_object, address=client.address)
client.send_immediate_msg_without_reply(msg=save_object_action)
list_pointer.block_with_timeout(secs=10)
assert list_pointer.get() == [1]
|
apache-2.0
|
Python
|
da3a4e8036a5933a9ce00f42795c8ca398925c38
|
Update geogig_init_repo.py
|
state-hiu/cybergis-scripts,state-hiu/cybergis-scripts
|
lib/rogue/geogig_init_repo.py
|
lib/rogue/geogig_init_repo.py
|
from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import os
import subprocess
#==#
import _geogig_init_repo
#==#
parser = argparse.ArgumentParser(description='Initialize GeoGig repository and optionally add to GeoServer instance. If you want to add the GeoGig repo include the optional parameters.')
parser.add_argument("--path", help="The location in the filesystem of the Geogig repository.")
parser.add_argument("--name", help="The name of the GeoGig repo and data store in GeoServer.")
parser.add_argument('-gs', '--geoserver', help="The url of the GeoServer servicing the GeoGig repository.")
parser.add_argument('-ws', '--workspace', help="The GeoServer workspace to use for the data store.")
#parser.add_argument("--path", help="The location in the filesystem of the Geogig repository.")
parser.add_argument("--username", help="The username to use for basic auth requests.")
parser.add_argument("--password", help="The password to use for basic auth requests.")
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
parser.add_argument("--publish_datastore", default=0, action='count', help="Publish datastore in GeoServer for GeoGig repository")
parser.add_argument('--publish_layers', default=0, action='count', help="Publish layers from GeoGig data store")
args = parser.parse_args()
#==#
_geogig_init_repo.run(args)
|
from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import os
import subprocess
#==#
import _geogig_init_repo
#==#
parser = argparse.ArgumentParser(description='Initialize GeoGig repository and optionally add to GeoServer instance. If you want to add the GeoGig repo include the optional parameters.')
parser.add_argument("--path", help="The location in the filesystem of the Geogig repository.")
parser.add_argument("--name", help="The name of the GeoGig repo and data store in GeoServer.")
parser.add_argument("--geoserver", help="The url of the GeoServer servicing the GeoGig repository.")
parser.add_argument("--workspace", help="The GeoServer workspace to use for the data store.")
#parser.add_argument("--path", help="The location in the filesystem of the Geogig repository.")
parser.add_argument("--username", help="The username to use for basic auth requests.")
parser.add_argument("--password", help="The password to use for basic auth requests.")
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
parser.add_argument("--publish_datastore", default=0, action='count', help="Publish datastore in GeoServer for GeoGig repository")
parser.add_argument('--publish_layers', default=0, action='count', help="Publish layers from GeoGig data store")
args = parser.parse_args()
#==#
_geogig_init_repo.run(args)
|
mit
|
Python
|
a19a52a42486eaa8e849d2f0a175f9a76497029d
|
bump version number
|
Kagiso-Future-Media/django-intercom,kencochrane/django-intercom,kencochrane/django-intercom,mikesdnielsen/django-intercom,mikesdnielsen/django-intercom,Kagiso-Future-Media/django-intercom
|
intercom/__init__.py
|
intercom/__init__.py
|
__version__ = "0.0.7"
|
__version__ = "0.0.6"
|
bsd-3-clause
|
Python
|
d23a53f5c97a3939952ecb8f39d24603fe0d4bab
|
bump `datadog-checks-base` version (#9718)
|
DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core
|
mysql/setup.py
|
mysql/setup.py
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from codecs import open
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
# Get version info
ABOUT = {}
with open(path.join(HERE, "datadog_checks", "mysql", "__about__.py")) as f:
exec(f.read(), ABOUT)
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def get_dependencies():
dep_file = path.join(HERE, 'requirements.in')
if not path.isfile(dep_file):
return []
with open(dep_file, encoding='utf-8') as f:
return f.readlines()
CHECKS_BASE_REQ = 'datadog-checks-base>=20.2.0'
setup(
name='datadog-mysql',
version=ABOUT['__version__'],
description='The MySQL check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent mysql check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='[email protected]',
# License
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.mysql'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
extras_require={'deps': get_dependencies()},
# Extra files to ship with the wheel package
include_package_data=True,
)
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from codecs import open
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
# Get version info
ABOUT = {}
with open(path.join(HERE, "datadog_checks", "mysql", "__about__.py")) as f:
exec(f.read(), ABOUT)
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def get_dependencies():
dep_file = path.join(HERE, 'requirements.in')
if not path.isfile(dep_file):
return []
with open(dep_file, encoding='utf-8') as f:
return f.readlines()
CHECKS_BASE_REQ = 'datadog-checks-base>=20.1.0'
setup(
name='datadog-mysql',
version=ABOUT['__version__'],
description='The MySQL check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent mysql check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='[email protected]',
# License
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.mysql'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
extras_require={'deps': get_dependencies()},
# Extra files to ship with the wheel package
include_package_data=True,
)
|
bsd-3-clause
|
Python
|
1ecf42f474b17e01de12d235a29b08e7f18d0726
|
bump version to v1.10.3
|
simomarsili/ndd
|
ndd/package.py
|
ndd/package.py
|
# -*- coding: utf-8 -*-
"""Template package file"""
__title__ = 'ndd'
__version__ = '1.10.3'
__author__ = 'Simone Marsili'
__summary__ = ''
__url__ = 'https://github.com/simomarsili/ndd'
__email__ = '[email protected]'
__license__ = 'BSD 3-Clause'
__copyright__ = 'Copyright (c) 2020, Simone Marsili'
__classifiers__ = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
|
# -*- coding: utf-8 -*-
"""Template package file"""
__title__ = 'ndd'
__version__ = '1.10.2'
__author__ = 'Simone Marsili'
__summary__ = ''
__url__ = 'https://github.com/simomarsili/ndd'
__email__ = '[email protected]'
__license__ = 'BSD 3-Clause'
__copyright__ = 'Copyright (c) 2020, Simone Marsili'
__classifiers__ = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
|
bsd-3-clause
|
Python
|
527ccd5790aa08d33387b43fd25beb2ed20335c7
|
remove defaults, use self.asserts
|
karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model
|
tensorflow/python/ops/script_ops_test.py
|
tensorflow/python/ops/script_ops_test.py
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for script operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import def_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import script_ops
from tensorflow.python.ops.script_ops import numpy_function
from tensorflow.python.platform import test
class NumpyFunctionTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_numpy_arguments(self):
def plus(a, b):
return a + b
actual_result = script_ops.numpy_function(plus, [1, 2], dtypes.int32)
expect_result = constant_op.constant(3, dtypes.int32)
self.assertAllEqual(actual_result, expect_result)
def test_stateless_flag(self):
call_count = 0
def plus(a, b):
global call_count
call_count += 1
return a + b
@def_function.function
def tensor_plus_stateful(a, b):
return numpy_function(plus, [a, b], dtypes.int32, stateful=True)
@def_function.function
def tensor_plus_stateless(a, b):
return numpy_function(plus, [a, b], dtypes.int32, stateful=False)
@def_function.function(autograph=False)
def tensor_double_plus_stateless(a, b):
sum1 = tensor_plus_stateless(a, b)
sum2 = tensor_plus_stateless(a, b)
return sum1 + sum2
# different argument
tensor_double_plus_stateless(
constant_op.constant(1),
constant_op.constant(2),
)
self.assertEqual(call_count, 1) # +1 as only the first one was executed
@def_function.function(autograph=False)
def tensor_double_plus_stateful(a, b):
sum1 = tensor_plus_stateful(a, b)
sum2 = tensor_plus_stateful(a, b)
return sum1 + sum2
tensor_double_plus_stateful(
constant_op.constant(3),
constant_op.constant(4),
)
self.assertEqual(call_count, 3) # +2 as it is stateful, both were executed
if __name__ == "__main__":
test.main()
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for script operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import def_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import script_ops
from tensorflow.python.ops.script_ops import numpy_function
from tensorflow.python.platform import test
class NumpyFunctionTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_numpy_arguments(self):
def plus(a, b):
return a + b
actual_result = script_ops.numpy_function(plus, [1, 2], dtypes.int32)
expect_result = constant_op.constant(3, dtypes.int32)
self.assertAllEqual(actual_result, expect_result)
def test_stateless_flag(self):
call_count = 0
def plus(a, b):
global call_count
call_count += 1
return a + b
@def_function.function
def tensor_plus_stateful(a, b):
return numpy_function(plus, [a, b], dtypes.int32, stateful=True)
@def_function.function
def tensor_plus_stateless(a, b):
return numpy_function(plus, [a, b], dtypes.int32, stateful=False)
@def_function.function(autograph=False)
def tensor_double_plus_stateless(a, b):
sum1 = tensor_plus_stateless(a, b)
sum2 = tensor_plus_stateless(a, b)
return sum1 + sum2
# different argument
tensor_double_plus_stateless(
constant_op.constant(1, dtype=dtypes.int32),
constant_op.constant(2, dtype=dtypes.int32),
)
assert call_count == 1 # +1 as only the first one was executed
@def_function.function(autograph=False)
def tensor_double_plus_stateful(a, b):
sum1 = tensor_plus_stateful(a, b)
sum2 = tensor_plus_stateful(a, b)
return sum1 + sum2
tensor_double_plus_stateful(
constant_op.constant(3, dtype=dtypes.int32),
constant_op.constant(4, dtype=dtypes.int32),
)
assert call_count == 3 # +2 as it is stateful, both were executed
if __name__ == "__main__":
test.main()
|
apache-2.0
|
Python
|
10f0807b9ab85bfa6f6bbb4ed533e1a8af642571
|
fix bug in raw service
|
spaam/svtplay-dl,spaam/svtplay-dl,olof/svtplay-dl,olof/svtplay-dl
|
lib/svtplay_dl/service/raw.py
|
lib/svtplay_dl/service/raw.py
|
from __future__ import absolute_import
import os
import re
from svtplay_dl.service import Service
from svtplay_dl.fetcher.hds import hdsparse
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.fetcher.dash import dashparse
class Raw(Service):
def get(self):
if self.exclude():
return
extention = False
filename = os.path.basename(self.url[:self.url.rfind("/")])
if self.options.output and os.path.isdir(self.options.output):
self.options.output = os.path.join(os.path.dirname(self.options.output), filename)
extention = True
elif self.options.output is None:
self.options.output = filename
extention = True
streams = []
if re.search(".f4m", self.url):
if extention:
self.options.output = "{0}.flv".format(self.options.output)
streams.append(hdsparse(self.options, self.http.request("get", self.url, params={"hdcore": "3.7.0"}), self.url))
if re.search(".m3u8", self.url):
streams.append(hlsparse(self.options, self.http.request("get", self.url), self.url))
if re.search(".mpd", self.url):
streams.append(dashparse(self.options, self.http.request("get", self.url), self.url))
for stream in streams:
if stream:
for n in list(stream.keys()):
yield stream[n]
|
from __future__ import absolute_import
import os
import re
from svtplay_dl.service import Service
from svtplay_dl.fetcher.hds import hdsparse
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.fetcher.dash import dashparse
class Raw(Service):
def get(self):
if self.exclude():
return
extention = False
filename = os.path.basename(self.url[:self.url.rfind("/")])
if self.options.output and os.path.isdir(self.options.output):
self.options.output = os.path.join(os.path.dirname(self.options.output), filename)
extention = True
elif self.options.output is None:
self.options.output = filename
extention = True
streams = []
if re.search(".f4m", self.url):
if extention:
self.options.output = "{0}.flv".format(self.options.output)
streams.append(hdsparse(self.options, self.http.request("get", self.url, params={"hdcore": "3.7.0"}), self.url))
if re.search(".m3u8", self.url):
streams.append(hlsparse(self.options, self.http.request("get", self.url), self.url))
if re.search(".mpd", self.url):
streams.append(dashparse(self.options, self.http.request("get", self.url), self.url))
for stream in streams:
for n in list(stream.keys()):
yield stream[n]
|
mit
|
Python
|
6a2aa6051c7922d1b2b37824d92634a4880e9ff2
|
Correct semantic version format.
|
tensorflow/probability,tensorflow/probability
|
tensorflow_probability/python/version.py
|
tensorflow_probability/python/version.py
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Define TensorFlow Probability version information."""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '5'
_PATCH_VERSION = '0'
# When building releases, we can update this value on the release branch to
# reflect the current release candidate ('rc0', 'rc1') or, finally, the official
# stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a
# release branch, the current version is by default assumed to be a
# 'development' version, labeled 'dev'.
_VERSION_SUFFIX = 'dev'
# Example, '0.4.0-dev'
__version__ = '.'.join([
_MAJOR_VERSION,
_MINOR_VERSION,
_PATCH_VERSION,
])
if _VERSION_SUFFIX:
__version__ = '{}-{}'.format(__version__, _VERSION_SUFFIX)
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Define TensorFlow Probability version information."""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '5'
_PATCH_VERSION = '0'
# When building releases, we can update this value on the release branch to
# reflect the current release candidate ('rc0', 'rc1') or, finally, the official
# stable release (indicated by `_VERSION_SUFFIX = ''`). Outside the context of a
# release branch, the current version is by default assumed to be a
# 'development' version, labeled 'dev'.
_VERSION_SUFFIX = 'dev'
# Example, '0.4.0.dev'
__version__ = '.'.join(s for s in [
_MAJOR_VERSION,
_MINOR_VERSION,
_PATCH_VERSION,
_VERSION_SUFFIX,
] if s) # Prevent trailing dot when version suffix is empty.
|
apache-2.0
|
Python
|
82665b999fb07e3ebc41de8132ba9d22dc04140c
|
Change version number back to 0.8.0.dev
|
samuelgarcia/python-neo,apdavison/python-neo,NeuralEnsemble/python-neo,rgerkin/python-neo,JuliaSprenger/python-neo,INM-6/python-neo
|
neo/version.py
|
neo/version.py
|
# -*- coding: utf-8 -*-
version = '0.8.0.dev'
|
# -*- coding: utf-8 -*-
version = '0.7.1'
|
bsd-3-clause
|
Python
|
b3761729b156367229b5cd8895d225cb13d3267a
|
Fix example `Set-Based Column Map Expectation` template import (#6134)
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
examples/expectations/set_based_column_map_expectation_template.py
|
examples/expectations/set_based_column_map_expectation_template.py
|
"""
This is a template for creating custom SetBasedColumnMapExpectations.
For detailed instructions on how to use it, please see:
https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_set_based_column_map_expectations
"""
from great_expectations.expectations.set_based_column_map_expectation import (
SetBasedColumnMapExpectation,
)
# <snippet>
# This class defines the Expectation itself
class ExpectColumnValuesToBeInSomeSet(SetBasedColumnMapExpectation):
"""TODO: Add a docstring here"""
# These values will be used to configure the metric created by your expectation
set_ = []
set_camel_name = "SetName"
set_semantic_name = None
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = []
# Here your regex is used to create a custom metric for this expectation
map_metric = SetBasedColumnMapExpectation.register_metric(
set_camel_name=set_camel_name,
set_=set_,
)
# This object contains metadata for display in the public Gallery
library_metadata = {
"tags": ["set-based"], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@your_name_here", # Don't forget to add your github handle here!
],
}
# </snippet>
if __name__ == "__main__":
ExpectColumnValuesToBeInSomeSet().print_diagnostic_checklist()
|
"""
This is a template for creating custom SetBasedColumnMapExpectations.
For detailed instructions on how to use it, please see:
https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_set_based_column_map_expectations
"""
from great_expectations.expectations.regex_based_column_map_expectation import (
SetBasedColumnMapExpectation,
)
# <snippet>
# This class defines the Expectation itself
class ExpectColumnValuesToBeInSomeSet(SetBasedColumnMapExpectation):
"""TODO: Add a docstring here"""
# These values will be used to configure the metric created by your expectation
set_ = []
set_camel_name = "SetName"
set_semantic_name = None
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = []
# Here your regex is used to create a custom metric for this expectation
map_metric = SetBasedColumnMapExpectation.register_metric(
set_camel_name=set_camel_name,
set_=set_,
)
# This object contains metadata for display in the public Gallery
library_metadata = {
"tags": ["set-based"], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@your_name_here", # Don't forget to add your github handle here!
],
}
# </snippet>
if __name__ == "__main__":
ExpectColumnValuesToBeInSomeSet().print_diagnostic_checklist()
|
apache-2.0
|
Python
|
99e9ef79178d6e2dffd8ec7ed12b3edbd8b7d0f1
|
Add basket total to context
|
JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw
|
longclaw/longclawbasket/views.py
|
longclaw/longclawbasket/views.py
|
from django.shortcuts import render
from django.views.generic import ListView
from longclaw.longclawbasket.models import BasketItem
from longclaw.longclawbasket import utils
class BasketView(ListView):
model = BasketItem
template_name = "longclawbasket/basket.html"
def get_context_data(self, **kwargs):
items, _ = utils.get_basket_items(self.request)
total_price = sum(item.total() for item in items)
return {"basket": items, "total_price": total_price}
|
from django.shortcuts import render
from django.views.generic import ListView
from longclaw.longclawbasket.models import BasketItem
from longclaw.longclawbasket import utils
class BasketView(ListView):
model = BasketItem
template_name = "longclawbasket/basket.html"
def get_context_data(self, **kwargs):
items, _ = utils.get_basket_items(self.request)
return {"basket": items}
|
mit
|
Python
|
1d07732e0fae0dca9eae1d89de913a1e124e32fc
|
Disable some prod optimisations
|
Turupawn/website,lutris/website,lutris/website,Turupawn/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website
|
lutrisweb/settings/production.py
|
lutrisweb/settings/production.py
|
import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
#'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
|
import os
from base import * # noqa
DEBUG = False
MEDIA_URL = '//lutris.net/media/'
FILES_ROOT = '/srv/files'
ALLOWED_HOSTS = ['.lutris.net', '.lutris.net.', ]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'lutris',
'USER': 'lutris',
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': 'localhost',
'CONN_MAX_AGE': 600,
}
}
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_USE_TLS = True
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
STEAM_API_KEY = os.environ['STEAM_API_KEY']
|
agpl-3.0
|
Python
|
362312ad1a26dbecf0c4942c9a6e7042cbaab3bd
|
Test the rest of Roman masters
|
khaledhosny/psautohint,khaledhosny/psautohint
|
test-mm.py
|
test-mm.py
|
from psautohint import autohint
from psautohint import psautohint
def getFonts(masters, baseDir):
options = autohint.ACOptions()
options.quiet = True
fonts = []
infos = []
for master in masters:
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, None, False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
_, fontDictList = font.getfdInfo(font.getPSName(), path, False, False, [], [], names)
info = fontDictList[0].getFontInfo()
fonts.append(font)
infos.append(info)
return fonts, infos
def getGlyphList(fonts):
glyphList = fonts[0].getGlyphList()
assert all([font.getGlyphList() == glyphList for font in fonts])
return glyphList
def mmHint(masters, fonts, infos, glyphList):
hinted = []
for name in glyphList:
glyphs = []
print("Hinting %s" % name)
for i, (font, info) in enumerate(zip(fonts, infos)):
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
if i == 0:
glyph = psautohint.autohint(info, [glyph], False, False, False, False)[0]
glyphs.append(glyph)
try:
glyphs = _psautohint.autohintmm(infos[0], [glyphs], masters, True)
except:
for i, glyph in enumerate(glyphs):
print(masters[i])
print(glyph)
raise
hinted.append(glyphs)
return hinted
def main():
masters = ["Regular", "Light", "ExtraLight", "Medium", "Semibold", "Bold", "Black"]
fonts, infos = getFonts(masters, "tests/data/source-code-pro")
glyphList = getGlyphList(fonts)
hinted = mmHint(masters, fonts, infos, glyphList)
if __name__ == "__main__":
main()
|
from psautohint import autohint
from psautohint import psautohint
def getFonts(masters, baseDir):
options = autohint.ACOptions()
options.quiet = True
fonts = []
infos = []
for master in masters:
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, None, False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
_, fontDictList = font.getfdInfo(font.getPSName(), path, False, False, [], [], names)
info = fontDictList[0].getFontInfo()
fonts.append(font)
infos.append(info)
return fonts, infos
def getGlyphList(fonts):
glyphList = fonts[0].getGlyphList()
assert all([font.getGlyphList() == glyphList for font in fonts])
return glyphList
def mmHint(masters, fonts, infos, glyphList):
hinted = []
for name in glyphList:
glyphs = []
print("Hinting %s" % name)
for i, (font, info) in enumerate(zip(fonts, infos)):
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
if i == 0:
glyph = psautohint.autohint(info, [glyph], False, False, False, False)[0]
glyphs.append(glyph)
try:
glyphs = _psautohint.autohintmm(infos[0], [glyphs], masters, True)
except:
for i, glyph in enumerate(glyphs):
print(masters[i])
print(glyph)
raise
hinted.append(glyphs)
return hinted
def main():
masters = ["Black", "ExtraLight"]
fonts, infos = getFonts(masters, "tests/data/source-code-pro")
glyphList = getGlyphList(fonts)
hinted = mmHint(masters, fonts, infos, glyphList)
if __name__ == "__main__":
main()
|
apache-2.0
|
Python
|
6aa5e2c95c0f529aa2803395779ca7274d5795b1
|
Bump version to 1.0.1-machtfit-67
|
machtfit/django-oscar,machtfit/django-oscar,machtfit/django-oscar
|
src/oscar/__init__.py
|
src/oscar/__init__.py
|
import os
# Use 'dev', 'beta', or 'final' as the 4th element to indicate release type.
VERSION = (1, 0, 1, 'machtfit', 67)
def get_short_version():
return '%s.%s' % (VERSION[0], VERSION[1])
def get_version():
return '{}.{}.{}-{}-{}'.format(*VERSION)
# Cheeky setting that allows each template to be accessible by two paths.
# Eg: the template 'oscar/templates/oscar/base.html' can be accessed via both
# 'base.html' and 'oscar/base.html'. This allows Oscar's templates to be
# extended by templates with the same filename
OSCAR_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'templates/oscar')
OSCAR_CORE_APPS = [
'oscar',
'oscar.apps.checkout',
'oscar.apps.address',
'oscar.apps.shipping',
'oscar.apps.catalogue',
'oscar.apps.partner',
'oscar.apps.basket',
'oscar.apps.payment',
'oscar.apps.offer',
'oscar.apps.order',
'oscar.apps.customer',
'oscar.apps.dashboard',
# 3rd-party apps that oscar depends on
'treebeard',
'sorl.thumbnail',
'django_tables2',
]
def get_core_apps(overrides=None):
"""
Return a list of oscar's apps amended with any passed overrides
"""
if not overrides:
return OSCAR_CORE_APPS
def get_app_label(app_label, overrides):
pattern = app_label.replace('oscar.apps.', '')
for override in overrides:
if override.endswith(pattern):
if 'dashboard' in override and 'dashboard' not in pattern:
continue
return override
return app_label
apps = []
for app_label in OSCAR_CORE_APPS:
apps.append(get_app_label(app_label, overrides))
return apps
|
import os
# Use 'dev', 'beta', or 'final' as the 4th element to indicate release type.
VERSION = (1, 0, 1, 'machtfit', 66)
def get_short_version():
return '%s.%s' % (VERSION[0], VERSION[1])
def get_version():
return '{}.{}.{}-{}-{}'.format(*VERSION)
# Cheeky setting that allows each template to be accessible by two paths.
# Eg: the template 'oscar/templates/oscar/base.html' can be accessed via both
# 'base.html' and 'oscar/base.html'. This allows Oscar's templates to be
# extended by templates with the same filename
OSCAR_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'templates/oscar')
OSCAR_CORE_APPS = [
'oscar',
'oscar.apps.checkout',
'oscar.apps.address',
'oscar.apps.shipping',
'oscar.apps.catalogue',
'oscar.apps.partner',
'oscar.apps.basket',
'oscar.apps.payment',
'oscar.apps.offer',
'oscar.apps.order',
'oscar.apps.customer',
'oscar.apps.dashboard',
# 3rd-party apps that oscar depends on
'treebeard',
'sorl.thumbnail',
'django_tables2',
]
def get_core_apps(overrides=None):
"""
Return a list of oscar's apps amended with any passed overrides
"""
if not overrides:
return OSCAR_CORE_APPS
def get_app_label(app_label, overrides):
pattern = app_label.replace('oscar.apps.', '')
for override in overrides:
if override.endswith(pattern):
if 'dashboard' in override and 'dashboard' not in pattern:
continue
return override
return app_label
apps = []
for app_label in OSCAR_CORE_APPS:
apps.append(get_app_label(app_label, overrides))
return apps
|
bsd-3-clause
|
Python
|
d9c2bb2de79db80bc94509cb6a23de7f85e6e899
|
update tests
|
dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy
|
tests/test_pecanstreet_dataset_adapter.py
|
tests/test_pecanstreet_dataset_adapter.py
|
import sys
sys.path.append('../')
from disaggregator import PecanStreetDatasetAdapter
import unittest
class PecanStreetDatasetAdapterTestCase(unittest.TestCase):
def setUp(self):
db_url = "postgresql://USERNAME:[email protected]:5432/postgres"
self.psda = PecanStreetDatasetAdapter(db_url)
def test_get_table_names(self):
s_tables = self.psda.get_table_names('shared')
c_tables = self.psda.get_table_names('curated')
r_tables = self.psda.get_table_names('raw')
self.assertIn('group1_disaggregated_2012_12', c_tables,
'curated schema has correct tables')
self.assertIn('egauge_15min_2013', r_tables,
'raw schema has correct tables')
self.assertIn('validated_01_2014', s_tables,
'shared schema has correct tables')
def test_table_metadata(self):
ids,cols = self.psda.get_table_metadata('shared','validated_01_2014')
self.assertIn(744,ids,'shared table 01 2014 has dataid 744')
self.assertIn('use',cols,'shared table 01 2014 has column "use"')
self.assertIn('air1',cols,'shared table 01 2014 has column "air1"')
pass
def test_get_month_traces(self):
# traces = self.pdsa.get_month_traces('shared','validated_01_2014')
# trace = p.get_month_traces_wo_time_align('shared',str(tables[0]),i[0])
pass
fast = TestSuite()
if __name__ == '__main__':
unittest.main()
|
import sys
sys.path.append('../')
from disaggregator import PecanStreetDatasetAdapter
import unittest
class PecanStreetDatasetAdapterTestCase(unittest.TestCase):
def setUp(self):
db_url = "postgresql://USERNAME:[email protected]:5432/postgres"
self.psda = PecanStreetDatasetAdapter(db_url)
def test_get_table_names(self):
s_tables = self.psda.get_table_names('shared')
c_tables = self.psda.get_table_names('curated')
r_tables = self.psda.get_table_names('raw')
self.assertIn('group1_disaggregated_2012_12', c_tables,
'curated schema has correct tables')
self.assertIn('egauge_15min_2013', r_tables,
'raw schema has correct tables')
self.assertIn('validated_01_2014', s_tables,
'shared schema has correct tables')
def test_table_metadata(self):
ids,cols = self.psda.get_table_metadata('shared','validated_01_2014')
self.assertIn(744,ids,'shared table 01 2014 has dataid 744')
self.assertIn('use',cols,'shared table 01 2014 has column "use"')
self.assertIn('air1',cols,'shared table 01 2014 has column "air1"')
pass
def test_get_month_traces(self):
# traces = self.pdsa.get_month_traces('shared','validated_01_2014')
# trace = p.get_month_traces_wo_time_align('shared',str(tables[0]),i[0])
pass
if __name__ == '__main__':
unittest.main()
|
mit
|
Python
|
52ebe157585019c9be01b22638fff924ba328892
|
Increase delay (to fix tests that are failing randomly on travis but are always passing on my locale machine)
|
pyQode/pyqode.python,zwadar/pyqode.python,mmolero/pyqode.python,pyQode/pyqode.python
|
test/test_modes/test_goto_assignments.py
|
test/test_modes/test_goto_assignments.py
|
"""
Test the autocomplete mode
"""
from pyqode.core.api import TextHelper
from pyqode.qt import QtCore, QtWidgets
from pyqode.qt.QtTest import QTest
from pyqode.python import modes as pymodes
from test.helpers import editor_open
def get_mode(editor):
return editor.modes.get(pymodes.GoToAssignmentsMode)
@editor_open(__file__)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@editor_open(__file__)
def test_goto_variable(editor):
editor.clear()
code = "a = 15\nprint(a)"
editor.setPlainText(code)
mode = get_mode(editor)
TextHelper(editor).goto_line(2, len('print(a)') - 2)
mode.request_goto()
QTest.qWait(5000)
assert TextHelper(editor).current_line_nbr() == 0
out = False
def _on_out_of_doc(*args):
global out
out = True
@editor_open(__file__)
def test_goto_out_of_doc(editor):
global out
out = False
editor.clear()
code = "import logging\nlogging.basicConfig()"
editor.setPlainText(code)
mode = get_mode(editor)
TextHelper(editor).goto_line(1, len('logging.basicConfig()') - 4)
mode.out_of_doc.connect(_on_out_of_doc)
assert out is False
mode.request_goto()
QTest.qWait(5000)
assert out is True
flg_multi = False
def accept_dlg():
global flg_multi
flg_multi = True
widgets = QtWidgets.QApplication.instance().topLevelWidgets()
for w in widgets:
if isinstance(w, QtWidgets.QDialog):
QTest.keyPress(w, QtCore.Qt.Key_Tab)
QTest.keyPress(w, QtCore.Qt.Key_Tab)
QTest.keyPress(w, QtCore.Qt.Key_Return)
@editor_open(__file__)
def test_multiple_results(editor):
global flg_multi
editor.clear()
code = "import os\nos.path.abspath('..')"
editor.setPlainText(code)
mode = get_mode(editor)
TextHelper(editor).goto_line(1, 4)
QTest.qWait(1000)
mode.request_goto()
assert flg_multi is False
QtCore.QTimer.singleShot(1000, accept_dlg)
QTest.qWait(1000)
assert flg_multi is True
@editor_open(__file__)
def test_make_unique(editor):
seq = ['a', 'b', 'c', 'a']
mode = get_mode(editor)
new_seq = mode._unique(seq)
assert len(new_seq) == len(seq) - 1
|
"""
Test the autocomplete mode
"""
from pyqode.core.api import TextHelper
from pyqode.qt import QtCore, QtWidgets
from pyqode.qt.QtTest import QTest
from pyqode.python import modes as pymodes
from test.helpers import editor_open
def get_mode(editor):
return editor.modes.get(pymodes.GoToAssignmentsMode)
@editor_open(__file__)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@editor_open(__file__)
def test_goto_variable(editor):
editor.clear()
code = "a = 15\nprint(a)"
editor.setPlainText(code)
mode = get_mode(editor)
TextHelper(editor).goto_line(2, len('print(a)') - 2)
mode.request_goto()
QTest.qWait(1000)
assert TextHelper(editor).current_line_nbr() == 0
out = False
def _on_out_of_doc(*args):
global out
out = True
@editor_open(__file__)
def test_goto_out_of_doc(editor):
global out
out = False
editor.clear()
code = "import logging\nlogging.basicConfig()"
editor.setPlainText(code)
mode = get_mode(editor)
TextHelper(editor).goto_line(1, len('logging.basicConfig()') - 4)
mode.out_of_doc.connect(_on_out_of_doc)
assert out is False
mode.request_goto()
QTest.qWait(1000)
assert out is True
flg_multi = False
def accept_dlg():
global flg_multi
flg_multi = True
widgets = QtWidgets.QApplication.instance().topLevelWidgets()
for w in widgets:
if isinstance(w, QtWidgets.QDialog):
QTest.keyPress(w, QtCore.Qt.Key_Tab)
QTest.keyPress(w, QtCore.Qt.Key_Tab)
QTest.keyPress(w, QtCore.Qt.Key_Return)
@editor_open(__file__)
def test_multiple_results(editor):
global flg_multi
editor.clear()
code = "import os\nos.path.abspath('..')"
editor.setPlainText(code)
mode = get_mode(editor)
TextHelper(editor).goto_line(1, 4)
QTest.qWait(1000)
mode.request_goto()
assert flg_multi is False
QtCore.QTimer.singleShot(1000, accept_dlg)
QTest.qWait(1000)
assert flg_multi is True
@editor_open(__file__)
def test_make_unique(editor):
seq = ['a', 'b', 'c', 'a']
mode = get_mode(editor)
new_seq = mode._unique(seq)
assert len(new_seq) == len(seq) - 1
|
mit
|
Python
|
48e280177123902001e4ff6fb3e178190b435054
|
fix test for Exscript.workqueue.MainLoop.
|
maximumG/exscript,knipknap/exscript,knipknap/exscript,maximumG/exscript
|
tests/Exscript/workqueue/MainLoopTest.py
|
tests/Exscript/workqueue/MainLoopTest.py
|
import sys, unittest, re, os.path, threading
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src'))
from Exscript.workqueue import MainLoop
from Exscript.workqueue.Job import ProcessJob
class MainLoopTest(unittest.TestCase):
CORRELATE = MainLoop
def setUp(self):
pass
def testMainLoop(self):
lock = threading.Lock()
data = {'sum': 0, 'randsum': 0}
ml = MainLoop.MainLoop(ProcessJob)
nop = lambda x: None
for i in range(12345):
ml.enqueue(nop, name = 'test', times = 1, data = None)
self.assertEqual(0, data['sum'])
# Note: Further testing is done in WorkQueueTest.py
def suite():
return unittest.TestLoader().loadTestsFromTestCase(MainLoopTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity = 2).run(suite())
|
import sys, unittest, re, os.path, threading
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src'))
from Exscript.workqueue import MainLoop
class MainLoopTest(unittest.TestCase):
CORRELATE = MainLoop
def setUp(self):
pass
def testMainLoop(self):
lock = threading.Lock()
data = {'sum': 0, 'randsum': 0}
ml = MainLoop.MainLoop()
nop = lambda x: None
for i in range(12345):
ml.enqueue(nop, name = 'test', times = 1, data = None)
self.assertEqual(0, data['sum'])
# Note: Further testing is done in WorkQueueTest.py
def suite():
return unittest.TestLoader().loadTestsFromTestCase(MainLoopTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity = 2).run(suite())
|
mit
|
Python
|
523216bbf6f21757651e41ac307bc296041b7963
|
load nonlinux_config if the platform is not linux
|
simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote
|
tests/docker/test_async_docker_client.py
|
tests/docker/test_async_docker_client.py
|
import os
import sys
import warnings
from tornado.testing import AsyncTestCase, gen_test
from remoteappmanager.docker.async_docker_client import AsyncDockerClient
from tests.docker.config import nonlinux_config
from tests import utils
class TestAsyncDockerClient(AsyncTestCase):
def setUp(self):
super().setUp()
# Due to a python requests design choice, we receive a warning about
# leaking connection. This is expected and pretty much out of our
# authority but it can be annoying in tests, hence we suppress the
# warning. See issue simphony-remote/10
warnings.filterwarnings(action="ignore",
message="unclosed",
category=ResourceWarning)
def tearDown(self):
super().tearDown()
warnings.filterwarnings(action="default",
message="unclosed",
category=ResourceWarning)
@gen_test
def test_info(self):
client = AsyncDockerClient()
client.client = utils.mock_docker_client()
response = yield client.info()
# Test contents of response
self.assertIsInstance(response, dict)
self.assertIn("ID", response)
@gen_test
def test_real_connection(self):
config = None
if "DOCKER_HOST" not in os.environ and sys.platform != 'linux':
config = nonlinux_config()
if not os.path.exists(config.tls_cert):
self.skipTest("Certificates are not available. Skipping.")
client = AsyncDockerClient(config=config)
response = yield client.info()
# Test contents of response
self.assertIsInstance(response, dict)
self.assertIn("ID", response)
|
import os
import warnings
from tornado.testing import AsyncTestCase, gen_test
from remoteappmanager.docker.async_docker_client import AsyncDockerClient
from tests.docker.config import nonlinux_config
from tests import utils
class TestAsyncDockerClient(AsyncTestCase):
def setUp(self):
super().setUp()
# Due to a python requests design choice, we receive a warning about
# leaking connection. This is expected and pretty much out of our
# authority but it can be annoying in tests, hence we suppress the
# warning. See issue simphony-remote/10
warnings.filterwarnings(action="ignore",
message="unclosed",
category=ResourceWarning)
def tearDown(self):
super().tearDown()
warnings.filterwarnings(action="default",
message="unclosed",
category=ResourceWarning)
@gen_test
def test_info(self):
client = AsyncDockerClient()
client.client = utils.mock_docker_client()
response = yield client.info()
# Test contents of response
self.assertIsInstance(response, dict)
self.assertIn("ID", response)
@gen_test
def test_real_connection(self):
config = None
if "DOCKER_HOST" not in os.environ:
config = nonlinux_config()
if not os.path.exists(config.tls_cert):
self.skipTest("Certificates are not available. Skipping.")
client = AsyncDockerClient(config=config)
response = yield client.info()
# Test contents of response
self.assertIsInstance(response, dict)
self.assertIn("ID", response)
|
bsd-3-clause
|
Python
|
6bec22cd51288c94dff40cf0c973b975538040d5
|
Increase timeout for test_long_running_job test
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
tests/integration/minion/test_timeout.py
|
tests/integration/minion/test_timeout.py
|
# -*- coding: utf-8 -*-
'''
Tests for various minion timeouts
'''
# Import Python libs
from __future__ import absolute_import
import os
import sys
import salt.utils.platform
# Import Salt Testing libs
from tests.support.case import ShellCase
class MinionTimeoutTestCase(ShellCase):
'''
Test minion timing functions
'''
def test_long_running_job(self):
'''
Test that we will wait longer than the job timeout for a minion to
return.
'''
# Launch the command
sleep_length = 30
if salt.utils.platform.is_windows():
popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))}
else:
popen_kwargs = None
ret = self.run_salt(
'minion test.sleep {0}'.format(sleep_length),
timeout=90,
catch_stderr=True,
popen_kwargs=popen_kwargs,
)
self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion'
' may have returned error: {0}'.format(ret))
self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret))
self.assertTrue('True' in ret[0][1], 'Minion did not return True after '
'{0} seconds. ret={1}'.format(sleep_length, ret))
|
# -*- coding: utf-8 -*-
'''
Tests for various minion timeouts
'''
# Import Python libs
from __future__ import absolute_import
import os
import sys
import salt.utils.platform
# Import Salt Testing libs
from tests.support.case import ShellCase
class MinionTimeoutTestCase(ShellCase):
'''
Test minion timing functions
'''
def test_long_running_job(self):
'''
Test that we will wait longer than the job timeout for a minion to
return.
'''
# Launch the command
sleep_length = 30
if salt.utils.platform.is_windows():
popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))}
else:
popen_kwargs = None
ret = self.run_salt(
'minion test.sleep {0}'.format(sleep_length),
timeout=45,
catch_stderr=True,
popen_kwargs=popen_kwargs,
)
self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion'
' may have returned error: {0}'.format(ret))
self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret))
self.assertTrue('True' in ret[0][1], 'Minion did not return True after '
'{0} seconds. ret={1}'.format(sleep_length, ret))
|
apache-2.0
|
Python
|
0bb2ebc52e720a3d693ca14f3621fd710ea36d4b
|
use make_result_iq
|
community-ssu/telepathy-gabble,community-ssu/telepathy-gabble,jku/telepathy-gabble,community-ssu/telepathy-gabble,mlundblad/telepathy-gabble,community-ssu/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble
|
tests/twisted/vcard/test-avatar-async.py
|
tests/twisted/vcard/test-avatar-async.py
|
"""
Test support for retrieving avatars asynchronously using RequestAvatars.
"""
import base64
import hashlib
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq, make_result_iq
def test(q, bus, conn, stream):
conn.Connect()
_, iq_event = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged', args=[0, 1]),
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'))
acknowledge_iq(stream, iq_event.stanza)
handle = conn.RequestHandles(1, ['[email protected]'])[0]
conn.Avatars.RequestAvatars([handle])
iq_event = q.expect('stream-iq', to='[email protected]', query_ns='vcard-temp',
query_name='vCard')
iq = make_result_iq(stream, iq_event.stanza)
vcard = iq.firstChildElement()
photo = vcard.addElement('PHOTO')
photo.addElement('TYPE', content='image/png')
photo.addElement('BINVAL', content=base64.b64encode('hello'))
stream.send(iq)
event = q.expect('dbus-signal', signal='AvatarRetrieved')
assert event.args[0] == handle
assert event.args[1] == hashlib.sha1('hello').hexdigest()
assert event.args[2] == 'hello'
assert event.args[3] == 'image/png'
# Request again; this request should be satisfied from the avatar cache.
conn.Avatars.RequestAvatars([handle])
event = q.demand('dbus-signal', signal='AvatarRetrieved')
assert event.args[0] == handle
assert event.args[1] == hashlib.sha1('hello').hexdigest()
assert event.args[2] == 'hello'
assert event.args[3] == 'image/png'
conn.Disconnect()
q.expect('dbus-signal', signal='StatusChanged', args=[2, 1])
if __name__ == '__main__':
exec_test(test)
|
"""
Test support for retrieving avatars asynchronously using RequestAvatars.
"""
import base64
import hashlib
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
def test(q, bus, conn, stream):
conn.Connect()
_, iq_event = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged', args=[0, 1]),
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'))
acknowledge_iq(stream, iq_event.stanza)
handle = conn.RequestHandles(1, ['[email protected]'])[0]
conn.Avatars.RequestAvatars([handle])
iq_event = q.expect('stream-iq', to='[email protected]', query_ns='vcard-temp',
query_name='vCard')
iq = iq_event.stanza
vcard = iq_event.query
photo = vcard.addElement('PHOTO')
photo.addElement('TYPE', content='image/png')
photo.addElement('BINVAL', content=base64.b64encode('hello'))
iq['type'] = 'result'
stream.send(iq)
event = q.expect('dbus-signal', signal='AvatarRetrieved')
assert event.args[0] == handle
assert event.args[1] == hashlib.sha1('hello').hexdigest()
assert event.args[2] == 'hello'
assert event.args[3] == 'image/png'
# Request again; this request should be satisfied from the avatar cache.
conn.Avatars.RequestAvatars([handle])
event = q.demand('dbus-signal', signal='AvatarRetrieved')
assert event.args[0] == handle
assert event.args[1] == hashlib.sha1('hello').hexdigest()
assert event.args[2] == 'hello'
assert event.args[3] == 'image/png'
conn.Disconnect()
q.expect('dbus-signal', signal='StatusChanged', args=[2, 1])
if __name__ == '__main__':
exec_test(test)
|
lgpl-2.1
|
Python
|
cf4d8318557d971cee1869fe8cbac82cc6316020
|
Change expected exception
|
plotly/python-api,plotly/plotly.py,plotly/plotly.py,plotly/python-api,plotly/plotly.py,plotly/python-api
|
plotly/tests/test_core/test_file/test_file.py
|
plotly/tests/test_core/test_file/test_file.py
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
import requests
from unittest import TestCase
from nose.plugins.attrib import attr
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
@attr('slow')
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
choice_chars = string.ascii_letters + string.digits
random_chars = [random.choice(choice_chars) for _ in range(10)]
unique_filename = 'Valid Folder ' + ''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except PlotlyRequestError as e:
self.assertTrue(400 <= e.status_code < 500)
else:
self.fail('Expected this to fail!')
|
"""
test_meta:
==========
A module intended for use with Nose.
"""
import random
import string
import requests
from unittest import TestCase
from nose.plugins.attrib import attr
import plotly.plotly as py
from plotly.exceptions import PlotlyRequestError
@attr('slow')
class FolderAPITestCase(TestCase):
def setUp(self):
py.sign_in('PythonTest', '9v9f20pext')
def _random_filename(self):
choice_chars = string.ascii_letters + string.digits
random_chars = [random.choice(choice_chars) for _ in range(10)]
unique_filename = 'Valid Folder ' + ''.join(random_chars)
return unique_filename
def test_create_folder(self):
try:
py.file_ops.mkdirs(self._random_filename())
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_create_nested_folders(self):
first_folder = self._random_filename()
nested_folder = '{0}/{1}'.format(first_folder, self._random_filename())
try:
py.file_ops.mkdirs(nested_folder)
except PlotlyRequestError as e:
self.fail('Expected this *not* to fail! Status: {}'
.format(e.status_code))
def test_duplicate_folders(self):
first_folder = self._random_filename()
py.file_ops.mkdirs(first_folder)
try:
py.file_ops.mkdirs(first_folder)
except requests.exceptions.RequestException as e:
self.assertTrue(400 <= e.response.status_code < 500)
else:
self.fail('Expected this to fail!')
|
mit
|
Python
|
6cfc94d8a03439c55808090aa5e3a4f35c288887
|
Use assert_allclose so we can see the appveyor failure
|
yuxiang-zhou/menpodetect,jabooth/menpodetect,yuxiang-zhou/menpodetect,jabooth/menpodetect
|
menpodetect/tests/opencv_test.py
|
menpodetect/tests/opencv_test.py
|
from numpy.testing import assert_allclose
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert_allclose(len(pcs), 1)
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
|
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
|
bsd-3-clause
|
Python
|
b8d0a7cbac6ab2415a1d059a1f68428e9312f3cb
|
Make our error page handlers work on Django 2.0 (#969)
|
DMOJ/site,DMOJ/site,DMOJ/site,DMOJ/site
|
judge/views/error.py
|
judge/views/error.py
|
import traceback
from django.shortcuts import render
from django.utils.translation import gettext as _
def error(request, context, status):
return render(request, 'error.html', context=context, status=status)
def error404(request, exception=None):
# TODO: "panic: go back"
return render(request, 'generic-message.html', {
'title': _('404 error'),
'message': _('Could not find page "%s"') % request.path
}, status=404)
def error403(request, exception=None):
return error(request, {'id': 'unauthorized_access',
'description': _('no permission for %s') % request.path,
'code': 403}, 403)
def error500(request):
return error(request, {'id': 'invalid_state',
'description': _('corrupt page %s') % request.path,
'traceback': traceback.format_exc(),
'code': 500}, 500)
|
import traceback
from django.shortcuts import render
from django.utils.translation import gettext as _
def error(request, context, status):
return render(request, 'error.html', context=context, status=status)
def error404(request):
# TODO: "panic: go back"
return render(request, 'generic-message.html', {
'title': _('404 error'),
'message': _('Could not find page "%s"') % request.path
}, status=404)
def error403(request):
return error(request, {'id': 'unauthorized_access',
'description': _('no permission for %s') % request.path,
'code': 403}, 403)
def error500(request):
return error(request, {'id': 'invalid_state',
'description': _('corrupt page %s') % request.path,
'traceback': traceback.format_exc(),
'code': 500}, 500)
|
agpl-3.0
|
Python
|
4e92dabe65416a3a751a0b38e75512b6daa1ba38
|
Remove useless imports
|
gdetrez/fscons-ticketshop,gdetrez/fscons-ticketshop,gdetrez/fscons-ticketshop
|
ticketshop/ticketapp/tests/test_views.py
|
ticketshop/ticketapp/tests/test_views.py
|
from django.test import Client
from django.contrib.auth.models import User
from django.test import TestCase
from ..models import TicketType, TicketPurchase
class TicketPurchaseViewTest(TestCase):
def test_getForm(self):
"""
Test that we can get the purchase form
"""
self.assertContains(self.client.get("/"), "name")
class TestConfirmationView(TestCase):
def setUp(self):
# It appears that client.session only work
# for non annonymous users: setup Test User
User.objects.create_user('user', '[email protected]', 'password')
# Login
self.client.login(username='user', password='password')
# Create data
tt = TicketType.objects.create( name = "Standard ticket", price = 100 )
self.purchase = TicketPurchase.objects.create(
name = "Bruce Wayne",
email = "[email protected]" )
self.purchase.ticket_set.create( name = "Batman", ticket_type = tt )
self.purchase.ticket_set.create( name = "Catwoman", ticket_type = tt )
self.invoice_id = self.purchase.invoice_id
def test_itRedirectToTheHomePageWhenThereIsNoSessionData(self):
"""
Test that /confirm/ redirect to / when the session doesn,t
contain any purchase data
"""
self.assertRedirects(self.client.get('/confirm/'), '/')
def test_itDisplaysTheContactName(self):
"""
Test that the view displays the contact name
"""
session = self.client.session
session['invoice_id'] = self.invoice_id
session.save()
self.assertContains(self.client.get('/confirm/'), "Bruce Wayne" )
self.assertContains(self.client.get('/confirm/'), "[email protected]" )
self.assertContains(self.client.get('/confirm/'), "[email protected]" )
def test_itDisplaysTheTotal(self):
"""
Test that the view displays the total amount
"""
session = self.client.session
session['invoice_id'] = self.invoice_id
session.save()
self.assertContains(self.client.get('/confirm/'), "<b>Total:</b> 200 SEK" )
class TestPaypalView(TestCase):
def test_2(self):
self.client.get("/paypal/")
|
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.messages.storage.base import Message
from django.contrib.messages.constants import ERROR
from django.test import TestCase
from ..models import TicketType, Ticket, TicketPurchase, Coupon
class TicketPurchaseViewTest(TestCase):
def test_getForm(self):
"""
Test that we can get the purchase form
"""
self.assertContains(self.client.get("/"), "name")
class TestConfirmationView(TestCase):
def setUp(self):
# It appears that client.session only work
# for non annonymous users: setup Test User
User.objects.create_user('user', '[email protected]', 'password')
# Login
self.client.login(username='user', password='password')
# Create data
tt = TicketType.objects.create( name = "Standard ticket", price = 100 )
self.purchase = TicketPurchase.objects.create(
name = "Bruce Wayne",
email = "[email protected]" )
self.purchase.ticket_set.create( name = "Batman", ticket_type = tt )
self.purchase.ticket_set.create( name = "Catwoman", ticket_type = tt )
self.invoice_id = self.purchase.invoice_id
def test_itRedirectToTheHomePageWhenThereIsNoSessionData(self):
"""
Test that /confirm/ redirect to / when the session doesn,t
contain any purchase data
"""
self.assertRedirects(self.client.get('/confirm/'), '/')
def test_itDisplaysTheContactName(self):
"""
Test that the view displays the contact name
"""
session = self.client.session
session['invoice_id'] = self.invoice_id
session.save()
self.assertContains(self.client.get('/confirm/'), "Bruce Wayne" )
self.assertContains(self.client.get('/confirm/'), "[email protected]" )
self.assertContains(self.client.get('/confirm/'), "[email protected]" )
def test_itDisplaysTheTotal(self):
"""
Test that the view displays the total amount
"""
session = self.client.session
session['invoice_id'] = self.invoice_id
session.save()
self.assertContains(self.client.get('/confirm/'), "<b>Total:</b> 200 SEK" )
class TestPaypalView(TestCase):
def test_2(self):
self.client.get("/paypal/")
|
mit
|
Python
|
1f98e497136ce3d9da7e63a6dc7c3f67fedf50b5
|
Save the observation if the form was valid.
|
zsiciarz/variablestars.net,zsiciarz/variablestars.net,zsiciarz/variablestars.net
|
observations/views.py
|
observations/views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from braces.views import LoginRequiredMixin
from .forms import ObservationForm, BatchUploadForm
class AddObservationView(FormView):
"""
Add a single observation.
"""
form_class = ObservationForm
template_name = "observations/add_observation.html"
success_url = reverse_lazy('observations:add_observation')
def form_valid(self, form):
observation = form.save(commit=False)
observation.observer = self.request.observer
observation.save()
return super(AddObservationView, self).form_valid(form)
class UploadObservationsView(LoginRequiredMixin, FormView):
"""
Upload a file of observations.
"""
form_class = BatchUploadForm
template_name = "observations/upload_observations.html"
success_url = reverse_lazy('observations:upload_observations')
def form_valid(self, form):
form.process_file()
messages.success(self.request, _("File uploaded successfully!"))
return super(UploadObservationsView, self).form_valid(form)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from braces.views import LoginRequiredMixin
from .forms import ObservationForm, BatchUploadForm
class AddObservationView(FormView):
"""
Add a single observation.
"""
form_class = ObservationForm
template_name = "observations/add_observation.html"
success_url = reverse_lazy('observations:add_observation')
class UploadObservationsView(LoginRequiredMixin, FormView):
"""
Upload a file of observations.
"""
form_class = BatchUploadForm
template_name = "observations/upload_observations.html"
success_url = reverse_lazy('observations:upload_observations')
def form_valid(self, form):
form.process_file()
messages.success(self.request, _("File uploaded successfully!"))
return super(UploadObservationsView, self).form_valid(form)
|
mit
|
Python
|
6353dd8caa3656b8c37280bcccd56cfaa78ff67a
|
Add API for making authenticated API requests
|
valohai/valohai-cli
|
valohai_cli/api.py
|
valohai_cli/api.py
|
import platform
from urllib.parse import urljoin, urlparse
import requests
from click.globals import get_current_context
from requests.auth import AuthBase
from valohai_cli import __version__ as VERSION
from valohai_cli.exceptions import APIError, ConfigurationError
from valohai_cli.settings import settings
from valohai_cli.utils import force_text
class TokenAuth(AuthBase):
def __init__(self, netloc, token):
super(TokenAuth, self).__init__()
self.netloc = netloc
self.token = token
def __call__(self, request):
if not request.headers.get('Authorization') and urlparse(request.url).netloc == self.netloc:
if self.token:
request.headers['Authorization'] = 'Token %s' % self.token
return request
class APISession(requests.Session):
def __init__(self, base_url, token=None):
super(APISession, self).__init__()
self.base_url = base_url
self.base_netloc = urlparse(self.base_url).netloc
self.auth = TokenAuth(self.base_netloc, token)
self.headers['Accept'] = 'application/json'
self.headers['User-Agent'] = 'valohai-cli/%s (%s)' % (
VERSION,
';'.join(platform.uname()),
)
def prepare_request(self, request):
url_netloc = urlparse(request.url).netloc
if not url_netloc:
request.url = urljoin(self.base_url, request.url)
return super(APISession, self).prepare_request(request)
def request(self, method, url, **kwargs):
handle_errors = bool(kwargs.pop('handle_errors', True))
resp = super(APISession, self).request(method, url, **kwargs)
if handle_errors and resp.status_code >= 400:
raise APIError(resp)
return resp
def _get_current_api_session():
"""
Get an API session, either from the Click context cache, or a new one from the config.
:return: API session
:rtype: APISession
"""
host = settings.get('host')
token = settings.get('token')
if not (host and token):
raise ConfigurationError('You\'re not logged in; try `vh login` first.')
ctx = get_current_context(silent=True) or object()
cache_key = force_text('_api_session_%s_%s' % (host, token))
session = getattr(ctx, cache_key, None)
if not session:
session = APISession(host, token)
setattr(ctx, cache_key, session)
return session
def request(method, url, **kwargs):
"""
Make an authenticated API request.
See the documentation for `requests.Session.request()`.
:param method: HTTP Method
:param url: URL
:param kwargs: Other kwargs, see `requests.Session.request()`
:return: requests.Response
:rtype: requests.Response
"""
session = _get_current_api_session()
return session.request(method, url, **kwargs)
|
import platform
from urllib.parse import urljoin, urlparse
import requests
from requests.auth import AuthBase
from valohai_cli import __version__ as VERSION
from valohai_cli.exceptions import APIError, ConfigurationError
from valohai_cli.settings import settings
class TokenAuth(AuthBase):
def __init__(self, netloc, token):
super(TokenAuth, self).__init__()
self.netloc = netloc
self.token = token
def __call__(self, request):
if not request.headers.get('Authorization') and urlparse(request.url).netloc == self.netloc:
if self.token:
request.headers['Authorization'] = 'Token %s' % self.token
return request
class APISession(requests.Session):
def __init__(self, base_url, token=None):
super(APISession, self).__init__()
self.base_url = base_url
self.base_netloc = urlparse(self.base_url).netloc
self.auth = TokenAuth(self.base_netloc, token)
self.headers['Accept'] = 'application/json'
self.headers['User-Agent'] = 'valohai-cli/%s (%s)' % (
VERSION,
';'.join(platform.uname()),
)
def prepare_request(self, request):
url_netloc = urlparse(request.url).netloc
if not url_netloc:
request.url = urljoin(self.base_url, request.url)
return super(APISession, self).prepare_request(request)
def request(self, method, url, **kwargs):
handle_errors = bool(kwargs.pop('handle_errors', True))
resp = super(APISession, self).request(method, url, **kwargs)
if handle_errors and resp.status_code >= 400:
raise APIError(resp)
return resp
@classmethod
def from_settings(cls):
host = settings.get('host')
token = settings.get('token')
if not (host and token):
raise ConfigurationError('You\'re not logged in; try `vh login` first.')
return APISession(host, token)
|
mit
|
Python
|
1b2a1bb5f4c99f80c3664a40796939732e9fe91c
|
bump dev version
|
bndl/bndl,bndl/bndl
|
bndl/__init__.py
|
bndl/__init__.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging.config
import os.path
from bndl.util.conf import Config, String
from bndl.util.log import install_trace_logging
from bndl.util.objects import LazyObject
# Expose a global BNDL configuration
conf = LazyObject(Config)
# Configure Logging
logging_conf = String('logging.conf')
install_trace_logging()
logging.captureWarnings(True)
if os.path.exists(conf['bndl.logging_conf']):
logging.config.fileConfig(conf['bndl.logging_conf'], disable_existing_loggers=False)
# BNDL version info
__version_info__ = (0, 7, 0, 'dev2')
__version__ = '.'.join(map(str, __version_info__))
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging.config
import os.path
from bndl.util.conf import Config, String
from bndl.util.log import install_trace_logging
from bndl.util.objects import LazyObject
# Expose a global BNDL configuration
conf = LazyObject(Config)
# Configure Logging
logging_conf = String('logging.conf')
install_trace_logging()
logging.captureWarnings(True)
if os.path.exists(conf['bndl.logging_conf']):
logging.config.fileConfig(conf['bndl.logging_conf'], disable_existing_loggers=False)
# BNDL version info
__version_info__ = (0, 6, 0)
__version__ = '.'.join(map(str, __version_info__))
|
apache-2.0
|
Python
|
b159d28dc965e60843f2617b4ae40d6c04cd2604
|
Optimize sensitive areas API
|
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek
|
geotrek/api/v2/views/sensitivity.py
|
geotrek/api/v2/views/sensitivity.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.db.models import F, Case, When
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from geotrek.api.v2 import serializers as api_serializers, \
viewsets as api_viewsets
from geotrek.api.v2.functions import Transform, Buffer, GeometryType
from geotrek.sensitivity import models as sensitivity_models
from ..filters import GeotrekQueryParamsFilter, GeotrekInBBoxFilter, GeotrekSensitiveAreaFilter
class SensitiveAreaViewSet(api_viewsets.GeotrekViewset):
filter_backends = (
DjangoFilterBackend,
GeotrekQueryParamsFilter,
GeotrekInBBoxFilter,
GeotrekSensitiveAreaFilter,
)
serializer_class = api_serializers.SensitiveAreaListSerializer
serializer_detail_class = api_serializers.SensitiveAreaListSerializer
permission_classes = [IsAuthenticatedOrReadOnly]
authentication_classes = []
queryset = sensitivity_models.SensitiveArea.objects.existing() \
.filter(published=True) \
.select_related('species', 'structure') \
.prefetch_related('species__practices') \
.annotate(geom_type=GeometryType(F('geom'))) \
.annotate(geom2d_transformed=Case(
When(geom_type='POINT', then=Transform(Buffer(F('geom'), F('species__radius'), 4), settings.API_SRID)),
When(geom_type='POLYGON', then=Transform(F('geom'), settings.API_SRID))
))
|
from __future__ import unicode_literals
from django.conf import settings
from django.db.models import F, Case, When
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from geotrek.api.v2 import serializers as api_serializers, \
viewsets as api_viewsets
from geotrek.api.v2.functions import Transform, Buffer, GeometryType
from geotrek.sensitivity import models as sensitivity_models
from ..filters import GeotrekQueryParamsFilter, GeotrekInBBoxFilter, GeotrekSensitiveAreaFilter
class SensitiveAreaViewSet(api_viewsets.GeotrekViewset):
filter_backends = (
DjangoFilterBackend,
GeotrekQueryParamsFilter,
GeotrekInBBoxFilter,
GeotrekSensitiveAreaFilter,
)
serializer_class = api_serializers.SensitiveAreaListSerializer
serializer_detail_class = api_serializers.SensitiveAreaListSerializer
permission_classes = [IsAuthenticatedOrReadOnly]
authentication_classes = []
queryset = sensitivity_models.SensitiveArea.objects.existing() \
.filter(published=True) \
.prefetch_related('species') \
.annotate(geom_type=GeometryType(F('geom'))) \
.annotate(geom2d_transformed=Case(
When(geom_type='POINT', then=Transform(Buffer(F('geom'), F('species__radius'), 4), settings.API_SRID)),
When(geom_type='POLYGON', then=Transform(F('geom'), settings.API_SRID))
))
|
bsd-2-clause
|
Python
|
9433fa8970341cb2d024bceb0e23e93fbfb71393
|
Update python test
|
enjin/contracts
|
solidity/python/FormulaTestSale.py
|
solidity/python/FormulaTestSale.py
|
from sys import argv
from decimal import Decimal
from random import randrange
from Formula import calculateSaleReturn
def formulaTest(supply,reserve,ratio,amount):
fixed = Decimal(calculateSaleReturn(supply,reserve,ratio,amount))
real = Decimal(reserve)*(1-(1-Decimal(amount)/Decimal(supply))**(100/Decimal(ratio)))
if fixed > real:
error = []
error.append('error occurred on:')
error.append('supply = {}'.format(supply))
error.append('reserve = {}'.format(reserve))
error.append('ratio = {}'.format(ratio))
error.append('amount = {}'.format(amount))
error.append('fixed = {}'.format(fixed))
error.append('real = {}'.format(real))
raise BaseException('\n'.join(error))
return fixed/real
size = int(argv[1]) if len(argv) > 1 else 0
if size == 0:
size = input('How many test-cases would you like to execute? ')
worstAccuracy = 1
numOfFailures = 0
for n in xrange(size):
supply = randrange(2,10**26)
reserve = randrange(1,10**23)
ratio = randrange(1,99)
amount = randrange(1,supply)
try:
accuracy = formulaTest(supply,reserve,ratio,amount)
worstAccuracy = min(worstAccuracy,accuracy)
except Exception,error:
accuracy = 0
numOfFailures += 1
except BaseException,error:
print error
break
print 'Test #{}: accuracy = {:.12f}, worst accuracy = {:.12f}, num of failures = {}'.format(n,accuracy,worstAccuracy,numOfFailures)
|
from sys import argv
from decimal import Decimal
from random import randrange
from Formula import calculateSaleReturn
def formulaTest(supply,reserve,ratio,amount):
fixed = Decimal(calculateSaleReturn(supply,reserve,ratio,amount))
real = Decimal(reserve)*(1-(1-Decimal(amount)/Decimal(supply))**(100/Decimal(ratio)))
if fixed > real:
error = []
error.append('error occurred on:')
error.append('supply = {}'.format(supply))
error.append('reserve = {}'.format(reserve))
error.append('ratio = {}'.format(ratio))
error.append('amount = {}'.format(amount))
error.append('fixed = {}'.format(fixed))
error.append('real = {}'.format(real))
raise BaseException('\n'.join(error))
return fixed/real
size = int(argv[1]) if len(argv) > 1 else 0
if size == 0:
size = input('How many test-cases would you like to execute? ')
n = 0
worstAccuracy = 1
numOfFailures = 0
while n < size: # avoid creating a large range in memory
supply = randrange(2,10**26)
reserve = randrange(1,10**23)
ratio = randrange(1,99)
amount = randrange(1,supply)
try:
accuracy = formulaTest(supply,reserve,ratio,amount)
worstAccuracy = min(worstAccuracy,accuracy)
except Exception,error:
accuracy = 0
numOfFailures += 1
except BaseException,error:
print error
break
print 'Test #{}: accuracy = {:.12f}, worst accuracy = {:.12f}, num of failures = {}'.format(n,accuracy,worstAccuracy,numOfFailures)
n += 1
|
apache-2.0
|
Python
|
e364bdf7723ca45ac1000eda13a76cf1b19f0ad8
|
Remove a debug print
|
ipa-led/airbus_coop,ipa-led/airbus_coop,ipa-led/airbus_coop,ipa-led/airbus_coop
|
plugins/plugin_node_manager/src/plugin_node_manager/launch_item.py
|
plugins/plugin_node_manager/src/plugin_node_manager/launch_item.py
|
#!/usr/bin/env python
################################################################################
#
# Copyright Airbus Group SAS 2015
# All rigths reserved.
#
# File Name : setup.py
# Authors : Martin Matignon
#
# If you find any bug or if you have any question please contact
# Adolfo Suarez Roos <[email protected]>
# Martin Matignon <[email protected]>
#
#
################################################################################
import rospy
import time
import os
import roslaunch
import subprocess
from roslib.packages import get_pkg_dir
from python_qt_binding.QtGui import *
from python_qt_binding.QtCore import *
def widget_creator(obj_ui):
widget = QWidget()
layout = QHBoxLayout(widget)
layout.setSpacing(6)
layout.setContentsMargins(0, 0, 0, 0)
spacer_left = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
spacer_right = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
layout.addItem(spacer_left)
layout.addWidget(obj_ui)
layout.addItem(spacer_right)
return widget
class LaunchItem:
def __init__(self, launch, machine):
self.launch_name = QLabel(launch)
self.launch_name.setContentsMargins(0,0,10,0)
self.launch_name.setMinimumHeight(40)
self.combo_machines = QComboBox()
self.combo_machines.setMinimumHeight(40)
self.combo_machines.addItem('cobotgui-dev:127.0.0.1')
self.combo_machines.addItem('cobot:192.168.0.1')
rsc = os.path.join(get_pkg_dir('plugin_node_manager'),'resources')
icon_launch = QIcon(rsc+'/launch.png')
self.button_launch = QPushButton()
self.button_launch.setIcon(icon_launch)
self.button_launch.setIconSize(QSize(30,30))
self.button_launch.setFixedSize(QSize(100,40))
self.button_launch.clicked.connect(self._launch_node_slot)
self.button_launch_widget = widget_creator(self.button_launch)
def _launch_node_slot(self):
rospy.loginfo('%s::_launch_node()'%self.launch_name.text())
subprocess.Popen(['roslaunch',
'node_launchers',
self.launch_name.text()])
#End of file
|
#!/usr/bin/env python
################################################################################
#
# Copyright Airbus Group SAS 2015
# All rigths reserved.
#
# File Name : setup.py
# Authors : Martin Matignon
#
# If you find any bug or if you have any question please contact
# Adolfo Suarez Roos <[email protected]>
# Martin Matignon <[email protected]>
#
#
################################################################################
import rospy
import time
import os
import roslaunch
import subprocess
from roslib.packages import get_pkg_dir
from python_qt_binding.QtGui import *
from python_qt_binding.QtCore import *
def widget_creator(obj_ui):
widget = QWidget()
layout = QHBoxLayout(widget)
layout.setSpacing(6)
layout.setContentsMargins(0, 0, 0, 0)
spacer_left = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
spacer_right = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
layout.addItem(spacer_left)
layout.addWidget(obj_ui)
layout.addItem(spacer_right)
return widget
class LaunchItem:
def __init__(self, launch, machine):
self.launch_name = QLabel(launch)
self.launch_name.setContentsMargins(0,0,10,0)
self.launch_name.setMinimumHeight(40)
self.combo_machines = QComboBox()
self.combo_machines.setMinimumHeight(40)
self.combo_machines.addItem('cobotgui-dev:127.0.0.1')
self.combo_machines.addItem('cobot:192.168.0.1')
rsc = os.path.join(get_pkg_dir('plugin_node_manager'),'resources')
icon_launch = QIcon(rsc+'/launch.png')
self.button_launch = QPushButton()
self.button_launch.setIcon(icon_launch)
self.button_launch.setIconSize(QSize(30,30))
self.button_launch.setFixedSize(QSize(100,40))
self.button_launch.clicked.connect(self._launch_node_slot)
self.button_launch_widget = widget_creator(self.button_launch)
def _launch_node_slot(self):
print 'coucou'
rospy.loginfo('%s::_launch_node()'%self.launch_name.text())
subprocess.Popen(['roslaunch',
'node_launchers',
self.launch_name.text()])
#End of file
|
apache-2.0
|
Python
|
31caceefaa2f6b6dc7d2601d8537e613ce600743
|
Use account's static groups instead of a conversation's groups for dialogue group state
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
go/apps/dialogue/view_definition.py
|
go/apps/dialogue/view_definition.py
|
import json
from django.http import HttpResponse
from django.forms import Form
from go.api.go_api import client
from go.api.go_api.client import GoApiError
from go.conversation.view_definition import (
ConversationViewDefinitionBase, ConversationTemplateView)
class DialogueEditView(ConversationTemplateView):
"""This app is a unique and special snowflake, so it gets special views.
"""
view_name = 'edit'
path_suffix = 'edit/'
template_base = 'dialogue'
def get(self, request, conversation):
r = client.rpc(
request.session.session_key, 'conversation.dialogue.get_poll',
[request.user_api.user_account_key,
conversation.key])
if r.status_code != 200:
raise GoApiError(
"Failed to load dialogue from Go API:"
" (%r) %r." % (r.status_code, r.text))
contact_store = conversation.user_api.contact_store
groups = contact_store.list_static_groups()
model_data = {
'campaign_id': request.user_api.user_account_key,
'conversation_key': conversation.key,
'groups': [g.get_data() for g in groups],
'urls': {
'show': self.get_view_url(
'show',
conversation_key=conversation.key)
}
}
model_data.update(r.json['result']['poll'])
return self.render_to_response({
'conversation': conversation,
'session_id': request.session.session_key,
'model_data': json.dumps(model_data),
})
class UserDataView(ConversationTemplateView):
view_name = 'user_data'
path_suffix = 'users.csv'
def get(self, request, conversation):
# TODO: write new CSV data export
csv_data = "TODO: write data export."
return HttpResponse(csv_data, content_type='application/csv')
class SendDialogueForm(Form):
# TODO: Something better than this?
pass
class ConversationViewDefinition(ConversationViewDefinitionBase):
edit_view = DialogueEditView
extra_views = (
UserDataView,
)
action_forms = {
'send_jsbox': SendDialogueForm,
}
|
import json
from django.http import HttpResponse
from django.forms import Form
from go.api.go_api import client
from go.api.go_api.client import GoApiError
from go.conversation.view_definition import (
ConversationViewDefinitionBase, ConversationTemplateView)
class DialogueEditView(ConversationTemplateView):
"""This app is a unique and special snowflake, so it gets special views.
"""
view_name = 'edit'
path_suffix = 'edit/'
template_base = 'dialogue'
def get(self, request, conversation):
r = client.rpc(
request.session.session_key, 'conversation.dialogue.get_poll',
[request.user_api.user_account_key,
conversation.key])
if r.status_code != 200:
raise GoApiError(
"Failed to load dialogue from Go API:"
" (%r) %r." % (r.status_code, r.text))
model_data = {
'campaign_id': request.user_api.user_account_key,
'conversation_key': conversation.key,
'groups': [g.get_data() for g in conversation.get_groups()],
'urls': {
'show': self.get_view_url(
'show',
conversation_key=conversation.key)
}
}
model_data.update(r.json['result']['poll'])
return self.render_to_response({
'conversation': conversation,
'session_id': request.session.session_key,
'model_data': json.dumps(model_data),
})
class UserDataView(ConversationTemplateView):
view_name = 'user_data'
path_suffix = 'users.csv'
def get(self, request, conversation):
# TODO: write new CSV data export
csv_data = "TODO: write data export."
return HttpResponse(csv_data, content_type='application/csv')
class SendDialogueForm(Form):
# TODO: Something better than this?
pass
class ConversationViewDefinition(ConversationViewDefinitionBase):
edit_view = DialogueEditView
extra_views = (
UserDataView,
)
action_forms = {
'send_jsbox': SendDialogueForm,
}
|
bsd-3-clause
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.