commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
edebc05d3df68faadc6c0547de7cc06f1469915e
|
make click example do stuff
|
tylerdave/FOSSCON-2015-Example
|
fosscon2015/cli_click.py
|
fosscon2015/cli_click.py
|
#!/usr/bin/env python
import click
import json
try:
from collections import Counter
except ImportError:
# backport_collections needed for python 2.6 compatibility
from backport_collections import Counter
@click.command()
@click.argument('infile', type=click.File('r'), default='-')
@click.argument('outfile', type=click.File('w'), default='-')
@click.option('--verbose', '-v')
def cli(infile, outfile, verbose):
""" Count the occurances of characters in INFILE and save results in OUTFILE. """
click.echo("Hi!")
click.secho("infile: {0}".format(infile))
click.secho("outfile: {0}".format(outfile))
text = infile.read()
char_counts = Counter(text)
click.secho(json.dumps(dict(char_counts.most_common())), file=outfile,
fg='green')
if __name__ == '__main__':
cli()
|
import click
@click.command()
def cli():
click.echo("I'm a click CLI.")
if __name__ == '__main__':
cli()
|
mit
|
Python
|
514c6c145e6e2f2c327fb89cfe780eb196508f79
|
change absolute site url
|
3-strand-code/3sc-blog,3-strand-code/3sc-blog,3-strand-code/3sc-blog,3-strand-code/3sc-blog
|
publishconf.py
|
publishconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://blog.3strandcode.com/'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "3strandcode"
#GOOGLE_ANALYTICS = ""
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://3-strand-code.github.io/3sc-blog/'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "3strandcode"
#GOOGLE_ANALYTICS = ""
|
mit
|
Python
|
ca863134d20cda67c6e7f4abf1df595d5d549952
|
Fix agent changelog command (#3233)
|
DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core
|
datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/common.py
|
datadog_checks_dev/datadog_checks/dev/tooling/commands/agent/common.py
|
# (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from semver import parse_version_info
from ...git import git_tag_list
def get_agent_tags(since, to):
"""
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
"""
agent_tags = sorted(parse_version_info(t) for t in git_tag_list(r'^\d+\.\d+\.\d+$'))
# default value for `to` is the latest tag
if to:
to = parse_version_info(to)
else:
to = agent_tags[-1]
since = parse_version_info(since)
# filter out versions according to the interval [since, to]
agent_tags = [t for t in agent_tags if since <= t <= to]
# reverse so we have descendant order
return [str(t) for t in reversed(agent_tags)]
|
# (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from ...git import git_tag_list
def get_agent_tags(since, to):
"""
Return a list of tags from integrations-core representing an Agent release,
sorted by more recent first.
"""
agent_tags = git_tag_list(r'^\d+\.\d+\.\d+$')
# default value for `to` is the latest tag
if not to:
to = agent_tags[-1]
# filter out versions according to the interval [since, to]
agent_tags = [t for t in agent_tags if since <= t <= to]
# reverse so we have descendant order
return agent_tags[::-1]
|
bsd-3-clause
|
Python
|
07e12dd0942329aadc8fb3ed47b6f088779800b9
|
fix logcollector
|
sch3m4/intelmq,robcza/intelmq,robcza/intelmq,certtools/intelmq,robcza/intelmq,aaronkaplan/intelmq,sch3m4/intelmq,certtools/intelmq,pkug/intelmq,robcza/intelmq,sch3m4/intelmq,pkug/intelmq,pkug/intelmq,aaronkaplan/intelmq,sch3m4/intelmq,aaronkaplan/intelmq,pkug/intelmq,certtools/intelmq
|
src/bots/outputs/logcollector/logcollector.py
|
src/bots/outputs/logcollector/logcollector.py
|
import sys
import time
import socket
from lib.bot import *
from lib.utils import *
from lib.event import *
class LogCollectorBot(Bot):
def process(self):
event = self.receive_message()
if event:
data = ''
for key, value in event.items():
data += key.replace(' ','_') + '="' + value + '" '
data += "\n"
self.send_data(data)
self.acknowledge_message()
def connect(self):
address = (self.parameters.ip, int(self.parameters.port))
self.con = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
try:
self.con.connect(address)
break
except socket.error, e:
self.logger.error(e.args[1] + ". Retrying in 10 seconds.")
time.sleep(10)
self.logger.info("Connected successfully to %s:%i", address[0], address[1])
def send_data(self, data):
while True:
try:
self.con.send(unicode(data).encode("utf-8"))
self.con.sendall("")
break
except socket.error, e:
self.logger.error(e.args[1] + ". Reconnecting..")
self.con.close()
self.connect()
except AttributeError:
self.connect()
if __name__ == "__main__":
bot = LogCollectorBot(sys.argv[1])
bot.start()
|
import sys
import time
import socket
from lib.bot import *
from lib.utils import *
from lib.event import *
try:
import simplejson as json
except ImportError:
import json
class LogCollectorBot(Bot):
def process(self):
event = self.receive_message()
if event:
data = ''
for key, value in event.items():
data += key.replace(' ','_') + '=' + json.dumps(value) + ' '
data += "\n"
self.send_data(data)
self.acknowledge_message()
def connect(self):
address = (self.parameters.ip, int(self.parameters.port))
self.con = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
try:
self.con.connect(address)
break
except socket.error, e:
self.logger.error(e.args[1] + ". Retrying in 10 seconds.")
time.sleep(10)
self.logger.info("Connected successfully to %s:%i", address[0], address[1])
def send_data(self, data):
while True:
try:
self.con.send(unicode(data).encode("utf-8"))
self.con.sendall("")
break
except socket.error, e:
self.logger.error(e.args[1] + ". Reconnecting..")
self.con.close()
self.connect()
except AttributeError:
self.connect()
if __name__ == "__main__":
bot = LogCollectorBot(sys.argv[1])
bot.start()
|
agpl-3.0
|
Python
|
d451814584318ac45cdefa9702c72eb5c15fe690
|
Fix Flakes Errors: openspending/ui/controllers/home.py
|
openspending/spendb,johnjohndoe/spendb,spendb/spendb,openspending/spendb,pudo/spendb,spendb/spendb,CivicVision/datahub,openspending/spendb,pudo/spendb,nathanhilbert/FPA_Core,pudo/spendb,johnjohndoe/spendb,spendb/spendb,CivicVision/datahub,USStateDept/FPA_Core,nathanhilbert/FPA_Core,USStateDept/FPA_Core,USStateDept/FPA_Core,nathanhilbert/FPA_Core,CivicVision/datahub,johnjohndoe/spendb
|
openspending/ui/controllers/home.py
|
openspending/ui/controllers/home.py
|
import logging
import os
import subprocess
from pylons import request, tmpl_context as c
from pylons.controllers.util import redirect
from pylons.i18n import _
from openspending.model import Dataset
from openspending.lib.solr_util import dataset_entries
from openspending.ui.i18n import set_session_locale
from openspending.ui.lib.base import BaseController
from openspending.ui.lib.helpers import flash_success
from openspending.ui.alttemplates import templating
log = logging.getLogger(__name__)
class HomeController(BaseController):
def index(self):
# Get all of the datasets available to the account of the logged in
# or an anonymous user (if c.account is None)
c.datasets = Dataset.all_by_account(c.account)
c.num_entries = dataset_entries(None)
return templating.render('home/index.html')
def set_locale(self):
locale = request.params.get('locale')
if locale is not None:
set_session_locale(locale)
def version(self):
cwd = os.path.dirname(__file__)
process = subprocess.Popen('git rev-parse --verify HEAD'.split(' '),
cwd=cwd,
stdout=subprocess.PIPE)
output = process.communicate()[0]
if process.returncode == 0:
return output
else:
import openspending.version
return openspending.version.__version__
def favicon(self):
return redirect('/static/img/favicon.ico', code=301)
def ping(self):
from openspending.tasks import ping
ping.delay()
flash_success(_("Sent ping!"))
redirect('/')
|
import logging
import os
import random
import subprocess
from datetime import datetime
from pylons import request, response, tmpl_context as c, url, config
from pylons.controllers.util import redirect
from pylons.decorators.cache import beaker_cache
from pylons.i18n import _
from openspending.model import Dataset
from openspending.lib.solr_util import dataset_entries
from openspending.ui.i18n import set_session_locale
from openspending.ui.lib import views
from openspending.ui.lib.base import BaseController, require
from openspending.ui.lib.helpers import flash_success, flash_error
from openspending.ui.lib import helpers as h
from openspending.ui.alttemplates import templating
log = logging.getLogger(__name__)
class HomeController(BaseController):
def index(self):
# Get all of the datasets available to the account of the logged in
# or an anonymous user (if c.account is None)
c.datasets = Dataset.all_by_account(c.account)
c.num_entries = dataset_entries(None)
return templating.render('home/index.html')
def set_locale(self):
locale = request.params.get('locale')
if locale is not None:
set_session_locale(locale)
def version(self):
cwd = os.path.dirname(__file__)
process = subprocess.Popen('git rev-parse --verify HEAD'.split(' '),
cwd=cwd,
stdout=subprocess.PIPE)
output = process.communicate()[0]
if process.returncode == 0:
return output
else:
import openspending.version
return openspending.version.__version__
def favicon(self):
return redirect('/static/img/favicon.ico', code=301)
def ping(self):
from openspending.tasks import ping
ping.delay()
flash_success(_("Sent ping!"))
redirect('/')
|
agpl-3.0
|
Python
|
7db11fa7aad4b53a1f50988e83de2abfbae61dde
|
Fix the senddeletionnotices command to take into account the new default SMS limit.
|
iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks
|
hc/accounts/management/commands/senddeletionnotices.py
|
hc/accounts/management/commands/senddeletionnotices.py
|
from datetime import timedelta
import time
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from hc.accounts.models import Profile, Member
from hc.api.models import Ping
from hc.lib import emails
class Command(BaseCommand):
help = """Send deletion notices to inactive user accounts.
Conditions for sending the notice:
- deletion notice has not been sent recently
- last login more than a year ago
- none of the owned projects has invited team members
"""
def handle(self, *args, **options):
year_ago = now() - timedelta(days=365)
q = Profile.objects.order_by("id")
# Exclude accounts with logins in the last year_ago
q = q.exclude(user__last_login__gt=year_ago)
# Exclude accounts less than a year_ago old
q = q.exclude(user__date_joined__gt=year_ago)
# Exclude accounts with the deletion notice already sent
q = q.exclude(deletion_notice_date__gt=year_ago)
# Exclude paid accounts
q = q.exclude(sms_limit__gt=5)
sent = 0
for profile in q:
members = Member.objects.filter(project__owner_id=profile.user_id)
if members.exists():
print("Skipping %s, has team members" % profile)
continue
pings = Ping.objects
pings = pings.filter(owner__project__owner_id=profile.user_id)
pings = pings.filter(created__gt=year_ago)
if pings.exists():
print("Skipping %s, has pings in last year" % profile)
continue
self.stdout.write("Sending notice to %s" % profile.user.email)
profile.deletion_notice_date = now()
profile.save()
ctx = {"email": profile.user.email, "support_email": settings.SUPPORT_EMAIL}
emails.deletion_notice(profile.user.email, ctx)
# Throttle so we don't send too many emails at once:
time.sleep(1)
sent += 1
return "Done! Sent %d notices" % sent
|
from datetime import timedelta
import time
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from hc.accounts.models import Profile, Member
from hc.api.models import Ping
from hc.lib import emails
class Command(BaseCommand):
help = """Send deletion notices to inactive user accounts.
Conditions for sending the notice:
- deletion notice has not been sent recently
- last login more than a year ago
- none of the owned projects has invited team members
"""
def handle(self, *args, **options):
year_ago = now() - timedelta(days=365)
q = Profile.objects.order_by("id")
# Exclude accounts with logins in the last year_ago
q = q.exclude(user__last_login__gt=year_ago)
# Exclude accounts less than a year_ago old
q = q.exclude(user__date_joined__gt=year_ago)
# Exclude accounts with the deletion notice already sent
q = q.exclude(deletion_notice_date__gt=year_ago)
# Exclude paid accounts
q = q.exclude(sms_limit__gt=0)
sent = 0
for profile in q:
members = Member.objects.filter(project__owner_id=profile.user_id)
if members.exists():
print("Skipping %s, has team members" % profile)
continue
pings = Ping.objects
pings = pings.filter(owner__project__owner_id=profile.user_id)
pings = pings.filter(created__gt=year_ago)
if pings.exists():
print("Skipping %s, has pings in last year" % profile)
continue
self.stdout.write("Sending notice to %s" % profile.user.email)
profile.deletion_notice_date = now()
profile.save()
ctx = {"email": profile.user.email, "support_email": settings.SUPPORT_EMAIL}
emails.deletion_notice(profile.user.email, ctx)
# Throttle so we don't send too many emails at once:
time.sleep(1)
sent += 1
return "Done! Sent %d notices" % sent
|
bsd-3-clause
|
Python
|
43ab753c4a9892c55f115a4dd5345e94c4bb5d41
|
Fix auth initialization logging
|
ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver
|
opwen_email_server/services/auth.py
|
opwen_email_server/services/auth.py
|
from ast import literal_eval
from os import environ
from typing import Callable
from typing import Mapping
from opwen_email_server.utils.log import LogMixin
class EnvironmentAuth(LogMixin):
def __init__(self, client_to_domain: Mapping[str, str]=None,
envgetter: Callable[[str, str], str]=environ.get,
envkey: str='LOKOLE_CLIENTS') -> None:
self.__client_to_domain = dict(client_to_domain or {})
self._envgetter = envgetter
self._envkey = envkey
@property
def _client_to_domain(self):
if not self.__client_to_domain:
self.__client_to_domain = self._create_client_to_domain()
self.log_debug('initialized auth to %r', self.__client_to_domain)
return self.__client_to_domain
def _create_client_to_domain(self) -> Mapping[str, str]:
client_to_domain = literal_eval(self._envgetter(self._envkey, '{}'))
if not client_to_domain:
raise ValueError('environment key {} not set'.format(self._envkey))
return client_to_domain
def __contains__(self, client: str) -> bool:
return client in self._client_to_domain
def domain_for(self, client: str) -> str:
return self._client_to_domain[client]
|
from ast import literal_eval
from os import environ
from typing import Callable
from typing import Mapping
from opwen_email_server.utils.log import LogMixin
class EnvironmentAuth(LogMixin):
def __init__(self, client_to_domain: Mapping[str, str]=None,
envgetter: Callable[[str, str], str]=environ.get,
envkey: str='LOKOLE_CLIENTS') -> None:
self.__client_to_domain = dict(client_to_domain or {})
self._envgetter = envgetter
self._envkey = envkey
@property
def _client_to_domain(self):
if not self.__client_to_domain:
self.log_debug('initialized auth to %r', self.__client_to_domain)
self.__client_to_domain = self._create_client_to_domain()
return self.__client_to_domain
def _create_client_to_domain(self) -> Mapping[str, str]:
client_to_domain = literal_eval(self._envgetter(self._envkey, '{}'))
if not client_to_domain:
raise ValueError('environment key {} not set'.format(self._envkey))
return client_to_domain
def __contains__(self, client: str) -> bool:
return client in self._client_to_domain
def domain_for(self, client: str) -> str:
return self._client_to_domain[client]
|
apache-2.0
|
Python
|
5118104dad921128e4dec0cd1ea00aa7d854c0a9
|
fix token create params to match new keystone - this change will need to be ported to novaclient
|
ntt-pf-lab/backup_openstackx
|
openstackx/auth/tokens.py
|
openstackx/auth/tokens.py
|
from openstackx.api import base
class Tenant(base.Resource):
def __repr__(self):
return "<Tenant %s>" % self._info
@property
def id(self):
return self._info['id']
@property
def description(self):
return self._info['description']
@property
def enabled(self):
return self._info['enabled']
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def username(self):
try:
return self._info['user']['username']
except:
return "?"
@property
def tenant_id(self):
try:
return self._info['user']['tenantId']
except:
return "?"
def delete(self):
self.manager.delete(self)
class TokenManager(base.ManagerWithFind):
resource_class = Token
def create(self, tenant, username, password):
params = {"auth": {"passwordCredentials": {"username": username,
"password": password},
"tenantId": tenant}}
return self._create('tokens', params, "access")
class TenantManager(base.ManagerWithFind):
resource_class = Tenant
def for_token(self, token):
# FIXME(ja): now that tenants & tokens are separate managers we shouldn't
# need the uglyness of setting token this way?
orig = self.api.connection.auth_token
self.api.connection.auth_token = token
rval = self._list('tenants', "tenants")
self.api.connection.auth_token = orig
return rval
|
from openstackx.api import base
class Tenant(base.Resource):
def __repr__(self):
return "<Tenant %s>" % self._info
@property
def id(self):
return self._info['id']
@property
def description(self):
return self._info['description']
@property
def enabled(self):
return self._info['enabled']
class Token(base.Resource):
def __repr__(self):
return "<Token %s>" % self._info
@property
def id(self):
return self._info['token']['id']
@property
def username(self):
try:
return self._info['user']['username']
except:
return "?"
@property
def tenant_id(self):
try:
return self._info['user']['tenantId']
except:
return "?"
def delete(self):
self.manager.delete(self)
class TokenManager(base.ManagerWithFind):
resource_class = Token
def create(self, tenant, username, password):
params = {"passwordCredentials": {"username": username,
"password": password,
"tenantId": tenant}}
return self._create('tokens', params, "auth")
class TenantManager(base.ManagerWithFind):
resource_class = Tenant
def for_token(self, token):
# FIXME(ja): now that tenants & tokens are separate managers we shouldn't
# need the uglyness of setting token this way?
orig = self.api.connection.auth_token
self.api.connection.auth_token = token
rval = self._list('tenants', "tenants")
self.api.connection.auth_token = orig
return rval
|
bsd-3-clause
|
Python
|
180b7810cdc9a2a17a13c184fc2143f9a5f10cd6
|
change var name article to container (opps 0.2) in sitemap generator class
|
williamroot/opps,opps/opps,YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps
|
opps/sitemaps/sitemaps.py
|
opps/sitemaps/sitemaps.py
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
container = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
container = container[:1000]
return {
'queryset': container,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.containers.models import Container
def InfoDisct(googlenews=False):
article = Container.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Container.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
mit
|
Python
|
ba92d4b5854f31b97255c98b83d8dfb8874c8668
|
Fix arg list for thumbnailpath override
|
findvid/main,findvid/main,findvid/main,findvid/main,findvid/main
|
frontend/src/indexing.py
|
frontend/src/indexing.py
|
#!/usr/bin/env python
from pymongo import MongoClient
#from subprocess import Popen, PIPE
import FindVid as fv
from sys import argv, exit
import hashlib
import os
def hashFile(filename, blocksize):
hash = hashlib.sha1()
with open(filename, 'rb') as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
return hash.hexdigest()
# returns the configuration dictionary
def config(db="findvid", collection="videos", config={"_id": "config"}):
client = MongoClient()
db = client[db]
videos = db[collection]
return videos.find(config).next()
CONFIG = config() # abs, thumbnail, video
VIDEOPATH = CONFIG["abspath"] + CONFIG["videopath"]
# path to shotbounds program
SHOTBOUNDS = "{0}main/impl/shotbounds".format(CONFIG["abspath"])
THUMBNAILER = "{0}main/impl/thumbnails".format(CONFIG["abspath"])
#Index the given videofile (abs. path), create thumbnails in the
def index_video(videofile, searchable=False, uploaded=True, thumbpath = None):
#Get PyMongo client
client = MongoClient()
db = client["findvid"]
videos = db["videos"]
#Get Hash
fileHash = str(hashFile(videofile, 65536))
#Check if this exact video exists already
video = videos.find_one({'_id': fileHash})
if (video):
return False
#Use C-Lib to get cuts in the video
cuts = fv.getCuts(videofile)
#Heuristic approach: Suitable keyframe between 2 cuts
keyframes = [(cuts[i-1] + cuts[i])/2 for i in range(1, len(cuts))]
#extract features from videofile given the keyframes array, use the middle keyframe as videothumb and save to default folder
if (thumbpath == None):
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes)
else:
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes, thumbpath)
prev = 0
scenes = [] # scenes collection
for i, c in enumerate(cuts[1:]):
scene = {} # scene document
scene["_id"] = str(i)
scene["startframe"] = prev
scene["endframe"] = c
# save features
scene["colorhist"] = []
for v in features[i][0]:
scene["colorhist"].append(v)
scene["edges"] = []
for v in features[i][1]:
scene["edges"].append(v)
# TinyIMG
# scene["tinyimg"]
# for v in features[i][2]:
# scene["tinyimg"].append(v)
# GIST
# scene["gist"]
# for v in features[i][2]:
# scene["gist"].append(v)
scenes.append(scene)
prev = c
video = {}
# TODO sequence counter
video["_id"] = fileHash
video["filename"] = videofile
fps = fv.getFramerate(videofile)
video["fps"] = fps
video["framecount"] = cuts[-1:][0] # last entry
video["scenes"] = scenes
video["upload"] = uploaded
video["searchable"] = searchable
videos.insert(video)
return True
if __name__ == "__main__":
if len(argv) < 2:
print "ERROR: file missing!"
exit(1)
videofile = argv[1]
index_video(videofile)
|
#!/usr/bin/env python
from pymongo import MongoClient
#from subprocess import Popen, PIPE
import FindVid as fv
from sys import argv, exit
import hashlib
import os
def hashFile(filename, blocksize):
hash = hashlib.sha1()
with open(filename, 'rb') as f:
buffer = f.read(blocksize)
while len(buffer) > 0:
hash.update(buffer)
buffer = f.read(blocksize)
return hash.hexdigest()
# returns the configuration dictionary
def config(db="findvid", collection="videos", config={"_id": "config"}):
client = MongoClient()
db = client[db]
videos = db[collection]
return videos.find(config).next()
CONFIG = config() # abs, thumbnail, video
VIDEOPATH = CONFIG["abspath"] + CONFIG["videopath"]
# path to shotbounds program
SHOTBOUNDS = "{0}main/impl/shotbounds".format(CONFIG["abspath"])
THUMBNAILER = "{0}main/impl/thumbnails".format(CONFIG["abspath"])
#Index the given videofile (abs. path), create thumbnails in the
def index_video(videofile, searchable=False, uploaded=True, thumbpath = None):
#Get PyMongo client
client = MongoClient()
db = client["findvid"]
videos = db["videos"]
#Get Hash
fileHash = str(hashFile(videofile, 65536))
#Check if this exact video exists already
video = videos.find_one({'_id': fileHash})
if (video):
return False
#Use C-Lib to get cuts in the video
cuts = fv.getCuts(videofile)
#Heuristic approach: Suitable keyframe between 2 cuts
keyframes = [(cuts[i-1] + cuts[i])/2 for i in range(1, len(cuts))]
#extract features from videofile given the keyframes array, use the middle keyframe as videothumb and save to default folder
features = fv.getFeatures(videofile, keyframes[len(keyframes)/2], keyframes, thumbpath)
prev = 0
scenes = [] # scenes collection
for i, c in enumerate(cuts[1:]):
scene = {} # scene document
scene["_id"] = str(i)
scene["startframe"] = prev
scene["endframe"] = c
# save features
scene["colorhist"] = []
for v in features[i][0]:
scene["colorhist"].append(v)
scene["edges"] = []
for v in features[i][1]:
scene["edges"].append(v)
# TinyIMG
# scene["tinyimg"]
# for v in features[i][2]:
# scene["tinyimg"].append(v)
# GIST
# scene["gist"]
# for v in features[i][2]:
# scene["gist"].append(v)
scenes.append(scene)
prev = c
video = {}
# TODO sequence counter
video["_id"] = fileHash
video["filename"] = videofile
fps = fv.getFramerate(videofile)
video["fps"] = fps
video["framecount"] = cuts[-1:][0] # last entry
video["scenes"] = scenes
video["upload"] = uploaded
video["searchable"] = searchable
videos.insert(video)
return True
if __name__ == "__main__":
if len(argv) < 2:
print "ERROR: file missing!"
exit(1)
videofile = argv[1]
index_video(videofile)
|
mit
|
Python
|
f127f0e9bb0b8778feafbdbc1fa68e79a923d639
|
Update product listing test to use product ids rather than index
|
osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
|
whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py
|
whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper products and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
product_dict = {}
for product in items:
product_id = product['link'].split('/')[-1]
product_dict[str(product_id)] = product
for product in Product.objects.all():
self.assertEqual(
product_dict[str(product.id)]['description'],
product.description)
self.assertEqual(
product_dict[str(product.id)]['name'], product.name)
self.assertEqual(
product_dict[str(product.id)]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
product_dict[str(product.id)]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(product_dict[str(product.id)]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper productss and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
for product in Product.objects.all():
self.assertEqual(
items[product.id-1]['description'], product.description)
self.assertEqual(
items[product.id-1]['name'], product.name)
self.assertEqual(
items[product.id-1]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
items[product.id-1]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(items[product.id-1]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
|
apache-2.0
|
Python
|
266ee5a6798c1fe09e9c6b36ee5831a1060b4624
|
Fix lust.py
|
juricapapak/git-tutorial-code,juricapapak/git-tutorial-code
|
python/lust.py
|
python/lust.py
|
#!/usr/bin/env python
class LustObject(object):
# executes the command
def handle(self, arguments): pass
def print_help(self): pass
class FactorialCommand(LustObject):
def handle(self, arguments):
try: argument = int(arguments[0])
except (ValueError, IndexError):
print("fact: could not read integer argument.")
return
if argument < 0:
print("fact: argument has to be non-negative!")
return
print(self.__calculate_factorial(argument))
def print_help(self):
print(" fact <integer>")
print(" Calculates the factorial of <integer>.")
def __calculate_factorial(self, argument):
# Hmmm...
result = 1
for i in range(1, argument+1):
result *= i
return result
class QuitCommand(LustObject):
def handle(self, arguments = None):
print("Bye!")
exit()
def print_help(self):
print(" quit")
print(" Quits.")
class HelpCommand(LustObject):
def __init__(self, commands):
self.commands = commands
def handle(self, arguments = None):
print("List of all commands")
print("--------------------")
for command in sorted(self.commands):
self.commands[command].print_help()
def print_help(self):
print(" help")
print(" Prints help for all commands.")
print("Hello! Welcome to the LARICS Universal Shell Terminal (LUST)!")
print("Enter 'help' for a list of commands. Press Ctrl-D or enter 'quit' to quit.")
# dictionary for storing all commands
commands = { }
commands["fact"] = FactorialCommand()
commands["quit"] = QuitCommand()
# help command needs a reference to the parent dictionary in order to call each
# command's print_help() function
commands["help"] = HelpCommand(commands)
# input from Python 3 is raw_input in Python 2
try: input = raw_input
except NameError: pass
while True:
# read current line and try to extract command name
try:
cmd_line = input(">> ")
except (EOFError):
break
arguments = cmd_line.split()
try: cmd_name = arguments[0].lower()
except IndexError: continue
# look up the appropriate command in commands dictionary
if cmd_name not in commands:
print("lust: no such command '{}'.".format(cmd_name))
continue
else:
# command found, pass its handler the rest of the read arguments
commands[cmd_name].handle(arguments[1:])
print
commands["quit"].handle()
|
#!/usr/bin/env python
class LustObject(object):
# executes the command
def handle(self, arguments): pass
def print_help(self): pass
class FactorialCommand(LustObject):
def handle(self, arguments):
try: argument = int(arguments[0])
except (ValueError, IndexError):
print("fact: could not read integer argument.")
return
if argument < 0:
print("fact: argument has to be non-negative!")
return
print(self.__calculate_factorial(argument))
def print_help(self):
print(" fact <integer>")
print(" Calculates the factorial of <integer>.")
def __calculate_factorial(self, argument):
# Hmmm...
result = 0
for i in range(1, argument+1):
result *= i
return result
class QuitCommand(LustObject):
def handle(self, arguments = None):
print("Bye!")
exit()
def print_help(self):
print(" quit")
print(" Quits.")
class HelpCommand(LustObject):
def __init__(self, commands):
self.commands = commands
def handle(self, arguments = None):
print("List of all commands")
print("--------------------")
for command in sorted(self.commands):
self.commands[command].print_help()
def print_help(self):
print(" help")
print(" Prints help for all commands.")
print("Hello! Welcome to the LARICS Universal Shell Terminal (LUST)!")
print("Enter 'help' for a list of commands. Press Ctrl-D or enter 'quit' to quit.")
# dictionary for storing all commands
commands = { }
commands["fact"] = FactorialCommand()
commands["quit"] = QuitCommand()
# help command needs a reference to the parent dictionary in order to call each
# command's print_help() function
commands["help"] = HelpCommand(commands)
# input from Python 3 is raw_input in Python 2
try: input = raw_input
except NameError: pass
while True:
# read current line and try to extract command name
try:
cmd_line = input(">> ")
except (EOFError):
break
arguments = cmd_line.split()
try: cmd_name = arguments[0].lower()
except IndexError: continue
# look up the appropriate command in commands dictionary
if cmd_name not in commands:
print("lust: no such command '{}'.".format(cmd_name))
continue
else:
# command found, pass its handler the rest of the read arguments
commands[cmd_name].handle(arguments[1:])
print
commands["quit"].handle()
|
apache-2.0
|
Python
|
12b34fc09baa5060495e25e57680d1f6170559c5
|
Enable estimation reports for FPBŻ
|
KamilWo/bestja,EE/bestja,ludwiktrammer/bestja,KrzysiekJ/bestja,ludwiktrammer/bestja,KrzysiekJ/bestja,KrzysiekJ/bestja,ludwiktrammer/bestja,EE/bestja,KamilWo/bestja,EE/bestja,KamilWo/bestja
|
addons/bestja_configuration_fpbz/__openerp__.py
|
addons/bestja_configuration_fpbz/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_estimation_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
|
# -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
|
agpl-3.0
|
Python
|
2dc3e7eb3e6e5b32347d24d5353f9a5f0f6915c2
|
Create app.py
|
rajeshrao04/news-api
|
app.py
|
app.py
|
#!/usr/bin/env python
import urllib
import json
import os
import time
from flask import Flask
from flask import request
from flask import make_response
from datetime import datetime
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "time.get":
return {}
result = req.get("result")
parameters = result.get("parameters")
zone = parameters.get("sys.location")
cost = {'Europe':100, 'North America':200, 'South America':300, 'Asia':400, 'Africa':500}
speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
localtime = time.localtime(time.time())
print "Local current time :", localtime
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
|
#!/usr/bin/env python
import urllib
import json
import os
import time
from flask import Flask
from flask import request
from flask import make_response
from datetime import datetime
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "time.get":
return {}
result = req.get("result")
parameters = result.get("parameters")
zone = parameters.get("sys.location")
// cost = {'Europe':100, 'North America':200, 'South America':300, 'Asia':400, 'Africa':500}
// speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
localtime = time.localtime(time.time())
print "Local current time :", localtime
// print("Response:")
// print(speech)
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "apiai-onlinestore-shipping"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
|
apache-2.0
|
Python
|
cb5aa965254c6abe3a865747e823fbe90f894a2c
|
return addresses as 'items'
|
colorado-code-for-communities/denver_streets,colorado-code-for-communities/denver_streets
|
app.py
|
app.py
|
from flask import Flask, Response, url_for, request, json
import os
import urllib2
import datetime
app = Flask(__name__)
@app.route('/hello')
def hello():
return 'Hello world'
@app.route('/')
def index():
response_data = json.dumps({ 'closures_href': url_for('closures') })
response = Response(response_data, status=200, mimetype='application/json')
return response
@app.route('/closures')
def closures():
d = datetime.datetime.today().strftime('%Y-%m-%d')
scraperwiki_query = "https://api.scraperwiki.com/api/1.0/datastore/sqlite?format=jsondict&name=denver_streets_and_sidewalks&query=select%20*%20from%20%60swdata%60%20where%20start_date%20%3C%20date('"+ d +"')%20and%20end_date%20%3E%20date('" + d + "')"
scraperwiki_response = { 'items': json.loads(urllib2.urlopen(scraperwiki_query).read()) }
response = Response(json.dumps(scraperwiki_response), status=200, mimetype='application/json')
return response
@app.route('/closures/<int:closure_id>')
def closure_id():
return ""
# find closure with closure_id
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
from flask import Flask, Response, url_for, request, json
import os
import urllib2
import datetime
app = Flask(__name__)
@app.route('/hello')
def hello():
return 'Hello world'
@app.route('/')
def index():
response_data = json.dumps({ 'closures_href': url_for('closures') })
response = Response(response_data, status=200, mimetype='application/json')
return response
@app.route('/closures')
def closures():
d = datetime.datetime.today().strftime('%Y-%m-%d')
scraperwiki_query = "https://api.scraperwiki.com/api/1.0/datastore/sqlite?format=jsondict&name=denver_streets_and_sidewalks&query=select%20*%20from%20%60swdata%60%20where%20start_date%20%3C%20date('"+ d +"')%20and%20end_date%20%3E%20date('" + d + "')"
scraperwiki_response = urllib2.urlopen(scraperwiki_query).read()
response = Response(scraperwiki_response, status=200, mimetype='application/json')
#return url_for('closure_id', closure_id=1)
return response
@app.route('/closures/<int:closure_id>')
def closure_id():
return ""
# find closure with closure_id
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
apache-2.0
|
Python
|
070589ee7dba86dd3d0a8928ebdd331d4faaa0c6
|
Remove pdf_file field from plugin
|
n0m4dz/odoo,slevenhagen/odoo,funkring/fdoo,BT-rmartin/odoo,Grirrane/odoo,hassoon3/odoo,srimai/odoo,stephen144/odoo,alexcuellar/odoo,aviciimaxwell/odoo,OpenUpgrade-dev/OpenUpgrade,florian-dacosta/OpenUpgrade,sv-dev1/odoo,Ernesto99/odoo,slevenhagen/odoo,patmcb/odoo,havt/odoo,OSSESAC/odoopubarquiluz,apanju/GMIO_Odoo,incaser/odoo-odoo,abdellatifkarroum/odoo,Antiun/odoo,ojengwa/odoo,bguillot/OpenUpgrade,oasiswork/odoo,provaleks/o8,Noviat/odoo,gorjuce/odoo,KontorConsulting/odoo,CubicERP/odoo,pedrobaeza/odoo,steedos/odoo,damdam-s/OpenUpgrade,klunwebale/odoo,mszewczy/odoo,agrista/odoo-saas,FlorianLudwig/odoo,thanhacun/odoo,odootr/odoo,sadleader/odoo,camptocamp/ngo-addons-backport,draugiskisprendimai/odoo,lsinfo/odoo,fdvarela/odoo8,idncom/odoo,oasiswork/odoo,guerrerocarlos/odoo,synconics/odoo,christophlsa/odoo,lombritz/odoo,Ernesto99/odoo,vnsofthe/odoo,srsman/odoo,joshuajan/odoo,gsmartway/odoo,kybriainfotech/iSocioCRM,ecosoft-odoo/odoo,aviciimaxwell/odoo,Endika/OpenUpgrade,abenzbiria/clients_odoo,gorjuce/odoo,ojengwa/odoo,joariasl/odoo,shingonoide/odoo,ovnicraft/odoo,apanju/GMIO_Odoo,Codefans-fan/odoo,OpenUpgrade/OpenUpgrade,provaleks/o8,alexcuellar/odoo,abenzbiria/clients_odoo,stonegithubs/odoo,ehirt/odoo,datenbetrieb/odoo,tinkerthaler/odoo,Drooids/odoo,mmbtba/odoo,massot/odoo,dezynetechnologies/odoo,Codefans-fan/odoo,colinnewell/odoo,Daniel-CA/odoo,sve-odoo/odoo,feroda/odoo,gavin-feng/odoo,naousse/odoo,zchking/odoo,SAM-IT-SA/odoo,odooindia/odoo,gvb/odoo,andreparames/odoo,bkirui/odoo,Adel-Magebinary/odoo,ShineFan/odoo,NeovaHealth/odoo,jiachenning/odoo,papouso/odoo,ehirt/odoo,ShineFan/odoo,javierTerry/odoo,shivam1111/odoo,grap/OpenUpgrade,RafaelTorrealba/odoo,aviciimaxwell/odoo,Antiun/odoo,hbrunn/OpenUpgrade,numerigraphe/odoo,apocalypsebg/odoo,eino-makitalo/odoo,storm-computers/odoo,xzYue/odoo,sv-dev1/odoo,cpyou/odoo,Endika/odoo,collex100/odoo,syci/OCB,n0m4dz/odoo,microcom/odoo,inspyration/odoo,apanju/odoo,shaufi/odoo,ccomb/OpenUpgrade,jfpla/odoo,ThinkOpen-Solutions/odoo,odoousers2014/odoo,Bachaco-ve/odoo,bguillot/OpenUpgrade,credativUK/OCB,mlaitinen/odoo,poljeff/odoo,hmen89/odoo,lightcn/odoo,srimai/odoo,frouty/odoo_oph,VielSoft/odoo,Endika/odoo,zchking/odoo,FlorianLudwig/odoo,simongoffin/website_version,x111ong/odoo,0k/odoo,Danisan/odoo-1,Drooids/odoo,n0m4dz/odoo,rubencabrera/odoo,oliverhr/odoo,fjbatresv/odoo,slevenhagen/odoo,pplatek/odoo,dfang/odoo,rahuldhote/odoo,ramitalat/odoo,abstract-open-solutions/OCB,highco-groupe/odoo,csrocha/OpenUpgrade,RafaelTorrealba/odoo,NeovaHealth/odoo,arthru/OpenUpgrade,juanalfonsopr/odoo,bplancher/odoo,elmerdpadilla/iv,jusdng/odoo,mszewczy/odoo,synconics/odoo,hopeall/odoo,leoliujie/odoo,BT-fgarbely/odoo,fjbatresv/odoo,abdellatifkarroum/odoo,ThinkOpen-Solutions/odoo,camptocamp/ngo-addons-backport,lsinfo/odoo,hassoon3/odoo,luiseduardohdbackup/odoo,jiangzhixiao/odoo,oasiswork/odoo,shivam1111/odoo,odoo-turkiye/odoo,hanicker/odoo,BT-ojossen/odoo,prospwro/odoo,dkubiak789/odoo,kifcaliph/odoo,xzYue/odoo,grap/OCB,OpenPymeMx/OCB,bkirui/odoo,ujjwalwahi/odoo,gdgellatly/OCB1,acshan/odoo,ecosoft-odoo/odoo,frouty/odoogoeen,bobisme/odoo,guewen/OpenUpgrade,nuncjo/odoo,rgeleta/odoo,eino-makitalo/odoo,leorochael/odoo,CatsAndDogsbvba/odoo,feroda/odoo,odooindia/odoo,addition-it-solutions/project-all,fgesora/odoo,dkubiak789/odoo,Gitlab11/odoo,Eric-Zhong/odoo,jaxkodex/odoo,bguillot/OpenUpgrade,odoousers2014/odoo,papouso/odoo,oliverhr/odoo,vrenaville/ngo-addons-backport,BT-fgarbely/odoo,takis/odoo,highco-groupe/odoo,datenbetrieb/odoo,savoirfairelinux/OpenUpgrade,hmen89/odoo,dgzurita/odoo,steedos/odoo,klunwebale/odoo,savoirfairelinux/OpenUpgrade,ygol/odoo,guewen/OpenUpgrade,CubicERP/odoo,bealdav/OpenUpgrade,ClearCorp-dev/odoo,rowemoore/odoo,mustafat/odoo-1,guerrerocarlos/odoo,Daniel-CA/odoo,nuncjo/odoo,GauravSahu/odoo,feroda/odoo,jeasoft/odoo,nhomar/odoo,CubicERP/odoo,microcom/odoo,VitalPet/odoo,OpenUpgrade-dev/OpenUpgrade,tarzan0820/odoo,fuselock/odoo,realsaiko/odoo,hifly/OpenUpgrade,jfpla/odoo,FlorianLudwig/odoo,joariasl/odoo,xzYue/odoo,ChanduERP/odoo,jusdng/odoo,dezynetechnologies/odoo,guewen/OpenUpgrade,goliveirab/odoo,ovnicraft/odoo,oliverhr/odoo,KontorConsulting/odoo,NL66278/OCB,luistorresm/odoo,javierTerry/odoo,mszewczy/odoo,erkrishna9/odoo,GauravSahu/odoo,cloud9UG/odoo,nagyistoce/odoo-dev-odoo,provaleks/o8,charbeljc/OCB,nexiles/odoo,nhomar/odoo,joariasl/odoo,sv-dev1/odoo,gdgellatly/OCB1,Gitlab11/odoo,mkieszek/odoo,prospwro/odoo,leoliujie/odoo,florian-dacosta/OpenUpgrade,xujb/odoo,RafaelTorrealba/odoo,CubicERP/odoo,zchking/odoo,odoousers2014/odoo,leoliujie/odoo,kifcaliph/odoo,ramitalat/odoo,doomsterinc/odoo,hbrunn/OpenUpgrade,andreparames/odoo,Nowheresly/odoo,synconics/odoo,bplancher/odoo,inspyration/odoo,matrixise/odoo,dllsf/odootest,luistorresm/odoo,christophlsa/odoo,jiangzhixiao/odoo,oasiswork/odoo,0k/OpenUpgrade,VielSoft/odoo,stonegithubs/odoo,MarcosCommunity/odoo,alexcuellar/odoo,rdeheele/odoo,sinbazhou/odoo,leorochael/odoo,poljeff/odoo,Gitlab11/odoo,frouty/odoo_oph,massot/odoo,janocat/odoo,markeTIC/OCB,windedge/odoo,PongPi/isl-odoo,BT-ojossen/odoo,nhomar/odoo-mirror,VielSoft/odoo,pedrobaeza/odoo,synconics/odoo,FlorianLudwig/odoo,BT-rmartin/odoo,ingadhoc/odoo,rubencabrera/odoo,bobisme/odoo,syci/OCB,Antiun/odoo,OpenUpgrade-dev/OpenUpgrade,nuncjo/odoo,BT-rmartin/odoo,hbrunn/OpenUpgrade,jolevq/odoopub,ujjwalwahi/odoo,cedk/odoo,mszewczy/odoo,NL66278/OCB,ApuliaSoftware/odoo,ojengwa/odoo,alexteodor/odoo,lightcn/odoo,rubencabrera/odoo,virgree/odoo,shingonoide/odoo,BT-ojossen/odoo,bobisme/odoo,pedrobaeza/OpenUpgrade,janocat/odoo,JGarcia-Panach/odoo,OpusVL/odoo,dfang/odoo,bkirui/odoo,tinkerthaler/odoo,mszewczy/odoo,odoousers2014/odoo,colinnewell/odoo,florentx/OpenUpgrade,pedrobaeza/OpenUpgrade,lgscofield/odoo,rschnapka/odoo,virgree/odoo,camptocamp/ngo-addons-backport,ShineFan/odoo,MarcosCommunity/odoo,arthru/OpenUpgrade,spadae22/odoo,x111ong/odoo,Drooids/odoo,PongPi/isl-odoo,xujb/odoo,RafaelTorrealba/odoo,deKupini/erp,ojengwa/odoo,fuselock/odoo,oihane/odoo,Grirrane/odoo,nhomar/odoo-mirror,florentx/OpenUpgrade,lgscofield/odoo,apanju/odoo,abstract-open-solutions/OCB,ecosoft-odoo/odoo,luiseduardohdbackup/odoo,vrenaville/ngo-addons-backport,acshan/odoo,shaufi10/odoo,OpenUpgrade/OpenUpgrade,hubsaysnuaa/odoo,numerigraphe/odoo,Nick-OpusVL/odoo,dgzurita/odoo,waytai/odoo,nagyistoce/odoo-dev-odoo,storm-computers/odoo,Noviat/odoo,matrixise/odoo,SerpentCS/odoo,jeasoft/odoo,laslabs/odoo,OSSESAC/odoopubarquiluz,florentx/OpenUpgrade,NeovaHealth/odoo,kifcaliph/odoo,charbeljc/OCB,waytai/odoo,KontorConsulting/odoo,leorochael/odoo,dalegregory/odoo,luiseduardohdbackup/odoo,agrista/odoo-saas,stonegithubs/odoo,SAM-IT-SA/odoo,cloud9UG/odoo,rschnapka/odoo,ygol/odoo,nuuuboo/odoo,BT-astauder/odoo,odoo-turkiye/odoo,hassoon3/odoo,kirca/OpenUpgrade,JCA-Developpement/Odoo,hopeall/odoo,ThinkOpen-Solutions/odoo,TRESCLOUD/odoopub,CubicERP/odoo,grap/OpenUpgrade,takis/odoo,RafaelTorrealba/odoo,Elico-Corp/odoo_OCB,dalegregory/odoo,KontorConsulting/odoo,JGarcia-Panach/odoo,odootr/odoo,0k/odoo,ChanduERP/odoo,mmbtba/odoo,doomsterinc/odoo,credativUK/OCB,ihsanudin/odoo,avoinsystems/odoo,colinnewell/odoo,OpenPymeMx/OCB,damdam-s/OpenUpgrade,bwrsandman/OpenUpgrade,Endika/OpenUpgrade,bakhtout/odoo-educ,fuselock/odoo,dgzurita/odoo,pedrobaeza/odoo,bobisme/odoo,fdvarela/odoo8,sysadminmatmoz/OCB,apocalypsebg/odoo,idncom/odoo,diagramsoftware/odoo,hip-odoo/odoo,draugiskisprendimai/odoo,ojengwa/odoo,fuhongliang/odoo,xujb/odoo,syci/OCB,erkrishna9/odoo,KontorConsulting/odoo,goliveirab/odoo,oliverhr/odoo,VielSoft/odoo,ChanduERP/odoo,0k/OpenUpgrade,havt/odoo,OpenPymeMx/OCB,grap/OpenUpgrade,syci/OCB,SAM-IT-SA/odoo,OpenPymeMx/OCB,slevenhagen/odoo,klunwebale/odoo,frouty/odoogoeen,numerigraphe/odoo,Codefans-fan/odoo,brijeshkesariya/odoo,guerrerocarlos/odoo,jusdng/odoo,credativUK/OCB,klunwebale/odoo,stonegithubs/odoo,gsmartway/odoo,Grirrane/odoo,datenbetrieb/odoo,sebalix/OpenUpgrade,Endika/OpenUpgrade,addition-it-solutions/project-all,camptocamp/ngo-addons-backport,Nowheresly/odoo,credativUK/OCB,jeasoft/odoo,ubic135/odoo-design,provaleks/o8,Bachaco-ve/odoo,fossoult/odoo,BT-astauder/odoo,JonathanStein/odoo,SerpentCS/odoo,chiragjogi/odoo,kybriainfotech/iSocioCRM,jeasoft/odoo,ShineFan/odoo,camptocamp/ngo-addons-backport,tinkhaven-organization/odoo,hbrunn/OpenUpgrade,jaxkodex/odoo,bealdav/OpenUpgrade,0k/odoo,markeTIC/OCB,florentx/OpenUpgrade,vrenaville/ngo-addons-backport,kittiu/odoo,Ichag/odoo,vrenaville/ngo-addons-backport,patmcb/odoo,OSSESAC/odoopubarquiluz,nhomar/odoo,jpshort/odoo,ygol/odoo,sysadminmatmoz/OCB,KontorConsulting/odoo,juanalfonsopr/odoo,mmbtba/odoo,srimai/odoo,csrocha/OpenUpgrade,laslabs/odoo,lsinfo/odoo,dkubiak789/odoo,kybriainfotech/iSocioCRM,codekaki/odoo,dfang/odoo,0k/OpenUpgrade,prospwro/odoo,csrocha/OpenUpgrade,abenzbiria/clients_odoo,OpenUpgrade-dev/OpenUpgrade,fjbatresv/odoo,zchking/odoo,hopeall/odoo,charbeljc/OCB,OpenUpgrade/OpenUpgrade,shaufi/odoo,matrixise/odoo,cloud9UG/odoo,bakhtout/odoo-educ,Grirrane/odoo,Danisan/odoo-1,TRESCLOUD/odoopub,avoinsystems/odoo,waytai/odoo,OpusVL/odoo,savoirfairelinux/odoo,funkring/fdoo,poljeff/odoo,hanicker/odoo,dezynetechnologies/odoo,optima-ict/odoo,kirca/OpenUpgrade,bwrsandman/OpenUpgrade,factorlibre/OCB,tangyiyong/odoo,fgesora/odoo,avoinsystems/odoo,matrixise/odoo,stonegithubs/odoo,funkring/fdoo,jiachenning/odoo,ojengwa/odoo,dsfsdgsbngfggb/odoo,alexteodor/odoo,dezynetechnologies/odoo,srsman/odoo,BT-rmartin/odoo,thanhacun/odoo,cysnake4713/odoo,juanalfonsopr/odoo,savoirfairelinux/odoo,leoliujie/odoo,highco-groupe/odoo,vrenaville/ngo-addons-backport,xzYue/odoo,steedos/odoo,tvtsoft/odoo8,damdam-s/OpenUpgrade,kittiu/odoo,juanalfonsopr/odoo,arthru/OpenUpgrade,mustafat/odoo-1,virgree/odoo,acshan/odoo,markeTIC/OCB,mlaitinen/odoo,leoliujie/odoo,pplatek/odoo,cedk/odoo,luistorresm/odoo,shaufi10/odoo,PongPi/isl-odoo,JGarcia-Panach/odoo,pplatek/odoo,leorochael/odoo,BT-astauder/odoo,grap/OCB,ChanduERP/odoo,alhashash/odoo,Danisan/odoo-1,gdgellatly/OCB1,Daniel-CA/odoo,ovnicraft/odoo,demon-ru/iml-crm,florentx/OpenUpgrade,bkirui/odoo,shaufi10/odoo,papouso/odoo,nhomar/odoo,funkring/fdoo,tinkerthaler/odoo,brijeshkesariya/odoo,zchking/odoo,mmbtba/odoo,PongPi/isl-odoo,blaggacao/OpenUpgrade,JCA-Developpement/Odoo,JGarcia-Panach/odoo,Noviat/odoo,Eric-Zhong/odoo,Noviat/odoo,lsinfo/odoo,luistorresm/odoo,salaria/odoo,hifly/OpenUpgrade,draugiskisprendimai/odoo,arthru/OpenUpgrade,gsmartway/odoo,avoinsystems/odoo,codekaki/odoo,markeTIC/OCB,nhomar/odoo-mirror,NeovaHealth/odoo,spadae22/odoo,nhomar/odoo,andreparames/odoo,ccomb/OpenUpgrade,goliveirab/odoo,lgscofield/odoo,havt/odoo,microcom/odoo,Nowheresly/odoo,spadae22/odoo,provaleks/o8,VitalPet/odoo,draugiskisprendimai/odoo,codekaki/odoo,OpenUpgrade/OpenUpgrade,dgzurita/odoo,deKupini/erp,ShineFan/odoo,tinkhaven-organization/odoo,jpshort/odoo,frouty/odoogoeen,shingonoide/odoo,ygol/odoo,kifcaliph/odoo,BT-astauder/odoo,OpenPymeMx/OCB,ClearCorp-dev/odoo,funkring/fdoo,havt/odoo,pplatek/odoo,nuuuboo/odoo,odoo-turkiye/odoo,windedge/odoo,makinacorpus/odoo,laslabs/odoo,odooindia/odoo,mlaitinen/odoo,gorjuce/odoo,JonathanStein/odoo,mvaled/OpenUpgrade,xujb/odoo,optima-ict/odoo,sergio-incaser/odoo,tinkerthaler/odoo,ihsanudin/odoo,hifly/OpenUpgrade,VitalPet/odoo,highco-groupe/odoo,sebalix/OpenUpgrade,juanalfonsopr/odoo,matrixise/odoo,grap/OpenUpgrade,Danisan/odoo-1,blaggacao/OpenUpgrade,abdellatifkarroum/odoo,hubsaysnuaa/odoo,hifly/OpenUpgrade,fgesora/odoo,arthru/OpenUpgrade,fuselock/odoo,nitinitprof/odoo,idncom/odoo,waytai/odoo,lombritz/odoo,AuyaJackie/odoo,SerpentCS/odoo,ramadhane/odoo,ShineFan/odoo,sergio-incaser/odoo,minhtuancn/odoo,mkieszek/odoo,jesramirez/odoo,sve-odoo/odoo,javierTerry/odoo,alhashash/odoo,ecosoft-odoo/odoo,CubicERP/odoo,minhtuancn/odoo,rahuldhote/odoo,ramadhane/odoo,steedos/odoo,alexcuellar/odoo,jusdng/odoo,mvaled/OpenUpgrade,nhomar/odoo,joshuajan/odoo,CatsAndDogsbvba/odoo,GauravSahu/odoo,minhtuancn/odoo,hassoon3/odoo,goliveirab/odoo,ccomb/OpenUpgrade,Endika/odoo,hbrunn/OpenUpgrade,chiragjogi/odoo,GauravSahu/odoo,cysnake4713/odoo,minhtuancn/odoo,diagramsoftware/odoo,OpenUpgrade/OpenUpgrade,Eric-Zhong/odoo,colinnewell/odoo,jpshort/odoo,pplatek/odoo,0k/OpenUpgrade,thanhacun/odoo,rubencabrera/odoo,abstract-open-solutions/OCB,shaufi/odoo,CubicERP/odoo,rdeheele/odoo,ramitalat/odoo,lgscofield/odoo,rschnapka/odoo,SAM-IT-SA/odoo,ramitalat/odoo,Drooids/odoo,lightcn/odoo,incaser/odoo-odoo,VielSoft/odoo,cdrooom/odoo,eino-makitalo/odoo,guerrerocarlos/odoo,n0m4dz/odoo,jeasoft/odoo,grap/OpenUpgrade,fossoult/odoo,BT-fgarbely/odoo,tarzan0820/odoo,Adel-Magebinary/odoo,vrenaville/ngo-addons-backport,frouty/odoogoeen,markeTIC/OCB,incaser/odoo-odoo,shivam1111/odoo,vnsofthe/odoo,nagyistoce/odoo-dev-odoo,rdeheele/odoo,rahuldhote/odoo,JonathanStein/odoo,srsman/odoo,OpenUpgrade/OpenUpgrade,guewen/OpenUpgrade,bguillot/OpenUpgrade,OpenPymeMx/OCB,Gitlab11/odoo,feroda/odoo,tarzan0820/odoo,nitinitprof/odoo,realsaiko/odoo,CopeX/odoo,brijeshkesariya/odoo,mvaled/OpenUpgrade,blaggacao/OpenUpgrade,x111ong/odoo,hanicker/odoo,osvalr/odoo,javierTerry/odoo,incaser/odoo-odoo,tvibliani/odoo,simongoffin/website_version,makinacorpus/odoo,wangjun/odoo,0k/odoo,hubsaysnuaa/odoo,tinkerthaler/odoo,mszewczy/odoo,ramadhane/odoo,ojengwa/odoo,sergio-incaser/odoo,alqfahad/odoo,ubic135/odoo-design,inspyration/odoo,jaxkodex/odoo,alqfahad/odoo,kifcaliph/odoo,damdam-s/OpenUpgrade,steedos/odoo,hifly/OpenUpgrade,acshan/odoo,datenbetrieb/odoo,vnsofthe/odoo,synconics/odoo,Elico-Corp/odoo_OCB,hmen89/odoo,janocat/odoo,collex100/odoo,ubic135/odoo-design,Adel-Magebinary/odoo,BT-fgarbely/odoo,ramadhane/odoo,tangyiyong/odoo,AuyaJackie/odoo,VitalPet/odoo,jiangzhixiao/odoo,CatsAndDogsbvba/odoo,naousse/odoo,CopeX/odoo,vnsofthe/odoo,naousse/odoo,bakhtout/odoo-educ,oihane/odoo,Daniel-CA/odoo,lombritz/odoo,havt/odoo,Ichag/odoo,Grirrane/odoo,rubencabrera/odoo,gvb/odoo,joshuajan/odoo,hmen89/odoo,hmen89/odoo,TRESCLOUD/odoopub,jiangzhixiao/odoo,sysadminmatmoz/OCB,sergio-incaser/odoo,sergio-incaser/odoo,wangjun/odoo,fjbatresv/odoo,jiangzhixiao/odoo,Ernesto99/odoo,mustafat/odoo-1,rgeleta/odoo,microcom/odoo,leoliujie/odoo,BT-fgarbely/odoo,Gitlab11/odoo,hifly/OpenUpgrade,tvtsoft/odoo8,prospwro/odoo,windedge/odoo,hoatle/odoo,Adel-Magebinary/odoo,srimai/odoo,Elico-Corp/odoo_OCB,storm-computers/odoo,tinkerthaler/odoo,nitinitprof/odoo,vrenaville/ngo-addons-backport,ihsanudin/odoo,savoirfairelinux/odoo,draugiskisprendimai/odoo,camptocamp/ngo-addons-backport,makinacorpus/odoo,rahuldhote/odoo,MarcosCommunity/odoo,tinkhaven-organization/odoo,glovebx/odoo,tinkerthaler/odoo,leoliujie/odoo,arthru/OpenUpgrade,idncom/odoo,oihane/odoo,ramadhane/odoo,cedk/odoo,bealdav/OpenUpgrade,syci/OCB,makinacorpus/odoo,gdgellatly/OCB1,chiragjogi/odoo,odoousers2014/odoo,nagyistoce/odoo-dev-odoo,tinkhaven-organization/odoo,klunwebale/odoo,hip-odoo/odoo,nagyistoce/odoo-dev-odoo,slevenhagen/odoo,Nowheresly/odoo,ehirt/odoo,abstract-open-solutions/OCB,frouty/odoo_oph,ubic135/odoo-design,chiragjogi/odoo,takis/odoo,bobisme/odoo,ramadhane/odoo,sv-dev1/odoo,glovebx/odoo,cloud9UG/odoo,microcom/odoo,papouso/odoo,sebalix/OpenUpgrade,odoo-turkiye/odoo,KontorConsulting/odoo,tvibliani/odoo,datenbetrieb/odoo,bkirui/odoo,salaria/odoo,BT-rmartin/odoo,ygol/odoo,collex100/odoo,lightcn/odoo,elmerdpadilla/iv,ShineFan/odoo,tvtsoft/odoo8,realsaiko/odoo,rschnapka/odoo,lgscofield/odoo,VitalPet/odoo,AuyaJackie/odoo,agrista/odoo-saas,hopeall/odoo,shaufi10/odoo,rowemoore/odoo,Bachaco-ve/odoo,gsmartway/odoo,sinbazhou/odoo,ujjwalwahi/odoo,ChanduERP/odoo,mvaled/OpenUpgrade,Codefans-fan/odoo,windedge/odoo,shaufi/odoo,osvalr/odoo,joariasl/odoo,dkubiak789/odoo,kybriainfotech/iSocioCRM,BT-fgarbely/odoo,xujb/odoo,osvalr/odoo,nexiles/odoo,tvibliani/odoo,Daniel-CA/odoo,elmerdpadilla/iv,rschnapka/odoo,deKupini/erp,BT-fgarbely/odoo,pedrobaeza/odoo,mlaitinen/odoo,luiseduardohdbackup/odoo,blaggacao/OpenUpgrade,Noviat/odoo,collex100/odoo,alhashash/odoo,credativUK/OCB,alexcuellar/odoo,slevenhagen/odoo-npg,erkrishna9/odoo,andreparames/odoo,leorochael/odoo,n0m4dz/odoo,mvaled/OpenUpgrade,papouso/odoo,tinkhaven-organization/odoo,addition-it-solutions/project-all,frouty/odoogoeen,savoirfairelinux/OpenUpgrade,osvalr/odoo,Nick-OpusVL/odoo,jfpla/odoo,gdgellatly/OCB1,slevenhagen/odoo,codekaki/odoo,doomsterinc/odoo,demon-ru/iml-crm,mmbtba/odoo,dsfsdgsbngfggb/odoo,hanicker/odoo,jiachenning/odoo,charbeljc/OCB,fevxie/odoo,ingadhoc/odoo,hanicker/odoo,cpyou/odoo,Nick-OpusVL/odoo,apanju/GMIO_Odoo,alqfahad/odoo,JCA-Developpement/Odoo,Grirrane/odoo,Eric-Zhong/odoo,Adel-Magebinary/odoo,florian-dacosta/OpenUpgrade,dalegregory/odoo,Elico-Corp/odoo_OCB,omprakasha/odoo,pedrobaeza/odoo,aviciimaxwell/odoo,CopeX/odoo,nexiles/odoo,Kilhog/odoo,tvtsoft/odoo8,Daniel-CA/odoo,feroda/odoo,fgesora/odoo,frouty/odoogoeen,BT-astauder/odoo,glovebx/odoo,Maspear/odoo,ehirt/odoo,gvb/odoo,minhtuancn/odoo,synconics/odoo,camptocamp/ngo-addons-backport,jaxkodex/odoo,bobisme/odoo,SAM-IT-SA/odoo,thanhacun/odoo,fjbatresv/odoo,sv-dev1/odoo,ApuliaSoftware/odoo,pedrobaeza/OpenUpgrade,fdvarela/odoo8,doomsterinc/odoo,gavin-feng/odoo,abenzbiria/clients_odoo,savoirfairelinux/OpenUpgrade,incaser/odoo-odoo,oihane/odoo,omprakasha/odoo,juanalfonsopr/odoo,AuyaJackie/odoo,ecosoft-odoo/odoo,nagyistoce/odoo-dev-odoo,dllsf/odootest,mustafat/odoo-1,TRESCLOUD/odoopub,NeovaHealth/odoo,tvtsoft/odoo8,rdeheele/odoo,Endika/OpenUpgrade,kirca/OpenUpgrade,ApuliaSoftware/odoo,colinnewell/odoo,chiragjogi/odoo,florian-dacosta/OpenUpgrade,nexiles/odoo,srimai/odoo,OpenUpgrade/OpenUpgrade,dariemp/odoo,ujjwalwahi/odoo,Noviat/odoo,jaxkodex/odoo,slevenhagen/odoo-npg,gorjuce/odoo,sinbazhou/odoo,oasiswork/odoo,csrocha/OpenUpgrade,synconics/odoo,frouty/odoo_oph,poljeff/odoo,fevxie/odoo,fossoult/odoo,andreparames/odoo,FlorianLudwig/odoo,cloud9UG/odoo,bakhtout/odoo-educ,guewen/OpenUpgrade,damdam-s/OpenUpgrade,jeasoft/odoo,alhashash/odoo,Antiun/odoo,factorlibre/OCB,pplatek/odoo,ThinkOpen-Solutions/odoo,Bachaco-ve/odoo,frouty/odoo_oph,wangjun/odoo,VitalPet/odoo,omprakasha/odoo,joshuajan/odoo,salaria/odoo,hip-odoo/odoo,QianBIG/odoo,Ichag/odoo,rowemoore/odoo,luistorresm/odoo,OpusVL/odoo,n0m4dz/odoo,apocalypsebg/odoo,RafaelTorrealba/odoo,stonegithubs/odoo,Ernesto99/odoo,simongoffin/website_version,bguillot/OpenUpgrade,jiangzhixiao/odoo,srsman/odoo,leorochael/odoo,Kilhog/odoo,erkrishna9/odoo,inspyration/odoo,factorlibre/OCB,gdgellatly/OCB1,Daniel-CA/odoo,dkubiak789/odoo,kittiu/odoo,mkieszek/odoo,damdam-s/OpenUpgrade,VitalPet/odoo,bplancher/odoo,virgree/odoo,cedk/odoo,leorochael/odoo,lgscofield/odoo,elmerdpadilla/iv,ClearCorp-dev/odoo,nuncjo/odoo,nuncjo/odoo,factorlibre/OCB,datenbetrieb/odoo,rgeleta/odoo,takis/odoo,bealdav/OpenUpgrade,spadae22/odoo,srimai/odoo,dalegregory/odoo,PongPi/isl-odoo,CopeX/odoo,patmcb/odoo,Kilhog/odoo,laslabs/odoo,oliverhr/odoo,draugiskisprendimai/odoo,alexteodor/odoo,agrista/odoo-saas,Ichag/odoo,omprakasha/odoo,collex100/odoo,nagyistoce/odoo-dev-odoo,MarcosCommunity/odoo,hanicker/odoo,JGarcia-Panach/odoo,tangyiyong/odoo,ApuliaSoftware/odoo,massot/odoo,VitalPet/odoo,tvtsoft/odoo8,dllsf/odootest,jesramirez/odoo,Endika/OpenUpgrade,stephen144/odoo,chiragjogi/odoo,chiragjogi/odoo,BT-rmartin/odoo,steedos/odoo,0k/OpenUpgrade,PongPi/isl-odoo,bealdav/OpenUpgrade,mkieszek/odoo,kirca/OpenUpgrade,Maspear/odoo,oliverhr/odoo,0k/OpenUpgrade,poljeff/odoo,jusdng/odoo,tvibliani/odoo,massot/odoo,tvibliani/odoo,odootr/odoo,factorlibre/OCB,Drooids/odoo,apocalypsebg/odoo,rubencabrera/odoo,janocat/odoo,rahuldhote/odoo,MarcosCommunity/odoo,mlaitinen/odoo,salaria/odoo,tangyiyong/odoo,aviciimaxwell/odoo,nexiles/odoo,GauravSahu/odoo,hubsaysnuaa/odoo,hubsaysnuaa/odoo,dkubiak789/odoo,apanju/GMIO_Odoo,jesramirez/odoo,jaxkodex/odoo,gdgellatly/OCB1,codekaki/odoo,doomsterinc/odoo,dfang/odoo,savoirfairelinux/odoo,mustafat/odoo-1,mustafat/odoo-1,dariemp/odoo,ChanduERP/odoo,shivam1111/odoo,odootr/odoo,Endika/odoo,florentx/OpenUpgrade,omprakasha/odoo,Noviat/odoo,dsfsdgsbngfggb/odoo,ccomb/OpenUpgrade,savoirfairelinux/OpenUpgrade,gavin-feng/odoo,Nick-OpusVL/odoo,xujb/odoo,diagramsoftware/odoo,diagramsoftware/odoo,janocat/odoo,rschnapka/odoo,odoousers2014/odoo,dgzurita/odoo,apanju/odoo,Danisan/odoo-1,Ichag/odoo,demon-ru/iml-crm,blaggacao/OpenUpgrade,fdvarela/odoo8,sve-odoo/odoo,guerrerocarlos/odoo,nexiles/odoo,oliverhr/odoo,sebalix/OpenUpgrade,lightcn/odoo,frouty/odoogoeen,QianBIG/odoo,nitinitprof/odoo,dllsf/odootest,ihsanudin/odoo,ingadhoc/odoo,funkring/fdoo,storm-computers/odoo,BT-rmartin/odoo,kittiu/odoo,x111ong/odoo,guewen/OpenUpgrade,jiachenning/odoo,massot/odoo,shivam1111/odoo,javierTerry/odoo,tangyiyong/odoo,stephen144/odoo,rowemoore/odoo,charbeljc/OCB,MarcosCommunity/odoo,hanicker/odoo,Codefans-fan/odoo,numerigraphe/odoo,spadae22/odoo,shaufi10/odoo,osvalr/odoo,oasiswork/odoo,optima-ict/odoo,mvaled/OpenUpgrade,CatsAndDogsbvba/odoo,gorjuce/odoo,janocat/odoo,charbeljc/OCB,idncom/odoo,sysadminmatmoz/OCB,Nowheresly/odoo,lsinfo/odoo,Elico-Corp/odoo_OCB,grap/OCB,takis/odoo,odooindia/odoo,Kilhog/odoo,ramitalat/odoo,kybriainfotech/iSocioCRM,GauravSahu/odoo,jeasoft/odoo,cloud9UG/odoo,grap/OpenUpgrade,brijeshkesariya/odoo,ThinkOpen-Solutions/odoo,Ernesto99/odoo,avoinsystems/odoo,abstract-open-solutions/OCB,FlorianLudwig/odoo,xujb/odoo,windedge/odoo,juanalfonsopr/odoo,joshuajan/odoo,0k/odoo,blaggacao/OpenUpgrade,thanhacun/odoo,mmbtba/odoo,ingadhoc/odoo,SerpentCS/odoo,sadleader/odoo,naousse/odoo,diagramsoftware/odoo,demon-ru/iml-crm,shingonoide/odoo,dsfsdgsbngfggb/odoo,christophlsa/odoo,mkieszek/odoo,frouty/odoogoeen,glovebx/odoo,Danisan/odoo-1,CopeX/odoo,odoo-turkiye/odoo,stephen144/odoo,JonathanStein/odoo,stephen144/odoo,sinbazhou/odoo,ygol/odoo,cpyou/odoo,Endika/odoo,Ichag/odoo,nitinitprof/odoo,RafaelTorrealba/odoo,Codefans-fan/odoo,idncom/odoo,pedrobaeza/OpenUpgrade,OpenPymeMx/OCB,shivam1111/odoo,ApuliaSoftware/odoo,havt/odoo,pedrobaeza/OpenUpgrade,Ernesto99/odoo,mmbtba/odoo,Maspear/odoo,pedrobaeza/OpenUpgrade,jusdng/odoo,addition-it-solutions/project-all,provaleks/o8,dezynetechnologies/odoo,Kilhog/odoo,acshan/odoo,sadleader/odoo,pplatek/odoo,fevxie/odoo,sve-odoo/odoo,SerpentCS/odoo,apocalypsebg/odoo,gavin-feng/odoo,csrocha/OpenUpgrade,mustafat/odoo-1,sysadminmatmoz/OCB,laslabs/odoo,Endika/OpenUpgrade,JonathanStein/odoo,dezynetechnologies/odoo,fuselock/odoo,Antiun/odoo,alexcuellar/odoo,jpshort/odoo,steedos/odoo,wangjun/odoo,bakhtout/odoo-educ,rowemoore/odoo,aviciimaxwell/odoo,cedk/odoo,Drooids/odoo,papouso/odoo,luiseduardohdbackup/odoo,grap/OCB,poljeff/odoo,Bachaco-ve/odoo,sergio-incaser/odoo,fossoult/odoo,omprakasha/odoo,simongoffin/website_version,NL66278/OCB,fuselock/odoo,hip-odoo/odoo,Eric-Zhong/odoo,ccomb/OpenUpgrade,ehirt/odoo,odootr/odoo,optima-ict/odoo,christophlsa/odoo,Eric-Zhong/odoo,elmerdpadilla/iv,xzYue/odoo,dgzurita/odoo,abstract-open-solutions/OCB,slevenhagen/odoo-npg,dezynetechnologies/odoo,jeasoft/odoo,dariemp/odoo,odooindia/odoo,gavin-feng/odoo,cdrooom/odoo,Nowheresly/odoo,nuuuboo/odoo,alqfahad/odoo,kirca/OpenUpgrade,OpenPymeMx/OCB,oihane/odoo,hopeall/odoo,Gitlab11/odoo,ehirt/odoo,virgree/odoo,dfang/odoo,hip-odoo/odoo,ihsanudin/odoo,mlaitinen/odoo,sinbazhou/odoo,deKupini/erp,ovnicraft/odoo,nhomar/odoo-mirror,codekaki/odoo,apanju/GMIO_Odoo,fevxie/odoo,abdellatifkarroum/odoo,laslabs/odoo,numerigraphe/odoo,jiachenning/odoo,erkrishna9/odoo,NeovaHealth/odoo,alhashash/odoo,FlorianLudwig/odoo,nuncjo/odoo,csrocha/OpenUpgrade,agrista/odoo-saas,hopeall/odoo,rowemoore/odoo,ApuliaSoftware/odoo,rgeleta/odoo,gvb/odoo,storm-computers/odoo,feroda/odoo,vnsofthe/odoo,osvalr/odoo,codekaki/odoo,grap/OCB,luistorresm/odoo,dsfsdgsbngfggb/odoo,lombritz/odoo,avoinsystems/odoo,ujjwalwahi/odoo,JonathanStein/odoo,hoatle/odoo,hassoon3/odoo,addition-it-solutions/project-all,Adel-Magebinary/odoo,nexiles/odoo,osvalr/odoo,prospwro/odoo,kittiu/odoo,joshuajan/odoo,waytai/odoo,fuselock/odoo,fossoult/odoo,lsinfo/odoo,frouty/odoo_oph,Ernesto99/odoo,microcom/odoo,blaggacao/OpenUpgrade,hbrunn/OpenUpgrade,idncom/odoo,numerigraphe/odoo,incaser/odoo-odoo,gdgellatly/OCB1,bplancher/odoo,nuuuboo/odoo,waytai/odoo,QianBIG/odoo,cysnake4713/odoo,storm-computers/odoo,fjbatresv/odoo,abstract-open-solutions/OCB,sinbazhou/odoo,colinnewell/odoo,ramitalat/odoo,feroda/odoo,alexteodor/odoo,tinkhaven-organization/odoo,nuuuboo/odoo,kirca/OpenUpgrade,cdrooom/odoo,grap/OCB,addition-it-solutions/project-all,QianBIG/odoo,vrenaville/ngo-addons-backport,Endika/odoo,guerrerocarlos/odoo,bealdav/OpenUpgrade,slevenhagen/odoo-npg,hoatle/odoo,takis/odoo,tvibliani/odoo,tarzan0820/odoo,Bachaco-ve/odoo,MarcosCommunity/odoo,ingadhoc/odoo,hoatle/odoo,nitinitprof/odoo,grap/OpenUpgrade,joariasl/odoo,minhtuancn/odoo,goliveirab/odoo,jfpla/odoo,OpusVL/odoo,hubsaysnuaa/odoo,janocat/odoo,lgscofield/odoo,dalegregory/odoo,Maspear/odoo,Antiun/odoo,apanju/GMIO_Odoo,luiseduardohdbackup/odoo,shaufi/odoo,fuhongliang/odoo,alexteodor/odoo,savoirfairelinux/odoo,glovebx/odoo,zchking/odoo,NL66278/OCB,jolevq/odoopub,tangyiyong/odoo,javierTerry/odoo,kybriainfotech/iSocioCRM,OSSESAC/odoopubarquiluz,VielSoft/odoo,Kilhog/odoo,papouso/odoo,cdrooom/odoo,ThinkOpen-Solutions/odoo,credativUK/OCB,cysnake4713/odoo,OpenUpgrade-dev/OpenUpgrade,savoirfairelinux/OpenUpgrade,tarzan0820/odoo,sadleader/odoo,CatsAndDogsbvba/odoo,brijeshkesariya/odoo,collex100/odoo,dalegregory/odoo,acshan/odoo,Bachaco-ve/odoo,ubic135/odoo-design,Kilhog/odoo,christophlsa/odoo,windedge/odoo,BT-ojossen/odoo,diagramsoftware/odoo,fuhongliang/odoo,charbeljc/OCB,fjbatresv/odoo,deKupini/erp,brijeshkesariya/odoo,prospwro/odoo,rschnapka/odoo,gavin-feng/odoo,srsman/odoo,markeTIC/OCB,alqfahad/odoo,rubencabrera/odoo,jiachenning/odoo,glovebx/odoo,hubsaysnuaa/odoo,abenzbiria/clients_odoo,OSSESAC/odoopubarquiluz,slevenhagen/odoo-npg,cysnake4713/odoo,dllsf/odootest,stonegithubs/odoo,tarzan0820/odoo,bkirui/odoo,srsman/odoo,cedk/odoo,Maspear/odoo,optima-ict/odoo,bwrsandman/OpenUpgrade,ujjwalwahi/odoo,virgree/odoo,bwrsandman/OpenUpgrade,JCA-Developpement/Odoo,JGarcia-Panach/odoo,hifly/OpenUpgrade,shaufi10/odoo,gvb/odoo,jfpla/odoo,alhashash/odoo,sebalix/OpenUpgrade,vnsofthe/odoo,takis/odoo,jpshort/odoo,apanju/odoo,TRESCLOUD/odoopub,gsmartway/odoo,goliveirab/odoo,fdvarela/odoo8,vnsofthe/odoo,bwrsandman/OpenUpgrade,oihane/odoo,sysadminmatmoz/OCB,jesramirez/odoo,Gitlab11/odoo,BT-ojossen/odoo,lombritz/odoo,ecosoft-odoo/odoo,BT-ojossen/odoo,eino-makitalo/odoo,Antiun/odoo,slevenhagen/odoo,thanhacun/odoo,factorlibre/OCB,christophlsa/odoo,jiangzhixiao/odoo,Maspear/odoo,AuyaJackie/odoo,n0m4dz/odoo,fuhongliang/odoo,realsaiko/odoo,damdam-s/OpenUpgrade,sebalix/OpenUpgrade,mlaitinen/odoo,alexcuellar/odoo,SerpentCS/odoo,nuncjo/odoo,SerpentCS/odoo,collex100/odoo,fgesora/odoo,fgesora/odoo,colinnewell/odoo,nuuuboo/odoo,CatsAndDogsbvba/odoo,Nick-OpusVL/odoo,dfang/odoo,jolevq/odoopub,savoirfairelinux/odoo,ihsanudin/odoo,incaser/odoo-odoo,grap/OCB,gorjuce/odoo,jfpla/odoo,Codefans-fan/odoo,ujjwalwahi/odoo,fevxie/odoo,odootr/odoo,xzYue/odoo,VielSoft/odoo,spadae22/odoo,kittiu/odoo,hoatle/odoo,naousse/odoo,sv-dev1/odoo,PongPi/isl-odoo,markeTIC/OCB,Drooids/odoo,CatsAndDogsbvba/odoo,GauravSahu/odoo,joariasl/odoo,JGarcia-Panach/odoo,gvb/odoo,avoinsystems/odoo,fuhongliang/odoo,jolevq/odoopub,pedrobaeza/odoo,hassoon3/odoo,SAM-IT-SA/odoo,wangjun/odoo,CopeX/odoo,fossoult/odoo,factorlibre/OCB,QianBIG/odoo,tangyiyong/odoo,hopeall/odoo,ovnicraft/odoo,lightcn/odoo,Eric-Zhong/odoo,ccomb/OpenUpgrade,Danisan/odoo-1,bplancher/odoo,dgzurita/odoo,ClearCorp-dev/odoo,spadae22/odoo,shaufi/odoo,ClearCorp-dev/odoo,rgeleta/odoo,datenbetrieb/odoo,omprakasha/odoo,jaxkodex/odoo,OpenUpgrade-dev/OpenUpgrade,Maspear/odoo,bakhtout/odoo-educ,sve-odoo/odoo,fuhongliang/odoo,brijeshkesariya/odoo,MarcosCommunity/odoo,slevenhagen/odoo-npg,doomsterinc/odoo,poljeff/odoo,salaria/odoo,naousse/odoo,odootr/odoo,hoatle/odoo,sysadminmatmoz/OCB,shingonoide/odoo,dariemp/odoo,patmcb/odoo,tinkhaven-organization/odoo,joariasl/odoo,kirca/OpenUpgrade,cpyou/odoo,abdellatifkarroum/odoo,apanju/odoo,patmcb/odoo,apanju/GMIO_Odoo,csrocha/OpenUpgrade,eino-makitalo/odoo,grap/OCB,shingonoide/odoo,makinacorpus/odoo,guerrerocarlos/odoo,rahuldhote/odoo,sv-dev1/odoo,nuuuboo/odoo,NL66278/OCB,cedk/odoo,demon-ru/iml-crm,rowemoore/odoo,draugiskisprendimai/odoo,gsmartway/odoo,dsfsdgsbngfggb/odoo,sadleader/odoo,OSSESAC/odoopubarquiluz,florian-dacosta/OpenUpgrade,nhomar/odoo-mirror,rschnapka/odoo,gorjuce/odoo,slevenhagen/odoo-npg,prospwro/odoo,shivam1111/odoo,andreparames/odoo,tvibliani/odoo,JCA-Developpement/Odoo,dalegregory/odoo,ovnicraft/odoo,BT-ojossen/odoo,lombritz/odoo,luistorresm/odoo,oihane/odoo,naousse/odoo,AuyaJackie/odoo,salaria/odoo,Endika/OpenUpgrade,ygol/odoo,diagramsoftware/odoo,x111ong/odoo,tarzan0820/odoo,Elico-Corp/odoo_OCB,odoo-turkiye/odoo,Nick-OpusVL/odoo,ccomb/OpenUpgrade,bguillot/OpenUpgrade,nitinitprof/odoo,florian-dacosta/OpenUpgrade,jpshort/odoo,thanhacun/odoo,wangjun/odoo,sinbazhou/odoo,ihsanudin/odoo,ehirt/odoo,cpyou/odoo,dariemp/odoo,ingadhoc/odoo,mszewczy/odoo,glovebx/odoo,jpshort/odoo,bplancher/odoo,havt/odoo,doomsterinc/odoo,syci/OCB,wangjun/odoo,funkring/fdoo,shingonoide/odoo,hoatle/odoo,makinacorpus/odoo,makinacorpus/odoo,ovnicraft/odoo,apocalypsebg/odoo,patmcb/odoo,x111ong/odoo,NeovaHealth/odoo,kybriainfotech/iSocioCRM,klunwebale/odoo,bakhtout/odoo-educ,Nick-OpusVL/odoo,bwrsandman/OpenUpgrade,credativUK/OCB,apocalypsebg/odoo,waytai/odoo,srimai/odoo,numerigraphe/odoo,srsman/odoo,cloud9UG/odoo,fevxie/odoo,camptocamp/ngo-addons-backport,christophlsa/odoo,rahuldhote/odoo,provaleks/o8,bguillot/OpenUpgrade,fuhongliang/odoo,aviciimaxwell/odoo,bobisme/odoo,gavin-feng/odoo,apanju/odoo,abdellatifkarroum/odoo,simongoffin/website_version,ramadhane/odoo,Endika/odoo,zchking/odoo,eino-makitalo/odoo,fossoult/odoo,sebalix/OpenUpgrade,xzYue/odoo,pedrobaeza/OpenUpgrade,CopeX/odoo,ingadhoc/odoo,jfpla/odoo,SAM-IT-SA/odoo,shaufi10/odoo,rdeheele/odoo,credativUK/OCB,fevxie/odoo,lsinfo/odoo,ApuliaSoftware/odoo,dariemp/odoo,shaufi/odoo,lightcn/odoo,patmcb/odoo,realsaiko/odoo,Ichag/odoo,gvb/odoo,jusdng/odoo,alqfahad/odoo,rgeleta/odoo,x111ong/odoo,windedge/odoo,minhtuancn/odoo,fgesora/odoo,optima-ict/odoo,javierTerry/odoo,ChanduERP/odoo,alqfahad/odoo,gsmartway/odoo,andreparames/odoo,highco-groupe/odoo,luiseduardohdbackup/odoo,odoo-turkiye/odoo,JonathanStein/odoo,klunwebale/odoo,mkieszek/odoo,goliveirab/odoo,AuyaJackie/odoo,guewen/OpenUpgrade,dariemp/odoo,QianBIG/odoo,Adel-Magebinary/odoo,ecosoft-odoo/odoo,eino-makitalo/odoo,stephen144/odoo,acshan/odoo,codekaki/odoo,virgree/odoo,dkubiak789/odoo,mvaled/OpenUpgrade,bwrsandman/OpenUpgrade,hip-odoo/odoo,ThinkOpen-Solutions/odoo,rgeleta/odoo,salaria/odoo,lombritz/odoo,jolevq/odoopub,jesramirez/odoo,abdellatifkarroum/odoo,oasiswork/odoo,kittiu/odoo,dsfsdgsbngfggb/odoo,Nowheresly/odoo,apanju/odoo,bkirui/odoo
|
addons/plugin_thunderbird/plugin_thunderbird.py
|
addons/plugin_thunderbird/plugin_thunderbird.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields
from osv import osv
class plugin_thunderbird_installer(osv.osv_memory):
_name = 'plugin_thunderbird.installer'
_inherit = 'res.config.installer'
_columns = {
'thunderbird': fields.boolean('Thunderbird Plug-in', help="Allows you to select an object that you would like to add to your email and its attachments."),
'plugin_name': fields.char('File name', size=64),
'plugin_file': fields.char('Thunderbird Plug-in', size=256, readonly=True, help="Thunderbird plug-in file. Save this file and install it in Thunderbird."),
}
_defaults = {
'thunderbird': True,
'plugin_name': 'openerp_plugin.xpi',
}
def default_get(self, cr, uid, fields, context=None):
res = super(plugin_thunderbird_installer, self).default_get(cr, uid, fields, context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
res['plugin_file'] = base_url + '/plugin_thunderbird/static/openerp_plugin.xpi'
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields
from osv import osv
class plugin_thunderbird_installer(osv.osv_memory):
_name = 'plugin_thunderbird.installer'
_inherit = 'res.config.installer'
_columns = {
'thunderbird': fields.boolean('Thunderbird Plug-in', help="Allows you to select an object that you would like to add to your email and its attachments."),
'plugin_name': fields.char('File name', size=64),
'plugin_file': fields.char('Thunderbird Plug-in', size=256, readonly=True, help="Thunderbird plug-in file. Save this file and install it in Thunderbird."),
}
_defaults = {
'thunderbird': True,
'plugin_name': 'openerp_plugin.xpi',
'pdf_file' : 'http://doc.openerp.com/v6.1/book/2/3_CRM_Contacts/communicate.html#managing-your-crm-from-mozilla-thunderbird',
}
def default_get(self, cr, uid, fields, context=None):
res = super(plugin_thunderbird_installer, self).default_get(cr, uid, fields, context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
res['plugin_file'] = base_url + '/plugin_thunderbird/static/openerp_plugin.xpi'
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Python
|
052dc22a82267d381636f5f5fbbf4b5149ffb518
|
check if the values are empty before adding the data
|
rrpg/world-editor,rrpg/world-editor
|
gui/specieslistdialog.py
|
gui/specieslistdialog.py
|
# -*- coding: utf8 -*-
from PyQt4 import QtGui, QtCore
class speciesListDialog(QtGui.QDialog):
_tableview = None
def __init__(self, parent, app):
QtGui.QDialog.__init__(self, parent)
self._app = app
self._parent = parent
self.initUI()
self.setWindowTitle('List species')
self.show()
def initUI(self):
layout = QtGui.QVBoxLayout(self)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview = QtGui.QTableView()
self._tableview.setModel(tablemodel)
form = QtGui.QGridLayout()
nameLabel = QtGui.QLabel("Species name")
self._nameField = QtGui.QLineEdit()
descriptionLabel = QtGui.QLabel("Species Description")
self._descriptionField = QtGui.QTextEdit()
self._saveButton = QtGui.QPushButton("Create")
self._saveButton.clicked.connect(self.createSpecies)
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
form.addWidget(nameLabel, 0, 0)
form.addWidget(self._nameField, 0, 1)
form.addWidget(descriptionLabel, 1, 0)
form.addWidget(self._descriptionField, 1, 1)
form.addWidget(self._saveButton, 2, 1)
layout.addWidget(self._tableview)
layout.addLayout(form)
layout.addWidget(closeButton)
self.setLayout(layout)
def createSpecies(self):
name = str(self._nameField.text())
description = str(self._descriptionField.toPlainText())
if name is "" or description is "":
return False
self._app.addSpecies(name, description)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview.setModel(tablemodel)
class SpeciesTableModel(QtCore.QAbstractTableModel):
def __init__(self, datain, parent = None, *args):
QtCore.QAbstractTableModel.__init__(self, parent, *args)
self.dataChanged.connect(self.saveChange)
self.arraydata = datain
def rowCount(self, parent):
return len(self.arraydata)
def columnCount(self, parent):
if len(self.arraydata) == 0:
return 0
return len(self.arraydata[0])
def data(self, index, role):
if not index.isValid():
return None
elif role != QtCore.Qt.DisplayRole:
return None
return (self.arraydata[index.row()][index.column()])
def saveChange(self, x, y):
print x, y
def flags(self, index):
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
# -*- coding: utf8 -*-
from PyQt4 import QtGui, QtCore
class speciesListDialog(QtGui.QDialog):
_tableview = None
def __init__(self, parent, app):
QtGui.QDialog.__init__(self, parent)
self._app = app
self._parent = parent
self.initUI()
self.setWindowTitle('List species')
self.show()
def initUI(self):
layout = QtGui.QVBoxLayout(self)
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview = QtGui.QTableView()
self._tableview.setModel(tablemodel)
form = QtGui.QGridLayout()
nameLabel = QtGui.QLabel("Species name")
self._nameField = QtGui.QLineEdit()
descriptionLabel = QtGui.QLabel("Species Description")
self._descriptionField = QtGui.QTextEdit()
self._saveButton = QtGui.QPushButton("Create")
self._saveButton.clicked.connect(self.createSpecies)
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
form.addWidget(nameLabel, 0, 0)
form.addWidget(self._nameField, 0, 1)
form.addWidget(descriptionLabel, 1, 0)
form.addWidget(self._descriptionField, 1, 1)
form.addWidget(self._saveButton, 2, 1)
layout.addWidget(self._tableview)
layout.addLayout(form)
layout.addWidget(closeButton)
self.setLayout(layout)
def createSpecies(self):
self._app.addSpecies(self._nameField.text(), self._descriptionField.toPlainText())
tablemodel = SpeciesTableModel(self._app.map.species, self)
self._tableview.setModel(tablemodel)
class SpeciesTableModel(QtCore.QAbstractTableModel):
def __init__(self, datain, parent = None, *args):
QtCore.QAbstractTableModel.__init__(self, parent, *args)
self.dataChanged.connect(self.saveChange)
self.arraydata = datain
def rowCount(self, parent):
return len(self.arraydata)
def columnCount(self, parent):
if len(self.arraydata) == 0:
return 0
return len(self.arraydata[0])
def data(self, index, role):
if not index.isValid():
return None
elif role != QtCore.Qt.DisplayRole:
return None
return (self.arraydata[index.row()][index.column()])
def saveChange(self, x, y):
print x, y
def flags(self, index):
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
mit
|
Python
|
ffe23433056e0a710f81bb22a9161cdaf3ff2c12
|
fix format call for python 2.6
|
chfw/moban,chfw/moban
|
moban/filters/repr.py
|
moban/filters/repr.py
|
from moban.extensions import JinjaFilter
@JinjaFilter('repr')
def repr_function(string):
if isinstance(string, list):
return ["'{0}'".format(str(element)) for element in string]
else:
return "'{0}'".format(str(string))
|
from moban.extensions import JinjaFilter
@JinjaFilter('repr')
def repr_function(string):
if isinstance(string, list):
return ["'{}'".format(str(element)) for element in string]
else:
return "'{}'".format(str(string))
|
mit
|
Python
|
9255fd2c34a403b14b423628f47b7a7419c0d526
|
update language lexer translations
|
AppVentus/AvTime-client,sachintaware/sublime-wakatime,sachintaware/sublime-wakatime,jinie/sublime-wakatime,jinie/sublime-wakatime,sachintaware/sublime-wakatime,AppVentus/AvTime-client,wakatime/sublime-wakatime,jinie/sublime-wakatime
|
packages/wakatime/wakatime/stats.py
|
packages/wakatime/wakatime/stats.py
|
# -*- coding: utf-8 -*-
"""
wakatime.stats
~~~~~~~~~~~~~~
Stats about files
:copyright: (c) 2013 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import os
import sys
if sys.version_info[0] == 2:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments2'))
else:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments3'))
from pygments.lexers import guess_lexer_for_filename
log = logging.getLogger(__name__)
# force file name extensions to be recognized as a certain language
EXTENSIONS = {
'md': 'Markdown',
}
TRANSLATIONS = {
'CSS+Genshi Text': 'CSS',
'CSS+Lasso': 'CSS',
'HTML+Django/Jinja': 'HTML',
'HTML+Lasso': 'HTML',
'JavaScript+Genshi Text': 'JavaScript',
'JavaScript+Lasso': 'JavaScript',
'Perl6': 'Perl',
}
def guess_language(file_name):
if file_name:
language = guess_language_from_extension(file_name.rsplit('.', 1)[-1])
if language:
return language
lexer = None
try:
with open(file_name) as f:
lexer = guess_lexer_for_filename(file_name, f.read(512000))
except:
pass
if lexer:
return translate_language(str(lexer.name))
else:
return None
def guess_language_from_extension(extension):
if extension:
if extension in EXTENSIONS:
return EXTENSIONS[extension]
if extension.lower() in EXTENSIONS:
return mapping[EXTENSIONS.lower()]
return None
def translate_language(language):
if language in TRANSLATIONS:
language = TRANSLATIONS[language]
return language
def number_lines_in_file(file_name):
lines = 0
try:
with open(file_name) as f:
for line in f:
lines += 1
except IOError:
return None
return lines
def get_file_stats(file_name):
stats = {
'language': guess_language(file_name),
'lines': number_lines_in_file(file_name),
}
return stats
|
# -*- coding: utf-8 -*-
"""
wakatime.stats
~~~~~~~~~~~~~~
Stats about files
:copyright: (c) 2013 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import os
import sys
if sys.version_info[0] == 2:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments2'))
else:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments3'))
from pygments.lexers import guess_lexer_for_filename
log = logging.getLogger(__name__)
# force file name extensions to be recognized as a certain language
EXTENSIONS = {
'md': 'Markdown',
}
def guess_language(file_name):
if file_name:
language = guess_language_from_extension(file_name.rsplit('.', 1)[-1])
if language:
return language
lexer = None
try:
with open(file_name) as f:
lexer = guess_lexer_for_filename(file_name, f.read(512000))
except:
pass
if lexer:
return str(lexer.name)
else:
return None
def guess_language_from_extension(extension):
if extension:
if extension in EXTENSIONS:
return EXTENSIONS[extension]
if extension.lower() in EXTENSIONS:
return mapping[EXTENSIONS.lower()]
return None
def number_lines_in_file(file_name):
lines = 0
try:
with open(file_name) as f:
for line in f:
lines += 1
except IOError:
return None
return lines
def get_file_stats(file_name):
stats = {
'language': guess_language(file_name),
'lines': number_lines_in_file(file_name),
}
return stats
|
bsd-3-clause
|
Python
|
324cae7bc3d8d758205a0760dffd8d78ce611d48
|
Increase tweet length
|
julians/zcraper,julians/zcraper,julians/zcraper
|
bot.py
|
bot.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import tweepy
import datetime
import os
import os.path
import requests
import json
from models import Aufmacher, Author, Image, TweetJob
from config import db
from playhouse.shortcuts import model_to_dict
from secrets import twitter_secrets
def tweet(tweetjob):
href = tweetjob.aufmacher.unique_id.replace("http://xml", "http://www")
tweet_text = """
{supertitle}: {title}
{subtitle}
""".format(**model_to_dict(tweetjob.aufmacher)).strip()
if len(tweet_text) > 250:
tweet_text = "{:.250}…".format(tweet_text)
tweet = """
{tweet_text}
{href}
""".format(tweet_text=tweet_text,
href=href).strip()
auth = tweepy.OAuthHandler(twitter_secrets["CONSUMER_KEY"], twitter_secrets["CONSUMER_SECRET"])
auth.set_access_token(twitter_secrets["ACCESS_TOKEN"], twitter_secrets["ACCESS_TOKEN_SECRET"])
api = tweepy.API(auth)
api.update_status(status=tweet)
tweetjob.tweeted_at = datetime.datetime.now()
tweetjob.save()
def go():
tweetjobs = TweetJob.select().where(TweetJob.tweeted_at == None)
for tweetjob in tweetjobs:
tweet(tweetjob)
if __name__ == "__main__":
go()
# #media_upload_response = api.media_upload(image_filename)
# #print(media_upload_response.media_id_string)
# #api.update_status(status="test with image", media_ids=[media_upload_response.media_id_string])
# with open("last_tweeted", 'w') as file:
# file.write(todays_date)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import tweepy
import datetime
import os
import os.path
import requests
import json
from models import Aufmacher, Author, Image, TweetJob
from config import db
from playhouse.shortcuts import model_to_dict
from secrets import twitter_secrets
def tweet(tweetjob):
href = tweetjob.aufmacher.unique_id.replace("http://xml", "http://www")
tweet_text = """
{supertitle}: {title}
{subtitle}
""".format(**model_to_dict(tweetjob.aufmacher)).strip()
if len(tweet_text) > 115:
tweet_text = "{:.115}…".format(tweet_text)
tweet = """
{tweet_text}
{href}
""".format(tweet_text=tweet_text,
href=href).strip()
auth = tweepy.OAuthHandler(twitter_secrets["CONSUMER_KEY"], twitter_secrets["CONSUMER_SECRET"])
auth.set_access_token(twitter_secrets["ACCESS_TOKEN"], twitter_secrets["ACCESS_TOKEN_SECRET"])
api = tweepy.API(auth)
api.update_status(status=tweet)
tweetjob.tweeted_at = datetime.datetime.now()
tweetjob.save()
def go():
tweetjobs = TweetJob.select().where(TweetJob.tweeted_at == None)
for tweetjob in tweetjobs:
tweet(tweetjob)
if __name__ == "__main__":
go()
# #media_upload_response = api.media_upload(image_filename)
# #print(media_upload_response.media_id_string)
# #api.update_status(status="test with image", media_ids=[media_upload_response.media_id_string])
# with open("last_tweeted", 'w') as file:
# file.write(todays_date)
|
apache-2.0
|
Python
|
3d52eca5b9a7cddcd1d2b67547c22c28847aa085
|
fix print format for python3
|
donnadionne/grpc,ctiller/grpc,jtattermusch/grpc,sreecha/grpc,jtattermusch/grpc,pszemus/grpc,grpc/grpc,jboeuf/grpc,pszemus/grpc,nicolasnoble/grpc,grpc/grpc,donnadionne/grpc,jtattermusch/grpc,grpc/grpc,jtattermusch/grpc,firebase/grpc,grpc/grpc,stanley-cheung/grpc,nicolasnoble/grpc,firebase/grpc,grpc/grpc,stanley-cheung/grpc,ejona86/grpc,sreecha/grpc,nicolasnoble/grpc,ctiller/grpc,vjpai/grpc,donnadionne/grpc,jboeuf/grpc,firebase/grpc,vjpai/grpc,stanley-cheung/grpc,jboeuf/grpc,vjpai/grpc,donnadionne/grpc,stanley-cheung/grpc,grpc/grpc,pszemus/grpc,ejona86/grpc,muxi/grpc,sreecha/grpc,jtattermusch/grpc,ejona86/grpc,ctiller/grpc,sreecha/grpc,vjpai/grpc,nicolasnoble/grpc,ejona86/grpc,donnadionne/grpc,muxi/grpc,muxi/grpc,grpc/grpc,sreecha/grpc,pszemus/grpc,jboeuf/grpc,muxi/grpc,muxi/grpc,sreecha/grpc,vjpai/grpc,sreecha/grpc,ejona86/grpc,donnadionne/grpc,muxi/grpc,donnadionne/grpc,jboeuf/grpc,ctiller/grpc,grpc/grpc,stanley-cheung/grpc,sreecha/grpc,pszemus/grpc,grpc/grpc,jboeuf/grpc,jtattermusch/grpc,muxi/grpc,ctiller/grpc,jtattermusch/grpc,ejona86/grpc,jtattermusch/grpc,donnadionne/grpc,pszemus/grpc,grpc/grpc,ejona86/grpc,jboeuf/grpc,jtattermusch/grpc,grpc/grpc,ejona86/grpc,jboeuf/grpc,ejona86/grpc,pszemus/grpc,sreecha/grpc,firebase/grpc,firebase/grpc,firebase/grpc,jboeuf/grpc,pszemus/grpc,ctiller/grpc,muxi/grpc,sreecha/grpc,stanley-cheung/grpc,donnadionne/grpc,nicolasnoble/grpc,vjpai/grpc,vjpai/grpc,firebase/grpc,firebase/grpc,ejona86/grpc,firebase/grpc,donnadionne/grpc,muxi/grpc,jtattermusch/grpc,nicolasnoble/grpc,pszemus/grpc,muxi/grpc,nicolasnoble/grpc,nicolasnoble/grpc,vjpai/grpc,nicolasnoble/grpc,ctiller/grpc,stanley-cheung/grpc,donnadionne/grpc,vjpai/grpc,jboeuf/grpc,stanley-cheung/grpc,ctiller/grpc,firebase/grpc,ctiller/grpc,vjpai/grpc,firebase/grpc,ctiller/grpc,ejona86/grpc,stanley-cheung/grpc,jtattermusch/grpc,ctiller/grpc,nicolasnoble/grpc,jboeuf/grpc,donnadionne/grpc,ctiller/grpc,sreecha/grpc,grpc/grpc,muxi/grpc,nicolasnoble/grpc,stanley-cheung/grpc,pszemus/grpc,ejona86/grpc,pszemus/grpc,nicolasnoble/grpc,vjpai/grpc,stanley-cheung/grpc,stanley-cheung/grpc,firebase/grpc,jboeuf/grpc,vjpai/grpc,sreecha/grpc,pszemus/grpc,jtattermusch/grpc,muxi/grpc
|
tools/run_tests/start_port_server.py
|
tools/run_tests/start_port_server.py
|
#!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper around port server starting code.
Used by developers who wish to run individual C/C++ tests outside of the
run_tests.py infrastructure.
The path to this file is called out in test/core/util/port.c, and printed as
an error message to users.
"""
import python_utils.start_port_server as start_port_server
start_port_server.start_port_server()
print("Port server started successfully")
|
#!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper around port server starting code.
Used by developers who wish to run individual C/C++ tests outside of the
run_tests.py infrastructure.
The path to this file is called out in test/core/util/port.c, and printed as
an error message to users.
"""
import python_utils.start_port_server as start_port_server
start_port_server.start_port_server()
print "Port server started successfully"
|
apache-2.0
|
Python
|
6561ea0b329b2f42126dc23eab59676de305dd73
|
remove unused imports
|
simonz05/flask-wtf,simonz05/flask-wtf
|
flask_wtf/forms.py
|
flask_wtf/forms.py
|
from __future__ import absolute_import
import jinja2
from flask import request, session, current_app
from wtforms.ext.csrf.session import SessionSecureForm
from wtforms.fields import HiddenField
class Form(SessionSecureForm):
"Implements a SessionSecureForm using app.SECRET_KEY and flask.session obj"
def __init__(self, formdata=None, obj=None, prefix='', csrf_enabled=None, **kwargs):
self.csrf_enabled = csrf_enabled
if csrf_enabled is None:
self.csrf_enabled = current_app.config.get('CSRF_ENABLED', True)
self.SECRET_KEY = current_app.config.get('CSRF_SESSION_KEY', '_csrf_token')
super(Form, self).__init__(formdata, obj, prefix, session, **kwargs)
def is_submitted(self):
"Check if request method is either PUT or POST"
return request and request.method in ("PUT", "POST")
def validate_on_submit(self):
"Call `form.validate()` if request method was either PUT or POST"
return self.is_submitted() and self.validate()
def validate_csrf_token(self, field):
if not self.csrf_enabled:
return True
return super(Form, self).validate_csrf_token(field)
def hidden_fields(self, *fields):
"hidden fields in a hidden DIV tag, in order to keep XHTML compliance."
if not fields:
fields = [f for f in self if isinstance(f, HiddenField)]
rv = [u'<div style="display:none;">']
for field in fields:
if isinstance(field, basestring):
field = getattr(self, field)
rv.append(unicode(field))
rv.append(u"</div>")
return jinja2.Markup(u"".join(rv))
def process(self, formdata=None, obj=None, **kwargs):
try:
if formdata is None:
formdata = request.form
except AttributeError:
pass
super(Form, self).process(formdata, obj, **kwargs)
|
from __future__ import absolute_import
import jinja2
import wtforms
from flask import request, session, current_app
from wtforms.ext.csrf.session import SessionSecureForm
from wtforms.fields import HiddenField
class Form(SessionSecureForm):
"Implements a SessionSecureForm using app.SECRET_KEY and flask.session obj"
def __init__(self, formdata=None, obj=None, prefix='', csrf_enabled=None, **kwargs):
self.csrf_enabled = csrf_enabled
if csrf_enabled is None:
self.csrf_enabled = current_app.config.get('CSRF_ENABLED', True)
self.SECRET_KEY = current_app.config.get('CSRF_SESSION_KEY', '_csrf_token')
super(Form, self).__init__(formdata, obj, prefix, session, **kwargs)
def is_submitted(self):
"Check if request method is either PUT or POST"
return request and request.method in ("PUT", "POST")
def validate_on_submit(self):
"Call `form.validate()` if request method was either PUT or POST"
return self.is_submitted() and self.validate()
def validate_csrf_token(self, field):
if not self.csrf_enabled:
return True
return super(Form, self).validate_csrf_token(field)
def hidden_fields(self, *fields):
"hidden fields in a hidden DIV tag, in order to keep XHTML compliance."
if not fields:
fields = [f for f in self if isinstance(f, HiddenField)]
rv = [u'<div style="display:none;">']
for field in fields:
if isinstance(field, basestring):
field = getattr(self, field)
rv.append(unicode(field))
rv.append(u"</div>")
return jinja2.Markup(u"".join(rv))
def process(self, formdata=None, obj=None, **kwargs):
try:
if formdata is None:
formdata = request.form
except AttributeError:
pass
super(Form, self).process(formdata, obj, **kwargs)
|
bsd-3-clause
|
Python
|
413057374d55d851fa4717a66a0975f29b131f4f
|
Fix bytes output
|
martindurant/libhdfs3-ctypes,martindurant/libhdfs3-ctypes,blaze/hdfs3,blaze/hdfs3
|
cli.py
|
cli.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 30 13:58:24 2015
@author: mdurant
"""
import argparse
import hdfs
import sys
import inspect
parser = argparse.ArgumentParser(description='HDFS commands')
parser.add_argument("command", help="filesystem command to run")
parser.add_argument("par1", help="filesystem command to run", nargs="?", default=None)
parser.add_argument("par2", help="filesystem command to run", nargs="?", default=None)
parser.add_argument('--port', type=int,
help='Name node port')
parser.add_argument('--host', type=str,
help='Name node address')
parser.add_argument('--verbose', type=int, default=0,
help='Verbosity')
args = parser.parse_args()
par1, par2 = args.par1, args.par2
if args.verbose > 0:
print(args)
commands = ['ls', 'cat', 'info', 'mkdir', 'rmdir', 'rm', 'mv', 'exists',
'chmod', 'chmown', 'set_replication', 'get_block_locations',
'to_local', 'to_hdfs']
if __name__ == "__main__":
if args.command not in commands:
print("Available commands:", list(sorted(commands)))
sys.exit(1)
kwargs = {}
if args.host:
kwargs['host'] = args.host
if args.port:
kwargs['port'] = args.port
fs = hdfs.HDFileSystem(**kwargs)
cmd = getattr(fs, args.command)
nargs = len(inspect.getargspec(cmd).args) - 1
args = (par1, par2)[:nargs]
out = cmd(*args)
if isinstance(out, list):
for l in out:
print(l)
elif hasattr(out, 'decode'):
print(out.decode())
elif out is not None:
print(out)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 30 13:58:24 2015
@author: mdurant
"""
import argparse
import hdfs
import sys
import inspect
parser = argparse.ArgumentParser(description='HDFS commands')
parser.add_argument("command", help="filesystem command to run")
parser.add_argument("par1", help="filesystem command to run", nargs="?", default=None)
parser.add_argument("par2", help="filesystem command to run", nargs="?", default=None)
parser.add_argument('--port', type=int,
help='Name node port')
parser.add_argument('--host', type=str,
help='Name node address')
parser.add_argument('--verbose', type=int, default=0,
help='Verbosity')
args = parser.parse_args()
par1, par2 = args.par1, args.par2
if args.verbose > 0:
print(args)
commands = ['ls', 'cat', 'info', 'mkdir', 'rmdir', 'rm', 'mv', 'exists',
'chmod', 'chmown', 'set_replication', 'get_block_locations',
'to_local', 'to_hdfs']
if __name__ == "__main__":
if args.command not in commands:
print("Available commands:", list(sorted(commands)))
sys.exit(1)
kwargs = {}
if args.host:
kwargs['host'] = args.host
if args.port:
kwargs['port'] = args.port
fs = hdfs.HDFileSystem(**kwargs)
cmd = getattr(fs, args.command)
nargs = len(inspect.getargspec(cmd).args) - 1
args = (par1, par2)[:nargs]
out = cmd(*args)
if isinstance(out, list):
for l in out:
print(l)
elif out is not None:
print(out)
|
unknown
|
Python
|
72fa091716e1e0d40a8219701da94bee6d49c58b
|
remove debugging
|
kevinpdavies/pycsw,ricardogsilva/pycsw,ingenieroariel/pycsw,ckan-fcd/pycsw-fcd,bukun/pycsw,tomkralidis/pycsw,tomkralidis/pycsw,benhowell/pycsw,benhowell/pycsw,rouault/pycsw,ricardogsilva/pycsw,kevinpdavies/pycsw,PublicaMundi/pycsw,rouault/pycsw,ocefpaf/pycsw,kalxas/pycsw,geopython/pycsw,mwengren/pycsw,kalxas/pycsw,geopython/pycsw,kalxas/pycsw,ricardogsilva/pycsw,PublicaMundi/pycsw,ckan-fcd/pycsw-fcd,bukun/pycsw,ingenieroariel/pycsw,tomkralidis/pycsw,geopython/pycsw,ocefpaf/pycsw,bukun/pycsw,mwengren/pycsw
|
csw.py
|
csw.py
|
#!/usr/bin/python -u
# -*- coding: iso-8859-15 -*-
# =================================================================
#
# $Id$
#
# Authors: Tom Kralidis <[email protected]>
#
# Copyright (c) 2010 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
# CGI wrapper for pycsw
import os, sys
from StringIO import StringIO
from server import server
CONFIG = 'default.cfg'
GZIP = False
if os.environ.has_key('PYCSW_CONFIG'):
CONFIG = os.environ['PYCSW_CONFIG']
if os.environ['QUERY_STRING'].lower().find('config') != -1:
for kvp in os.environ['QUERY_STRING'].split('&'):
if kvp.lower().find('config') != -1:
CONFIG = kvp.split('=')[1]
if (os.environ.has_key('HTTP_ACCEPT_ENCODING') and
os.environ['HTTP_ACCEPT_ENCODING'].find('gzip') != -1):
# set for gzip compressed response
GZIP = True
# get runtime configuration
CSW = server.Csw(CONFIG)
# set compression level
if CSW.config.has_option('server', 'gzip_compresslevel'):
GZIP_COMPRESSLEVEL = \
int(CSW.config.get('server', 'gzip_compresslevel'))
else:
GZIP_COMPRESSLEVEL = 0
# go!
OUTP = CSW.dispatch_cgi()
sys.stdout.write("Content-Type:%s\r\n" % CSW.contenttype)
if GZIP and GZIP_COMPRESSLEVEL > 0:
import gzip
BUF = StringIO()
GZIPFILE = gzip.GzipFile(mode='wb', fileobj=BUF,
compresslevel=GZIP_COMPRESSLEVEL)
GZIPFILE.write(OUTP)
GZIPFILE.close()
OUTP = BUF.getvalue()
sys.stdout.write('Content-Encoding: gzip\r\n')
sys.stdout.write('Content-Length: %d\r\n' % len(OUTP))
sys.stdout.write('\r\n')
sys.stdout.write(OUTP)
|
#!/usr/bin/python -u
# -*- coding: iso-8859-15 -*-
# =================================================================
#
# $Id$
#
# Authors: Tom Kralidis <[email protected]>
#
# Copyright (c) 2010 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
# CGI wrapper for pycsw
import cgitb
cgitb.enable()
import os, sys
from StringIO import StringIO
from server import server
CONFIG = 'default.cfg'
GZIP = False
if os.environ.has_key('PYCSW_CONFIG'):
CONFIG = os.environ['PYCSW_CONFIG']
if os.environ['QUERY_STRING'].lower().find('config') != -1:
for kvp in os.environ['QUERY_STRING'].split('&'):
if kvp.lower().find('config') != -1:
CONFIG = kvp.split('=')[1]
if (os.environ.has_key('HTTP_ACCEPT_ENCODING') and
os.environ['HTTP_ACCEPT_ENCODING'].find('gzip') != -1):
# set for gzip compressed response
GZIP = True
# get runtime configuration
CSW = server.Csw(CONFIG)
# set compression level
if CSW.config.has_option('server', 'gzip_compresslevel'):
GZIP_COMPRESSLEVEL = \
int(CSW.config.get('server', 'gzip_compresslevel'))
else:
GZIP_COMPRESSLEVEL = 0
# go!
OUTP = CSW.dispatch_cgi()
sys.stdout.write("Content-Type:%s\r\n" % CSW.contenttype)
if GZIP and GZIP_COMPRESSLEVEL > 0:
import gzip
BUF = StringIO()
GZIPFILE = gzip.GzipFile(mode='wb', fileobj=BUF,
compresslevel=GZIP_COMPRESSLEVEL)
GZIPFILE.write(OUTP)
GZIPFILE.close()
OUTP = BUF.getvalue()
sys.stdout.write('Content-Encoding: gzip\r\n')
sys.stdout.write('Content-Length: %d\r\n' % len(OUTP))
sys.stdout.write('\r\n')
sys.stdout.write(OUTP)
|
mit
|
Python
|
b666228405e9b23e65d6d631968a7f6f334b6b46
|
change string for utf8 translation (#48)
|
GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples
|
translation/samples/snippets/snippets_test.py
|
translation/samples/snippets/snippets_test.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import snippets
def test_detect_language(capsys):
snippets.detect_language('Hæ sæta')
out, _ = capsys.readouterr()
assert 'is' in out
def test_list_languages(capsys):
snippets.list_languages()
out, _ = capsys.readouterr()
assert 'Icelandic (is)' in out
def test_list_languages_with_target(capsys):
snippets.list_languages_with_target('is')
out, _ = capsys.readouterr()
assert u'íslenska (is)' in out
def test_translate_text(capsys):
snippets.translate_text('is', 'Hello world')
out, _ = capsys.readouterr()
assert u'Halló heimur' in out
def test_translate_utf8(capsys):
text = u'파인애플 13개'
snippets.translate_text('en', text)
out, _ = capsys.readouterr()
assert u'13 pineapples' in out
|
# -*- coding: utf-8 -*-
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import snippets
def test_detect_language(capsys):
snippets.detect_language('Hæ sæta')
out, _ = capsys.readouterr()
assert 'is' in out
def test_list_languages(capsys):
snippets.list_languages()
out, _ = capsys.readouterr()
assert 'Icelandic (is)' in out
def test_list_languages_with_target(capsys):
snippets.list_languages_with_target('is')
out, _ = capsys.readouterr()
assert u'íslenska (is)' in out
def test_translate_text(capsys):
snippets.translate_text('is', 'Hello world')
out, _ = capsys.readouterr()
assert u'Halló heimur' in out
def test_translate_utf8(capsys):
text = u'나는 파인애플을 좋아한다.'
snippets.translate_text('en', text)
out, _ = capsys.readouterr()
assert u'I like pineapple' in out
|
apache-2.0
|
Python
|
833cd8342385fc095181afc3306ce04414bfd447
|
Add work around for destroying models too quickly.
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
perfscale_mass_model_destruction.py
|
perfscale_mass_model_destruction.py
|
#!/usr/bin/env python
"""Perfscale test measuring adding and destroying a large number of models.
Steps taken in this test:
- Bootstraps a provider
- Creates x amount of models and waits for them to be ready
- Delete all the models at once.
"""
import argparse
from datetime import datetime
import logging
import sys
from time import sleep
from deploy_stack import (
BootstrapManager,
)
from generate_perfscale_results import (
DeployDetails,
TimingData,
run_perfscale_test,
)
from utility import (
add_basic_testing_arguments,
configure_logging,
)
log = logging.getLogger("perfscale_mass_model_destruction")
__metaclass__ = type
def perfscale_assess_model_destruction(client, args):
"""Create a bunch of models and then destroy them all."""
model_count = args.model_count
all_models = []
for item in xrange(0, model_count):
model_name = 'model{}'.format(item)
log.info('Creating model: {}'.format(model_name))
new_model = client.add_model(client.env.clone(model_name))
new_model.wait_for_started()
all_models.append(new_model)
# Workaround for bug: https://bugs.launchpad.net/juju/+bug/1635052
# Noted here: https://bugs.launchpad.net/juju-ci-tools/+bug/1635109
sleep(10)
destruction_start = datetime.utcnow()
for doomed in all_models:
doomed.destroy_model()
destruction_end = datetime.utcnow()
destruction_timing = TimingData(destruction_start, destruction_end)
return DeployDetails(
'Destroy {} models'.format(model_count),
{'Model Count': model_count},
destruction_timing)
def parse_args(argv):
"""Parse all arguments."""
parser = argparse.ArgumentParser(
description="Perfscale bundle deployment test.")
add_basic_testing_arguments(parser)
parser.add_argument(
'--model-count',
type=int,
help='Number of models to create.',
default=100)
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
configure_logging(args.verbose)
bs_manager = BootstrapManager.from_args(args)
run_perfscale_test(perfscale_assess_model_destruction, bs_manager, args)
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
"""Perfscale test measuring adding and destroying a large number of models.
Steps taken in this test:
- Bootstraps a provider
- Creates x amount of models and waits for them to be ready
- Delete all the models at once.
"""
import argparse
from datetime import datetime
import logging
import sys
from deploy_stack import (
BootstrapManager,
)
from generate_perfscale_results import (
DeployDetails,
TimingData,
run_perfscale_test,
)
from utility import (
add_basic_testing_arguments,
configure_logging,
)
log = logging.getLogger("perfscale_mass_model_destruction")
__metaclass__ = type
def perfscale_assess_model_destruction(client, args):
"""Create a bunch of models and then destroy them all."""
model_count = args.model_count
all_models = []
for item in xrange(0, model_count):
model_name = 'model{}'.format(item)
log.info('Creating model: {}'.format(model_name))
new_model = client.add_model(client.env.clone(model_name))
new_model.wait_for_started()
all_models.append(new_model)
destruction_start = datetime.utcnow()
for doomed in all_models:
doomed.destroy_model()
destruction_end = datetime.utcnow()
destruction_timing = TimingData(destruction_start, destruction_end)
return DeployDetails(
'Destroy {} models'.format(model_count),
{'Model Count': model_count},
destruction_timing)
def parse_args(argv):
"""Parse all arguments."""
parser = argparse.ArgumentParser(
description="Perfscale bundle deployment test.")
add_basic_testing_arguments(parser)
parser.add_argument(
'--model-count',
type=int,
help='Number of models to create.',
default=100)
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
configure_logging(args.verbose)
bs_manager = BootstrapManager.from_args(args)
run_perfscale_test(perfscale_assess_model_destruction, bs_manager, args)
return 0
if __name__ == '__main__':
sys.exit(main())
|
agpl-3.0
|
Python
|
f29ff5eaa24b0671066f145d76b53e534a574119
|
Set "text/javascript" type if given a callback
|
Endika/freegeoip,stea4lth/freegeoip,NickTaporuk/freegeoip,danielstreit/freegeoip,wackadoo/freegeoip,mavelar/freegeoip,thebakeryio/freegeoip,btsantos/freegeoip,btsantos/freegeoip,NickTaporuk/freegeoip,pocket-playlab/freegeoip,btsantos/freegeoip,wackadoo/freegeoip,mcdelaney/freegeoip,My-Sign/freegeoip,GhaziTriki/freegeoip,danielstreit/freegeoip,mcdelaney/freegeoip,mavelar/freegeoip,thebakeryio/freegeoip,bcbroussard/freegeoip,pocket-playlab/freegeoip,stea4lth/freegeoip,Sweazer/freegeoip,Sweazer/freegeoip,Endika/freegeoip,stea4lth/freegeoip,GhaziTriki/freegeoip,Sweazer/freegeoip,Endika/freegeoip,GhaziTriki/freegeoip,bcbroussard/freegeoip,arnononline/freegeoip,wackadoo/freegeoip,pocket-playlab/freegeoip,mcdelaney/freegeoip,My-Sign/freegeoip,NickTaporuk/freegeoip,mavelar/freegeoip,danielstreit/freegeoip,bcbroussard/freegeoip,My-Sign/freegeoip,thebakeryio/freegeoip
|
freegeoip/geoip.py
|
freegeoip/geoip.py
|
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# freegeoip.net
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cyclone.web
import cyclone.escape
import socket
from twisted.python import log
from twisted.internet import defer
import freegeoip.search
class BaseHandler(cyclone.web.RequestHandler):
@defer.inlineCallbacks
def get(self, address):
try:
ip, data = yield freegeoip.search.geoip(self.settings.db,
address or self.request.remote_ip)
if data:
data = cyclone.escape.json_decode(data[0][0])
data["ip"] = ip
except socket.error:
raise cyclone.web.HTTPError(404)
except ValueError:
raise cyclone.web.HTTPError(400)
except Exception, e:
log.err("search.geoip('%s') failed: %s" % (address, e))
raise cyclone.web.HTTPError(503)
if data:
self.dump(data)
else:
raise cyclone.web.HTTPError(404)
def dump(self, data):
raise NotImplementedError
class CsvHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/csv")
self.render("geoip.csv", data=data)
class XmlHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/xml")
self.render("geoip.xml", data=data)
class JsonHandler(BaseHandler):
def dump(self, data):
callback = self.get_argument("callback", None)
if callback:
self.set_header("Content-Type", "text/javascript")
self.finish("%s(%s);" % (callback, cyclone.escape.json_encode(data)))
else:
self.set_header("Content-Type", "application/json")
self.finish(cyclone.escape.json_encode(data))
|
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# freegeoip.net
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cyclone.web
import cyclone.escape
import socket
from twisted.python import log
from twisted.internet import defer
import freegeoip.search
class BaseHandler(cyclone.web.RequestHandler):
@defer.inlineCallbacks
def get(self, address):
try:
ip, data = yield freegeoip.search.geoip(self.settings.db,
address or self.request.remote_ip)
if data:
data = cyclone.escape.json_decode(data[0][0])
data["ip"] = ip
except socket.error:
raise cyclone.web.HTTPError(404)
except ValueError:
raise cyclone.web.HTTPError(400)
except Exception, e:
log.err("search.geoip('%s') failed: %s" % (address, e))
raise cyclone.web.HTTPError(503)
if data:
self.dump(data)
else:
raise cyclone.web.HTTPError(404)
def dump(self, data):
raise NotImplementedError
class CsvHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/csv")
self.render("geoip.csv", data=data)
class XmlHandler(BaseHandler):
def dump(self, data):
self.set_header("Content-Type", "text/xml")
self.render("geoip.xml", data=data)
class JsonHandler(BaseHandler):
def dump(self, data):
callback = self.get_argument("callback", None)
self.set_header("Content-Type", "application/json")
if callback:
self.finish("%s(%s);" % (callback, cyclone.escape.json_encode(data)))
else:
self.finish(cyclone.escape.json_encode(data))
|
bsd-3-clause
|
Python
|
c9917b3dc54290bb3fc7c977e8c1db76ac60cf82
|
Update project queries
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
polyaxon/api/projects/queries.py
|
polyaxon/api/projects/queries.py
|
from django.db.models import Count, Q
from db.models.projects import Project
projects = Project.objects.select_related('user')
projects_details = projects.select_related('repo').annotate(
Count('experiments', distinct=True),
Count('jobs', distinct=True),
Count('build_jobs', distinct=True),
Count('experiment_groups', distinct=True),
independent_experiments__count=Count(
'experiments',
filter=Q(experiments__experiment_group__isnull=True),
distinct=True))
|
from django.db.models import Count, Q
from db.models.projects import Project
projects = Project.objects.select_related('user')
projects_details = projects.select_related('repo').annotate(
Count('experiments', distinct=True),
Count('jobs', distinct=True),
Count('build_jobs', distinct=True),
Count('experiment_groups', distinct=True)).annotate(
independent_experiments__count=Count(
'experiments',
filter=Q(experiments__experiment_group__isnull=True),
distinct=True))
|
apache-2.0
|
Python
|
086b7a7de994e30d2e5defa214eca846862aec59
|
update default configuration in config
|
mahak/nova,phenoxim/nova,hanlind/nova,mikalstill/nova,rajalokan/nova,jianghuaw/nova,jianghuaw/nova,Juniper/nova,mahak/nova,rajalokan/nova,klmitch/nova,jianghuaw/nova,rajalokan/nova,openstack/nova,rahulunair/nova,gooddata/openstack-nova,mikalstill/nova,Juniper/nova,vmturbo/nova,vmturbo/nova,mahak/nova,mikalstill/nova,jianghuaw/nova,vmturbo/nova,klmitch/nova,Juniper/nova,klmitch/nova,gooddata/openstack-nova,Juniper/nova,vmturbo/nova,rajalokan/nova,rahulunair/nova,openstack/nova,phenoxim/nova,rahulunair/nova,gooddata/openstack-nova,openstack/nova,klmitch/nova,gooddata/openstack-nova,hanlind/nova,hanlind/nova
|
nova/common/config.py
|
nova/common/config.py
|
# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import cors
def set_middleware_defaults():
"""Update default configuration options for oslo.middleware."""
cors.set_defaults(
allow_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Identity-Status',
'X-Roles',
'X-Service-Catalog',
'X-User-Id',
'X-Tenant-Id'],
expose_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Subject-Token',
'X-Service-Token'],
allow_methods=['GET',
'PUT',
'POST',
'DELETE',
'PATCH']
)
|
# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_middleware import cors
def set_middleware_defaults():
"""Update default configuration options for oslo.middleware."""
# CORS Defaults
# TODO(krotscheck): Update with https://review.openstack.org/#/c/285368/
cfg.set_defaults(cors.CORS_OPTS,
allow_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Identity-Status',
'X-Roles',
'X-Service-Catalog',
'X-User-Id',
'X-Tenant-Id'],
expose_headers=['X-Auth-Token',
'X-Openstack-Request-Id',
'X-Subject-Token',
'X-Service-Token'],
allow_methods=['GET',
'PUT',
'POST',
'DELETE',
'PATCH']
)
|
apache-2.0
|
Python
|
bc224499e2f4f663a1fe5e41cbfad691e7c04de4
|
Drop unused import
|
swift-lang/swift-t,swift-lang/swift-t,swift-lang/swift-t,swift-lang/swift-t,swift-lang/swift-t,swift-lang/swift-t,swift-lang/swift-t
|
turbine/code/py/turbine_helpers.py
|
turbine/code/py/turbine_helpers.py
|
# Copyright 2013 University of Chicago and Argonne National Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
# TURBINE HELPERS PY
# Python helpers for JSON module
import json
# Type classes for comparison:
_zero = 0
_zerof = 0.0
type_str = "x".__class__
type_int = _zero.__class__
type_float = _zerof.__class__
type_list = [].__class__
type_dict = {}.__class__
type_none = None.__class__
def set_key_type(k):
""" Convert to integer if possible """
try:
result = int(k)
except ValueError:
result = k
return result
def json_path(J, path):
""" Reusable function to search a JSON tree """
J = json.loads(J)
P = path.split(",")
for p in P:
if len(p) > 0:
k = set_key_type(p)
J = J[k]
return J
def json_type(J, path):
""" Obtain the type of the entry at given path in the JSON tree """
J = json_path(J, path)
c = J.__class__
if c == type_str:
return "string"
elif c == type_int:
return "int"
elif c == type_float:
return "float"
elif c == type_list:
return "array"
elif c == type_dict:
return "object"
elif c == type_none:
return "null"
else:
raise Exception("json_type: ERROR class='%s'" % str(c))
def json_object_names(J, path):
""" Assume dict and return all names at given path """
J = json_path(J, path)
L = []
for i in J.keys():
L.append(i)
result = ",".join(L)
return result
def json_array_size(J, path):
""" Assume list and return length of it """
J = json_path(J, path)
return str(len(J))
def json_get(J, path):
""" Return whatever is at the given path (usually scalar) """
J = json_path(J, path)
if J == None:
return "null"
return str(J)
|
# Copyright 2013 University of Chicago and Argonne National Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
# TURBINE HELPERS PY
# Python helpers for JSON module
import json
import sys
# Type classes for comparison:
_zero = 0
_zerof = 0.0
type_str = "x".__class__
type_int = _zero.__class__
type_float = _zerof.__class__
type_list = [].__class__
type_dict = {}.__class__
type_none = None.__class__
def set_key_type(k):
""" Convert to integer if possible """
try:
result = int(k)
except ValueError:
result = k
return result
def json_path(J, path):
""" Reusable function to search a JSON tree """
J = json.loads(J)
P = path.split(",")
for p in P:
if len(p) > 0:
k = set_key_type(p)
J = J[k]
return J
def json_type(J, path):
""" Obtain the type of the entry at given path in the JSON tree """
J = json_path(J, path)
c = J.__class__
if c == type_str:
return "string"
elif c == type_int:
return "int"
elif c == type_float:
return "float"
elif c == type_list:
return "array"
elif c == type_dict:
return "object"
elif c == type_none:
return "null"
else:
raise Exception("json_type: ERROR class='%s'" % str(c))
def json_object_names(J, path):
""" Assume dict and return all names at given path """
J = json_path(J, path)
L = []
for i in J.keys():
L.append(i)
result = ",".join(L)
return result
def json_array_size(J, path):
""" Assume list and return length of it """
J = json_path(J, path)
return str(len(J))
def json_get(J, path):
""" Return whatever is at the given path (usually scalar) """
J = json_path(J, path)
if J == None:
return "null"
return str(J)
|
apache-2.0
|
Python
|
63cb9a8b3acb78be155bbc770cdd1d06170eccc1
|
Fix customer filter field.
|
opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind
|
src/nodeconductor_assembly_waldur/invoices/filters.py
|
src/nodeconductor_assembly_waldur/invoices/filters.py
|
import django_filters
from django.conf import settings
from django.core import exceptions
from django.db.models import Q
from django import forms
from django.utils import timezone
from nodeconductor.core import filters as core_filters
from nodeconductor.structure import filters as structure_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
class PaymentDetailsFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.PaymentDetails
fields = '__all__'
class AccountingStartDateFilter(core_filters.BaseExternalFilter):
def filter(self, request, queryset, view):
if not settings.INVOICES['ENABLE_ACCOUNTING_START_DATE']:
return queryset
value = request.query_params.get('accounting_is_running')
boolean_field = forms.NullBooleanField()
try:
value = boolean_field.to_python(value)
except exceptions.ValidationError:
value = None
if value is None:
return queryset
query = Q(payment_details__isnull=True) | Q(payment_details__accounting_start_date__gt=timezone.now())
if value:
return queryset.exclude(query)
else:
return queryset.filter(query)
structure_filters.ExternalCustomerFilterBackend.register(AccountingStartDateFilter())
|
import django_filters
from django.conf import settings
from django.core import exceptions
from django.db.models import Q, BooleanField
from django.utils import timezone
from nodeconductor.core import filters as core_filters
from nodeconductor.structure import filters as structure_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
class PaymentDetailsFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.PaymentDetails
fields = '__all__'
class AccountingStartDateFilter(core_filters.BaseExternalFilter):
def filter(self, request, queryset, view):
if not settings.INVOICES['ENABLE_ACCOUNTING_START_DATE']:
return queryset
value = request.query_params.get('accounting_is_running')
boolean_field = BooleanField()
try:
value = boolean_field.to_python(value)
except exceptions.ValidationError:
value = None
if value is None:
return queryset
query = Q(payment_details__isnull=True) | Q(payment_details__accounting_start_date__gt=timezone.now())
if value:
return queryset.exclude(query)
else:
return queryset.filter(query)
structure_filters.ExternalCustomerFilterBackend.register(AccountingStartDateFilter())
|
mit
|
Python
|
77301c29c0f5db1e132b01a883eadcb79df5fd3f
|
Reduce timeout value
|
stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment,stopstalk/stopstalk-deployment
|
models/000_config.py
|
models/000_config.py
|
"""
Configure StopStalk as required
"""
"""
Copyright (c) 2015-2017 Raj Patel([email protected]), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from gluon import *
from collections import OrderedDict
# List all the profile sites here
# To disable any of the profile site
# - Just remove that site from the dictionary
# Site Name => Site website url
# OrderedDict is used to maintain the order of insertion
current.SITES = OrderedDict()
current.SITES["CodeChef"] = "http://www.codechef.com/"
current.SITES["CodeForces"] = "http://www.codeforces.com/"
current.SITES["Spoj"] = "https://www.spoj.com/"
current.SITES["HackerEarth"] = "https://www.hackerearth.com/"
current.SITES["HackerRank"] = "https://www.hackerrank.com/"
current.SITES["UVa"] = "https://uva.onlinejudge.org/"
# If you are under a PROXY uncomment this and comment the next line
#current.PROXY = {"http": "http://proxy.iiit.ac.in:8080/",
# "https": "https://proxy.iiit.ac.in:8080/"}
# If you are not under a PROXY
current.PROXY = {}
# The initial date from which the submissions need to be added
current.INITIAL_DATE = "2013-01-01 00:00:00"
# Number of submissions per page
current.PER_PAGE = 100
# Maximum number of requests to make if a website is not responding
current.MAX_TRIES_ALLOWED = 2
# Maximum time that a request can take to return a response(in seconds)
current.TIMEOUT = 5
# Number of problems to be shown in Trending page
current.PROBLEMS_PER_PAGE = 15
# Number of days in the past that should be considered for trending problems
current.PAST_DAYS = 10
# =============================================================================
|
"""
Configure StopStalk as required
"""
"""
Copyright (c) 2015-2017 Raj Patel([email protected]), StopStalk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from gluon import *
from collections import OrderedDict
# List all the profile sites here
# To disable any of the profile site
# - Just remove that site from the dictionary
# Site Name => Site website url
# OrderedDict is used to maintain the order of insertion
current.SITES = OrderedDict()
current.SITES["CodeChef"] = "http://www.codechef.com/"
current.SITES["CodeForces"] = "http://www.codeforces.com/"
current.SITES["Spoj"] = "https://www.spoj.com/"
current.SITES["HackerEarth"] = "https://www.hackerearth.com/"
current.SITES["HackerRank"] = "https://www.hackerrank.com/"
current.SITES["UVa"] = "https://uva.onlinejudge.org/"
# If you are under a PROXY uncomment this and comment the next line
#current.PROXY = {"http": "http://proxy.iiit.ac.in:8080/",
# "https": "https://proxy.iiit.ac.in:8080/"}
# If you are not under a PROXY
current.PROXY = {}
# The initial date from which the submissions need to be added
current.INITIAL_DATE = "2013-01-01 00:00:00"
# Number of submissions per page
current.PER_PAGE = 100
# Maximum number of requests to make if a website is not responding
current.MAX_TRIES_ALLOWED = 5
# Maximum time that a request can take to return a response(in seconds)
current.TIMEOUT = 20
# Number of problems to be shown in Trending page
current.PROBLEMS_PER_PAGE = 15
# Number of days in the past that should be considered for trending problems
current.PAST_DAYS = 10
# =============================================================================
|
mit
|
Python
|
fc75f5843af70c09e0d63284277bf88689cbb06d
|
Add apidoc to doc building
|
mrjmad/invocations,pyinvoke/invocations,alex/invocations,singingwolfboy/invocations
|
invocations/docs.py
|
invocations/docs.py
|
import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def api_docs(target, output="api", exclude=""):
"""
Runs ``sphinx-apidoc`` to autogenerate your API docs.
Must give target directory/package as ``target``. Results are written out
to ``docs/<output>`` (``docs/api`` by default).
To exclude certain output files from the final build give ``exclude`` as a
comma separated list of file paths.
"""
output = os.path.join('docs', output)
# Have to make these absolute or apidoc is dumb :(
exclude = map(
lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
exclude.split(',')
)
run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
@task
def docs(clean=False, browse=False, api_target=None, api_output=None,
api_exclude=None):
"""
Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
Can also build API docs by giving ``api_target`` and optionally
``api_output`` and/or ``api_exclude``.
"""
if api_target:
kwargs = {'target': api_target}
if api_output:
kwargs['output'] = api_output
if api_exclude:
kwargs['exclude'] = api_exclude
api_docs.body(**kwargs)
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
|
import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def docs(clean=False, browse=False):
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
|
bsd-2-clause
|
Python
|
0a07f6ac82f099d836eb5276063adab245979258
|
rename `recall` to `call`
|
okuta/chainer,chainer/chainer,niboshi/chainer,tkerola/chainer,wkentaro/chainer,wkentaro/chainer,keisuke-umezawa/chainer,chainer/chainer,hvy/chainer,pfnet/chainer,okuta/chainer,hvy/chainer,wkentaro/chainer,niboshi/chainer,wkentaro/chainer,niboshi/chainer,hvy/chainer,okuta/chainer,okuta/chainer,chainer/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,niboshi/chainer,hvy/chainer
|
chainer/training/triggers/once_trigger.py
|
chainer/training/triggers/once_trigger.py
|
class OnceTrigger(object):
"""Trigger based on the starting point of the iteration.
This trigger accepts only once at starting point of the iteration. There
are two ways to specify the starting point: only starting point in whole
iteration or called again when training resumed.
Args:
call_on_resume (bool): Whether the extension is called again or not
when restored from a snapshot. It is set to ``False`` by default.
"""
def __init__(self, call_on_resume=False):
self._call_on_resume = call_on_resume
self._flag_called = False
def trigger(self, trainer):
if self._flag_called:
return False
self._flag_called = True
return True
@property
def skip_initialize(self):
"""The flag decide to call `Extension.initialize` or not.
If this flag is exist and set `True`, `Extension.initialize` is
skipped.
"""
return self._flag_called
def serialize(self, serializer):
if not self._call_on_resume:
self._flag_called = serializer('_flag_called', self._flag_called)
|
class OnceTrigger(object):
"""Trigger based on the starting point of the iteration.
This trigger accepts only once at starting point of the iteration. There
are two ways to specify the starting point: only starting point in whole
iteration or recalled when training resumed.
Args:
recall_on_resume (bool): Whether the extension is recalled or not when
restored from a snapshot. It is set to ``False`` by default.
"""
def __init__(self, recall_on_resume=False):
self._recall_on_resume = recall_on_resume
self._flag_called = False
def trigger(self, trainer):
if self._flag_called:
return False
self._flag_called = True
return True
@property
def skip_initialize(self):
"""The flag decide to call `Extension.initialize` or not.
If this flag is exist and set `True`, `Extension.initialize` is
skipped.
"""
return self._flag_called
def serialize(self, serializer):
if not self._recall_on_resume:
self._flag_called = serializer('_flag_called', self._flag_called)
|
mit
|
Python
|
2d6906bc58275b18102b4523a4faa5078a6e74f1
|
fix wrong description (all?)
|
chainer/chainercv,yuyu2172/chainercv,chainer/chainercv,pfnet/chainercv,yuyu2172/chainercv
|
chainercv/transforms/image/random_crop.py
|
chainercv/transforms/image/random_crop.py
|
import random
import six
def random_crop(img, output_shape, return_slices=False, copy=False):
"""Crop array randomly into `output_shape`.
The input image is cropped by a randomly selected region whose shape
is :obj:`output_shape`.
Args:
img (~numpy.ndarray): An image array to be cropped. This is in
CHW format.
output_shape (tuple): the size of output image after cropping.
This value is :math:`(heihgt, width)`.
return_slices (bool): If :obj:`True`, this function returns
information of slices.
copy (bool): If :obj:`False`, a view of :obj:`img` is returned.
Returns:
This function returns :obj:`out_img, slice_H, slice_W` if
:obj:`return_slices = True`. Otherwise, this returns
:obj:`out_img`.
Note that :obj:`out_img` is the transformed image array.
Also, :obj:`slice_H` and :obj:`slice_W` are slices used to crop the
input image. The following relationship is satisfied.
.. code::
out_img = img[:, slice_H, slice_W]
"""
H, W = output_shape
if img.shape[1] == H:
start_H = 0
elif img.shape[1] > H:
start_H = random.choice(six.moves.range(img.shape[1] - H))
else:
raise ValueError('shape of image is larger than output shape')
slice_H = slice(start_H, start_H + H)
if img.shape[2] == W:
start_W = 0
elif img.shape[2] > W:
start_W = random.choice(six.moves.range(img.shape[2] - W))
else:
raise ValueError('shape of image is larger than output shape')
slice_W = slice(start_W, start_W + W)
img = img[:, slice_H, slice_W]
if copy:
img = img.copy()
if return_slices:
return img, slice_H, slice_W
else:
return img
|
import random
import six
def random_crop(img, output_shape, return_slices=False, copy=False):
"""Crop array randomly into `output_shape`.
All arrays will be cropped by the same region randomly selected. The
output will all be in shape :obj:`output_shape`.
Args:
img (~numpy.ndarray): An image array to be cropped. This is in
CHW format.
output_shape (tuple): the size of output image after cropping.
This value is :math:`(heihgt, width)`.
return_slices (bool): If :obj:`True`, this function returns
information of slices.
copy (bool): If :obj:`False`, a view of :obj:`img` is returned.
Returns:
This function returns :obj:`out_img, slice_H, slice_W` if
:obj:`return_slices = True`. Otherwise, this returns
:obj:`out_img`.
Note that :obj:`out_img` is the transformed image array.
Also, :obj:`slice_H` and :obj:`slice_W` are slices used to crop the
input image. The following relationship is satisfied.
.. code::
out_img = img[:, slice_H, slice_W]
"""
H, W = output_shape
if img.shape[1] == H:
start_H = 0
elif img.shape[1] > H:
start_H = random.choice(six.moves.range(img.shape[1] - H))
else:
raise ValueError('shape of image is larger than output shape')
slice_H = slice(start_H, start_H + H)
if img.shape[2] == W:
start_W = 0
elif img.shape[2] > W:
start_W = random.choice(six.moves.range(img.shape[2] - W))
else:
raise ValueError('shape of image is larger than output shape')
slice_W = slice(start_W, start_W + W)
img = img[:, slice_H, slice_W]
if copy:
img = img.copy()
if return_slices:
return img, slice_H, slice_W
else:
return img
|
mit
|
Python
|
57e177e47bcc54683654e5d0de81af6e0cbd803d
|
update version
|
jorgensoares/pimat,jorgensoares/pimat,jorgensoares/pimat,jorgensoares/pimat
|
pimat_web/version.py
|
pimat_web/version.py
|
__version__ = '0.6.18'
|
__version__ = '0.6.17'
|
mit
|
Python
|
e3b6b9864376f2dabe42b6d80d4a5db65cb85d30
|
Update docs for 'prep_command()'
|
sid-kap/pants,UnrememberMe/pants,wisechengyi/pants,sid-kap/pants,di0spyr0s/pants,15Dkatz/pants,gmalmquist/pants,megaserg/pants,lahosken/pants,digwanderlust/pants,foursquare/pants,digwanderlust/pants,foursquare/pants,15Dkatz/pants,twitter/pants,mateor/pants,cevaris/pants,pombredanne/pants,gmalmquist/pants,dturner-tw/pants,dgomez10/xanon,pombredanne/pants,peiyuwang/pants,15Dkatz/pants,kslundberg/pants,15Dkatz/pants,kwlzn/pants,jessrosenfield/pants,digwanderlust/pants,sameerparekh/pants,kslundberg/pants,baroquebobcat/pants,UnrememberMe/pants,pombredanne/pants,cevaris/pants,UnrememberMe/pants,baroquebobcat/pants,sameerparekh/pants,gmalmquist/pants,kslundberg/pants,landism/pants,jtrobec/pants,ity/pants,foursquare/pants,slyphon/pants,dbentley/pants,pgroudas/pants,peiyuwang/pants,mateor/pants,TansyArron/pants,mateor/pants,foursquare/pants,ericzundel/pants,sameerparekh/pants,Gabriel439/pants,twitter/pants,dbentley/pants,benjyw/pants,digwanderlust/pants,twitter/pants,dturner-tw/pants,manasapte/pants,megaserg/pants,scode/pants,sameerparekh/pants,lahosken/pants,slyphon/pants,15Dkatz/pants,jtrobec/pants,cevaris/pants,kwlzn/pants,ericzundel/pants,cevaris/pants,foursquare/pants,di0spyr0s/pants,sid-kap/pants,peiyuwang/pants,digwanderlust/pants,Gabriel439/pants,wisechengyi/pants,dgomez10/xanon,foursquare/pants,UnrememberMe/pants,slyphon/pants,dgomez10/xanon,fkorotkov/pants,jtrobec/pants,TansyArron/pants,mateor/pants,di0spyr0s/pants,dgomez10/xanon,fkorotkov/pants,slyphon/pants,baroquebobcat/pants,Gabriel439/pants,pantsbuild/pants,pgroudas/pants,peiyuwang/pants,ity/pants,landism/pants,TansyArron/pants,baroquebobcat/pants,gmalmquist/pants,pgroudas/pants,pantsbuild/pants,peiyuwang/pants,areitz/pants,kwlzn/pants,Gabriel439/pants,dbentley/pants,dgomez10/xanon,dturner-tw/pants,pombredanne/pants,benjyw/pants,dgomez10/xanon,mateor/pants,manasapte/pants,sid-kap/pants,landism/pants,kwlzn/pants,tdyas/pants,di0spyr0s/pants,jessrosenfield/pants,UnrememberMe/pants,digwanderlust/pants,laurentgo/pants,megaserg/pants,jsirois/pants,dgomez10/xanon,lahosken/pants,UnrememberMe/pants,pantsbuild/pants,Gabriel439/pants,tdyas/pants,dgomez10/xanon,qma/pants,tdyas/pants,peiyuwang/pants,scode/pants,twitter/pants,peiyuwang/pants,foursquare/pants,megaserg/pants,landism/pants,twitter/pants,kslundberg/pants,megaserg/pants,ity/pants,jtrobec/pants,fkorotkov/pants,areitz/pants,qma/pants,jsirois/pants,baroquebobcat/pants,areitz/pants,scode/pants,jessrosenfield/pants,gmalmquist/pants,areitz/pants,tdyas/pants,ity/pants,UnrememberMe/pants,pgroudas/pants,di0spyr0s/pants,cevaris/pants,qma/pants,ericzundel/pants,ericzundel/pants,tdyas/pants,scode/pants,benjyw/pants,lahosken/pants,laurentgo/pants,benjyw/pants,pantsbuild/pants,dgomez10/xanon,jessrosenfield/pants,tdyas/pants,sameerparekh/pants,mateor/pants,kwlzn/pants,pombredanne/pants,15Dkatz/pants,manasapte/pants,fkorotkov/pants,foursquare/pants,wisechengyi/pants,kslundberg/pants,ericzundel/pants,Gabriel439/pants,dbentley/pants,pantsbuild/pants,areitz/pants,kslundberg/pants,di0spyr0s/pants,landism/pants,foursquare/pants,fkorotkov/pants,Gabriel439/pants,dturner-tw/pants,pombredanne/pants,wisechengyi/pants,scode/pants,wisechengyi/pants,kwlzn/pants,laurentgo/pants,tdyas/pants,manasapte/pants,baroquebobcat/pants,scode/pants,megaserg/pants,fkorotkov/pants,baroquebobcat/pants,fkorotkov/pants,manasapte/pants,15Dkatz/pants,dgomez10/xanon,slyphon/pants,laurentgo/pants,slyphon/pants,wisechengyi/pants,dbentley/pants,jessrosenfield/pants,benjyw/pants,benjyw/pants,slyphon/pants,manasapte/pants,twitter/pants,kwlzn/pants,lahosken/pants,mateor/pants,areitz/pants,scode/pants,dbentley/pants,ericzundel/pants,megaserg/pants,manasapte/pants,TansyArron/pants,ity/pants,pombredanne/pants,benjyw/pants,twitter/pants,UnrememberMe/pants,lahosken/pants,twitter/pants,pgroudas/pants,mateor/pants,sameerparekh/pants,laurentgo/pants,15Dkatz/pants,baroquebobcat/pants,lahosken/pants,gmalmquist/pants,sid-kap/pants,baroquebobcat/pants,TansyArron/pants,sameerparekh/pants,pgroudas/pants,cevaris/pants,laurentgo/pants,dturner-tw/pants,qma/pants,ity/pants,areitz/pants,ericzundel/pants,jessrosenfield/pants,tdyas/pants,twitter/pants,jtrobec/pants,peiyuwang/pants,kslundberg/pants,ericzundel/pants,dturner-tw/pants,pantsbuild/pants,qma/pants,pantsbuild/pants,wisechengyi/pants,TansyArron/pants,landism/pants,ity/pants,jessrosenfield/pants,di0spyr0s/pants,cevaris/pants,sid-kap/pants,jtrobec/pants,landism/pants,jsirois/pants,dturner-tw/pants,jtrobec/pants,UnrememberMe/pants,gmalmquist/pants,dbentley/pants,wisechengyi/pants,qma/pants,fkorotkov/pants,lahosken/pants,landism/pants,dgomez10/xanon,sid-kap/pants,tdyas/pants,qma/pants,wisechengyi/pants,TansyArron/pants
|
src/python/pants/backend/core/targets/prep_command.py
|
src/python/pants/backend/core/targets/prep_command.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
from pants.base.target import Target
class PrepCommand(Target):
"""A command that must be run before some other target can be tested.
For example, you can use `prep_command()` to execute a script that sets up tunnels to database
servers. These tunnels could then be leveraged by integration tests.
Pants will only execute the `prep_command()` under the test goal, when testing targets that
depend on the `prep_command()` target.
"""
def __init__(self, prep_executable=None, prep_args=None, payload=None, prep_environ=False, **kwargs):
"""
:param prep_executable: The path to the executable that should be run.
:param prep_args: A list of command-line args to the excutable.
:param prep_environ: If True, the output of the command will be treated as
a \\\\0-separated list of key=value pairs to insert into the environment.
Note that this will pollute the environment for all future tests, so
avoid it if at all possible.
"""
payload = payload or Payload()
payload.add_fields({
'prep_command_executable': PrimitiveField(prep_executable),
'prep_command_args': PrimitiveField(prep_args or []),
'prep_environ': PrimitiveField(prep_environ),
})
super(PrepCommand, self).__init__(payload=payload, **kwargs)
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
from pants.base.target import Target
class PrepCommand(Target):
"""A command that must be run before some other target can be built.
For example, a script that sets up tunnels to database servers
might need to be run before running integration tests
"""
def __init__(self, prep_executable=None, prep_args=None, payload=None, prep_environ=False, **kwargs):
"""
:param prep_executable: The path to the executable that should be run.
:param prep_args: A list of command-line args to the excutable.
:param prep_environ: If True, the output of the command will be treated as
a \0-separated list of key=value pairs to insert into the environment.
Note that this will pollute the environment for all future tests, so
avoid it if at all possible.
"""
payload = payload or Payload()
payload.add_fields({
'prep_command_executable': PrimitiveField(prep_executable),
'prep_command_args': PrimitiveField(prep_args or []),
'prep_environ': PrimitiveField(prep_environ),
})
super(PrepCommand, self).__init__(payload=payload, **kwargs)
|
apache-2.0
|
Python
|
a9ac098ec492739f37005c9bd6278105df0261c5
|
Add fields to save question url and annexure links
|
mthipparthi/parliament-search
|
parliamentsearch/items.py
|
parliamentsearch/items.py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
q_url = scrapy.Field()
q_annex = scrapy.Field()
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
|
mit
|
Python
|
20e63fb5b5a02966acbe66f3cda19bc59ff89934
|
Set a default value for a default_roles
|
stackforge/monasca-log-api,openstack/monasca-log-api,openstack/monasca-log-api,stackforge/monasca-log-api,stackforge/monasca-log-api,openstack/monasca-log-api
|
monasca_log_api/conf/role_middleware.py
|
monasca_log_api/conf/role_middleware.py
|
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
role_m_opts = [
cfg.ListOpt(name='path',
default='/',
help='List of paths where middleware applies to'),
cfg.ListOpt(name='default_roles',
default=['monasca-user'],
help='List of roles allowed to enter api'),
cfg.ListOpt(name='agent_roles',
default=None,
help=('List of roles, that if set, mean that request '
'comes from agent, thus is authorized in the same '
'time')),
cfg.ListOpt(name='delegate_roles',
default=['admin'],
help=('Roles that are allowed to POST logs on '
'behalf of another tenant (project)'))
]
role_m_group = cfg.OptGroup(name='roles_middleware', title='roles_middleware')
def register_opts(conf):
conf.register_group(role_m_group)
conf.register_opts(role_m_opts, role_m_group)
def list_opts():
return role_m_group, role_m_opts
|
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
role_m_opts = [
cfg.ListOpt(name='path',
default='/',
help='List of paths where middleware applies to'),
cfg.ListOpt(name='default_roles',
default=None,
help='List of roles allowed to enter api'),
cfg.ListOpt(name='agent_roles',
default=None,
help=('List of roles, that if set, mean that request '
'comes from agent, thus is authorized in the same '
'time')),
cfg.ListOpt(name='delegate_roles',
default=['admin'],
help=('Roles that are allowed to POST logs on '
'behalf of another tenant (project)'))
]
role_m_group = cfg.OptGroup(name='roles_middleware', title='roles_middleware')
def register_opts(conf):
conf.register_group(role_m_group)
conf.register_opts(role_m_opts, role_m_group)
def list_opts():
return role_m_group, role_m_opts
|
apache-2.0
|
Python
|
dd725349e0613461bdbe75a0c32115b323e9ccc3
|
change settings import in wsgi for Travis CI
|
whitews/ReFlow,whitews/ReFlow,whitews/ReFlow
|
reflow/wsgi.py
|
reflow/wsgi.py
|
"""
WSGI config for reflow project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
# Set matplotlib configuration directory, else Django complains it is not writable
# We'll just use a tempfile
import tempfile
os.environ['MPLCONFIGDIR'] = tempfile.mkdtemp()
paths = [
'/srv/django-projects/ReFlow',
'/srv/django-projects/ReFlow/reflow'
]
for path in paths:
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "reflow.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
import settings_sample
if settings.INTERACTIVE_DEBUG:
class Debugger:
def __init__(self, object):
self.__object = object
def __call__(self, *args, **kwargs):
import pdb
debugger = pdb.Pdb()
debugger.use_rawinput = 0
debugger.reset()
sys.settrace(debugger.trace_dispatch)
try:
return self.__object(*args, **kwargs)
finally:
debugger.quitting = 1
sys.settrace(None)
application = Debugger(get_wsgi_application())
else:
application = get_wsgi_application()
|
"""
WSGI config for reflow project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
# Set matplotlib configuration directory, else Django complains it is not writable
# We'll just use a tempfile
import tempfile
os.environ['MPLCONFIGDIR'] = tempfile.mkdtemp()
paths = [
'/srv/django-projects/ReFlow',
'/srv/django-projects/ReFlow/reflow'
]
for path in paths:
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "reflow.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
import settings
if settings.INTERACTIVE_DEBUG:
class Debugger:
def __init__(self, object):
self.__object = object
def __call__(self, *args, **kwargs):
import pdb
debugger = pdb.Pdb()
debugger.use_rawinput = 0
debugger.reset()
sys.settrace(debugger.trace_dispatch)
try:
return self.__object(*args, **kwargs)
finally:
debugger.quitting = 1
sys.settrace(None)
application = Debugger(get_wsgi_application())
else:
application = get_wsgi_application()
|
bsd-3-clause
|
Python
|
c41f589a24108562b6dcc5eb303a183b2686f8e0
|
Adjust hashkey afterr php example
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
bluebottle/payments_telesom/gateway.py
|
bluebottle/payments_telesom/gateway.py
|
import hashlib
from bluebottle.payments.exception import PaymentException
from django.utils import timezone
from suds.client import Client
from suds.plugin import MessagePlugin
class NameSpacePlugin(MessagePlugin):
def sending(self, context):
print context.envelope
return context
class TelesomClient(object):
"""
API Client for Telesom Zaad.
This is a wrapper around the SOAP service methods,
providing more Python-friendly wrappers.
"""
def __init__(self, merchant_id, merchant_key, username, password, api_url):
"""
Initialize the client.
"""
self.client = Client(api_url + '?WSDL', plugins=[NameSpacePlugin()])
self.merchant_id = merchant_id
self.merchant_key = merchant_key
self.username = username
self.password = password
def create(self, mobile='', amount=0, description=''):
"""
Create the payment in Telesom.
"""
# We should not use actual IP address.
ip = '::1'
date = timezone.now().strftime('%d/%m/%Y')
username = self.username
password = self.password
uniquekey = self.merchant_id
account = self.merchant_key
# From PHP:
# $msg = $username.$password."::1".$merchant.$uniquekey. $dates.$mobile.$amount.$description;
hash = "{0}{1}{2}{3}{4}{5}{6}{7}{8}".format(
username, password, ip, account, uniquekey, date, mobile, amount, description
)
key = hashlib.md5(hash).hexdigest()
reply = self.client.service.PaymentRequest(
pMsisdn=mobile,
pAmount=amount,
Category=description,
MerchantID=self.merchant_id,
hashkey=key
)
# 5001! Invalid Username/Password/Hashkey Try Again!-1
# 2001! Success, Waiting Confirmation !265
res = reply.split('!')
if res[0] == '2001':
return res[2]
else:
raise PaymentException(res[1])
|
import hashlib
from django.utils import timezone
import ipgetter
from suds.client import Client
from suds.plugin import MessagePlugin
class NameSpacePlugin(MessagePlugin):
def sending(self, context):
print context.envelope
return context
class TelesomClient(object):
"""
API Client for Telesom Zaad.
This is a wrapper around the SOAP service methods,
providing more Python-friendly wrappers.
"""
def __init__(self, merchant_id, merchant_key, api_url):
"""
Initialize the client.
"""
self.client = Client(api_url + '?wsdl', plugins=[NameSpacePlugin()])
self.merchant_id = merchant_id
self.merchant_key = merchant_key
def create(self, subscriber=None, amount=0, description=''):
"""
Create the payment in Telesom.
"""
ip = ipgetter.myip()
date = timezone.now().strftime('%d/%m/%Y')
# username = 'shadir'
password = 'ieu45fi33%334'
key = '334fr43453423d'
merchant = '400032'
# key = subscriber + amount + account + description + Password + IPAddress + UniqueKey + date
key = hashlib.md5("{0}{1}{2}{3}{4}{5}{6}{7}".format(
subscriber, amount, merchant, description, password, ip, key, date
)).hexdigest().upper()
print subscriber, amount, merchant, description, password, ip, key, date
reply = self.client.service.PaymentRequest(
pMsisdn=subscriber,
pAmount=amount,
Category=description,
MerchantID=self.merchant_id,
hashkey=key
)
print reply
return reply
|
bsd-3-clause
|
Python
|
6fb1b24a3cf1a4cdb3bd35c6f575d96cb2da9415
|
Add binding for DSA_size
|
skeuomorf/cryptography,Hasimir/cryptography,sholsapp/cryptography,skeuomorf/cryptography,dstufft/cryptography,kimvais/cryptography,kimvais/cryptography,Hasimir/cryptography,skeuomorf/cryptography,dstufft/cryptography,sholsapp/cryptography,kimvais/cryptography,dstufft/cryptography,sholsapp/cryptography,bwhmather/cryptography,Ayrx/cryptography,Ayrx/cryptography,Hasimir/cryptography,sholsapp/cryptography,bwhmather/cryptography,dstufft/cryptography,skeuomorf/cryptography,Ayrx/cryptography,bwhmather/cryptography,kimvais/cryptography,Hasimir/cryptography,Ayrx/cryptography,dstufft/cryptography,bwhmather/cryptography
|
cryptography/hazmat/bindings/openssl/dsa.py
|
cryptography/hazmat/bindings/openssl/dsa.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#include <openssl/dsa.h>
"""
TYPES = """
typedef struct dsa_st {
// prime number (public)
BIGNUM *p;
// 160-bit subprime, q | p-1 (public)
BIGNUM *q;
// generator of subgroup (public)
BIGNUM *g;
// private key x
BIGNUM *priv_key;
// public key y = g^x
BIGNUM *pub_key;
...;
} DSA;
typedef struct {
BIGNUM *r;
BIGNUM *s;
} DSA_SIG;
"""
FUNCTIONS = """
DSA *DSA_generate_parameters(int, unsigned char *, int, int *, unsigned long *,
void (*)(int, int, void *), void *);
int DSA_generate_key(DSA *);
DSA *DSA_new(void);
void DSA_free(DSA *);
DSA_SIG *DSA_SIG_new(void);
void DSA_SIG_free(DSA_SIG *);
int i2d_DSA_SIG(const DSA_SIG *, unsigned char **);
DSA_SIG *d2i_DSA_SIG(DSA_SIG **, const unsigned char **, long);
int DSA_size(const DSA *);
"""
MACROS = """
int DSA_generate_parameters_ex(DSA *, int, unsigned char *, int,
int *, unsigned long *, BN_GENCB *);
"""
CUSTOMIZATIONS = """
"""
CONDITIONAL_NAMES = {}
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#include <openssl/dsa.h>
"""
TYPES = """
typedef struct dsa_st {
// prime number (public)
BIGNUM *p;
// 160-bit subprime, q | p-1 (public)
BIGNUM *q;
// generator of subgroup (public)
BIGNUM *g;
// private key x
BIGNUM *priv_key;
// public key y = g^x
BIGNUM *pub_key;
...;
} DSA;
typedef struct {
BIGNUM *r;
BIGNUM *s;
} DSA_SIG;
"""
FUNCTIONS = """
DSA *DSA_generate_parameters(int, unsigned char *, int, int *, unsigned long *,
void (*)(int, int, void *), void *);
int DSA_generate_key(DSA *);
DSA *DSA_new(void);
void DSA_free(DSA *);
DSA_SIG *DSA_SIG_new(void);
void DSA_SIG_free(DSA_SIG *);
int i2d_DSA_SIG(const DSA_SIG *, unsigned char **);
DSA_SIG *d2i_DSA_SIG(DSA_SIG **, const unsigned char **, long);
"""
MACROS = """
int DSA_generate_parameters_ex(DSA *, int, unsigned char *, int,
int *, unsigned long *, BN_GENCB *);
"""
CUSTOMIZATIONS = """
"""
CONDITIONAL_NAMES = {}
|
bsd-3-clause
|
Python
|
9bff4e9ed59d2c15b2da681476385274f5ef9059
|
Fix create_sample_event
|
kevinlondon/sentry,imankulov/sentry,JTCunning/sentry,kevinastone/sentry,JTCunning/sentry,felixbuenemann/sentry,boneyao/sentry,argonemyth/sentry,zenefits/sentry,ewdurbin/sentry,jean/sentry,camilonova/sentry,vperron/sentry,imankulov/sentry,drcapulet/sentry,llonchj/sentry,JamesMura/sentry,mvaled/sentry,wujuguang/sentry,rdio/sentry,camilonova/sentry,llonchj/sentry,boneyao/sentry,ewdurbin/sentry,jean/sentry,SilentCircle/sentry,Kryz/sentry,TedaLIEz/sentry,looker/sentry,kevinlondon/sentry,pauloschilling/sentry,jokey2k/sentry,fotinakis/sentry,nicholasserra/sentry,JackDanger/sentry,SilentCircle/sentry,BayanGroup/sentry,fuziontech/sentry,BayanGroup/sentry,ifduyue/sentry,JackDanger/sentry,fotinakis/sentry,ifduyue/sentry,JamesMura/sentry,JackDanger/sentry,zenefits/sentry,hongliang5623/sentry,camilonova/sentry,ewdurbin/sentry,songyi199111/sentry,nicholasserra/sentry,Kryz/sentry,felixbuenemann/sentry,BuildingLink/sentry,mvaled/sentry,wong2/sentry,JamesMura/sentry,alexm92/sentry,mvaled/sentry,fotinakis/sentry,JTCunning/sentry,rdio/sentry,gencer/sentry,wong2/sentry,fuziontech/sentry,gg7/sentry,looker/sentry,gencer/sentry,BuildingLink/sentry,JamesMura/sentry,kevinastone/sentry,looker/sentry,daevaorn/sentry,Natim/sentry,gencer/sentry,daevaorn/sentry,zenefits/sentry,kevinlondon/sentry,SilentCircle/sentry,BuildingLink/sentry,ngonzalvez/sentry,drcapulet/sentry,Natim/sentry,jean/sentry,fotinakis/sentry,felixbuenemann/sentry,jokey2k/sentry,argonemyth/sentry,imankulov/sentry,songyi199111/sentry,JamesMura/sentry,pauloschilling/sentry,mvaled/sentry,zenefits/sentry,llonchj/sentry,korealerts1/sentry,beeftornado/sentry,boneyao/sentry,gg7/sentry,songyi199111/sentry,hongliang5623/sentry,BuildingLink/sentry,korealerts1/sentry,ifduyue/sentry,pauloschilling/sentry,wujuguang/sentry,rdio/sentry,alexm92/sentry,ngonzalvez/sentry,zenefits/sentry,looker/sentry,Natim/sentry,vperron/sentry,alexm92/sentry,drcapulet/sentry,mitsuhiko/sentry,argonemyth/sentry,1tush/sentry,wong2/sentry,beeftornado/sentry,ifduyue/sentry,beeftornado/sentry,gencer/sentry,daevaorn/sentry,jean/sentry,hongliang5623/sentry,korealerts1/sentry,BayanGroup/sentry,SilentCircle/sentry,mvaled/sentry,mitsuhiko/sentry,kevinastone/sentry,rdio/sentry,jean/sentry,gencer/sentry,1tush/sentry,fuziontech/sentry,ifduyue/sentry,mvaled/sentry,ngonzalvez/sentry,BuildingLink/sentry,TedaLIEz/sentry,vperron/sentry,jokey2k/sentry,looker/sentry,Kryz/sentry,1tush/sentry,TedaLIEz/sentry,nicholasserra/sentry,daevaorn/sentry,wujuguang/sentry,gg7/sentry
|
src/sentry/management/commands/create_sample_event.py
|
src/sentry/management/commands/create_sample_event.py
|
"""
sentry.management.commands.create_sample_event
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.core.management.base import BaseCommand, CommandError, make_option
class Command(BaseCommand):
help = 'Creates a sample event in Sentry (if applicable)'
option_list = BaseCommand.option_list + (
make_option('--project', dest='project'),
make_option('--platform', dest='platform'),
)
def handle(self, **options):
from django.conf import settings
from sentry.constants import PLATFORM_LIST
from sentry.models import Project
from sentry.utils.samples import create_sample_event
if not options['project']:
project = Project.objects.get(id=settings.SENTRY_PROJECT)
else:
if options['project'].isdigit():
project = Project.objects.get(id=options['project'])
elif '/' in options['project']:
t_slug, p_slug = options['project'].split('/', 1)
project = Project.objects.get(slug=p_slug, team__slug=t_slug)
else:
raise CommandError('Project must be specified as team-slug/project-slug or a project id')
if options['platform'] not in PLATFORM_LIST:
raise CommandError('Invalid platform. Must specify one of: %s' % ', '.join(PLATFORM_LIST))
platform = options['platform'] or project.platform
event = create_sample_event(project, platform)
if not event:
raise CommandError('Unable to create an event for platform %r' % (str(platform),))
self.stdout.write('Event created: %s' % (event.group.get_absolute_url(),))
|
"""
sentry.management.commands.create_sample_event
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.core.management.base import BaseCommand, CommandError, make_option
class Command(BaseCommand):
help = 'Creates a sample event in Sentry (if applicable)'
option_list = BaseCommand.option_list + (
make_option('--project', dest='project'),
make_option('--platform', dest='platform'),
)
def handle(self, **options):
from django.conf import settings
from sentry.constants import PLATFORM_LIST
from sentry.models import Project
from sentry.utils.samples import create_sample_event
if not options['project']:
project = Project.objects.get(settings.SENTRY_DEFAULT_PROJECT)
else:
if options['project'].isdigit():
project = Project.objects.get(id=options['project'])
elif '/' in options['project']:
t_slug, p_slug = options['project'].split('/', 1)
project = Project.objects.get(slug=p_slug, team__slug=t_slug)
else:
raise CommandError('Project must be specified as team-slug/project-slug or a project id')
if options['platform'] not in PLATFORM_LIST:
raise CommandError('Invalid platform. Must specify one of: %s' % ', '.join(PLATFORM_LIST))
platform = options['platform'] or project.platform
event = create_sample_event(project, platform)
if not event:
raise CommandError('Unable to create an event for platform %r' % (str(platform),))
self.stdout.write('Event created: %s' % (event.group.get_absolute_url(),))
|
bsd-3-clause
|
Python
|
893e05540c640c4598477a39688a773556bebad9
|
Update HDF5 to 1.8.16
|
LLNL/spack,LLNL/spack,tmerrick1/spack,lgarren/spack,TheTimmy/spack,EmreAtes/spack,mfherbst/spack,TheTimmy/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,LLNL/spack,matthiasdiener/spack,lgarren/spack,skosukhin/spack,lgarren/spack,EmreAtes/spack,LLNL/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,TheTimmy/spack,matthiasdiener/spack,EmreAtes/spack,skosukhin/spack,skosukhin/spack,TheTimmy/spack,matthiasdiener/spack,lgarren/spack,tmerrick1/spack,EmreAtes/spack,matthiasdiener/spack,tmerrick1/spack,tmerrick1/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,mfherbst/spack,krafczyk/spack,krafczyk/spack,lgarren/spack,mfherbst/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,TheTimmy/spack,skosukhin/spack,skosukhin/spack,LLNL/spack,krafczyk/spack
|
var/spack/packages/hdf5/package.py
|
var/spack/packages/hdf5/package.py
|
from spack import *
class Hdf5(Package):
"""HDF5 is a data model, library, and file format for storing and managing
data. It supports an unlimited variety of datatypes, and is designed for
flexible and efficient I/O and for high volume and complex data.
"""
homepage = "http://www.hdfgroup.org/HDF5/"
url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz"
list_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
list_depth = 3
version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618')
version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
version('1.8.13', 'c03426e9e77d7766944654280b467289')
depends_on("mpi")
depends_on("zlib")
# TODO: currently hard-coded to use OpenMPI
def install(self, spec, prefix):
configure(
"--prefix=%s" % prefix,
"--with-zlib=%s" % spec['zlib'].prefix,
"--enable-parallel",
"--enable-shared",
"CC=%s" % spec['mpich'].prefix.bin + "/mpicc",
"CXX=%s" % spec['mpich'].prefix.bin + "/mpic++")
make()
make("install")
def url_for_version(self, version):
v = str(version)
if version == Version("1.2.2"):
return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz"
elif version < Version("1.7"):
return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz"
else:
return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz"
|
from spack import *
class Hdf5(Package):
"""HDF5 is a data model, library, and file format for storing and managing
data. It supports an unlimited variety of datatypes, and is designed for
flexible and efficient I/O and for high volume and complex data.
"""
homepage = "http://www.hdfgroup.org/HDF5/"
url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz"
list_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
list_depth = 3
version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
version('1.8.13', 'c03426e9e77d7766944654280b467289')
depends_on("mpi")
depends_on("zlib")
# TODO: currently hard-coded to use OpenMPI
def install(self, spec, prefix):
configure(
"--prefix=%s" % prefix,
"--with-zlib=%s" % spec['zlib'].prefix,
"--enable-parallel",
"--enable-shared",
"CC=%s" % spec['mpich'].prefix.bin + "/mpicc",
"CXX=%s" % spec['mpich'].prefix.bin + "/mpic++")
make()
make("install")
def url_for_version(self, version):
v = str(version)
if version == Version("1.2.2"):
return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz"
elif version < Version("1.7"):
return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz"
else:
return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz"
|
lgpl-2.1
|
Python
|
f28daad980dd95584dabe83a102ecdd0e1cac517
|
remove reference to unused summaries file
|
magenta/music-spectrogram-diffusion
|
music_spectrogram_diffusion/__init__.py
|
music_spectrogram_diffusion/__init__.py
|
# Copyright 2022 The Music Spectrogram Diffusion Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base module for Music Spectrogram Diffusion."""
from music_spectrogram_diffusion import audio_codecs
from music_spectrogram_diffusion import datasets
from music_spectrogram_diffusion import event_codec
from music_spectrogram_diffusion import inference
from music_spectrogram_diffusion import layers
from music_spectrogram_diffusion import metrics
from music_spectrogram_diffusion import note_sequences
from music_spectrogram_diffusion import preprocessors
from music_spectrogram_diffusion import run_length_encoding
from music_spectrogram_diffusion import tasks
from music_spectrogram_diffusion import vocabularies
|
# Copyright 2022 The Music Spectrogram Diffusion Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base module for Music Spectrogram Diffusion."""
from music_spectrogram_diffusion import audio_codecs
from music_spectrogram_diffusion import datasets
from music_spectrogram_diffusion import event_codec
from music_spectrogram_diffusion import inference
from music_spectrogram_diffusion import layers
from music_spectrogram_diffusion import metrics
from music_spectrogram_diffusion import note_sequences
from music_spectrogram_diffusion import preprocessors
from music_spectrogram_diffusion import run_length_encoding
from music_spectrogram_diffusion import summaries
from music_spectrogram_diffusion import tasks
from music_spectrogram_diffusion import vocabularies
|
apache-2.0
|
Python
|
376b8aa5b77066e06c17f41d65fe32a3c2bdef1f
|
Add a default value to the header limit
|
a2ohm/geo
|
geo.py
|
geo.py
|
#! /usr/bin/python3
# -*- coding-utf-8 -*-
"""
This script transform a md into a plain html in the context of a
documentation for Kit&Pack.
"""
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
self.header_limit = -1
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
#! /usr/bin/python3
# -*- coding-utf-8 -*-
"""
This script transform a md into a plain html in the context of a
documentation for Kit&Pack.
"""
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
|
mit
|
Python
|
8f3ff0cfd70bfe4eaa9e017323971bad453c93f5
|
set edit as bot
|
legoktm/legobot-old,legoktm/legobot-old
|
trunk/toolserver/pui.py
|
trunk/toolserver/pui.py
|
#!usr/bin/python
# -*- coding: utf-8 -*
#
# (C) Legoktm 2008-2009, MIT License
#
import re, sys, os
sys.path.append(os.environ['HOME'] + '/pythonwikibot')
#sys.path.append('/Users/kman/projects/pywikibot')
import wiki
wiki.setUser('Legobot')
page = wiki.Page('Wikipedia:Possibly unfree images')
try:
wikitext = state0 = page.get()
except wiki.IsRedirectPage:
page = wiki.Page('Wikipedia:Possibly unfree files')
wikitext = state0 = page.get()
wikitext = re.compile(r'\n==New listings==', re.IGNORECASE).sub(r'\n*[[/{{subst:#time:Y F j|-14 days}}]]\n==New listings==', wikitext)
EditMsg = 'Adding new day to holding cell'
wiki.showDiff(state0, wikitext)
page.put(wikitext,EditMsg, bot=True)
|
#!usr/bin/python
# -*- coding: utf-8 -*
#
# (C) Legoktm 2008-2009, MIT License
#
import re, sys, os
sys.path.append(os.environ['HOME'] + '/pythonwikibot')
#sys.path.append('/Users/kman/projects/pywikibot')
import wiki
wiki.setUser('Legobot')
page = wiki.Page('Wikipedia:Possibly unfree images')
try:
wikitext = state0 = page.get()
except wiki.IsRedirectPage:
page = wiki.Page('Wikipedia:Possibly unfree files')
wikitext = state0 = page.get()
wikitext = re.compile(r'\n==New listings==', re.IGNORECASE).sub(r'\n*[[/{{subst:#time:Y F j|-14 days}}]]\n==New listings==', wikitext)
EditMsg = 'Adding new day to holding cell'
wiki.showDiff(state0, wikitext)
page.put(wikitext,EditMsg)
|
mit
|
Python
|
fb51b056ce909028ee4b0cc3e790ae202d8711af
|
Update P06_factorialLog disable logging
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
books/AutomateTheBoringStuffWithPython/Chapter10/P06_factorialLog.py
|
books/AutomateTheBoringStuffWithPython/Chapter10/P06_factorialLog.py
|
# This program calculates factorial and logs debug messages
import logging
logging.basicConfig(level=logging.DEBUG, format=" %(asctime)s - %(levelname)s - %(message)s")
logging.disable(logging.CRITICAL) # Stop logging, comment out to debug
logging.debug("Start of program")
def factorial(n):
logging.debug("Start of factorial(%s%%)" % n)
total = 1
for i in range(1, n + 1):
total *= i
logging.debug("i is " + str(i) + ", total is " + str(total))
logging.debug("End of factorial(%s%%)" % n)
return total
print(factorial(5))
logging.debug("End of program")
|
# This program calculates factorial and logs debug messages
import logging
logging.basicConfig(level=logging.DEBUG, format=" %(asctime)s - %(levelname)s - %(message)s")
logging.debug("Start of program")
def factorial(n):
logging.debug("Start of factorial(%s%%)" % n)
total = 1
for i in range(1, n + 1):
total *= i
logging.debug("i is " + str(i) + ", total is " + str(total))
logging.debug("End of factorial(%s%%)" % n)
return total
print(factorial(5))
logging.debug("End of program")
|
mit
|
Python
|
6facb0f33a8cf53041d9fa1562376e43e6d6194f
|
add init for smiles2graph
|
snap-stanford/ogb
|
ogb/utils/__init__.py
|
ogb/utils/__init__.py
|
try:
from .mol import smiles2graph
except ImportError:
pass
|
mit
|
Python
|
|
fdae17a50223c2f9b8ba4a665fc24726e2c2ce14
|
Add auth header to the fixture loader
|
matthewfranglen/postgres-elasticsearch-fdw
|
tests/lib/es_tools.py
|
tests/lib/es_tools.py
|
""" Commands for interacting with Elastic Search """
# pylint: disable=broad-except
from os.path import join
import requests
from lib.tools import TEST_FOLDER
def es_is_available():
""" Test if Elastic Search is running """
try:
return (
requests.get("http://localhost:9200", auth=("elastic", "changeme")).json()[
"tagline"
]
== "You Know, for Search"
)
except Exception:
return False
def load_json_file(filename):
""" Load JSON file into Elastic Search """
url = "http://localhost:9200/_bulk"
path = join(TEST_FOLDER, "data", filename)
headers = {"Content-Type": "application/x-ndjson"}
with open(path, "r") as handle:
body = handle.read().encode(encoding="utf-8")
return requests.post(
url, headers=headers, data=body, auth=("elastic", "changeme")
)
|
""" Commands for interacting with Elastic Search """
# pylint: disable=broad-except
from os.path import join
import requests
from lib.tools import TEST_FOLDER
def es_is_available():
""" Test if Elastic Search is running """
try:
return (
requests.get("http://localhost:9200").json()["tagline"]
== "You Know, for Search"
)
except Exception:
return False
def load_json_file(filename):
""" Load JSON file into Elastic Search """
url = "http://localhost:9200/_bulk"
path = join(TEST_FOLDER, "data", filename)
headers = {"Content-Type": "application/x-ndjson"}
with open(path, "r") as handle:
body = handle.read().encode(encoding="utf-8")
return requests.post(url, headers=headers, data=body)
|
mit
|
Python
|
19e347716b5efcbaaf857a2805bd5f7ed5d5ec04
|
Patch waagent unit to kill process instead of entire control group
|
soumyanishan/azure-linux-extensions,krkhan/azure-linux-extensions,Azure/azure-linux-extensions,Azure/azure-linux-extensions,Azure/azure-linux-extensions,vityagi/azure-linux-extensions,vityagi/azure-linux-extensions,andyliuliming/azure-linux-extensions,varunkumta/azure-linux-extensions,jasonzio/azure-linux-extensions,bpramod/azure-linux-extensions,bpramod/azure-linux-extensions,vityagi/azure-linux-extensions,varunkumta/azure-linux-extensions,bpramod/azure-linux-extensions,soumyanishan/azure-linux-extensions,jasonzio/azure-linux-extensions,bpramod/azure-linux-extensions,Azure/azure-linux-extensions,krkhan/azure-linux-extensions,jasonzio/azure-linux-extensions,bpramod/azure-linux-extensions,andyliuliming/azure-linux-extensions,jasonzio/azure-linux-extensions,bpramod/azure-linux-extensions,Azure/azure-linux-extensions,soumyanishan/azure-linux-extensions,soumyanishan/azure-linux-extensions,soumyanishan/azure-linux-extensions,bpramod/azure-linux-extensions,varunkumta/azure-linux-extensions,vityagi/azure-linux-extensions,krkhan/azure-linux-extensions,andyliuliming/azure-linux-extensions,andyliuliming/azure-linux-extensions,varunkumta/azure-linux-extensions,vityagi/azure-linux-extensions,krkhan/azure-linux-extensions,vityagi/azure-linux-extensions,Azure/azure-linux-extensions,Azure/azure-linux-extensions,vityagi/azure-linux-extensions
|
VMEncryption/main/oscrypto/encryptstates/PrereqState.py
|
VMEncryption/main/oscrypto/encryptstates/PrereqState.py
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
from pprint import pprint
class PrereqState(OSEncryptionState):
def __init__(self, context):
super(PrereqState, self).__init__('PrereqState', context)
def should_enter(self):
self.context.logger.log("Verifying if machine should enter prereq state")
if not super(PrereqState, self).should_enter():
return False
self.context.logger.log("Performing enter checks for prereq state")
return True
def enter(self):
if not self.should_enter():
return
self.context.logger.log("Entering prereq state")
distro_info = self.context.distro_patcher.distro_info
self.context.logger.log("Distro info: {0}, {1}".format(distro_info[0], distro_info[1]))
if distro_info[0] == 'redhat' and distro_info[1] == '7.2':
self.context.logger.log("Enabling OS volume encryption on RHEL 7.2")
else:
raise Exception("OS volume encryption is not supported for distro {0} {1}".format(distro_info[0],
distro_info[1]))
self.context.distro_patcher.install_extras()
self._patch_waagent()
self.command_executor.Execute('systemctl daemon-reload', True)
def should_exit(self):
self.context.logger.log("Verifying if machine should exit prereq state")
return super(PrereqState, self).should_exit()
def _patch_waagent(self):
self.context.logger.log("Patching waagent")
contents = None
with open('/usr/lib/systemd/system/waagent.service', 'r') as f:
contents = f.read()
contents = re.sub(r'\[Service\]\n', '[Service]\nKillMode=process\n', contents)
with open('/usr/lib/systemd/system/waagent.service', 'w') as f:
f.write(contents)
self.context.logger.log("waagent patched successfully")
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
from pprint import pprint
class PrereqState(OSEncryptionState):
def __init__(self, context):
super(PrereqState, self).__init__('PrereqState', context)
def should_enter(self):
self.context.logger.log("Verifying if machine should enter prereq state")
if not super(PrereqState, self).should_enter():
return False
self.context.logger.log("Performing enter checks for prereq state")
return True
def enter(self):
if not self.should_enter():
return
self.context.logger.log("Entering prereq state")
distro_info = self.context.distro_patcher.distro_info
self.context.logger.log("Distro info: {0}, {1}".format(distro_info[0], distro_info[1]))
if distro_info[0] == 'redhat' and distro_info[1] == '7.2':
self.context.logger.log("Enabling OS volume encryption on RHEL 7.2")
else:
raise Exception("OS volume encryption is not supported for distro {0} {1}".format(distro_info[0],
distro_info[1]))
self.context.distro_patcher.install_extras()
def should_exit(self):
self.context.logger.log("Verifying if machine should exit prereq state")
return super(PrereqState, self).should_exit()
|
apache-2.0
|
Python
|
455874cae74a34e610650e5b5618b64fe808ea1c
|
fix docstring syntax error
|
ncareol/ncharts,ncareol/ncharts,ncareol/ncharts,ncareol/ncharts,ncareol/ncharts
|
ncharts/ncharts/templatetags/filters.py
|
ncharts/ncharts/templatetags/filters.py
|
from django import template
from ncharts import models as nc_models
register = template.Library()
@register.filter
def get_long_name(vs, v):
"""Get 'long_name' value of vs[v] """
try:
return vs[v]['long_name']
except:
return ''
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
@register.filter
def get_key_values(var_name, variables):
for var in variables:
if var.choice_label == var_name:
return var
@register.filter
def make_tabs(variables, dset):
return dset.make_tabs(variables)
@register.filter
def make_project_tabs(projects):
"""Get the dictionary of years and projects from models.py"""
return nc_models.Project.make_tabs(projects)
|
from django import template
from ncharts import models as nc_models
register = template.Library()
@register.filter
def get_long_name(vs, v):
"""Get 'long_name' value of vs[v] """
try:
return vs[v]['long_name']
except:
return ''
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
@register.filter
def get_key_values(var_name, variables):
for var in variables:
if var.choice_label == var_name:
return var
@register.filter
def make_tabs(variables, dset):
return dset.make_tabs(variables)
@register.filter
"""Get the dictionary of years and projects from models.py"""
def make_project_tabs(projects):
return nc_models.Project.make_tabs(projects)
|
bsd-2-clause
|
Python
|
f7d4be60dd246193fe269dc1caaf8208bd4dba22
|
improve output of compare_dfa.py.
|
Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client,Lind-Project/native_client
|
src/trusted/validator_ragel/unreviewed/compare_dfa.py
|
src/trusted/validator_ragel/unreviewed/compare_dfa.py
|
#!/usr/bin/python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import dfa_parser
visited_pairs = set()
def Traverse(state1, state2, path):
if (state1, state2) in visited_pairs:
return
if state1.is_accepting != state2.is_accepting:
print map(hex, path), state1.is_accepting
print map(hex, path), state2.is_accepting
sys.exit(1)
visited_pairs.add((state1, state2))
for byte in range(256):
new_path = path + [byte]
t1 = state1.forward_transitions.get(byte)
t2 = state2.forward_transitions.get(byte)
if (t1 is None) != (t2 is None):
t = t1 or t2
s = t.to_state
path_to_accepting = new_path
while not s.is_accepting:
b = min(s.forward_transitions)
path_to_accepting.append(b)
s = s.forward_transitions[b].to_state
if t1 is not None:
print map(hex, path_to_accepting), True
print map(hex, path), '...', False
else:
print map(hex, path), '...', False
print map(hex, path_to_accepting), True
sys.exit(1)
if t1 is None:
continue
Traverse(t1.to_state, t2.to_state, new_path)
def main():
filename1, filename2 = sys.argv[1:]
_, start_state1 = dfa_parser.ParseXml(filename1)
_, start_state2 = dfa_parser.ParseXml(filename2)
Traverse(start_state1, start_state2, [])
print 'automata are equivalent'
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import dfa_parser
visited_pairs = set()
def Traverse(state1, state2, path):
if (state1, state2) in visited_pairs:
return
if state1.is_accepting != state2.is_accepting:
print map(hex, path)
print state1.is_accepting
print state2.is_accepting
sys.exit(1)
visited_pairs.add((state1, state2))
for byte in range(256):
new_path = path + [byte]
t1 = state1.forward_transitions.get(byte)
t2 = state2.forward_transitions.get(byte)
if (t1 is None) != (t2 is None):
print map(hex, new_path)
print t1 is not None
print t2 is not None
sys.exit(1)
if t1 is None:
continue
Traverse(t1.to_state, t2.to_state, new_path)
def main():
filename1, filename2 = sys.argv[1:]
_, start_state1 = dfa_parser.ParseXml(filename1)
_, start_state2 = dfa_parser.ParseXml(filename2)
Traverse(start_state1, start_state2, [])
print 'automata are equivalent'
if __name__ == '__main__':
main()
|
bsd-3-clause
|
Python
|
7331fa69c6cd2f09b1711272278a9684af5cf9c1
|
fix attachfilename
|
Phantasus/intelmq,aaronkaplan/intelmq-old,aaronkaplan/intelmq-old,aaronkaplan/intelmq-old,s4n7h0/intelmq
|
intelmq/bots/collectors/mail/mail-attach.py
|
intelmq/bots/collectors/mail/mail-attach.py
|
import re
import imbox
import zipfile
from intelmq.lib.bot import Bot, sys
from intelmq.bots.collectors.mail.lib import Mail
class MailAttachCollectorBot(Bot):
def process(self):
mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl)
emails = mailbox.messages(folder=self.parameters.folder, unread=True)
if emails:
for uid, message in emails:
if self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject):
continue
self.logger.info("Reading email report")
for attach in message.attachments:
if not attach:
continue
attach_name = attach['filename'][1:len(attach['filename'])-1] # remove quote marks from filename
if re.search(self.parameters.attach_regex, attach_name):
if self.parameters.attach_unzip:
zipped = zipfile.ZipFile(attach['content'])
report = zipped.read(zipped.namelist()[0])
else:
report = attach['content']
self.send_message(report)
mailbox.mark_seen(uid)
self.logger.info("Email report read")
if __name__ == "__main__":
bot = MailAttachCollectorBot(sys.argv[1])
bot.start()
|
import re
import imbox
import zipfile
from intelmq.lib.bot import Bot, sys
from intelmq.bots.collectors.mail.lib import Mail
class MailAttachCollectorBot(Bot):
def process(self):
mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl)
emails = mailbox.messages(folder=self.parameters.folder, unread=True)
if emails:
for uid, message in emails:
if self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject):
continue
self.logger.info("Reading email report")
for attach in message.attachments:
if not attach:
continue
if re.search(self.parameters.attach_regex, attach['filename']):
if self.parameters.attach_unzip:
zipped = zipfile.ZipFile(attach['content'])
report = zipped.read(zipped.namelist()[0])
else:
report = attach['content']
self.send_message(report)
mailbox.mark_seen(uid)
self.logger.info("Email report read")
if __name__ == "__main__":
bot = MailAttachCollectorBot(sys.argv[1])
bot.start()
|
agpl-3.0
|
Python
|
b481426e52661b702fa014a86c68b015f46feb1f
|
remove deprecated test suite declarations
|
OCA/account-financial-tools,OCA/account-financial-tools
|
account_invoice_constraint_chronology/tests/__init__.py
|
account_invoice_constraint_chronology/tests/__init__.py
|
# -*- coding: utf-8 -*-
#
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
# All Rights Reserved
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from . import test_account_constraint_chronology
|
# -*- coding: utf-8 -*-
#
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
# All Rights Reserved
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from . import test_account_constraint_chronology
fast_suite = [
test_account_constraint_chronology,
]
checks = [
test_account_constraint_chronology,
]
|
agpl-3.0
|
Python
|
7fef3fbd8f4a68a2cf584721930c276eb49b16ee
|
Fix issue in infer_android_package_name with tests in //javatests
|
android/android-test,android/android-test,android/android-test,android/android-test,android/android-test
|
build_extensions/infer_android_package_name.bzl
|
build_extensions/infer_android_package_name.bzl
|
"""A rule for inferring an android package name."""
def infer_android_package_name():
"""Infer an android package name based on current path below 'javatests'"""
path = native.package_name()
javatests_index = path.rindex("javatests/") + len("javatests/")
return path[javatests_index:].replace("/", ".")
|
"""A rule for inferring an android package name."""
def infer_android_package_name():
"""Infer an android package name based on current path below 'javatests'"""
path = native.package_name()
javatests_index = path.rindex("/javatests/") + len("/javatests/")
return path[javatests_index:].replace("/", ".")
|
apache-2.0
|
Python
|
c2f668b6b403bde09485595c2ac2852220739c93
|
Fix docstring.
|
taschini/morepath,faassen/morepath,morepath/morepath
|
morepath/toposort.py
|
morepath/toposort.py
|
from .error import TopologicalSortError
def topological_sort(l, get_depends):
result = []
marked = set()
temporary_marked = set()
def visit(n):
if n in marked:
return
if n in temporary_marked:
raise TopologicalSortError("Not a DAG")
temporary_marked.add(n)
for m in get_depends(n):
visit(m)
marked.add(n)
result.append(n)
for n in l:
visit(n)
return result
def toposorted(infos):
"""Sort infos topologically.
Info object must have a key attribute, and before and after
attributes that returns a list of keys.
"""
key_to_info = {}
depends = {}
for info in infos:
key_to_info[info.key] = info
depends[info.key] = []
for info in infos:
for after in info.after:
after_info = key_to_info[after]
depends[info.key].append(after_info)
for before in info.before:
before_info = key_to_info[before]
depends[before_info.key].append(info)
return topological_sort(
infos, lambda info: depends[info.key])
|
from .error import TopologicalSortError
def topological_sort(l, get_depends):
result = []
marked = set()
temporary_marked = set()
def visit(n):
if n in marked:
return
if n in temporary_marked:
raise TopologicalSortError("Not a DAG")
temporary_marked.add(n)
for m in get_depends(n):
visit(m)
marked.add(n)
result.append(n)
for n in l:
visit(n)
return result
def toposorted(infos):
"""Sort infos topologically.
Info object must have a key attribute, and before and after
methods that returns a list of keys.
"""
key_to_info = {}
depends = {}
for info in infos:
key_to_info[info.key] = info
depends[info.key] = []
for info in infos:
for after in info.after:
after_info = key_to_info[after]
depends[info.key].append(after_info)
for before in info.before:
before_info = key_to_info[before]
depends[before_info.key].append(info)
return topological_sort(
infos, lambda info: depends[info.key])
|
bsd-3-clause
|
Python
|
da8b7471ee773c835edae6f333938caa34e17cbd
|
Update wrap.py
|
davidone/misc,davidone/misc
|
tesco-delivery-bot/wrap.py
|
tesco-delivery-bot/wrap.py
|
#!/usr/local/bin/python3.7
import subprocess
import os
import http.client, urllib
PO_API_TOKEN = ""
PO_USER_KEY = ""
NODE_BIN = "/usr/local/bin/node"
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
def define_po_keys():
global PO_API_TOKEN, PO_USER_KEY
try:
PO_API_TOKEN = os.environ["PO_API_TOKEN"]
PO_USER_KEY = os.environ["PO_USER_KEY"]
except KeyError as err:
print(
f"Error: {err}. Check if your environment defines PO_API_TOKEN and PO_USER_KEY"
)
exit(1)
if not PO_API_TOKEN or not PO_USER_KEY:
print(f"Error: PushOver token or key are empty.")
exit(1)
def check_tesco() -> list:
with cd("~/delivery-slot-bot"):
result = subprocess.run(
[NODE_BIN, "delivery-slots.js"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
if result.stderr:
print(f"ERROR: {result.stderr}")
return []
result_list = result.stdout.split("\n")
print(result_list)
def process_tesco(t_list) -> str:
return "ok world"
def send_po(message) -> bool:
if not message:
print("message is emtpy")
return True
conn = http.client.HTTPSConnection("api.pushover.net:443")
conn.request(
"POST",
"/1/messages.json",
urllib.parse.urlencode(
{"token": PO_API_TOKEN, "user": PO_USER_KEY, "message": message,}
),
{"Content-type": "application/x-www-form-urlencoded"},
)
res = conn.getresponse()
if res.status not in range(200, 300):
return False
return True
if __name__ == "__main__":
define_po_keys()
res_tesco = check_tesco()
message = process_tesco(res_tesco)
send_po(message)
|
import subprocess
import os
import http.client, urllib
PO_API_TOKEN = ""
PO_USER_KEY = ""
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
def define_po_keys() -> None:
global PO_API_TOKEN, PO_USER_KEY
try:
PO_API_TOKEN = os.environ["PO_API_TOKEN"]
PO_USER_KEY = os.environ["PO_USER_KEY"]
except KeyError as err:
print(f"Error: {err}. Check if your environment defines PO_API_TOKEN and PO_USER_KEY")
exit(1)
if not PO_API_TOKEN or not PO_USER_KEY:
print(f"Error: PushOver token or key are empty.")
exit(1)
def check_tesco() -> list:
with cd("/path/delivery-slot-bot"):
result = subprocess.run(["node", "delivery-slots.js"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
if result.stderr:
print(f"ERROR: {result.stderr}")
return []
result_list = result.stdout.split('\n')
print(result_list)
def process_tesco(t_list) -> str:
pass
def send_po(message) -> bool:
if not message:
print("Message is emtpy")
return True
conn = http.client.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.parse.urlencode({
"token": PO_API_TOKEN,
"user": PO_USER_KEY,
"message": message,
}), { "Content-type": "application/x-www-form-urlencoded" })
res = conn.getresponse()
if (res.status not in range(200, 300)):
return False
return True
if __name__ == "__main__":
|
mit
|
Python
|
577a526284236cf9a31b3b339902bbeed4991b63
|
work on profile tests, part-baked
|
obsoleter/petl,alimanfoo/petl,rs/petl,rs/petl,thatneat/petl,Marketing1by1/petl,obsoleter/petl,obsoleter/petl,psnj/petl,rs/petl
|
petl/test/test_profile.py
|
petl/test/test_profile.py
|
"""
TODO doc me
"""
from petl.profile import *
def test_profile():
"""
TODO doc me
"""
table = [['foo', 'bar'],
['A', 1],
['B', 2],
['B', '3', True],
['D', 'xyz'],
['E']]
profiler = Profiler(table)
# profile the table with default analyses - list field names and
# report the sample size used for profiling
profile = profiler.profile()
assert profile['general']['default']['field_names'] == ('foo', 'bar')
assert profile['general']['default']['sample_size'] == 5
# add row lengths analysis
profiler.add(RowLengths)
profile = profiler.profile()
assert profile['general']['row_lengths']['max_row_length'] == 3
assert profile['general']['row_lengths']['min_row_length'] == 1
assert profile['general']['row_lengths']['mean_row_length'] == 2
# add distinct values analysis on field 'foo'
profiler.add(DistinctValues, field='foo')
profile = profiler.profile()
assert profile['fields']['foo']['distinct_values'] == {'A': 1, 'B': 2, 'D': 1, 'E': 1}
# add basic statistics analysis on field 'bar'
profiler.add(BasicStatistics, field='foo')
profile = profiler.profile()
assert profile['fields']['bar']['basic_statistics']['min'] == 1
assert profile['fields']['bar']['basic_statistics']['max'] == 3
assert profile['fields']['bar']['basic_statistics']['mean'] == 2
assert profile['fields']['bar']['basic_statistics']['sum'] == 6
assert profile['fields']['bar']['basic_statistics']['count'] == 3
assert profile['fields']['bar']['basic_statistics']['errors'] == 2
# add types analysis on all fields
profiler.add(Types, all_fields=True) # TODO a different method?
profile = profiler.profile()
assert profile['fields']['foo']['types']['actual_types'] == {'string': 5}
assert profile['fields']['foo']['types']['applicable_types'] == {'string': 5}
assert profile['fields']['foo']['types']['inferred_type'] == 'string'
assert profile['fields']['bar']['types']['actual_types'] == {'int': 2, 'string': 2}
assert profile['fields']['foo']['types']['applicable_types'] == {'int': 3, 'float': 3, 'string': 5}
assert profile['fields']['foo']['types']['inferred_type'] == 'int'
|
"""
TODO doc me
"""
from petl.profile import *
def test_profile():
"""
TODO doc me
"""
table = [['foo', 'bar'],
['A', 1],
['B', 2],
['B', '3', True],
['D', 'xyz'],
['E']]
profiler = Profiler(table)
profile = profiler.profile()
assert profile['general']['field_names'] == ('foo', 'bar')
assert profile['general']['sample_size'] == 5
profiler.add(record_lengths)
profile = profiler.profile()
assert profile['general']['max_row_length'] == 3
assert profile['general']['min_row_length'] == 1
assert profile['general']['mean_row_length'] == 2
profiler.add(distinct_values, field='foo')
profile = profiler.profile()
assert profile['fields']['foo']['distinct_values']['values'] == {'A', 'B', 'D', 'E'}
assert profile['fields']['foo']['distinct_values']['counts']['A'] == 1
assert profile['fields']['foo']['distinct_values']['counts']['B'] == 2
assert profile['fields']['foo']['distinct_values']['counts']['D'] == 1
assert profile['fields']['foo']['distinct_values']['counts']['E'] == 1
assert 'C' not in profile['fields']['foo']['distinct_values']['counts']
|
mit
|
Python
|
87244598ed08e790835818656ecba0178bb7ca89
|
Upgrade to a better version
|
leosartaj/fsplit
|
fsplit/__init__.py
|
fsplit/__init__.py
|
#!/usr/bin/env python2
##
# fsplit
# https://github.com/leosartaj/fsplit.git
#
# Copyright (c) 2014 Sartaj Singh
# Licensed under the MIT license.
##
from .info import __version__ # define __version__ variable
from .info import __desc__ # define __desc__ variable for description
|
#!/usr/bin/env python2
##
# fsplit
# https://github.com/leosartaj/fsplit.git
#
# Copyright (c) 2014 Sartaj Singh
# Licensed under the MIT license.
##
from info import __version__ # define __version__ variable
from info import __desc__ # define __desc__ variable for description
|
mit
|
Python
|
85124382cd3f90b439b27c8dd5a82f47925ddab7
|
fix settings print
|
jblance/mpp-solar
|
mppsolar/__init__.py
|
mppsolar/__init__.py
|
# -*- coding: utf-8 -*-
# !/usr/bin/python
import logging
from argparse import ArgumentParser
# import mppcommands
import mpputils
logger = logging.getLogger()
# if __name__ == '__main__':
def main():
parser = ArgumentParser(description='MPP Solar Command Utility')
parser.add_argument('-c', '--command', help='Command to run', default='QID')
parser.add_argument('-ll', '--loglevel',
type=str,
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
help='Set the logging level')
parser.add_argument('-d', '--device', type=str, help='Serial device to communicate with', default='/dev/ttyUSB0')
parser.add_argument('-b', '--baud', type=int, help='Baud rate for serial communications', default=2400)
parser.add_argument('-l', '--listknown', action='store_true', help='List known commands')
parser.add_argument('-s', '--getStatus', action='store_true', help='Get Inverter Status')
parser.add_argument('-t', '--getSettings', action='store_true', help='Get Inverter Settings')
parser.add_argument('-H', '--makepretty', action='store_true', help='Display result with descriptions etc if possible')
args = parser.parse_args()
logging.basicConfig(level=args.loglevel)
logging.debug('command %s', args.command)
logging.debug('Serial device used: %s, baud rate: %d', args.device, args.baud)
# mp = mppcommands.mppCommands(args.device, args.baud)
mp = mpputils.mppUtils(args.device, args.baud)
if(args.listknown):
for line in mp.getKnownCommands():
print line
elif(args.getStatus):
fullStatus = mp.getFullStatus()
for key in fullStatus:
print "{}\t{} {}".format(key, fullStatus[key]['value'], fullStatus[key]['unit'])
elif(args.getSettings):
settings = mp.getSettings()
for key in settings:
print "{}\t{}\t{} {}".format(key, settings[key]['default'],
settings[key]['value'],
settings[key]['unit'])
else:
# TODO: check if command is valid
# maybe check if query or setter and ...
if(args.makepretty):
for line in mp.getResponsePretty(args.command):
print line
else:
print mp.getResponse(args.command)
|
# -*- coding: utf-8 -*-
# !/usr/bin/python
import logging
from argparse import ArgumentParser
# import mppcommands
import mpputils
logger = logging.getLogger()
# if __name__ == '__main__':
def main():
parser = ArgumentParser(description='MPP Solar Command Utility')
parser.add_argument('-c', '--command', help='Command to run', default='QID')
parser.add_argument('-ll', '--loglevel',
type=str,
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
help='Set the logging level')
parser.add_argument('-d', '--device', type=str, help='Serial device to communicate with', default='/dev/ttyUSB0')
parser.add_argument('-b', '--baud', type=int, help='Baud rate for serial communications', default=2400)
parser.add_argument('-l', '--listknown', action='store_true', help='List known commands')
parser.add_argument('-s', '--getStatus', action='store_true', help='Get Inverter Status')
parser.add_argument('-t', '--getSettings', action='store_true', help='Get Inverter Settings')
parser.add_argument('-H', '--makepretty', action='store_true', help='Display result with descriptions etc if possible')
args = parser.parse_args()
logging.basicConfig(level=args.loglevel)
logging.debug('command %s', args.command)
logging.debug('Serial device used: %s, baud rate: %d', args.device, args.baud)
# mp = mppcommands.mppCommands(args.device, args.baud)
mp = mpputils.mppUtils(args.device, args.baud)
if(args.listknown):
for line in mp.getKnownCommands():
print line
elif(args.getStatus):
fullStatus = mp.getFullStatus()
for key in fullStatus:
print key, fullStatus[key]['value'], fullStatus[key]['unit']
elif(args.getSettings):
for line in mp.getSettings():
print line, line['value'], line['unit']
else:
# TODO: check if command is valid
# maybe check if query or setter and ...
if(args.makepretty):
for line in mp.getResponsePretty(args.command):
print line
else:
print mp.getResponse(args.command)
|
mit
|
Python
|
bafdbd28e35d80d28bfb82c23532533cb2915066
|
Add docs for MissingInputFiles 'message' arg.
|
hantek/fuel,rodrigob/fuel,dmitriy-serdyuk/fuel,codeaudit/fuel,udibr/fuel,mjwillson/fuel,dribnet/fuel,capybaralet/fuel,aalmah/fuel,glewis17/fuel,glewis17/fuel,vdumoulin/fuel,dmitriy-serdyuk/fuel,dwf/fuel,bouthilx/fuel,mila-udem/fuel,chrishokamp/fuel,udibr/fuel,janchorowski/fuel,dwf/fuel,dribnet/fuel,markusnagel/fuel,aalmah/fuel,markusnagel/fuel,orhanf/fuel,capybaralet/fuel,rodrigob/fuel,dhruvparamhans/fuel,dhruvparamhans/fuel,janchorowski/fuel,mila-udem/fuel,bouthilx/fuel,harmdevries89/fuel,hantek/fuel,harmdevries89/fuel,chrishokamp/fuel,codeaudit/fuel,orhanf/fuel,vdumoulin/fuel,mjwillson/fuel
|
fuel/exceptions.py
|
fuel/exceptions.py
|
class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
message : str
The error message to be associated with this exception.
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
|
class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
|
mit
|
Python
|
0da95bdfc184614edca41b41ac3409295352fff6
|
Update days_between.py
|
JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking
|
checkio/python/oreilly/days_between.py
|
checkio/python/oreilly/days_between.py
|
import datetime
def days_diff(date1, date2):
d1 = datetime.datetime(*date1)
d2 = datetime.datetime(*date2)
return abs((d2-d1).days)
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert days_diff((1982, 4, 19), (1982, 4, 22)) == 3
assert days_diff((2014, 1, 1), (2014, 8, 27)) == 238
assert days_diff((2014, 8, 27), (2014, 1, 1)) == 238
|
mit
|
Python
|
|
e35649188f10e99381926318192c856e85245ef9
|
update apk version to support toast
|
openatx/uiautomator2,openatx/uiautomator2,openatx/uiautomator2
|
uiautomator2/version.py
|
uiautomator2/version.py
|
# coding: utf-8
#
__apk_version__ = '1.0.12'
# 1.0.12 add toast support
# 1.0.11 add auto install support
# 1.0.10 fix service not started bug
# 1.0.9 fix apk version code and version name
# ERR: 1.0.8 bad version number. show ip on notification
# ERR: 1.0.7 bad version number. new input method, some bug fix
__atx_agent_version__ = '0.2.1'
# 0.2.1 support occupy /minicap connection
# 0.2.0 add session support
# 0.1.8 fix screenshot always the same image. (BUG in 0.1.7), add /shell/stream add timeout for /shell
# 0.1.7 fix dns resolve error in /install
# 0.1.6 change download logic. auto fix orientation
# 0.1.5 add singlefight for minicap and minitouch, proxy dial-timeout change 30 to 10
# 0.1.4 phone remote control
# 0.1.2 /download support
# 0.1.1 minicap buildin
|
# coding: utf-8
#
__apk_version__ = '1.0.11'
# 1.0.11 add auto install support
# 1.0.10 fix service not started bug
# 1.0.9 fix apk version code and version name
# ERR: 1.0.8 bad version number. show ip on notification
# ERR: 1.0.7 bad version number. new input method, some bug fix
__atx_agent_version__ = '0.2.1'
# 0.2.1 support occupy /minicap connection
# 0.2.0 add session support
# 0.1.8 fix screenshot always the same image. (BUG in 0.1.7), add /shell/stream add timeout for /shell
# 0.1.7 fix dns resolve error in /install
# 0.1.6 change download logic. auto fix orientation
# 0.1.5 add singlefight for minicap and minitouch, proxy dial-timeout change 30 to 10
# 0.1.4 phone remote control
# 0.1.2 /download support
# 0.1.1 minicap buildin
|
mit
|
Python
|
28c6e6747a6d0bd924f838ed4f846b01c247012b
|
Remove unused tempproject and tempscene
|
Previz-app/previz-python-api
|
previz/testsutils.py
|
previz/testsutils.py
|
import functools
from . import PrevizProject
class Decorators(object):
def __init__(self, api_token, api_root, new_project_prefix = 'cf-'):
self.api_root = api_root
self.api_token = api_token
self.new_project_prefix = new_project_prefix
def project(self, project_id):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
p = PrevizProject(self.api_root, self.api_token, project_id)
project = p.project(include=['scenes'])
func(project=project, *args, **kwargs)
return wrapper
return decorator
def tempproject(self):
'''Returning an existing project while the API v2 is being worked on'''
return self.project('8d9e684f-0763-4756-844b-d0219a4f3f9a')
def scene(self, scene_id):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
project_id = kwargs['project']['id']
p = PrevizProject(self.api_root, self.api_token, project_id)
scene = p.scene(scene_id, include=[])
func(scene=scene, *args, **kwargs)
#p = PrevizProject(self.api_root, self.api_token, project_id)
#func(project=p.project(include=['scenes']), *args, **kwargs)
return wrapper
return decorator
def tempscene(self):
'''Returning an existing scene while the API v2 is being worked on'''
return self.scene('5a56a895-46ef-4f0f-862c-38ce14f6275b')
|
import functools
from . import PrevizProject
class Decorators(object):
def __init__(self, api_token, api_root, new_project_prefix = 'cf-'):
self.api_root = api_root
self.api_token = api_token
self.new_project_prefix = new_project_prefix
def project(self, project_id):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
p = PrevizProject(self.api_root, self.api_token, project_id)
project = p.project(include=['scenes'])
func(project=project, *args, **kwargs)
return wrapper
return decorator
def tempproject(self):
'''Returning an existing project while the API v2 is being worked on'''
return self.project('8d9e684f-0763-4756-844b-d0219a4f3f9a')
def scene(self, scene_id):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
project_id = kwargs['project']['id']
p = PrevizProject(self.api_root, self.api_token, project_id)
scene = p.scene(scene_id, include=[])
func(scene=scene, *args, **kwargs)
#p = PrevizProject(self.api_root, self.api_token, project_id)
#func(project=p.project(include=['scenes']), *args, **kwargs)
return wrapper
return decorator
def tempscene(self):
'''Returning an existing scene while the API v2 is being worked on'''
return self.scene('5a56a895-46ef-4f0f-862c-38ce14f6275b')
def tempproject(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
api_root = os.environ[PREVIZ_API_ROOT_ENVVAR]
api_token = os.environ[PREVIZ_API_TOKEN_ENVVAR]
project_name = 'cf-' + func.__qualname__
p = PrevizProject(api_root, api_token)
#p.project_id = p.new_project(project_name)['id']
p.project_id = 'a5ff9cef-4904-4dc3-8a3c-821a219c891e' # p.project_id
func(project_id=p.project_id, *args, **kwargs)
#p.delete_project()
return wrapper
def tempscene(func):
pass
|
mit
|
Python
|
1b16467c9d24c770578d5f94e9715d754885de98
|
correct output at switch on
|
coolchip/amp-switch
|
printStatusChange.py
|
printStatusChange.py
|
import re
def readStatus():
status = ''
f = open('/proc/asound/card0/pcm0p/sub0/status', 'r')
for line in f:
matchObj = re.match(r'state.*', line)
if matchObj:
status = matchObj.group()
break
matchObj = re.match(r'closed', line)
if matchObj:
status = matchObj.group()
break
return status
import time
SHUTDOWN_TIME = 10
def do_main_program( console ):
loop = True
shutdownTimer = SHUTDOWN_TIME
power = 0
while loop:
status = readStatus()
if status == "closed":
if power == 1:
if shutdownTimer == 0:
print("Power OFF")
power = 0
else:
shutdownTimer = shutdownTimer-1
print("count down... " + str(shutdownTimer))
else:
if power == 0:
print("Power ON")
power = 1
shutdownTimer = SHUTDOWN_TIME
if shutdownTimer != SHUTDOWN_TIME:
shutdownTimer = SHUTDOWN_TIME
print("Stopping count down (Power is still ON)")
time.sleep(1)
#fp = open('status.log','a')
#fp.write(status+"\n")
#fp.close()
import os
from optparse import OptionParser
import daemon
if __name__ == "__main__":
parser = OptionParser( os.path.relpath(__file__) + " [-c] | [-d]" )
parser.add_option("-d", "--daemon", action="store_true", dest="daemon", default=False, help="start as daemon")
parser.add_option("-c", "--console", action="store_true", dest="console", default=False, help="output on console")
(optionen, args) = parser.parse_args()
if optionen.daemon:
with daemon.DaemonContext():
do_main_program(False)
else:
do_main_program(optionen.console)
sys.exit(0)
|
import re
def readStatus():
status = ''
f = open('/proc/asound/card0/pcm0p/sub0/status', 'r')
for line in f:
matchObj = re.match(r'state.*', line)
if matchObj:
status = matchObj.group()
break
matchObj = re.match(r'closed', line)
if matchObj:
status = matchObj.group()
break
return status
import time
SHUTDOWN_TIME = 10
def do_main_program( console ):
loop = True
shutdownTimer = SHUTDOWN_TIME
power = 0
while loop:
status = readStatus()
if status == "closed":
if power == 1:
if shutdownTimer == 0:
print("Power OFF")
power = 0
else:
shutdownTimer = shutdownTimer-1
print("count down... " + str(shutdownTimer))
else:
if power == 0:
print("Power ON")
power = 1
if shutdownTimer != SHUTDOWN_TIME:
shutdownTimer = SHUTDOWN_TIME
print("(stop count down) Power still ON")
time.sleep(1)
#fp = open('status.log','a')
#fp.write(status+"\n")
#fp.close()
import os
from optparse import OptionParser
import daemon
if __name__ == "__main__":
parser = OptionParser( os.path.relpath(__file__) + " [-c] | [-d]" )
parser.add_option("-d", "--daemon", action="store_true", dest="daemon", default=False, help="start as daemon")
parser.add_option("-c", "--console", action="store_true", dest="console", default=False, help="output on console")
(optionen, args) = parser.parse_args()
if optionen.daemon:
with daemon.DaemonContext():
do_main_program(False)
else:
do_main_program(optionen.console)
sys.exit(0)
|
mit
|
Python
|
f54802514b6d3ba66269c4e09640d2de7a7dade2
|
Fix regression in filesystem watcher
|
raxod502/straight.el,raxod502/straight.el
|
watcher/straight_watch_callback.py
|
watcher/straight_watch_callback.py
|
#!/usr/bin/env -S python3 -u
import os
import pathlib
import sys
WATCHEXEC_VAR_COMMON = "WATCHEXEC_COMMON_PATH"
WATCHEXEC_VARS = [
"WATCHEXEC_CREATED_PATH",
"WATCHEXEC_REMOVED_PATH",
"WATCHEXEC_RENAMED_PATH",
"WATCHEXEC_WRITTEN_PATH",
"WATCHEXEC_META_CHANGED_PATH",
]
def die(message):
print(message, file=sys.stderr)
sys.exit(1)
def usage():
return "usage: python -m straight_watch_callback <repos-dir> <modified-dir>"
def path_contains(parent, child):
parent = pathlib.Path(parent).resolve()
child = pathlib.Path(child).resolve()
return parent in child.parents
def path_strip(parent, child):
parent = pathlib.Path(parent).parts
child = pathlib.Path(child).parts
return child[len(parent)]
def main(args):
if len(args) != 2:
die(usage())
repos_dir, modified_dir = args
repos_dir = pathlib.Path(repos_dir).resolve()
modified_dir = pathlib.Path(modified_dir).resolve()
paths = []
for var in WATCHEXEC_VARS:
if var in os.environ:
for path in os.environ[var].split(os.pathsep):
paths.append(path)
if not paths:
die("straight_watch_callback.py: watchexec gave no modified files")
if WATCHEXEC_VAR_COMMON in os.environ:
common = os.environ[WATCHEXEC_VAR_COMMON]
# Yes, string concatentation. For some reason when a common
# prefix is used, the individual paths start with a slash even
# though they're actually relative to the prefix.
paths = [common + path for path in paths]
paths = [pathlib.Path(path).resolve() for path in paths]
paths = sorted(set(paths))
repos = set()
for path in paths:
print("detect modification: {}".format(path), file=sys.stderr)
if repos_dir in path.parents:
repo = path.relative_to(repos_dir).parts[0]
repos.add(repo)
if repos:
modified_dir.mkdir(parents=True, exist_ok=True)
repos = sorted(repos)
for repo in repos:
print("--> mark for rebuild: {}".format(repo), file=sys.stderr)
with open(modified_dir / repo, "w"):
pass
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/env python3 -u
import os
import pathlib
import sys
WATCHEXEC_VAR_COMMON = "WATCHEXEC_COMMON_PATH"
WATCHEXEC_VARS = [
"WATCHEXEC_CREATED_PATH",
"WATCHEXEC_REMOVED_PATH",
"WATCHEXEC_RENAMED_PATH",
"WATCHEXEC_WRITTEN_PATH",
"WATCHEXEC_META_CHANGED_PATH",
]
def die(message):
print(message, file=sys.stderr)
sys.exit(1)
def usage():
return "usage: python -m straight_watch_callback <repos-dir> <modified-dir>"
def path_contains(parent, child):
parent = pathlib.Path(parent).resolve()
child = pathlib.Path(child).resolve()
return parent in child.parents
def path_strip(parent, child):
parent = pathlib.Path(parent).parts
child = pathlib.Path(child).parts
return child[len(parent)]
def main(args):
if len(args) != 2:
die(usage())
repos_dir, modified_dir = args
repos_dir = pathlib.Path(repos_dir).resolve()
modified_dir = pathlib.Path(modified_dir).resolve()
paths = []
for var in WATCHEXEC_VARS:
if var in os.environ:
for path in os.environ[var].split(os.pathsep):
paths.append(path)
if not paths:
die("straight_watch_callback.py: watchexec gave no modified files")
if WATCHEXEC_VAR_COMMON in os.environ:
common = os.environ[WATCHEXEC_VAR_COMMON]
# Yes, string concatentation. For some reason when a common
# prefix is used, the individual paths start with a slash even
# though they're actually relative to the prefix.
paths = [common + path for path in paths]
paths = [pathlib.Path(path).resolve() for path in paths]
paths = sorted(set(paths))
repos = set()
for path in paths:
print("detect modification: {}".format(path), file=sys.stderr)
if repos_dir in path.parents:
repo = path.relative_to(repos_dir).parts[0]
repos.add(repo)
if repos:
modified_dir.mkdir(parents=True, exist_ok=True)
repos = sorted(repos)
for repo in repos:
print("--> mark for rebuild: {}".format(repo), file=sys.stderr)
with open(modified_dir / repo, "w"):
pass
if __name__ == "__main__":
main(sys.argv[1:])
|
mit
|
Python
|
ee1f59a8a61709bebc4f08adce520ab251576148
|
Use a single query to fetch the guardian and hub.
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
us_ignite/hubs/views.py
|
us_ignite/hubs/views.py
|
from django.db.models import Q
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import Http404
from django.template.response import TemplateResponse
from django.shortcuts import get_object_or_404, redirect
from us_ignite.hubs.models import Hub, HubRequest
from us_ignite.hubs import forms, mailer
@login_required
def hub_application(request):
"""View to submit a ``Hub`` for consideration"""
object_list = HubRequest.objects.filter(
~Q(status=HubRequest.REMOVED), user=request.user)
if request.method == 'POST':
form = forms.HubRequestForm(request.POST)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
# Notify US Ignite about this request:
mailer.notify_request(instance)
msg = 'The registration for "%s" has been submited.' % instance.name
messages.success(request, msg)
return redirect('home')
else:
form = forms.HubRequestForm()
context = {
'form': form,
'object_list': object_list,
}
return TemplateResponse(request, 'hubs/object_application.html', context)
def hub_detail(request, slug):
"""Homepage of a Ignite Community.
This view aggregates all the content related to this ``Hub``.
"""
instance = get_object_or_404(
Hub.objects.select_related('guardian'), slug=slug)
if not instance.is_published() and not instance.is_guardian(request.user):
raise Http404
context = {
'object': instance,
}
return TemplateResponse(request, 'hubs/object_detail.html', context)
|
from django.db.models import Q
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import Http404
from django.template.response import TemplateResponse
from django.shortcuts import get_object_or_404, redirect
from us_ignite.hubs.models import Hub, HubRequest
from us_ignite.hubs import forms, mailer
@login_required
def hub_application(request):
"""View to submit a ``Hub`` for consideration"""
object_list = HubRequest.objects.filter(
~Q(status=HubRequest.REMOVED), user=request.user)
if request.method == 'POST':
form = forms.HubRequestForm(request.POST)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
# Notify US Ignite about this request:
mailer.notify_request(instance)
msg = 'The registration for "%s" has been submited.' % instance.name
messages.success(request, msg)
return redirect('home')
else:
form = forms.HubRequestForm()
context = {
'form': form,
'object_list': object_list,
}
return TemplateResponse(request, 'hubs/object_application.html', context)
def hub_detail(request, slug):
"""Homepage of a Ignite Community.
This view aggregates all the content related to this ``Hub``.
"""
instance = get_object_or_404(Hub, slug=slug)
if not instance.is_published() and not instance.is_guardian(request.user):
raise Http404
context = {
'object': instance,
}
return TemplateResponse(request, 'hubs/object_detail.html', context)
|
bsd-3-clause
|
Python
|
0fdb93fb73142315fe404b9a161ef19af0d920cd
|
Add simple test for config builder
|
beezz/pg_bawler,beezz/pg_bawler
|
tests/test_bawlerd.py
|
tests/test_bawlerd.py
|
import io
import os
from textwrap import dedent
from pg_bawler import bawlerd
class TestBawlerdConfig:
def test_build_config_location_list(self):
assert not bawlerd.conf.build_config_location_list(locations=())
user_conf = os.path.join(
os.path.expanduser('~'),
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
system_conf = os.path.join(
'/etc/pg_bawler',
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
assert user_conf in bawlerd.conf.build_config_location_list()
assert system_conf in bawlerd.conf.build_config_location_list()
def test__load_file(self):
config = bawlerd.conf._load_file(io.StringIO(dedent("""\
logging:
formatters:
standard:
format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\"
handlers:
default:
level: "INFO"
formatter: standard
class: logging.StreamHandler
loggers:
"":
handlers: ["default"]
level: INFO
propagate: True
""")))
assert 'logging' in config
def test_read_config_files(self):
config_base = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'configs')
locations = [
os.path.join(config_base, 'etc'),
os.path.join(config_base, 'home'),
]
config = bawlerd.conf.read_config_files(
bawlerd.conf.build_config_location_list(locations=locations))
assert config['common']['listen_timeout'] == 40
assert 'logging' in config
|
import io
import os
from textwrap import dedent
from pg_bawler import bawlerd
class TestBawlerdConfig:
def test_build_config_location_list(self):
assert not bawlerd.conf.build_config_location_list(locations=())
user_conf = os.path.join(
os.path.expanduser('~'),
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
system_conf = os.path.join(
'/etc/pg_bawler',
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
assert user_conf in bawlerd.conf.build_config_location_list()
assert system_conf in bawlerd.conf.build_config_location_list()
def test__load_file(self):
config = bawlerd.conf._load_file(io.StringIO(dedent("""\
logging:
formatters:
standard:
format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\"
handlers:
default:
level: "INFO"
formatter: standard
class: logging.StreamHandler
loggers:
"":
handlers: ["default"]
level: INFO
propagate: True
""")))
assert 'logging' in config
|
bsd-3-clause
|
Python
|
e62b62107ef44fec414a3a932a3e3e7bad211f34
|
add django-extensions for easier local view debugging
|
coffeestats/coffeestats-django,coffeestats/coffeestats-django,coffeestats/coffeestats-django,coffeestats/coffeestats-django
|
coffeestats/coffeestats/settings/local.py
|
coffeestats/coffeestats/settings/local.py
|
# -*- python -*-
# pymode:lint_ignore=W0401,E501
"""Development settings and globals."""
from __future__ import absolute_import
from .base import * # noqa
# ######### DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# ######### END DEBUG CONFIGURATION
# ######### EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# ######### END EMAIL CONFIGURATION
# ######### CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# ######### END CACHE CONFIGURATION
# ######### TOOLBAR CONFIGURATION
# See: http://django-debug-toolbar.readthedocs.org/en/latest/installation.html#explicit-setup # noqa
INSTALLED_APPS += (
'debug_toolbar',
'django_extensions',
)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
DEBUG_TOOLBAR_PATCH_SETTINGS = False
# http://django-debug-toolbar.readthedocs.org/en/latest/installation.html
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2')
# ######### END TOOLBAR CONFIGURATION
|
# -*- python -*-
# pymode:lint_ignore=W0401,E501
"""Development settings and globals."""
from __future__ import absolute_import
from .base import * # noqa
# ######### DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# ######### END DEBUG CONFIGURATION
# ######### EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# ######### END EMAIL CONFIGURATION
# ######### CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# ######### END CACHE CONFIGURATION
# ######### TOOLBAR CONFIGURATION
# See: http://django-debug-toolbar.readthedocs.org/en/latest/installation.html#explicit-setup # noqa
INSTALLED_APPS += (
'debug_toolbar',
)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
DEBUG_TOOLBAR_PATCH_SETTINGS = False
# http://django-debug-toolbar.readthedocs.org/en/latest/installation.html
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2')
# ######### END TOOLBAR CONFIGURATION
|
mit
|
Python
|
4b0902e960f08f9ef99ce211b98e211b1685f68b
|
Update walletchangepass.py
|
bhok/PPYK,bhok/PPYK,bhok/PPYK,bhok/PPYK,bhok/PPYK
|
contrib/wallettools/walletchangepass.py
|
contrib/wallettools/walletchangepass.py
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:6647")
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:46393")
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
|
mit
|
Python
|
af0a932e8097701179310501d75888d7d4617dac
|
verifica codice con Schema dello SdI per fatturapa
|
OCA/l10n-italy,OCA/l10n-italy,OCA/l10n-italy
|
l10n_it_payment_reason/tests/test_reason.py
|
l10n_it_payment_reason/tests/test_reason.py
|
from odoo.exceptions import ValidationError
from odoo.tests.common import TransactionCase
from odoo.addons.l10n_it_account.tools.account_tools import fpa_schema_get_enum
class TestReasons(TransactionCase):
def setUp(self):
super(TestReasons, self).setUp()
self.reason_model = self.env["payment.reason"]
self.reason_b = self.env.ref("l10n_it_payment_reason.b")
def test_reasons(self):
with self.assertRaises(ValidationError):
self.reason_model.create({"code": "B", "name": "Test"})
name = self.reason_b.name_get()
self.assertEqual(
name,
[
(
self.reason_b.id,
"B - Utilizzazione economica, da parte dell'autore ...",
)
],
)
def test_compare_with_fpa_schema(self):
"""Check that the values we define in this module are
the same as those defined in FPA xsd"""
my_codes = self.reason_model.search([]).mapped("code")
# from fatturapa xml Schema
xsd_codes = [
code for code, descr in fpa_schema_get_enum("CausalePagamentoType")
]
# XXX hardcoded - obsolete code, that is still supported by Schema
xsd_codes.remove("Z")
self.assertCountEqual(my_codes, xsd_codes)
|
from odoo.exceptions import ValidationError
from odoo.tests.common import TransactionCase
class TestReasons(TransactionCase):
def setUp(self):
super(TestReasons, self).setUp()
self.reason_model = self.env["payment.reason"]
self.reason_b = self.env.ref("l10n_it_payment_reason.b")
def test_reasons(self):
with self.assertRaises(ValidationError):
self.reason_model.create({"code": "B", "name": "Test"})
name = self.reason_b.name_get()
self.assertEqual(
name,
[
(
self.reason_b.id,
"B - Utilizzazione economica, da parte dell'autore ...",
)
],
)
|
agpl-3.0
|
Python
|
c59a2ce0a6b164e7af26d73a553349937642c370
|
write frozen graph instead of just the graph def
|
astromme/classify-handwritten-characters,astromme/classify-handwritten-characters
|
write_pb_file.py
|
write_pb_file.py
|
#!/usr/bin/env python3
from gnt_model import model, error_rate, IMAGE_HEIGHT, IMAGE_WIDTH, PIXEL_DEPTH
import os
import sys
import tensorflow as tf
from tensorflow.python.platform import gfile
from tensorflow.python.framework import graph_util
def main():
if len(sys.argv) != 3:
print('Usage: {} checkpoint_path output_dir'.format(sys.argv[0]))
sys.exit()
_, checkpoint_path, output_dir = sys.argv
node_image_raw = tf.placeholder("float", shape=[None, 784], name="input")
node_normalized_image = tf.reshape(node_image_raw, [1, IMAGE_HEIGHT, IMAGE_WIDTH, 1]) / PIXEL_DEPTH - 0.5
node_logits = model(node_normalized_image)
node_predictions = tf.nn.softmax(node_logits, name="output")
saver = tf.train.Saver()
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
with tf.Session() as sess:
print('loading model')
sess.run(init_op)
saver.restore(sess, checkpoint_path)
pb_filename = os.path.join(output_dir, 'frozen_character_model_graph.pb')
graph_def = tf.get_default_graph().as_graph_def()
for node in graph_def.node:
node.device = ""
output_graph_def = graph_util.convert_variables_to_constants(
sess,
graph_def,
['output'])
print('writing {}'.format(pb_filename))
with gfile.GFile(pb_filename, "wb") as f:
f.write(output_graph_def.SerializeToString())
print("%d ops in the final graph." % len(output_graph_def.node))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
from gnt_model import model, error_rate, IMAGE_HEIGHT, IMAGE_WIDTH, PIXEL_DEPTH
import sys
import tensorflow as tf
def main():
if len(sys.argv) != 3:
print('Usage: {} modelpath outputdir'.format(sys.argv[0]))
sys.exit()
_, model_path, output_dir = sys.argv
node_image_raw = tf.placeholder("float", shape=[None, 784], name="input")
node_normalized_image = tf.reshape(node_image_raw, [1, IMAGE_HEIGHT, IMAGE_WIDTH, 1]) / PIXEL_DEPTH - 0.5
node_logits = model(node_normalized_image)
node_predictions = tf.nn.softmax(node_logits, name="output")
saver = tf.train.Saver()
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
with tf.Session() as sess:
print('loading model')
sess.run(init_op)
saver.restore(sess, model_path)
pb_filename = 'character_model_graph.pb.txt'
print('writing {}'.format(pb_filename))
graph_def = tf.get_default_graph().as_graph_def()
tf.train.write_graph(graph_def, output_dir, pb_filename, as_text=True)
if __name__ == '__main__':
main()
|
mit
|
Python
|
6ecccc9d5600f46e4810ab3fdf82d29268105abc
|
use watchdog instead of bare inotifyx for recursive file system monitoring
|
bbiskup/pytest-purkinje,bbiskup/pytest-purkinje
|
purkinje_pytest/testrunner.py
|
purkinje_pytest/testrunner.py
|
# -*- coding: utf-8 -*-
"""Automatic test execution"""
from __future__ import print_function
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import logging
import time
logger = logging.getLogger(__file__)
#WATCH_MASK = inotify.IN_CLOSE_WRITE | inotify.IN_DELETE
# Extensions of files to be watched
EXTENSIONS = ['.py']
class Handler(FileSystemEventHandler):
"""Triggers test execution when project contents change
"""
def on_created(self, event):
self._trigger(event)
def on_deleted(self, event):
self._trigger(event)
def on_modified(self, event):
self._trigger(event)
def on_moved(self, event):
self._trigger(event)
def _filter(self, path):
"""Determine whether a file is relevant to test execution"""
return path.endswith('.py')
def _trigger(self, event):
if self._filter(event.src_path):
print('##### Trigger: {} ####'.format(event))
class TestRunner:
"""Watches project directory and executes test when relevant files
have been changed
TODO: ensure that ulimit is sufficient for number of
files to be monitored
"""
def __init__(self, dir):
self._dir = dir
#self._fd = inotify.init()
# self._wd = inotify.add_watch(self._fd,
# self._dir,
# WATCH_MASK)
self.event_handler = Handler()
self.observer = Observer()
def start(self):
"""Watch directory forever and execute test cases
"""
print('{}: watching directory "{}"'.format(self.__class__, self._dir))
self.observer.schedule(self.event_handler, self._dir, recursive=True)
self.observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
self.observer.stop()
def _handle_event(self, e):
print('Event: {}'.format(e))
def main():
fw = TestRunner('.')
fw.start()
|
# -*- coding: utf-8 -*-
"""Automatic test execution"""
from __future__ import print_function
from watchdog.observers import Observer
from watchdog.events import LoggingEventHandler
import logging
import time
logger = logging.getLogger(__file__)
#WATCH_MASK = inotify.IN_CLOSE_WRITE | inotify.IN_DELETE
# Extensions of files to be watched
EXTENSIONS = ['.py']
class TestRunner:
"""Watches project directory and executes test when relevant files
have been changed
TODO: ensure that ulimit is sufficient for number of
files to be monitored
"""
def __init__(self, dir):
self._dir = dir
#self._fd = inotify.init()
# self._wd = inotify.add_watch(self._fd,
# self._dir,
# WATCH_MASK)
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
self.event_handler = LoggingEventHandler()
self.observer = Observer()
def start(self):
"""Watch directory forever and execute test cases
"""
print('{}: watching directory "{}"'.format(self.__class__, self._dir))
self.observer.schedule(self.event_handler, self._dir, recursive=True)
self.observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
self.observer.stop()
def _filter(self, events):
"""Select files that are relevant to test execution"""
print('Before filter: {}'.format(events))
for event in events:
n = event.name
if n.endswith('.py'):
yield event
def _handle_event(self, e):
print('Event: {}'.format(e))
def main():
fw = TestRunner('.')
fw.start()
|
mit
|
Python
|
c052aaca11f8fdcd465ee986548db8141b27a197
|
bump to 0.7.2
|
martin-hunt/hublib
|
hublib/__init__.py
|
hublib/__init__.py
|
from pint import UnitRegistry
ureg = UnitRegistry()
ureg.autoconvert_offset_to_baseunit = True
Q_ = ureg.Quantity
__version__ = "0.7.2"
|
from pint import UnitRegistry
ureg = UnitRegistry()
ureg.autoconvert_offset_to_baseunit = True
Q_ = ureg.Quantity
__version__ = "0.7.1"
|
mit
|
Python
|
8959801a293fa33cb7625b524c6e9226af7253b2
|
Use separate HITTypeIds for live vs. sandbox
|
suchow/judicious,suchow/judicious,suchow/judicious
|
recruiters.py
|
recruiters.py
|
"""Recruiters of judicious humans."""
import logging
import os
import boto3
# Set up logging.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s [recruiter.1]: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
class Recruiter(object):
"""Generic recruiter."""
def recruit(self):
raise NotImplementedError
class HotAirRecruiter(Recruiter):
"""Talks about recruiting, but does not recruit."""
def recruit(self):
logger.info("Recruiting a participant.")
class MTurkRecruiter(Recruiter):
"""Recruits from Amazon Mechanical Turk."""
def __init__(self):
self.mode = os.environ["JUDICIOUS_MTURK_MODE"]
if self.mode == "sandbox":
self._client = boto3.client(
service_name='mturk',
endpoint_url='https://mturk-requester-sandbox.us-east-1.amazonaws.com',
)
elif self.mode == "live":
self._client = boto3.client(
service_name='mturk',
region_name="us-east-1",
)
def _print_balance(self):
balance = self.client.get_account_balance()['AvailableBalance']
logger.info("Current MTurk balance is ${}.".format(balance))
def recruit(self):
if self.mode == "sandbox":
HITTypeId = os.environ["JUDICIOUS_MTURK_HIT_TYPE_ID_SANDBOX"]
elif self.mode == "live":
HITTypeId = os.environ["JUDICIOUS_MTURK_HIT_TYPE_ID_LIVE"]
response = self._client.create_hit_with_hit_type(
HITTypeId=HITTypeId,
MaxAssignments=1,
LifetimeInSeconds=int(os.environ["JUDICIOUS_MTURK_LIFETIME"]),
Question=open("external.xml", "r").read(),
)
logger.info("Created HIT with ID {}".format(response['HIT']['HITId']))
|
"""Recruiters of judicious humans."""
import logging
import os
import boto3
# Set up logging.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s [recruiter.1]: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
class Recruiter(object):
"""Generic recruiter."""
def recruit(self):
raise NotImplementedError
class HotAirRecruiter(Recruiter):
"""Talks about recruiting, but does not recruit."""
def recruit(self):
logger.info("Recruiting a participant.")
class MTurkRecruiter(Recruiter):
"""Recruits from Amazon Mechanical Turk."""
def __init__(self):
self.mode = os.environ["JUDICIOUS_MTURK_MODE"]
if self.mode == "sandbox":
self._client = boto3.client(
service_name='mturk',
endpoint_url='https://mturk-requester-sandbox.us-east-1.amazonaws.com',
)
elif self.mode == "live":
self._client = boto3.client(
service_name='mturk',
region_name="us-east-1",
)
def _print_balance(self):
balance = self.client.get_account_balance()['AvailableBalance']
logger.info("Current MTurk balance is ${}.".format(balance))
def recruit(self):
response = self._client.create_hit_with_hit_type(
HITTypeId=os.environ["JUDICIOUS_MTURK_HIT_TYPE_ID"],
MaxAssignments=1,
LifetimeInSeconds=int(os.environ["JUDICIOUS_MTURK_LIFETIME"]),
Question=open("external.xml", "r").read(),
)
logger.info("Created HIT with ID {}".format(response['HIT']['HITId']))
|
mit
|
Python
|
3ca6affb630f0dea9b414f5405acae7a20f213d2
|
add request apis
|
ryerh/py-crawler,ryerh/py-crawler,ryerh/py-crawler
|
crawler/spiders/movies_spider.py
|
crawler/spiders/movies_spider.py
|
# -*- coding: utf-8 -*-
import scrapy
import json
from core.models import db, Movie
from crawler.spiders.subjects_spider import save_subject_detail
class MoviesSpider(scrapy.Spider):
name = 'MoviesSpider'
allowed_domains = ['douban.com']
start_urls = [
'https://frodo.douban.com/jsonp/subject_collection/movie_showing/items?start=0&count=100',
'https://frodo.douban.com/jsonp/subject_collection/movie_latest/items?start=0&count=100',
# 'https://api.douban.com/v2/movie/coming_soon',
# 'https://api.douban.com/v2/movie/in_theaters',
# 'https://api.douban.com/v2/movie/top250',
]
def parse(self, response):
res = json.loads(response.body)
for subject in res['subjects']:
save_subject_detail(subject['id'])
|
# -*- coding: utf-8 -*-
import scrapy
import json
from core.models import db, Movie
from crawler.spiders.subjects_spider import save_subject_detail
class MoviesSpider(scrapy.Spider):
name = 'MoviesSpider'
allowed_domains = ['douban.com']
start_urls = [
'https://frodo.douban.com/jsonp/subject_collection/movie_showing/items?os=ios&callback=&start=0&count=30',
# 'https://api.douban.com/v2/movie/coming_soon',
# 'https://api.douban.com/v2/movie/in_theaters',
# 'https://api.douban.com/v2/movie/top250',
]
def parse(self, response):
res = json.loads(response.body)
for subject in res['subjects']:
save_subject_detail(subject['id'])
|
apache-2.0
|
Python
|
ac7090293c4686b071ca314c37304b2eeddec766
|
Fix isort problems
|
pretix/pretix-cartshare,pretix/pretix-cartshare,pretix/pretix-cartshare
|
pretix_cartshare/migrations/0002_auto_20161008_1047.py
|
pretix_cartshare/migrations/0002_auto_20161008_1047.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-08 10:47
from __future__ import unicode_literals
import pretix_cartshare.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretix_cartshare', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sharedcart',
name='cart_id',
field=models.CharField(default=pretix_cartshare.models.generate_cart_id, max_length=255, verbose_name='Cart ID'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-08 10:47
from __future__ import unicode_literals
from django.db import migrations, models
import pretix_cartshare.models
class Migration(migrations.Migration):
dependencies = [
('pretix_cartshare', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sharedcart',
name='cart_id',
field=models.CharField(default=pretix_cartshare.models.generate_cart_id, max_length=255, verbose_name='Cart ID'),
),
]
|
apache-2.0
|
Python
|
1b7289dd8b72cb67fe5c369e99b60e5cd0e85958
|
Fix neovim support
|
prashanthellina/follow-markdown-links
|
ftplugin/markdown/follow_markdown_links.py
|
ftplugin/markdown/follow_markdown_links.py
|
import os
import re
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
import vim
DEFAULT_EXTENSION = 'md'
MAX_LINE_LEN = 1024
def _extract_link_under_cursor():
_, col = vim.current.window.cursor
line = vim.current.line
# skip long lines to stop hogging CPU in vim
if len(line) >= MAX_LINE_LEN:
return
# find the markdown link substring from line
start_pos = line[:col].rfind("[")
if start_pos < 0: return
end_pos = line[col:].rfind(")")
if end_pos < 0: return
end_pos += (col + 1)
link = line[start_pos:end_pos]
return link
def _is_local_link(link):
link = urlparse(link)
return not link.netloc
def _resolve_link(link):
buf_path = os.path.dirname(vim.current.buffer.name)
return os.path.join(buf_path, link)
def _ensure_extension(link):
name = os.path.basename(link)
if '.' not in name:
return link + '.' + DEFAULT_EXTENSION
return link
def follow_link():
link = _extract_link_under_cursor()
if not link: return
# extract link text and link url
link = re.findall(r'^\[([^]]*)\]\(([^)]*)\)$', link)
if not link: return
# if not local link then stop
text, link = link[0]
if not _is_local_link(link): return
# Support [Text]() cases; Assume Text as link
# Also assume default extension
if not link: link = text
link = _ensure_extension(link)
# Resolve link (if relative) with relation
# to current file in buffer
link = _resolve_link(link)
# Open if exists
if os.path.exists(link):
return vim.command('e %s' % link)
# Directory path does not exist. Ask user to create it.
dirpath = os.path.dirname(link)
if not os.path.exists(dirpath):
confirm_fn = Function('confirm')
msg = '"%s" does not exist. create?' % dirpath
result = confirm_fn(msg, "&Yes\n&No")
if result != 1: return
os.makedirs(dirpath)
# Open as new file
return vim.command('e %s' % link)
|
import re
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from vim import *
DEFAULT_EXTENSION = 'md'
MAX_LINE_LEN = 1024
def _extract_link_under_cursor():
_, col = current.window.cursor
line = current.line
# skip long lines to stop hogging CPU in vim
if len(line) >= MAX_LINE_LEN:
return
# find the markdown link substring from line
start_pos = line[:col].rfind("[")
if start_pos < 0: return
end_pos = line[col:].rfind(")")
if end_pos < 0: return
end_pos += (col + 1)
link = line[start_pos:end_pos]
return link
def _is_local_link(link):
link = urlparse(link)
return not link.netloc
def _resolve_link(link):
buf_path = os.path.dirname(current.buffer.name)
return os.path.join(buf_path, link)
def _ensure_extension(link):
name = os.path.basename(link)
if '.' not in name:
return link + '.' + DEFAULT_EXTENSION
return link
def follow_link():
link = _extract_link_under_cursor()
if not link: return
# extract link text and link url
link = re.findall(r'^\[([^]]*)\]\(([^)]*)\)$', link)
if not link: return
# if not local link then stop
text, link = link[0]
if not _is_local_link(link): return
# Support [Text]() cases; Assume Text as link
# Also assume default extension
if not link: link = text
link = _ensure_extension(link)
# Resolve link (if relative) with relation
# to current file in buffer
link = _resolve_link(link)
# Open if exists
if os.path.exists(link):
return command('e %s' % link)
# Directory path does not exist. Ask user to create it.
dirpath = os.path.dirname(link)
if not os.path.exists(dirpath):
confirm_fn = Function('confirm')
msg = '"%s" does not exist. create?' % dirpath
result = confirm_fn(msg, "&Yes\n&No")
if result != 1: return
os.makedirs(dirpath)
# Open as new file
return command('e %s' % link)
|
mit
|
Python
|
44532322f4a183bd535de22374d149a6e2f8176a
|
Fix pep8 errors
|
ivoire/RandoAmisSecours,ivoire/RandoAmisSecours
|
RandoAmisSecours/views/reporting.py
|
RandoAmisSecours/views/reporting.py
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2014 Rémi Duraffort
# This file is part of RandoAmisSecours.
#
# RandoAmisSecours is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RandoAmisSecours is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with RandoAmisSecours. If not, see <http://www.gnu.org/licenses/>
from __future__ import unicode_literals
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.models import User
from django.utils.timezone import datetime, utc
from RandoAmisSecours.models import Outing, DRAFT, CONFIRMED, FINISHED
from datetime import timedelta
@staff_member_required
def index(request):
user_count = User.objects.count()
return render_to_response('RandoAmisSecours/reporting/index.html',
{'user_count': user_count},
context_instance=RequestContext(request))
@staff_member_required
def outings(request):
return render_to_response('RandoAmisSecours/reporting/outings.html',
context_instance=RequestContext(request))
@staff_member_required
def users(request):
now = datetime.utcnow().replace(tzinfo=utc)
users_list = User.objects.all()
joining_dates = [0] * 366
last_logins = [0] * 366
for user in users_list:
days_delta = (now - user.date_joined).days
if days_delta <= 365:
joining_dates[365 - days_delta] += 1
days_delta = (now - user.last_login).days
if days_delta <= 365:
last_logins[365 - days_delta] += 1
return render_to_response('RandoAmisSecours/reporting/users.html',
{'joining_dates': joining_dates,
'last_logins': last_logins},
context_instance=RequestContext(request))
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2014 Rémi Duraffort
# This file is part of RandoAmisSecours.
#
# RandoAmisSecours is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RandoAmisSecours is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with RandoAmisSecours. If not, see <http://www.gnu.org/licenses/>
from __future__ import unicode_literals
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.models import User
from django.utils.timezone import datetime, utc
from RandoAmisSecours.models import Outing, DRAFT, CONFIRMED, FINISHED
from datetime import timedelta
@staff_member_required
def index(request):
user_count = User.objects.count()
return render_to_response('RandoAmisSecours/reporting/index.html',
{'user_count': user_count},
context_instance=RequestContext(request))
@staff_member_required
def outings(request):
return render_to_response('RandoAmisSecours/reporting/outings.html',
context_instance=RequestContext(request))
@staff_member_required
def users(request):
now = datetime.utcnow().replace(tzinfo=utc)
users_list = User.objects.all()
joining_dates = [0] * 366
last_logins = [0] * 366
for user in users_list:
days_delta = (now - user.date_joined).days
if days_delta <= 365:
joining_dates[365 - days_delta] += 1
days_delta = (now - user.last_login).days
if days_delta <= 365:
last_logins[365 - days_delta] += 1
return render_to_response('RandoAmisSecours/reporting/users.html',
{'joining_dates': joining_dates,
'last_logins': last_logins},
context_instance=RequestContext(request))
|
agpl-3.0
|
Python
|
b8e7b9d9316e84fec4c8524eab6839e82b8534c1
|
Fix typo in add_subset
|
PennyQ/glue-3d-viewer,astrofrog/glue-3d-viewer,glue-viz/glue-3d-viewer,astrofrog/glue-vispy-viewers,glue-viz/glue-vispy-viewers,PennyQ/astro-vispy
|
glue_vispy_viewers/volume/volume_viewer.py
|
glue_vispy_viewers/volume/volume_viewer.py
|
from ..common.vispy_data_viewer import BaseVispyViewer
from .layer_artist import VolumeLayerArtist
from .layer_style_widget import VolumeLayerStyleWidget
class VispyVolumeViewer(BaseVispyViewer):
LABEL = "3D Volume Rendering"
_layer_style_widget_cls = VolumeLayerStyleWidget
def add_data(self, data):
if data in self._layer_artist_container:
return True
layer_artist = VolumeLayerArtist(data, vispy_viewer=self._vispy_widget)
if len(self._layer_artist_container) == 0:
self._options_widget.set_limits(*layer_artist.bbox)
self._layer_artist_container.append(layer_artist)
return True
def add_subset(self, subset):
if subset in self._layer_artist_container:
return
if subset.to_mask().ndim != 3:
return
layer_artist = VolumeLayerArtist(subset, vispy_viewer=self._vispy_widget)
self._layer_artist_container.append(layer_artist)
def _add_subset(self, message):
self.add_subset(message.subset)
def _update_attributes(self, index=None, layer_artist=None):
pass
|
from ..common.vispy_data_viewer import BaseVispyViewer
from .layer_artist import VolumeLayerArtist
from .layer_style_widget import VolumeLayerStyleWidget
class VispyVolumeViewer(BaseVispyViewer):
LABEL = "3D Volume Rendering"
_layer_style_widget_cls = VolumeLayerStyleWidget
def add_data(self, data):
if data in self._layer_artist_container:
return True
layer_artist = VolumeLayerArtist(data, vispy_viewer=self._vispy_widget)
if len(self._layer_artist_container) == 0:
self._options_widget.set_limits(*layer_artist.bbox)
self._layer_artist_container.append(layer_artist)
return True
def add_subset(self, subset):
if subset in self._layer_artist_container:
return
if subset.to_mask().ndim != 3:
return
layer_artist = VolumeLayerArtist(message.subset, vispy_viewer=self._vispy_widget)
self._layer_artist_container.append(layer_artist)
def _add_subset(self, message):
self.add_subset(message.subset)
def _update_attributes(self, index=None, layer_artist=None):
pass
|
bsd-2-clause
|
Python
|
872e008b3986b18a7c01cd47e91a7ef39e21006b
|
Remove unused parameter in test (#5423)
|
quantumlib/Cirq,quantumlib/Cirq,quantumlib/Cirq,quantumlib/Cirq,quantumlib/Cirq
|
cirq-core/cirq/contrib/paulistring/pauli_string_dag_test.py
|
cirq-core/cirq/contrib/paulistring/pauli_string_dag_test.py
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cirq
from cirq.contrib.paulistring import convert_and_separate_circuit, pauli_string_dag_from_circuit
def test_pauli_string_dag_from_circuit():
q0, q1, q2 = cirq.LineQubit.range(3)
c_orig = cirq.testing.nonoptimal_toffoli_circuit(q0, q1, q2)
c_left, _ = convert_and_separate_circuit(c_orig)
c_left_dag = pauli_string_dag_from_circuit(c_left)
c_left_reordered = c_left_dag.to_circuit()
cirq.testing.assert_allclose_up_to_global_phase(
c_left.unitary(), c_left_reordered.unitary(), atol=1e-7
)
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import cirq
from cirq.contrib.paulistring import convert_and_separate_circuit, pauli_string_dag_from_circuit
@pytest.mark.parametrize('repetition', range(6))
def test_pauli_string_dag_from_circuit(repetition):
q0, q1, q2 = cirq.LineQubit.range(3)
c_orig = cirq.testing.nonoptimal_toffoli_circuit(q0, q1, q2)
c_left, _ = convert_and_separate_circuit(c_orig)
c_left_dag = pauli_string_dag_from_circuit(c_left)
c_left_reordered = c_left_dag.to_circuit()
cirq.testing.assert_allclose_up_to_global_phase(
c_left.unitary(), c_left_reordered.unitary(), atol=1e-7
)
|
apache-2.0
|
Python
|
d7a665a3947e04c4689db67f35827db2cc3a6842
|
Update randrect.pyde
|
kantel/processingpy,kantel/processingpy,kantel/processingpy
|
sketches/randrect/randrect.pyde
|
sketches/randrect/randrect.pyde
|
from random import randint, choice
a = 220
colors = [color(155, 46, 105, a), color(217, 55, 80, a), color(226, 114, 79, a),
color(243, 220, 123, a), color(78, 147, 151, a)]
def setup():
size(600, 600)
rectMode(CENTER)
strokeWeight(5)
# noStroke()
background(255)
def draw():
for _ in range(200):
fill(choice(colors))
rect(randint(0, width), randint(0, height), randint(10, width - 200),
randint(10, int((height/2) - 100)))
noLoop()
|
from random import randint, choice
a = 220
colors = [color(155, 46, 105, a), color(217, 55, 80, a), color(226, 114, 79, a),
color(243, 220, 123, a), color(78, 147, 151, a)]
def setup():
size(940, 300)
rectMode(CENTER)
strokeWeight(2)
# noStroke()
background(255)
def draw():
for _ in range(200):
fill(choice(colors))
rect(randint(0, width), randint(0, height), randint(10, width - 200),
randint(10, int((height/2) - 100)))
noLoop()
|
mit
|
Python
|
4ff146a7e81fe4b065a551e29f42ef9b2a973823
|
document title edit
|
servee/django-servee-document,servee/django-servee-document
|
servee_document/views.py
|
servee_document/views.py
|
import json
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from .models import Document
@csrf_exempt
@require_POST
@login_required
def upload_documents(request):
documents = []
for f in request.FILES.getlist("file"):
obj = Document.objects.create(document=f)
documents.append({"filelink": obj.document.url})
return HttpResponse(json.dumps(documents), mimetype="application/json")
@login_required
def recent_documents(request):
documents = [
{"thumb": obj.document.url, "document": obj.document.url, "title": obj.title}
for obj in Document.objects.all().order_by("-uploaded")[:20]
]
return HttpResponse(json.dumps(documents), mimetype="application/json")
|
import json
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from .models import Document
@csrf_exempt
@require_POST
@login_required
def upload_documents(request):
documents = []
for f in request.FILES.getlist("file"):
obj = Document.objects.create(document=f)
documents.append({"filelink": obj.document.url})
return HttpResponse(json.dumps(documents), mimetype="application/json")
@login_required
def recent_documents(request):
documents = [
{"thumb": obj.document.url, "document": obj.document.url, "title": obj.document.name}
for obj in Document.objects.all().order_by("-uploaded")[:20]
]
return HttpResponse(json.dumps(documents), mimetype="application/json")
|
bsd-3-clause
|
Python
|
4aad6aa1b50f8b0c86c49867bc75081b03b03086
|
remove unused import
|
fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary
|
repository/shimclient.py
|
repository/shimclient.py
|
#
# Copyright (c) 2005 rpath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/cpl.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import netclient
class ShimNetClient(netclient.NetworkRepositoryClient):
"""
A subclass of NetworkRepositoryClient which can take a NetworkRepositoryServer
instance (plus a few other pieces of information) and expose the netclient
interface without the overhead of XMLRPC.
"""
def __init__(self, server, protocol, port, authToken, repMap):
netclient.NetworkRepositoryClient.__init__(self, repMap)
self.server = ShimServerProxy(server, protocol, port, authToken)
class _ShimMethod(netclient._Method):
def __init__(self, server, protocol, port, authToken, name):
self._server = server
self._authToken = authToken
self._name = name
self._protocol = protocol
self._port = port
def __repr__(self):
return "<server._ShimMethod(%r)>" % (self._ShimMethod__name)
def __call__(self, *args):
isException, result = self._server.callWrapper(
self._protocol, self._port,
self._name, self._authToken, args)
if not isException:
return result
else:
self.handleError(result)
class ShimServerProxy(netclient.ServerProxy):
def __init__(self, server, protocol, port, authToken):
self._authToken = authToken
self._server = server
self._protocol = protocol
self._port = port
def __getattr__(self, name):
return _ShimMethod(self._server,
self._protocol, self._port,
self._authToken, name)
|
#
# Copyright (c) 2005 rpath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/cpl.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import netclient
import netrepos
class ShimNetClient(netclient.NetworkRepositoryClient):
"""
A subclass of NetworkRepositoryClient which can take a NetworkRepositoryServer
instance (plus a few other pieces of information) and expose the netclient
interface without the overhead of XMLRPC.
"""
def __init__(self, server, protocol, port, authToken, repMap):
netclient.NetworkRepositoryClient.__init__(self, repMap)
self.server = ShimServerProxy(server, protocol, port, authToken)
class _ShimMethod(netclient._Method):
def __init__(self, server, protocol, port, authToken, name):
self._server = server
self._authToken = authToken
self._name = name
self._protocol = protocol
self._port = port
def __repr__(self):
return "<server._ShimMethod(%r)>" % (self._ShimMethod__name)
def __call__(self, *args):
isException, result = self._server.callWrapper(
self._protocol, self._port,
self._name, self._authToken, args)
if not isException:
return result
else:
self.handleError(result)
class ShimServerProxy(netclient.ServerProxy):
def __init__(self, server, protocol, port, authToken):
self._authToken = authToken
self._server = server
self._protocol = protocol
self._port = port
def __getattr__(self, name):
return _ShimMethod(self._server,
self._protocol, self._port,
self._authToken, name)
|
apache-2.0
|
Python
|
c36282a41c248ad3d2405a0461195c679ef5327c
|
disable socket timeout, set heartbeat to 10 from 30
|
hiidef/hiispider,hiidef/hiispider
|
hiispider/amqp/amqp.py
|
hiispider/amqp/amqp.py
|
import specs
from twisted.internet.protocol import ClientCreator
from twisted.internet import reactor
from txamqp.client import TwistedDelegate
from txamqp.protocol import AMQClient
import txamqp.spec
def createClient(amqp_host, amqp_vhost, amqp_port=5672):
amqp_spec = txamqp.spec.loadString(specs.v0_8)
amqp_delegate = TwistedDelegate()
client = ClientCreator(reactor,
AMQClient,
delegate=amqp_delegate,
vhost=amqp_vhost,
spec=amqp_spec,
heartbeat=10).connectTCP(amqp_host, amqp_port, timeout=0)
return client
|
import specs
from twisted.internet.protocol import ClientCreator
from twisted.internet import reactor
from txamqp.client import TwistedDelegate
from txamqp.protocol import AMQClient
import txamqp.spec
def createClient(amqp_host, amqp_vhost, amqp_port=5672):
amqp_spec = txamqp.spec.loadString(specs.v0_8)
amqp_delegate = TwistedDelegate()
client = ClientCreator(reactor,
AMQClient,
delegate=amqp_delegate,
vhost=amqp_vhost,
spec=amqp_spec,
heartbeat=30).connectTCP(amqp_host, amqp_port)
return client
|
mit
|
Python
|
1198d398fc8051142c700991c1360750b0857dc4
|
Update mp3test.py
|
sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,mecax/pyrobotlab,mecax/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,sstocker46/pyrobotlab
|
home/Markus/mp3test.py
|
home/Markus/mp3test.py
|
# this is a test script
from java.lang import String
from org.myrobotlab.service import Speech
from org.myrobotlab.service import Sphinx
from org.myrobotlab.service import Runtime
import random
mouth = Runtime.createAndStart("mouth","Speech")
music = 1
# add python as a listener of the "stopped" event from audioFile
mouth.audioFile.addListener("stopped", python.name, "stopped")
def play():
number = str(random.randint(1, 8))
# usually you need to escape backslash
mouth.audioFile.playFile("C:\\Users\\Markus\\Music\\Robynsfavoriter\\music" + str(number) + ".mp3", False)
print number
mouth.speak("playing song number" + str(number))
# stopped method is called when at the end of an audio file
def stopped():
if music == 1:
print("I have started playing")
global music
music = 2
elif music == 2:
global music
music = 1
play()
play()
|
# this is a test script
# i have a folder with the mp3 files named from music1 to music8.
# it random choses the files . no problem
# but i want to change the sleep(120) so the next starts when the previous is finished
from java.lang import String
from org.myrobotlab.service import Speech
from org.myrobotlab.service import Sphinx
from org.myrobotlab.service import Runtime
import random
mouth = Runtime.createAndStart("mouth","Speech")
music = 1
# add python as a listener of the "stopped" event from audioFile
mouth.audioFile.addListener("stopped", python.name, "stopped")
def play():
number = str(random.randint(1, 8))
# usually you need to escape backslash
mouth.audioFile.playFile("C:\\Users\\Markus\\Music\\Robynsfavoriter\\music" + str(number) + ".mp3", False)
print number
mouth.speak("playing song number" + str(number))
# stopped method is called when at the end of an audio file
def stopped():
if music == 1:
print("I have started playing")
global music
music = 2
elif music == 2:
global music
music = 1
play()
play()
|
apache-2.0
|
Python
|
7388de0439913a8a33ac47a3cec14546e2860737
|
Add code example with loggable quantities in _CustomAction
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
hoomd/custom_action.py
|
hoomd/custom_action.py
|
from abc import ABC, abstractmethod
from hoomd.parameterdicts import ParameterDict
from hoomd.operation import _HOOMDGetSetAttrBase
class _CustomAction(ABC):
"""Base class for all Python ``Action``s.
This class must be the parent class for all Python ``Action``s. This class
requires all subclasses to implement the act method which performs the
Python object's task whether that be updating the system, writing output, or
analyzing some property of the system.
To use subclasses of this class, the object must be passed as an argument
for the `hoomd.python_action._CustomOperation` constructor.
If the pressure, rotational kinetic energy, or external field virial is
needed for a subclass, the flags attribute of the class needs to be set with
the appropriate flags from `hoomd.util.ParticleDataFlags`.
.. code-block:: python
from hoomd.python_action import _CustomAction
from hoomd.util import ParticleDataFlags
class ExampleActionWithFlag(_CustomAction):
flags = [ParticleDataFlags.ROTATIONAL_KINETIC_ENERGY,
ParticleDataFlags.PRESSURE_TENSOR,
ParticleDataFlags.EXTERNAL_FIELD_VIRIAL]
def act(self, timestep):
pass
For advertising loggable quantities through the
`hoomd.python_action._CustomOperation` object, the class attribute
``log_quantities`` can be used. The dictionary expects string keys with the
name of the loggable and `hooomd.logger.LoggerQuantity` objects as the
values.
.. code-block:: python
from hoomd.python_action import _CustomAction
from hoomd.logger import LoggerQuantity
class ExampleActionWithFlag(_CustomAction):
def __init__(self):
self.log_quantities = {
'loggable': LoggerQuantity('scalar_loggable',
self.__class__,
flag='scalar')}
def loggable(self):
return 42
def act(self, timestep):
pass
"""
flags = []
log_quantities = {}
def __init__(self):
pass
def attach(self, simulation):
self._state = simulation.state
def detach(self):
if hasattr(self, '_state'):
del self._state
@abstractmethod
def act(self, timestep):
pass
class _InternalCustomAction(_CustomAction, _HOOMDGetSetAttrBase):
"""An internal class for Python ``Action``s.
Gives additional support in using HOOMD constructs like ``ParameterDict``s
and ``TypeParameters``.
"""
pass
|
from abc import ABC, abstractmethod
from hoomd.parameterdicts import ParameterDict
from hoomd.operation import _HOOMDGetSetAttrBase
class _CustomAction(ABC):
"""Base class for all Python ``Action``s.
This class must be the parent class for all Python ``Action``s. This class
requires all subclasses to implement the act method which performs the
Python object's task whether that be updating the system, writing output, or
analyzing some property of the system.
To use subclasses of this class, the object must be passed as an argument
for the `hoomd.python_action._CustomOperation` constructor.
If the pressure, rotational kinetic energy, or external field virial is
needed for a subclass, the flags attribute of the class needs to be set with
the appropriate flags from `hoomd.util.ParticleDataFlags`.
.. code-block:: python
from hoomd.python_action import _CustomAction
from hoomd.util import ParticleDataFlags
class ExampleActionWithFlag(_CustomAction):
flags = [ParticleDataFlags.ROTATIONAL_KINETIC_ENERGY,
ParticleDataFlags.PRESSURE_TENSOR,
ParticleDataFlags.EXTERNAL_FIELD_VIRIAL]
def act(self, timestep):
pass
For advertising loggable quantities through the
`hoomd.python_action._CustomOperation` object, the class attribute
``log_quantities`` can be used. The dictionary expects string keys with the
name of the loggable and `hooomd.logger.LoggerQuantity` objects as the
values.
"""
flags = []
log_quantities = {}
def __init__(self):
pass
def attach(self, simulation):
self._state = simulation.state
def detach(self):
if hasattr(self, '_state'):
del self._state
@abstractmethod
def act(self, timestep):
pass
class _InternalCustomAction(_CustomAction, _HOOMDGetSetAttrBase):
"""An internal class for Python ``Action``s.
Gives additional support in using HOOMD constructs like ``ParameterDict``s
and ``TypeParameters``.
"""
pass
|
bsd-3-clause
|
Python
|
94ecbdc67dd72c671862aea29fd5525ea92650d8
|
Update model.py
|
wfclark/hamlet,wfclark/hamlet
|
hurricane/model.py
|
hurricane/model.py
|
import sys
import os
import urllib2
import datetime
import time
import psycopg2
import pandas
from subprocess import call, Popen
# pull the 6 hr forecast track forecast from NHC
#os.system("wget http://www.srh.noaa.gov/ridge2/Precip/qpehourlyshape/latest/last_1_hours.tar.gz -O last_1_hours.tar.gz")
#os.system("mv last_1_hours.tar.gz last_1_hours.tar")
#os.system("tar xvf last_1_hours.tar")
#last_1hr_shp = './latest/last_1_hours.shp'
#last_hr_shp2pgsql = 'ogr2ogr -f "PostgreSQL" PG:"user=postgres dbname=hamlet password=password" {} -t_srs EPSG:4326 -nln last_1hr_qpe -overwrite'.format(last_1hr_shp)
#print last_hr_shp2pgsql
#call(last_hr_shp2pgsql, shell = True)
conn_string = "dbname='hamlethurricane' user=postgres port='5432' host='127.0.0.1' password='password'"
print "Connecting to database..."
try:
conn = psycopg2.connect(conn_string)
except Exception as e:
print str(e)
sys.exit()
print "Connected!\n"
dataframe_cur = conn.cursor()
dataframe_cur.execute("""Select * from hurricane_irene""")
data = dataframe_cur.fetchall()
colnames = [desc[0] for desc in dataframe_cur.description]
dataframe = pandas.DataFrame(data)
dataframe.columns = colnames
print data
print dataframe
conn.commit()
num_feat = len(data)
for i in range(len(data))
os.system('pgsql2shp -f {} -u postgres dbname=hamlet password=password" {} -t_srs EPSG:4326 -nln last_1hr_qpe -overwrite ').
#drop_cur.close()
# hurricane_cur = conn.cursor()
# hurricane_cur.execute("""
# create table roads_flooded_bunco as
# select
# a.gid,
# street_nam,
# sum(b.globvalue),
# a.geom
# from conterlines_poly as a
# inner join last_1hr_qpe as b
# on st_dwithin(a.geom::geometry(MULTIpolygon, 4326), b.wkb_geometry::geometry(point, 4326), 0.025)
# group by a.gid, a.street_nam, a.geom;""")
# conn.commit()
|
bsd-3-clause
|
Python
|
|
c78cb26ff07712027b3ae340d6209482e8708641
|
implement single-length tuple
|
tlevine/vlermv
|
vlermv/transformers/raw.py
|
vlermv/transformers/raw.py
|
import posixpath
error_msg = '''The index must be a string.'''
def to_path(key):
if isinstance(key, tuple) and len(key) == 1:
key = key[0]
if hasattr(key, 'strip') and hasattr(key, 'split'):
return tuple(key.strip('/').split('/'))
else:
raise TypeError('Key must be string-like or a tuple of length one.')
def from_path(path):
return posixpath.join(*path)
|
import posixpath
error_msg = '''The index must be a string.'''
def to_path(key):
return tuple(key.strip('/').split('/'))
def from_path(path):
return posixpath.join(*path)
|
agpl-3.0
|
Python
|
edb6a027aae1656d07112c0dcb455d6891a44992
|
split around nan test is passing
|
moorepants/BicycleDataProcessor,moorepants/BicycleDataProcessor
|
test_data_processor.py
|
test_data_processor.py
|
import DataProcessor as dp
import numpy as np
def test_unsize_vector():
n = 3
a = np.ones(n)
b = np.append(a, np.array([np.nan, np.nan]))
c = dp.unsize_vector(a, n)
assert (a == c).all()
def test_time_vector():
numSamples = 100
sampleRate = 50
time = dp.time_vector(numSamples, sampleRate)
assert (time == np.linspace(0., 2. - 1. / 50., num=100)).all()
def test_split_around_nan():
# build an array of length 25 with some nan values
a = np.ones(25) * np.nan
b = np.arange(25)
for i in b:
if i not in [0, 5, 20, 24]:
a[i] = b[i]
# run the function and test the results
indices, arrays = dp.split_around_nan(a)
assert len(indices) == 7
assert indices[0] == (0, 1)
assert indices[1] == (1, 5)
assert indices[2] == (5, 6)
assert indices[3] == (6, 20)
assert indices[4] == (20, 21)
assert indices[5] == (21, 24)
assert indices[6] == (24, 25)
# build an array of length 25 with some nan values
a = np.ones(25) * np.nan
b = np.arange(25)
for i in b:
if i not in [5, 20]:
a[i] = b[i]
# run the function and test the results
indices, arrays = dp.split_around_nan(a)
assert len(indices) == 5
assert indices[0] == (0, 5)
assert indices[1] == (5, 6)
assert indices[2] == (6, 20)
assert indices[3] == (20, 21)
assert indices[4] == (21, 25)
a = np.array([np.nan, 1, 2, 3, np.nan, np.nan, 6, 7, np.nan])
# run the function and test the results
indices, arrays = dp.split_around_nan(a)
assert len(indices) == 6
assert indices[0] == (0, 1)
assert indices[1] == (1, 4)
assert indices[2] == (4, 5)
assert indices[3] == (5, 6)
assert indices[4] == (6, 8)
assert indices[5] == (8, 9)
|
import DataProcessor as dp
import numpy as np
def test_unsize_vector():
n = 3
a = np.ones(n)
b = np.append(a, np.array([np.nan, np.nan]))
c = dp.unsize_vector(a, n)
assert (a == c).all()
def test_time_vector():
numSamples = 100
sampleRate = 50
time = dp.time_vector(numSamples, sampleRate)
assert (time == np.linspace(0., 2. - 1. / 50., num=100)).all()
def test_split_around_nan():
# build an array of length 25 with some nan values
a = np.ones(25) * np.nan
b = np.arange(25)
for i in b:
if i not in [0, 5, 20, 24]:
a[i] = b[i]
# run the function and test the results
indices, arrays = dp.split_around_nan(a)
assert indices[0] == (1, 5)
assert indices[1] == (6, 20)
assert indices[2] == (21, 24)
# build an array of length 25 with some nan values
a = np.ones(25) * np.nan
b = np.arange(25)
for i in b:
if i not in [5, 20]:
a[i] = b[i]
# run the function and test the results
indices, arrays = dp.split_around_nan(a)
assert indices[0] == (0, 5)
assert indices[1] == (6, 20)
assert indices[2] == (21, 25)
|
bsd-2-clause
|
Python
|
e5c3ece6ebad2b7ab524e074fd982e7fc11497b1
|
set better filter
|
rrooij/sitewatcher_telegram
|
watcher/tweakerswatcher.py
|
watcher/tweakerswatcher.py
|
import requests
import json
import os.path
from watcher.watcher import Watcher
class TweakersWatcher(Watcher):
watcher_name = 'Tweakers Pricewatch'
filename = 'site_tweakers.txt'
def parse_site(self):
url = 'https://tweakers.net/xmlhttp/xmlHttp.php?application=tweakbase&type=filter&action=deals&dayOffset=1&minRelativePriceDrop=0.4&maxRelativePriceDrop=1&minAbsolutePriceDrop=30&maxAbsolutePriceDrop=&minCurrentPrice=0&maxCurrentPrice=&minPrices=3&minViews=0&of=absolutePriceDrop&od=desc&output=json'
request = requests.get(url)
json_object = json.loads(request.text)
return json_object['data']['html']
def check_price_error(self):
url = 'https://tweakers.net/pricewatch/deals/#filter:q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA'
message_text = 'Mogelijke prijsfout, check: {0}'.format(url)
html = self.parse_site()
if not os.path.isfile(self.filename):
self.write_to_file(self.filename, html)
exit(0)
else:
with open(self.filename, 'r') as f:
file_content = f.read()
if file_content != html:
self.send_telegram(self.watcher_name, message_text)
self.write_to_file(self.filename, html)
|
import requests
import json
import os.path
from watcher.watcher import Watcher
class TweakersWatcher(Watcher):
watcher_name = 'Tweakers Pricewatch'
filename = 'site_tweakers.txt'
def parse_site(self):
url = 'https://tweakers.net/xmlhttp/xmlHttp.php?application=tweakbase&type=filter&action=deals&fromHash=1&currFilters=q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA&output=json';
request = requests.get(url)
json_object = json.loads(request.text)
return json_object['data']['html']
def check_price_error(self):
url = 'https://tweakers.net/pricewatch/deals/#filter:q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA'
message_text = 'Mogelijke prijsfout, check: {0}'.format(url)
html = self.parse_site()
if not os.path.isfile(self.filename):
self.write_to_file(self.filename, html)
exit(0)
else:
with open(self.filename, 'r') as f:
file_content = f.read()
if file_content != html:
self.send_telegram(self.watcher_name, message_text)
self.write_to_file(self.filename, html)
|
agpl-3.0
|
Python
|
c0566ab5f4dabdf0c366d6b6a32cbd8ca3fb4a75
|
use idle_add callback for animation
|
jswhit/pygrib,jswhit/pygrib
|
test/animate.py
|
test/animate.py
|
import matplotlib
matplotlib.use('GTKAgg')
import pygrib, time ,gobject
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.basemap import Basemap
# animation example.
grbs = pygrib.open('../sampledata/safrica.grib2')
# grab all "brightness temp" grib messages.
btemps = [grb for grb in grbs if grb['name']=='Brightness temperature']
lats, lons = grb.latlons()
projd = grb.projparams
grbs.close()
# create a map projection for the domain, plot 1st image on it.
m =\
Basemap(projection=projd['proj'],lat_ts=projd['lat_ts'],lon_0=projd['lon_0'],\
lat_0=projd['lat_0'],rsphere=(projd['a'],projd['b']),\
llcrnrlat=lats[0,0],urcrnrlat=lats[-1,-1],\
llcrnrlon=lons[0,0],urcrnrlon=lons[-1,-1],resolution='i')
plt.figure(figsize=(8,7))
m.drawcoastlines()
m.drawcountries()
grb = btemps[0]
im = m.imshow(grb['values'],interpolation='nearest',vmin=230,vmax=310)
plt.colorbar(orientation='horizontal')
m.drawparallels(np.arange(-80,10,10),labels=[1,0,0,0])
m.drawmeridians(np.arange(-80,81,20),labels=[0,0,0,1])
txt = plt.title(grb,fontsize=8)
manager = plt.get_current_fig_manager()
def updatefig(*args):
global cnt, loop, delay
grb = btemps[cnt]
im.set_data(grb['values'])
txt.set_text(repr(grb))
manager.canvas.draw()
if cnt==0: time.sleep(delay)
cnt = cnt+1
if cnt==len(btemps):
loop = loop + 1
print 'done loop = ',loop
if loop == loops:
print 'all done - close plot window to exit'
return False
else:
cnt = 0
return True
else:
return True
cnt = 0
delay = 5
loops = 4
loop = 0
gobject.idle_add(updatefig)
plt.show()
|
import matplotlib
matplotlib.use('GTKAgg')
import pygrib
import matplotlib.pyplot as plt
import numpy as np
import time
from mpl_toolkits.basemap import Basemap
# animation example.
grbs = pygrib.open('../sampledata/safrica.grib2')
# grab all "brightness temp" grib messages.
btemps = [grb for grb in grbs if grb['name']=='Brightness temperature']
lats, lons = grb.latlons()
projd = grb.projparams
grbs.close()
print projd
# create a map projection for the domain, plot 1st image on it.
m =\
Basemap(projection=projd['proj'],lat_ts=projd['lat_ts'],lon_0=projd['lon_0'],\
lat_0=projd['lat_0'],rsphere=(projd['a'],projd['b']),\
llcrnrlat=lats[0,0],urcrnrlat=lats[-1,-1],\
llcrnrlon=lons[0,0],urcrnrlon=lons[-1,-1],resolution='i')
plt.ion() # set interactive mode on
plt.figure(figsize=(8,7))
m.drawcoastlines()
m.drawcountries()
grb = btemps[0]
im = m.imshow(grb['values'],interpolation='nearest',vmin=230,vmax=310)
plt.colorbar(orientation='horizontal')
m.drawparallels(np.arange(-80,10,10),labels=[1,0,0,0])
m.drawmeridians(np.arange(-80,81,20),labels=[0,0,0,1])
plt.title(grb,fontsize=8)
plt.draw()
# loop 4 times, plot all images sequentially.
for loop in range(4):
time.sleep(5)
for grb in btemps:
print grb
im.set_data(grb['values'])
plt.title(grb,fontsize=8)
plt.draw()
time.sleep(5)
|
mit
|
Python
|
65f5695b90054f73d7119f0c50be51f61de777fa
|
Print the time of checking status at github.
|
kaushik94/tardis,orbitfold/tardis,kaushik94/tardis,orbitfold/tardis,orbitfold/tardis,kaushik94/tardis,orbitfold/tardis,kaushik94/tardis
|
tardis/tests/tests_slow/runner.py
|
tardis/tests/tests_slow/runner.py
|
import argparse
import datetime
import json
import os
import time
import requests
from tardis import __githash__ as tardis_githash
parser = argparse.ArgumentParser(description="Run slow integration tests")
parser.add_argument("--yaml", dest="yaml_filepath",
help="Path to YAML config file for integration tests.")
parser.add_argument("--atomic-dataset", dest="atomic_dataset",
help="Path to atomic dataset.")
test_command = (
"python setup.py test --test-path=tardis/tests/tests_slow/test_integration.py "
"--args=\"-rs --integration-tests={0} --atomic-dataset={1} --remote-data\""
)
if __name__ == "__main__":
args = parser.parse_args()
while True:
gh_request = requests.get(
"https://api.github.com/repos/tardis-sn/tardis/branches/master"
)
gh_master_head_data = json.loads(gh_request.content)
gh_tardis_githash = gh_master_head_data['commit']['sha']
if gh_tardis_githash != tardis_githash:
os.system("git pull origin master")
os.system(test_command.format(args.yaml_filepath,
args.atomic_dataset))
else:
checked = datetime.datetime.now()
print "Up-to-date. Checked on {0} {1}".format(
checked.strftime("%d-%b-%Y"), checked.strftime("%H:%M:%S")
)
time.sleep(600)
|
import argparse
import json
import os
import time
import requests
from tardis import __githash__ as tardis_githash
parser = argparse.ArgumentParser(description="Run slow integration tests")
parser.add_argument("--yaml", dest="yaml_filepath",
help="Path to YAML config file for integration tests.")
parser.add_argument("--atomic-dataset", dest="atomic_dataset",
help="Path to atomic dataset.")
test_command = (
"python setup.py test --test-path=tardis/tests/tests_slow/test_integration.py "
"--args=\"-rs --integration-tests={0} --atomic-dataset={1} --remote-data\""
)
if __name__ == "__main__":
args = parser.parse_args()
while True:
gh_request = requests.get(
"https://api.github.com/repos/tardis-sn/tardis/branches/master"
)
gh_master_head_data = json.loads(gh_request.content)
gh_tardis_githash = gh_master_head_data['commit']['sha']
if gh_tardis_githash != tardis_githash:
os.system("git pull origin master")
os.system(test_command.format(args.yaml_filepath,
args.atomic_dataset))
else:
time.sleep(600)
|
bsd-3-clause
|
Python
|
08a65747d608fcc530adf6291a95104d4348eae6
|
apply RatingTargetMixin to test model
|
hzy/django-polarize,pombredanne/django-polarize
|
tests/models.py
|
tests/models.py
|
from __future__ import unicode_literals
from django.db import models
from generic_ratings.model_mixins import RatingTargetMixin
class TextSnippet(RatingTargetMixin, models.Model):
text = models.TextField()
user = models.ForeignKey('auth.User')
|
from __future__ import unicode_literals
from django.db import models
class TextSnippet(models.Model):
text = models.TextField()
user = models.ForeignKey('auth.User')
|
mit
|
Python
|
5935a9f0ec35774f95b32465134d88d3e087fd1b
|
Use newer sphinx if available.
|
salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb
|
tools/sphinx.py
|
tools/sphinx.py
|
# Simple Sphinx tool and builder.
import os
from SCons.Script import *
# Build sphinx documentation:
def _action_sphinx(target, source, env):
sourcedir = os.path.dirname(source[0].path)
outdir = os.path.dirname(target[0].path)
app = "%s %s %s %s" % (env['SPHINX_BUILD'], env['SPHINX_OPTS'],
sourcedir, outdir)
ret = env.Execute([app, 'tools/munge-sphinx-perl.pl'])
if not ret:
print "Build finished. The HTML pages are in " + outdir
return ret
def generate(env):
"""Add builders and construction variables for the sphinx tool."""
import SCons.Builder
builder = SCons.Builder.Builder(action=_action_sphinx)
# Use Unix 'install' rather than env.InstallAs(), due to scons bug #1751
install = SCons.Builder.Builder(action="install -d ${TARGET.dir} && " + \
"install -d ${TARGET.dir}/_static && " + \
"install -d ${TARGET.dir}/_sources && " + \
"install -d ${TARGET.dir}/modules && " + \
"install -d ${TARGET.dir}/_sources/modules && " + \
"install ${SOURCE.dir}/*.html ${TARGET.dir} && " + \
"install ${SOURCE.dir}/*.js ${TARGET.dir} && " + \
"install ${SOURCE.dir}/modules/*.html " + \
"${TARGET.dir}/modules && " + \
"install ${SOURCE.dir}/_sources/*.txt " + \
"${TARGET.dir}/_sources && " + \
"install ${SOURCE.dir}/_sources/modules/* " + \
"${TARGET.dir}/_sources/modules && " + \
"install ${SOURCE.dir}/_static/* ${TARGET.dir}/_static")
env.Append(BUILDERS = {'Sphinx': builder, 'SphinxInstall':install})
if os.path.exists('/usr/bin/sphinx-1.0-build'):
env.AppendUnique(SPHINX_BUILD='/usr/bin/sphinx-1.0-build')
else:
env.AppendUnique(SPHINX_BUILD='/usr/bin/sphinx-build')
env.AppendUnique(SPHINX_OPTS='-a -E -b html')
def exists(env):
"""Make sure sphinx tools exist."""
return env.Detect("sphinx")
|
# Simple Sphinx tool and builder.
import os
from SCons.Script import *
# Build sphinx documentation:
def _action_sphinx(target, source, env):
sourcedir = os.path.dirname(source[0].path)
outdir = os.path.dirname(target[0].path)
app = "%s %s %s %s" % (env['SPHINX_BUILD'], env['SPHINX_OPTS'],
sourcedir, outdir)
ret = env.Execute([app, 'tools/munge-sphinx-perl.pl'])
if not ret:
print "Build finished. The HTML pages are in " + outdir
return ret
def generate(env):
"""Add builders and construction variables for the sphinx tool."""
import SCons.Builder
builder = SCons.Builder.Builder(action=_action_sphinx)
# Use Unix 'install' rather than env.InstallAs(), due to scons bug #1751
install = SCons.Builder.Builder(action="install -d ${TARGET.dir} && " + \
"install -d ${TARGET.dir}/_static && " + \
"install -d ${TARGET.dir}/_sources && " + \
"install -d ${TARGET.dir}/modules && " + \
"install -d ${TARGET.dir}/_sources/modules && " + \
"install ${SOURCE.dir}/*.html ${TARGET.dir} && " + \
"install ${SOURCE.dir}/*.js ${TARGET.dir} && " + \
"install ${SOURCE.dir}/modules/*.html " + \
"${TARGET.dir}/modules && " + \
"install ${SOURCE.dir}/_sources/*.txt " + \
"${TARGET.dir}/_sources && " + \
"install ${SOURCE.dir}/_sources/modules/* " + \
"${TARGET.dir}/_sources/modules && " + \
"install ${SOURCE.dir}/_static/* ${TARGET.dir}/_static")
env.Append(BUILDERS = {'Sphinx': builder, 'SphinxInstall':install})
env.AppendUnique(SPHINX_BUILD='/usr/bin/sphinx-build')
env.AppendUnique(SPHINX_OPTS='-a -E -b html')
def exists(env):
"""Make sure sphinx tools exist."""
return env.Detect("sphinx")
|
lgpl-2.1
|
Python
|
0f02210e4f57f0023a2be6b11059bc5754f4d467
|
Update version.py
|
mbakker7/ttim,mbakker7/ttim
|
ttim/version.py
|
ttim/version.py
|
__version__='0.4.1'
#__build__='4.0.0.0'
|
__version__='0.4.0'
#__build__='4.0.0.0'
|
mit
|
Python
|
1df785d6f337fa2dc9e42c808fab79a5a2827258
|
remove line to run tests
|
RENCI/xDCIShare,hydroshare/hydroshare,RENCI/xDCIShare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,hydroshare/hydroshare,RENCI/xDCIShare,FescueFungiShare/hydroshare,hydroshare/hydroshare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,RENCI/xDCIShare,hydroshare/hydroshare,FescueFungiShare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare
|
hs_modflow_modelinstance/tests/__init__.py
|
hs_modflow_modelinstance/tests/__init__.py
|
from test_modflow_modelinstance_metadata import *
|
bsd-3-clause
|
Python
|
|
40a98808da485edeaa87bb30f0e0749401c500da
|
update documentation configuration
|
sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs
|
_doc/sphinxdoc/source/conf3/conf.py
|
_doc/sphinxdoc/source/conf3/conf.py
|
import sys
import os
import datetime
import re
import sphinx_bootstrap_theme
source_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.split(__file__)[0]),
".."))
try:
from conf_base import *
except ImportError:
sys.path.append(source_path)
from conf_base import *
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
templates_path = [os.path.join(source_path, 'phdoc_static3')]
html_static_path = templates_path
if not os.path.exists(templates_path[0]):
raise FileNotFoundError(templates_path[0])
html_logo = "project_ico_small.png"
html_sidebars = {}
if html_theme == "bootstrap":
html_theme_options = {
'navbar_title': "home",
'navbar_site_name': "Site",
'navbar_links': [
("XD", "http://www.xavierdupre.fr", True),
("blog", "blog/main_0000.html", True),
("index", "genindex"),
],
'navbar_sidebarrel': False,
'navbar_pagenav': True,
'navbar_pagenav_name': "Page",
'globaltoc_depth': 3,
'globaltoc_includehidden': "true",
'navbar_class': "navbar navbar-inverse",
'navbar_fixed_top': "true",
'source_link_position': "footer",
'bootswatch_theme': "yeti",
# united = weird colors, sandstone=green, simplex=red, paper=trop bleu
# lumen: OK
# to try, yeti, flatly, paper
'bootstrap_version': "3",
}
blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/"
blog_background = False
html_context = {
'css_files': get_default_stylesheet() + ['_static/my-styles.css', '_static/gallery.css'],
}
|
import sys
import os
import datetime
import re
import sphinx_bootstrap_theme
source_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.split(__file__)[0]),
".."))
try:
from conf_base import *
except ImportError:
sys.path.append(source_path)
from conf_base import *
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
templates_path = [os.path.join(source_path, 'phdoc_static3')]
html_static_path = templates_path
if not os.path.exists(templates_path[0]):
raise FileNotFoundError(templates_path[0])
html_logo = "project_ico_small.png"
html_sidebars = {}
if html_theme == "bootstrap":
html_theme_options = {
'navbar_title': "home",
'navbar_site_name': "Site",
'navbar_links': [
("XD", "http://www.xavierdupre.fr", True),
("blog", "blog/main_0000.html", True),
("index", "genindex"),
],
'navbar_sidebarrel': False,
'navbar_pagenav': True,
'navbar_pagenav_name': "Page",
'globaltoc_depth': 3,
'globaltoc_includehidden': "true",
'navbar_class': "navbar navbar-inverse",
'navbar_fixed_top': "true",
'source_link_position': "nav",
'bootswatch_theme': "yeti",
# united = weird colors, sandstone=green, simplex=red, paper=trop bleu
# lumen: OK
# to try, yeti, flatly, paper
'bootstrap_version': "3",
}
blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/"
blog_background = False
html_context = {
'css_files': get_default_stylesheet() + ['_static/my-styles.css', '_static/gallery.css'],
}
|
mit
|
Python
|
e4942c16322829d37f780d539517fe10e50e0e39
|
Fix bad var
|
basepi/hubble,basepi/hubble
|
hubblestack/extmods/grains/splunkconfig.py
|
hubblestack/extmods/grains/splunkconfig.py
|
# -*- coding: utf-8 -*-
'''
Attempt to load alternate splunk config from the hubble.d/ directory and store
in grains for use by the splunk returners. This way splunk config changes don't
require a hubble restart.
'''
import os
import yaml
def splunkconfig():
'''
Walk the hubble.d/ directory and read in any .conf files using YAML. If
splunk config is found, place it in grains and return.
'''
configdir = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble.d')
ret = {}
if not os.path.isdir(configdir):
return ret
try:
for root, dirs, files in os.walk(configdir):
for f in files:
if f.endswith('.conf'):
fpath = os.path.join(root, f)
try:
with open(fpath, 'r') as fh:
config = yaml.safe_load(fh)
if config.get('hubblestack', {}).get('returner', {}).get('splunk'):
ret = {'hubblestack': config['hubblestack']}
except:
pass
except:
pass
return ret
|
# -*- coding: utf-8 -*-
'''
Attempt to load alternate splunk config from the hubble.d/ directory and store
in grains for use by the splunk returners. This way splunk config changes don't
require a hubble restart.
'''
import os
import yaml
def splunkconfig():
'''
Walk the hubble.d/ directory and read in any .conf files using YAML. If
splunk config is found, place it in grains and return.
'''
configdir = os.path.join(os.path.dirname(__opts__['configfile']), 'hubble.d')
ret = {}
if not os.path.isdir(configdir):
return ret
try:
for root, dirs, files in os.walk(configdir):
for f in files:
if f.endswith('.conf'):
fpath = os.path.join(root, fpath)
try:
with open(fpath, 'r') as fh:
config = yaml.safe_load(fh)
if config.get('hubblestack', {}).get('returner', {}).get('splunk'):
ret = {'hubblestack': config['hubblestack']}
except:
pass
except:
pass
return ret
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.