commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
4d79d49eb6e542f43636c6232c98953ffc1b28d3
clean up stitcher script
360ls/desktop,360ls/desktop,360ls/desktop
app/services/stitcher.py
app/services/stitcher.py
#!/usr/bin/env python """ Script for streaming a camera feed """ import sys import signal import argparse import subprocess try: import cv2 except: raise Exception('OpenCV is not installed') def parse_args(): """ Parses command line arguments """ parser = argparse.ArgumentParser() parser.add_argument('-f', default='') parser.add_argument('-i', type=int, default=0) parser.add_argument('-p', dest='preview', action='store_true') parser.add_argument('-s', dest='stream', action='store_true') parser.add_argument('--width', type=int, default=640) parser.add_argument('--height', type=int, default=480) parser.add_argument('--url', dest='url', default='rtmp://54.227.214.22:1935/live/myStream') parser.set_defaults(preview=False) parser.set_defaults(stream=False) return parser.parse_args() def check_index(index): """ Checks if given index is valid """ sample_cap = cv2.VideoCapture(index) frame = sample_cap.grab() sample_cap.release() return frame def main(): """ Parses command line arguments and starts the stitcher """ args = parse_args() dest = args.f index = args.i height = args.height width = args.width cap = cv2.VideoCapture(index) codec = cv2.cv.CV_FOURCC('m', 'p', '4', 'v') out = cv2.VideoWriter(dest, codec, 20.0, (width, height)) dimensions = str(width) + 'x' + str(height) def handler(signum, frame): # pylint: disable=unused-argument """ Interrupt handler """ # When everything done, release the capture cap.release() out.release() cv2.destroyAllWindows() sys.exit(0) signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGTERM, handler) if args.stream: proc = subprocess.Popen([ 'ffmpeg', '-y', '-f', 'rawvideo', '-s', dimensions, '-pix_fmt', 'bgr24', '-i', 'pipe:0', '-vcodec', 'libx264', '-pix_fmt', 'uyvy422', '-r', '28', '-an', '-f', 'flv', args.url], stdin=subprocess.PIPE) while True: # Capture frame-by-frame _, frame = cap.read() frame = cv2.resize(frame, (width, height)) if not args.preview: out.write(frame) if args.stream: proc.stdin.write(frame.tostring()) # Display the resulting frame cv2.imshow('frame', frame) if cv2.waitKey(1) & 0xFF == ord('q'): break if __name__ == "__main__": main()
#!/usr/bin/env python import sys import signal, os import argparse import subprocess try: import cv2 except: raise Exception('OpenCV is not installed') def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-f', default='') parser.add_argument('-i', type=int, default=0) parser.add_argument('-p', dest='preview', action='store_true') parser.add_argument('-s', dest='stream', action='store_true') parser.add_argument('--width', type=int, default=640) parser.add_argument('--height', type=int, default=480) parser.add_argument('--url', dest='url', default='rtmp://54.227.214.22:1935/live/myStream') parser.set_defaults(preview=False) parser.set_defaults(stream=False) return parser.parse_args() def check_index(index): sample_cap = cv2.VideoCapture(index) frame = sample_cap.grab() sample_cap.release() if frame: return True else: return False def handler(signum, frame): # When everything done, release the capture cap.release() out.release() cv2.destroyAllWindows() sys.exit(0) signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGTERM, handler) ext = '' args = parse_args() dest = args.f + ext index = args.i height = args.height width = args.width cap = cv2.VideoCapture(index) codec = cv2.cv.CV_FOURCC('m', 'p', '4', 'v') out = cv2.VideoWriter(dest, codec, 20.0, (width, height)); dimensions = str(width) + 'x' + str(height) if args.stream: proc = subprocess.Popen([ 'ffmpeg', '-y', '-f', 'rawvideo', '-s', dimensions, '-pix_fmt', 'bgr24', '-i','pipe:0','-vcodec', 'libx264','-pix_fmt','uyvy422','-r','28','-an', '-f','flv', args.url], stdin=subprocess.PIPE) while(True): # Capture frame-by-frame ret, frame = cap.read() frame = cv2.resize(frame, (width, height)) if not args.preview: out.write(frame); if args.stream: proc.stdin.write(frame.tostring()) # Display the resulting frame cv2.imshow('frame', frame) if cv2.waitKey(1) & 0xFF == ord('q'): break
mit
Python
54682633b6312cb942d819be9c02beb2b5ad7bef
Add edit-entry, single-entry, new_entry.
steventhan/learning-journal,steventhan/learning-journal,steventhan/learning-journal
learning_journal/learning_journal/views.py
learning_journal/learning_journal/views.py
from pyramid.response import Response import os HERE = os.path.dirname(__file__) def home_page(request): imported_text = open(os.path.join(HERE + '/static/', 'index.html')).read() return Response(imported_text) def view_entry(request): imported_text = open(os.path.join(HERE + '/static/', 'single-entry.html')).read() return Response(imported_text) def new_entry(request): imported_text = open(os.path.join(HERE + '/static/', 'new-entry.html')).read() return Response(imported_text) def edit_entry(request): imported_text = open(os.path.join(HERE + '/static/', 'edit-entry.html')).read() return Response(imported_text) def includeme(config): config.add_view(home_page, route_name='home') config.add_view(view_entry, route_name='single-entry') config.add_view(new_entry, route_name='new-entry') config.add_view(edit_entry, route_name='edit-entry')
from pyramid.response import Response import os HERE = os.path.dirname(__file__) def home_page(request): imported_text = open(os.path.join(HERE + '/static/', 'index.html')).read() return Response(imported_text) def includeme(config): config.add_view(home_page, route_name='home')
mit
Python
0b11bf48989673245adbc89aa6f65c85debafd9f
Make sure billing/shipping aren't populated if they aren't there
armstrong/armstrong.apps.donations,armstrong/armstrong.apps.donations
armstrong/apps/donations/backends.py
armstrong/apps/donations/backends.py
from armstrong.utils.backends import GenericBackend from billing import get_gateway from . import forms class AuthorizeNetBackend(object): def get_form_class(self): return forms.CreditCardDonationForm def purchase(self, donation, form): authorize = get_gateway("authorize_net") authorize.purchase(donation.amount, form.get_credit_card(donation.donor), options=self.get_options(donation)) def get_options(self, donation): donor = donation.donor r = {} if donor.address: r["billing_address"] = { "name": donor.name, "address1": donor.address.address, "city": donor.address.city, "state": donor.address.state, # TODO: Support other countries "country": "US", "zip": donor.address.zipcode, } if donor.mailing_address: r["shipping_address"] = { "name": donor.name, "address1": donor.mailing_address.address, "city": donor.mailing_address.city, "state": donor.mailing_address.state, # TODO: Support other countries "country": "US", "zip": donor.mailing_address.zipcode, } return r raw_backend = GenericBackend("ARMSTRONG_DONATIONS_BACKEND", defaults=[ "armstrong.apps.donations.backends.AuthorizeNetBackend", ]) get_backend = raw_backend.get_backend
from armstrong.utils.backends import GenericBackend from billing import get_gateway from . import forms class AuthorizeNetBackend(object): def get_form_class(self): return forms.CreditCardDonationForm def purchase(self, donation, form): authorize = get_gateway("authorize_net") authorize.purchase(donation.amount, form.get_credit_card(donation.donor), options=self.get_options(donation)) def get_options(self, donation): donor = donation.donor return { "billing_address": { "name": donor.name, "address1": donor.address.address, "city": donor.address.city, "state": donor.address.state, # TODO: Support other countries "country": "US", "zip": donor.address.zipcode, }, "shipping_address": { "name": donor.name, "address1": donor.mailing_address.address, "city": donor.mailing_address.city, "state": donor.mailing_address.state, # TODO: Support other countries "country": "US", "zip": donor.mailing_address.zipcode, } } raw_backend = GenericBackend("ARMSTRONG_DONATIONS_BACKEND", defaults=[ "armstrong.apps.donations.backends.AuthorizeNetBackend", ]) get_backend = raw_backend.get_backend
apache-2.0
Python
5efddf26176ac778556a3568bf97c2e70daac866
Replace many double quotes with single quotes
samjabrahams/anchorhub
anchorhub/settings/default_settings.py
anchorhub/settings/default_settings.py
""" Defaults for all settings used by AnchorHub """ WRAPPER = '{ }' INPUT = '.' OUTPUT = 'out-anchorhub' ARGPARSER = { 'description': "anchorhub parses through Markdown files and precompiles " "links to specially formatted anchors." } ARGPARSE_INPUT = { 'help': "Path of directory tree to be parsed", } ARGPARSE_OUTPUT = { 'help': "Desired output location (default is \"" + OUTPUT + "\")", 'default': OUTPUT } ARGPARSE_OVERWRITE = { 'help': "Overwrite input files; ignore output location" } ARGPARSE_EXTENSION = { 'help': "Indicate which file extensions to search and run anchorhub on.", 'default': [".md"] } ARGPARSE_WRAPPER = { 'help': "Specify custom wrapper format (default is \"" + WRAPPER + "\")", 'default': WRAPPER }
""" Defaults for all settings used by AnchorHub """ WRAPPER = "{ }" INPUT = "." OUTPUT = "out-anchorhub" ARGPARSER = { "description": "anchorhub parses through Markdown files and precompiles " "links to specially formatted anchors." } ARGPARSE_INPUT = { "help": "Path of directory tree to be parsed", } ARGPARSE_OUTPUT = { "help": "Desired output location (default is \"" + OUTPUT + "\")", "default": OUTPUT } ARGPARSE_OVERWRITE = { "help": "Overwrite input files; ignore output location" } ARGPARSE_EXTENSION = { "help": "Indicate which file extensions to search and run anchorhub on.", "default": [".md"] } ARGPARSE_WRAPPER = { "help": "Specify custom wrapper format (default is \"" + WRAPPER + "\")", "default": WRAPPER }
apache-2.0
Python
0ac45656e3b76564d1e1752dd16ae91cfc918134
Set room maximum capacity
andela-bmwenda/amity-cp1
app/room.py
app/room.py
class Room(object): """Room class that creates rooms in amity """ def __init__(self, room_name, room_type, capacity, occupants): self.room_name = room_name self.room_type = room_type self.capacity = capacity self.occupants = [] class LivingSpace(Room): """Creates living spaces and inherits from Room""" def __init__(self, room_name): super(LivingSpace, self).__init__( room_name, room_type="Living Space", capacity=4, occupants=[]) class Office(Room): """Creates offices and inherits from room""" def __init__(self, room_name): super(Office, self).__init__( room_name, room_type="Office", capacity=6, occupants=[])
class Room(object): """Room class that creates rooms in amity """ def __init__(self, room_name, room_type, capacity, occupants): self.room_name = room_name self.room_type = room_type self.capacity = capacity self.occupants = [] class LivingSpace(Room): """Creates living spaces and inherits from Room""" def __init__(self, room_name): super(LivingSpace, self).__init__( room_name, room_type="Living Space", capacity=4, occupants=[]) class Office(Room): """Creates offices and inherits from room""" def __init__(self, room_name): super(Office, self).__init__( room_name, room_type="Office", capacity=4, occupants=[])
mit
Python
e3a84c3ccadb98ecb5dae563475de5108d46cf9d
Format external_plugin_dependencies.bzl with buildifier
GerritCodeReview/plugins_quota,GerritCodeReview/plugins_quota,GerritCodeReview/plugins_quota
external_plugin_deps.bzl
external_plugin_deps.bzl
load("//tools/bzl:maven_jar.bzl", "maven_jar") def external_plugin_deps(): maven_jar( name = "mockito", artifact = "org.mockito:mockito-core:2.15.0", sha1 = "b84bfbbc29cd22c9529409627af6ea2897f4fa85", deps = [ "@byte_buddy//jar", "@byte_buddy_agent//jar", "@objenesis//jar", ], ) BYTE_BUDDY_VER = "1.7.9" maven_jar( name = "byte_buddy", artifact = "net.bytebuddy:byte-buddy:" + BYTE_BUDDY_VER, sha1 = "51218a01a882c04d0aba8c028179cce488bbcb58", ) maven_jar( name = "byte_buddy_agent", artifact = "net.bytebuddy:byte-buddy-agent:" + BYTE_BUDDY_VER, sha1 = "a6c65f9da7f467ee1f02ff2841ffd3155aee2fc9", ) maven_jar( name = "objenesis", artifact = "org.objenesis:objenesis:2.6", sha1 = "639033469776fd37c08358c6b92a4761feb2af4b", )
load("//tools/bzl:maven_jar.bzl", "maven_jar") def external_plugin_deps(): maven_jar( name = "mockito", artifact = "org.mockito:mockito-core:2.15.0", sha1 = "b84bfbbc29cd22c9529409627af6ea2897f4fa85", deps = [ "@byte_buddy//jar", "@byte_buddy_agent//jar", "@objenesis//jar", ], ) BYTE_BUDDY_VER = "1.7.9" maven_jar( name = "byte_buddy", artifact = "net.bytebuddy:byte-buddy:" + BYTE_BUDDY_VER, sha1 = "51218a01a882c04d0aba8c028179cce488bbcb58", ) maven_jar( name = "byte_buddy_agent", artifact = "net.bytebuddy:byte-buddy-agent:" + BYTE_BUDDY_VER, sha1 = "a6c65f9da7f467ee1f02ff2841ffd3155aee2fc9", ) maven_jar( name = "objenesis", artifact = "org.objenesis:objenesis:2.6", sha1 = "639033469776fd37c08358c6b92a4761feb2af4b", )
apache-2.0
Python
0a902a5afd43fe817320bd1e400828abcd1faa83
make su promt AIX compatible
thaim/ansible,thaim/ansible
lib/ansible/utils/su_prompts.py
lib/ansible/utils/su_prompts.py
# -*- coding: utf-8 -*- # (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import re SU_PROMPT_LOCALIZATIONS = [ 'Password', '암호', 'パスワード', 'Adgangskode', 'Contraseña', 'Contrasenya', 'Hasło', 'Heslo', 'Jelszó', 'Lösenord', 'Mật khẩu', 'Mot de passe', 'Parola', 'Parool', 'Pasahitza', 'Passord', 'Passwort', 'Salasana', 'Sandi', 'Senha', 'Wachtwoord', 'ססמה', 'Лозинка', 'Парола', 'Пароль', 'गुप्तशब्द', 'शब्दकूट', 'సంకేతపదము', 'හස්පදය', '密码', '密碼', ] SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join(['(\w+\'s )?' + x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE) def check_su_prompt(data): return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))
# -*- coding: utf-8 -*- # (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import re SU_PROMPT_LOCALIZATIONS = [ 'Password', '암호', 'パスワード', 'Adgangskode', 'Contraseña', 'Contrasenya', 'Hasło', 'Heslo', 'Jelszó', 'Lösenord', 'Mật khẩu', 'Mot de passe', 'Parola', 'Parool', 'Pasahitza', 'Passord', 'Passwort', 'Salasana', 'Sandi', 'Senha', 'Wachtwoord', 'ססמה', 'Лозинка', 'Парола', 'Пароль', 'गुप्तशब्द', 'शब्दकूट', 'సంకేతపదము', 'හස්පදය', '密码', '密碼', ] SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join([x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE) def check_su_prompt(data): return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))
mit
Python
f41714fb34c82d523b212f0a4301757969598373
Simplify correct function.
pinireznik/antitude,pinireznik/antitude,pinireznik/antitude,pinireznik/antitude
agents/serf/scripts/AgentEventHandler.py
agents/serf/scripts/AgentEventHandler.py
#!/usr/bin/python import re import os import SerfCID class AgentEventHandler: def __init__(self, payload="", CID="", envVarGetter=""): self.payload = payload self.CID = CID self.TARGET_STRING = "TARGET" self.TARGET_ALL_STRING = self.TARGET_STRING + "=ALL" self.envVarGetter = envVarGetter def getPayload(self): return self.payload def getCID(self): return self.CID def getArgumentPair(self, argumentKey): searchObj = re.search(r"%s=[^ ]*" % argumentKey, self.payload) if searchObj: return searchObj.group() else: return None def getArgumentValue(self, argumentPair): searchObj = re.search(r'[^=]*$', argumentPair) if searchObj: return searchObj.group() else: return None def getEnvVar(self, envVarName): return self.envVarGetter.get(envVarName) def correctTarget(self): argumentPair = self.getArgumentPair(self.TARGET_STRING) if (argumentPair is None): #TARGET = ALL return True return self.getArgumentValue(argumentPair) == self.CID if __name__ == '__main__': PAYLOAD = raw_input() CID = SerfCID.SerfCID.getCID() envVarGetter = os.environ print PAYLOAD print CID agentEventHandler = AgentEventHandler(PAYLOAD, CID, envVarGetter) if not agentEventHandler.correctTarget(): print "It's not for me!" else: print "It's for me!"
#!/usr/bin/python import re import os import SerfCID class AgentEventHandler: def __init__ (self, payload="", CID="", envVarGetter=""): self.payload = payload self.CID = CID self.TARGET_STRING = "TARGET" self.TARGET_ALL_STRING = self.TARGET_STRING + "=ALL" self.envVarGetter = envVarGetter def getPayload(self): return self.payload def getCID(self): return self.CID def getArgumentPair(self, argumentKey): searchObj = re.search(r"%s=[^ ]*" % argumentKey, self.payload) if searchObj: return searchObj.group() else: return None def getArgumentValue(self, argumentPair): searchObj = re.search(r'[^=]*$', argumentPair) if searchObj: return searchObj.group() else: return None def getEnvVar(self, envVarName): return self.envVarGetter.get(envVarName) def correctTarget(self): argumentPair = None if self.getArgumentPair(self.TARGET_STRING) == None: argumentPair = self.TARGET_ALL_STRING else: argumentPair = self.getArgumentPair(self.TARGET_STRING) if self.getArgumentValue(argumentPair) == self.CID or self.getArgumentValue(argumentPair) == "ALL": return True else: return False if __name__ == '__main__': PAYLOAD = raw_input() CID = SerfCID.SerfCID.getCID() envVarGetter = os.environ print PAYLOAD print CID agentEventHandler = AgentEventHandler(PAYLOAD, CID, envVarGetter) if not agentEventHandler.correctTarget(): print "It's not for me!" else: print "It's for me!"
apache-2.0
Python
502ffa8fe93e0066f3553039493ef8b552069141
Add KDumpConf shared mapper.
RedHatInsights/insights-core,RedHatInsights/insights-core
falafel/mappers/kdump.py
falafel/mappers/kdump.py
import re from falafel.core import computed, MapperOutput from falafel.core.plugins import mapper @mapper("cmdline") def crashkernel_enabled(context): """ Determine if kernel is configured to reserve memory for the crashkernel """ for line in context.content: if 'crashkernel' in line: return True @mapper("systemctl_list-unit-files") @mapper("chkconfig") def kdump_service_enabled(context): """ Determine if kdump service is enabled with system RHEL5/6 uses chkconfig and if enabled will look something like this: kdump 0:off 1:off 2:off 3:on 4:on 5:on 6:off RHEL7 uses systemctl list-unit-files and if enabled will look like this: kdump.service enabled """ for line in context.content: if line.startswith('kdump') and (':on' in line or 'enabled' in line): return True @mapper("kdump.conf") class KDumpConf(MapperOutput): @staticmethod def parse_content(content): data = {} for line in content: line = line.strip() if not line or line.startswith('#'): continue k, v = line.split(' ', 1) data[k.strip()] = v.strip() return data @computed def using_local_disk(self): KDUMP_NETWORK_REGEX = re.compile(r'^\s*(ssh|nfs4?|net)\s+', re.I) KDUMP_LOCAL_DISK_REGEX = re.compile(r'^\s*(ext[234]|raw|xfs|btrfs|minix)\s+', re.I) local_disk = True for k in self.data.keys(): if KDUMP_NETWORK_REGEX.search(k): local_disk = False elif KDUMP_LOCAL_DISK_REGEX.search(k): local_disk = True return local_disk @mapper("kdump.conf") def kdump_using_local_disk(context): """ Determine if kdump service is using local disk """ KDUMP_NETWORK_REGEX = re.compile(r'^\s*(ssh|nfs4?|net)\s+', re.I) KDUMP_LOCAL_DISK_REGEX = re.compile(r'^\s*(ext[234]|raw|xfs|btrfs|minix)\s+', re.I) local_disk = True for line in context.content: if line.startswith('#') or line == '': continue elif KDUMP_NETWORK_REGEX.search(line): local_disk = False elif KDUMP_LOCAL_DISK_REGEX.search(line): local_disk = True return local_disk
import re from falafel.core.plugins import mapper @mapper("cmdline") def crashkernel_enabled(context): """ Determine if kernel is configured to reserve memory for the crashkernel """ for line in context.content: if 'crashkernel' in line: return True @mapper("systemctl_list-unit-files") @mapper("chkconfig") def kdump_service_enabled(context): """ Determine if kdump service is enabled with system RHEL5/6 uses chkconfig and if enabled will look something like this: kdump 0:off 1:off 2:off 3:on 4:on 5:on 6:off RHEL7 uses systemctl list-unit-files and if enabled will look like this: kdump.service enabled """ for line in context.content: if line.startswith('kdump') and ('on' in line or 'enabled' in line): return True @mapper("kdump.conf") def kdump_using_local_disk(context): """ Determine if kdump service is using local disk """ KDUMP_NETWORK_REGEX = re.compile(r'^\s*(ssh|nfs4?|net)\s+', re.I) KDUMP_LOCAL_DISK_REGEX = re.compile(r'^\s*(ext[234]|raw|xfs|btrfs|minix)\s+', re.I) local_disk = True for line in context.content: if line.startswith('#') or line == '': continue elif KDUMP_NETWORK_REGEX.search(line): local_disk = False elif KDUMP_LOCAL_DISK_REGEX.search(line): local_disk = True return local_disk
apache-2.0
Python
4bce4da138c4dc8060e11451eb21fb2f7c0891f5
Fix issue #1: Runserver failed when having callback function in root URLconf
zniper/django-quickadmin
quickadmin/register.py
quickadmin/register.py
from django.contrib import admin from django.utils.module_loading import import_module from django.db.models import get_models from django.conf import settings from django.conf.urls import include, url from django.utils.log import getLogger from .config import QADMIN_DEFAULT_EXCLUDES, USE_APPCONFIG logger = getLogger(__name__) EXCL_MODELS = len('.models') def filter_models(exclude=None): """Returns all found models within this Django instance""" # Get all the models from installed apps model_list = [] if USE_APPCONFIG: from django.apps import apps found = apps.get_models() else: found = get_models() # Exclude some models excl_set = list(getattr(settings, 'QADMIN_EXCLUDES', [])) if getattr(settings, 'QADMIN_EXCLUDE_STOCK', True): excl_set.extend(QADMIN_DEFAULT_EXCLUDES) for model in found: app_name = model.__module__[:-EXCL_MODELS] full_name = '.'.join([app_name, model.__name__]) if full_name in excl_set or app_name in excl_set: continue model_list.append(model) return model_list def update_admin_urls(): """Admin urls set have to be updated or all new registered models will be shown as disabled in admin area""" # Delete the old admin URLs old_pattern = None admin_regex = r'^admin/' project_urls = import_module(settings.ROOT_URLCONF) for url_item in project_urls.urlpatterns: try: if url_item.app_name == 'admin': old_pattern = url_item admin_regex = url_item.regex.pattern project_urls.urlpatterns.remove(url_item) break except AttributeError: # Bypass the non-admin URLconf logger.error('Error when finding and removing old admin URLconf.') # Reload updated admin URLs try: admin.autodiscover() project_urls.urlpatterns.append( url(admin_regex, include(admin.site.urls)) ) except: logger.error('Error when updating new admin URLconfs.') if old_pattern: project_urls.urlpatterns.append(old_pattern) def register_models(): """Register all models insde specific application""" for model in filter_models(): try: admin.site.register(model) except admin.sites.AlreadyRegistered: logger.error('The model "%s" is already registered' % model.__name__) update_admin_urls()
from django.contrib import admin from django.utils.module_loading import import_module from django.db.models import get_models from django.conf import settings from django.conf.urls import include, url from django.utils.log import getLogger from .config import QADMIN_DEFAULT_EXCLUDES, USE_APPCONFIG logger = getLogger(__name__) EXCL_MODELS = len('.models') def filter_models(exclude=None): """Returns all found models within this Django instance""" # Get all the models from installed apps model_list = [] if USE_APPCONFIG: from django.apps import apps found = apps.get_models() else: found = get_models() # Exclude some models excl_set = list(getattr(settings, 'QADMIN_EXCLUDES', [])) if getattr(settings, 'QADMIN_EXCLUDE_STOCK', True): excl_set.extend(QADMIN_DEFAULT_EXCLUDES) for model in found: app_name = model.__module__[:-EXCL_MODELS] full_name = '.'.join([app_name, model.__name__]) if full_name in excl_set or app_name in excl_set: continue model_list.append(model) return model_list def update_admin_urls(): """Admin urls set have to be updated or all new registered models will be shown as disabled in admin area""" # Delete the old admin URLs old_pattern = None admin_regex = r'^admin/' project_urls = import_module(settings.ROOT_URLCONF) for url_item in project_urls.urlpatterns: if url_item.app_name == 'admin': old_pattern = url_item admin_regex = url_item.regex.pattern project_urls.urlpatterns.remove(url_item) break # Reload updated admin URLs try: admin.autodiscover() project_urls.urlpatterns.append( url(admin_regex, include(admin.site.urls)) ) except: logger.error('Error when updating new admin urls.') if old_pattern: project_urls.urlpatterns.append(old_pattern) def register_models(): """Register all models insde specific application""" for model in filter_models(): try: admin.site.register(model) except admin.sites.AlreadyRegistered: logger.error('The model "%s" is already registered' % model.__name__) update_admin_urls()
mit
Python
ff90958a0c79936d5056840ba03a5863bcdef099
Mark as test as "todo" for now.
emgee/formal,emgee/formal,emgee/formal
formal/test/test_util.py
formal/test/test_util.py
from twisted.trial import unittest from formal import util class TestUtil(unittest.TestCase): def test_validIdentifier(self): self.assertEquals(util.validIdentifier('foo'), True) self.assertEquals(util.validIdentifier('_foo'), True) self.assertEquals(util.validIdentifier('_foo_'), True) self.assertEquals(util.validIdentifier('foo2'), True) self.assertEquals(util.validIdentifier('Foo'), True) self.assertEquals(util.validIdentifier(' foo'), False) self.assertEquals(util.validIdentifier('foo '), False) self.assertEquals(util.validIdentifier('9'), False) test_validIdentifier.todo = "Fails due to weird import poblem"
from twisted.trial import unittest from formal import util class TestUtil(unittest.TestCase): def test_validIdentifier(self): self.assertEquals(util.validIdentifier('foo'), True) self.assertEquals(util.validIdentifier('_foo'), True) self.assertEquals(util.validIdentifier('_foo_'), True) self.assertEquals(util.validIdentifier('foo2'), True) self.assertEquals(util.validIdentifier('Foo'), True) self.assertEquals(util.validIdentifier(' foo'), False) self.assertEquals(util.validIdentifier('foo '), False) self.assertEquals(util.validIdentifier('9'), False)
mit
Python
2e7271a33e098d7cdef15207e8caa05e644c3223
Use full URI for build failure reasons
bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,dropbox/changes
changes/buildfailures/testfailure.py
changes/buildfailures/testfailure.py
from __future__ import absolute_import from jinja2 import Markup from changes.buildfailures.base import BuildFailure from changes.utils.http import build_uri class TestFailure(BuildFailure): def get_html_label(self, build): link = build_uri('/projects/{0}/builds/{1}/tests/?result=failed'.format(build.project.slug, build.id.hex)) try: test_failures = ( s.value for s in build.stats if s.name == 'test_failures' ).next() except StopIteration: return Markup('There were an <a href="{link}">unknown number of test failures</a>.'.format( link=link, )) return Markup('There were <a href="{link}">{count} failing tests</a>.'.format( link=link, count=test_failures, ))
from __future__ import absolute_import from jinja2 import Markup from changes.buildfailures.base import BuildFailure class TestFailure(BuildFailure): def get_html_label(self, build): link = '/projects/{0}/builds/{1}/tests/?result=failed'.format(build.project.slug, build.id.hex) try: test_failures = ( s.value for s in build.stats if s.name == 'test_failures' ).next() except StopIteration: return Markup('There were an <a href="{link}">unknown number of test failures</a>.'.format( link=link, )) return Markup('There were <a href="{link}">{count} failing tests</a>.'.format( link=link, count=test_failures, ))
apache-2.0
Python
4c303007d6418e2a2f1b2e1778d6b7d0c0573c74
Raise read-only fs on touch
bussiere/gitfs,PressLabs/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs,rowhit/gitfs
gitfs/views/read_only.py
gitfs/views/read_only.py
from errno import EROFS from fuse import FuseOSError from gitfs import FuseMethodNotImplemented from .view import View class ReadOnlyView(View): def getxattr(self, path, fh): raise FuseMethodNotImplemented def open(self, path, flags): return 0 def create(self, path, fh): raise FuseOSError(EROFS) def write(self, path, fh): raise FuseOSError(EROFS) def opendir(self, path): return 0 def releasedir(self, path, fi): return 0 def flush(self, path, fh): return 0 def release(self, path, fh): return 0 def access(self, path, amode): return 0 def mkdir(self, path, mode): raise FuseOSError(EROFS) def utimens(self, path, times=None): raise FuseOSError(EROFS)
import os from errno import EROFS from fuse import FuseOSError from gitfs import FuseMethodNotImplemented from .view import View class ReadOnlyView(View): def getxattr(self, path, fh): raise FuseMethodNotImplemented def open(self, path, flags): return 0 def create(self, path, fh): raise FuseOSError(EROFS) def write(self, path, fh): raise FuseOSError(EROFS) def opendir(self, path): return 0 def releasedir(self, path, fi): return 0 def flush(self, path, fh): return 0 def release(self, path, fh): return 0 def access(self, path, amode): return 0 def mkdir(self, path, mode): raise FuseOSError(EROFS)
apache-2.0
Python
7c3a164b74f345be07843482728b2e0b33a927bc
bump minor version
swistakm/graceful
src/graceful/__init__.py
src/graceful/__init__.py
# -*- coding: utf-8 -*- VERSION = (0, 1, 0) # PEP 386 # noqa __version__ = ".".join([str(x) for x in VERSION]) # noqa """ Minimalist framework for self-descriptive RESTful APIs build on top of falcon. It is inspired by Django REST Framework package. Mostly by how object serialization is done but more emphasis is put on API to being self-descriptive. """
# -*- coding: utf-8 -*- VERSION = (0, 0, 4) # PEP 386 # noqa __version__ = ".".join([str(x) for x in VERSION]) # noqa """ Minimalist framework for self-descriptive RESTful APIs build on top of falcon. It is inspired by Django REST Framework package. Mostly by how object serialization is done but more emphasis is put on API to being self-descriptive. """
bsd-3-clause
Python
9b5d2929f58a3155edc5f03a3cca14ff25356021
Remove pidfile in Container.kill()
ianpreston/cask,ianpreston/cask
src/libcask/container.py
src/libcask/container.py
import os import os.path import time import signal import subprocess import libcask.attach class Container(object): def __init__( self, name, root_path, pid_path, hostname, ipaddr, ipaddr_host, entry_point, ): # Human-readable name for this container self.name = name # Path to the filesystem root directory of the container self.root_path = root_path # Path to the pidfile of the container self.pid_path = pid_path # Hostname of the container self.hostname = hostname # IP Address of the container's virtual ethernet interface self.ipaddr = ipaddr # IP Address of the host's end of the virtual ethernet pair self.ipaddr_host = ipaddr # Command to run in the new container self.entry_point = entry_point def pid(self): try: with open(self.pid_path, 'r') as f: return int(f.read()) except IOError: return None def create(self): os.makedirs(self.root_path) os.makedirs(os.path.dirname(self.pid_path)) def status(self): pid = self.pid() status_path = '/proc/{pid}/status'.format(pid=pid) try: with open(status_path, 'r') as f: return True except IOError: return False def start(self): entry = self.entry_point.split(' ') args = ['./cask-clone', self.root_path, self.pid_path] + entry with open('/dev/null', 'rwb') as devnull: subprocess.Popen(args, stdin=devnull, stdout=devnull, stderr=devnull) # TODO - Properly await existence of pidfile. This /sucks/. time.sleep(1) print 'pid:', self.pid() def attach(self): return libcask.attach.Attachment(self.pid()) def kill(self, sig=None): os.kill(self.pid(), sig or signal.SIGKILL) os.unlink(self.pid_path)
import os import os.path import time import signal import subprocess import libcask.attach class Container(object): def __init__( self, name, root_path, pid_path, hostname, ipaddr, ipaddr_host, entry_point, ): # Human-readable name for this container self.name = name # Path to the filesystem root directory of the container self.root_path = root_path # Path to the pidfile of the container self.pid_path = pid_path # Hostname of the container self.hostname = hostname # IP Address of the container's virtual ethernet interface self.ipaddr = ipaddr # IP Address of the host's end of the virtual ethernet pair self.ipaddr_host = ipaddr # Command to run in the new container self.entry_point = entry_point def pid(self): try: with open(self.pid_path, 'r') as f: return int(f.read()) except IOError: return None def create(self): os.makedirs(self.root_path) os.makedirs(os.path.dirname(self.pid_path)) def status(self): pid = self.pid() status_path = '/proc/{pid}/status'.format(pid=pid) try: with open(status_path, 'r') as f: return True except IOError: return False def start(self): entry = self.entry_point.split(' ') args = ['./cask-clone', self.root_path, self.pid_path] + entry with open('/dev/null', 'rwb') as devnull: subprocess.Popen(args, stdin=devnull, stdout=devnull, stderr=devnull) # TODO - Properly await existence of pidfile. This /sucks/. time.sleep(1) print 'pid:', self.pid() def attach(self): return libcask.attach.Attachment(self.pid()) def kill(self, sig=None): os.kill(self.pid(), sig or signal.SIGKILL)
mit
Python
6f61215263cfe02dc50e508514d1d23208e46d92
Allow modules context processor works without request.user Fix #524
viewflow/django-material,viewflow/django-material,viewflow/django-material
material/frontend/context_processors.py
material/frontend/context_processors.py
from . import modules as modules_registry def modules(request): """Add current module and modules list to the template context.""" module = None if request.resolver_match: module = getattr(request.resolver_match.url_name, 'module', None) return { 'modules': modules_registry.available_modules(request.user) if hasattr(request, 'user') else [], 'current_module': module, }
from . import modules as modules_registry def modules(request): """Add current module and modules list to the template context.""" if not hasattr(request, 'user'): raise ValueError('modules context processor requires "django.contrib.auth.context_processors.auth"' 'to be in TEMPLATE_CONTEXT_PROCESSORS in your settings file.') module = None if request.resolver_match: module = getattr(request.resolver_match.url_name, 'module', None) return { 'modules': modules_registry.available_modules(request.user), 'current_module': module, }
bsd-3-clause
Python
0d8c30e58d8b53f90f9318cdf3db26ed1e272602
Fix pep8 violation.
CredoReference/edx-platform,Stanford-Online/edx-platform,edx/edx-platform,a-parhom/edx-platform,Stanford-Online/edx-platform,mitocw/edx-platform,Edraak/edraak-platform,Stanford-Online/edx-platform,angelapper/edx-platform,cpennington/edx-platform,edx/edx-platform,philanthropy-u/edx-platform,eduNEXT/edunext-platform,Edraak/edraak-platform,stvstnfrd/edx-platform,msegado/edx-platform,procangroup/edx-platform,a-parhom/edx-platform,teltek/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,edx-solutions/edx-platform,ESOedX/edx-platform,philanthropy-u/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,ahmedaljazzar/edx-platform,stvstnfrd/edx-platform,angelapper/edx-platform,cpennington/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,teltek/edx-platform,appsembler/edx-platform,ahmedaljazzar/edx-platform,Stanford-Online/edx-platform,mitocw/edx-platform,BehavioralInsightsTeam/edx-platform,gsehub/edx-platform,arbrandes/edx-platform,Edraak/edraak-platform,edx-solutions/edx-platform,arbrandes/edx-platform,gsehub/edx-platform,msegado/edx-platform,EDUlib/edx-platform,ESOedX/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,BehavioralInsightsTeam/edx-platform,gsehub/edx-platform,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,eduNEXT/edx-platform,jolyonb/edx-platform,TeachAtTUM/edx-platform,CredoReference/edx-platform,gymnasium/edx-platform,BehavioralInsightsTeam/edx-platform,kmoocdev2/edx-platform,angelapper/edx-platform,ahmedaljazzar/edx-platform,mitocw/edx-platform,gsehub/edx-platform,philanthropy-u/edx-platform,jolyonb/edx-platform,cpennington/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,appsembler/edx-platform,procangroup/edx-platform,philanthropy-u/edx-platform,edx-solutions/edx-platform,kmoocdev2/edx-platform,EDUlib/edx-platform,Edraak/edraak-platform,cpennington/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,eduNEXT/edunext-platform,a-parhom/edx-platform,TeachAtTUM/edx-platform,teltek/edx-platform,appsembler/edx-platform,CredoReference/edx-platform,eduNEXT/edx-platform,teltek/edx-platform,kmoocdev2/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,msegado/edx-platform,gymnasium/edx-platform,mitocw/edx-platform,a-parhom/edx-platform,TeachAtTUM/edx-platform,ESOedX/edx-platform,procangroup/edx-platform,EDUlib/edx-platform,jolyonb/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform,gymnasium/edx-platform,edx/edx-platform,procangroup/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,TeachAtTUM/edx-platform,ESOedX/edx-platform,proversity-org/edx-platform,kmoocdev2/edx-platform,CredoReference/edx-platform
lms/envs/static.py
lms/envs/static.py
""" This config file runs the simplest dev environment using sqlite, and db-based sessions. Assumes structure: /envroot/ /db # This is where it'll write the database file /edx-platform # The location of this repo /log # Where we're going to write log files """ # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=wildcard-import, unused-wildcard-import from .common import * from openedx.core.lib.derived import derive_settings from openedx.core.lib.logsettings import get_logger_config STATIC_GRAB = True LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev") DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ENV_ROOT / "db" / "edx.db", 'ATOMIC_REQUESTS': True, }, 'student_module_history': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ENV_ROOT / "db" / "student_module_history.db", 'ATOMIC_REQUESTS': True, } } CACHES = { # This is the cache used for most things. # In staging/prod envs, the sessions also live here. 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'edx_loc_mem_cache', 'KEY_FUNCTION': 'util.memcache.safe_key', }, # The general cache is what you get if you use our util.cache. It's used for # things like caching the course.xml file for different A/B test groups. # We set it to be a DummyCache to force reloading of course.xml in dev. # In staging environments, we would grab VERSION from data uploaded by the # push process. 'general': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', 'KEY_PREFIX': 'general', 'VERSION': 4, 'KEY_FUNCTION': 'util.memcache.safe_key', } } # Dummy secret key for dev SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd' ############################ FILE UPLOADS (for discussion forums) ############################# DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_ROOT = ENV_ROOT / "uploads" MEDIA_URL = "/discussion/upfiles/" FILE_UPLOAD_TEMP_DIR = ENV_ROOT / "uploads" FILE_UPLOAD_HANDLERS = [ 'django.core.files.uploadhandler.MemoryFileUploadHandler', 'django.core.files.uploadhandler.TemporaryFileUploadHandler', ] ########################## Derive Any Derived Settings ####################### derive_settings(__name__)
""" This config file runs the simplest dev environment using sqlite, and db-based sessions. Assumes structure: /envroot/ /db # This is where it'll write the database file /edx-platform # The location of this repo /log # Where we're going to write log files """ # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=wildcard-import, unused-wildcard-import from .common import * from openedx.core.lib.derived import derive_settings from openedx.core.lib.logsettings import get_logger_config STATIC_GRAB = True LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ENV_ROOT / "db" / "edx.db", 'ATOMIC_REQUESTS': True, }, 'student_module_history': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ENV_ROOT / "db" / "student_module_history.db", 'ATOMIC_REQUESTS': True, } } CACHES = { # This is the cache used for most things. # In staging/prod envs, the sessions also live here. 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'edx_loc_mem_cache', 'KEY_FUNCTION': 'util.memcache.safe_key', }, # The general cache is what you get if you use our util.cache. It's used for # things like caching the course.xml file for different A/B test groups. # We set it to be a DummyCache to force reloading of course.xml in dev. # In staging environments, we would grab VERSION from data uploaded by the # push process. 'general': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', 'KEY_PREFIX': 'general', 'VERSION': 4, 'KEY_FUNCTION': 'util.memcache.safe_key', } } # Dummy secret key for dev SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd' ############################ FILE UPLOADS (for discussion forums) ############################# DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_ROOT = ENV_ROOT / "uploads" MEDIA_URL = "/discussion/upfiles/" FILE_UPLOAD_TEMP_DIR = ENV_ROOT / "uploads" FILE_UPLOAD_HANDLERS = [ 'django.core.files.uploadhandler.MemoryFileUploadHandler', 'django.core.files.uploadhandler.TemporaryFileUploadHandler', ] ########################## Derive Any Derived Settings ####################### derive_settings(__name__)
agpl-3.0
Python
1af17b029cef4c3a197fd3a4813fc704cb277e59
use the correct name
geometalab/drf-utm-zone-info,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend
osmaxx-py/osmaxx/excerptexport/urls.py
osmaxx-py/osmaxx/excerptexport/urls.py
from django.conf.urls import url from django.contrib.auth.views import login, logout from django.views.generic import TemplateView from osmaxx.excerptexport.views import ( list_downloads, download_file, extraction_order_status, list_orders, NewExtractionOrderView, access_denied, ) excerpt_export_urlpatterns = [ url(r'^$', TemplateView.as_view(template_name="excerptexport/templates/index.html"), name='index'), url(r'^access_denied/$', access_denied, name='access_denied'), url(r'^downloads/$', list_downloads, name='downloads'), url(r'^downloads/(?P<uuid>[A-Za-z0-9_-]+)/$', download_file, name='download'), url(r'^orders/$', list_orders, name='orders'), url(r'^orders/new/$', NewExtractionOrderView.as_view(), name='new'), url(r'^orders/(?P<extraction_order_id>[0-9]+)$', extraction_order_status, name='status') ] login_logout_patterns = [ url(r'^login/$', login, {'template_name': 'excerptexport/templates/login.html'}, name='login'), url(r'^logout/$', logout, {'template_name': 'excerptexport/templates/logout.html'}, name='logout'), ] urlpatterns = excerpt_export_urlpatterns + login_logout_patterns
from django.conf.urls import url from django.contrib.auth.views import login, logout from django.views.generic import TemplateView from osmaxx.excerptexport.views import ( list_downloads, download_file, extraction_order_status, list_orders, NewExtractionOrderView, access_denied, ) except_export_urlpatterns = [ url(r'^$', TemplateView.as_view(template_name="excerptexport/templates/index.html"), name='index'), url(r'^access_denied/$', access_denied, name='access_denied'), url(r'^downloads/$', list_downloads, name='downloads'), url(r'^downloads/(?P<uuid>[A-Za-z0-9_-]+)/$', download_file, name='download'), url(r'^orders/$', list_orders, name='orders'), url(r'^orders/new/$', NewExtractionOrderView.as_view(), name='new'), url(r'^orders/(?P<extraction_order_id>[0-9]+)$', extraction_order_status, name='status') ] login_logout_patterns = [ url(r'^login/$', login, {'template_name': 'excerptexport/templates/login.html'}, name='login'), url(r'^logout/$', logout, {'template_name': 'excerptexport/templates/logout.html'}, name='logout'), ] urlpatterns = except_export_urlpatterns + login_logout_patterns
isc
Python
dda9b7576269f7dfc7ca864da33f6b047228e667
remove armeabi and mips targets
Akaaba/pdraw,Akaaba/pdraw,Akaaba/pdraw,Akaaba/pdraw
config/buildcfg.py
config/buildcfg.py
import sys, os import dragon import apps_tools.android as android import apps_tools.ios as ios android_pdraw_dir = os.path.join(dragon.WORKSPACE_DIR, "packages", "pdraw") android_jni_dir = os.path.join(android_pdraw_dir, "libpdraw", "android", "jni") android_app_dir = os.path.join(android_pdraw_dir, "apps", "pdraw_android") #=============================================================================== # Android #=============================================================================== if dragon.VARIANT == "android": android_abis = ["armeabi-v7a", "arm64-v8a", "x86"] android.add_task_build_common(android_abis) android.add_ndk_build_task( name="build-jni", desc="Build native libs & jni", subtasks=["build-common"], calldir=android_jni_dir, module="libpdraw", abis=android_abis, extra_args=["PACKAGES_DIR={}".format(os.path.join(dragon.WORKSPACE_DIR, "packages"))] ) android.add_ndk_build_task( name="clean-jni", desc="Clean native libs & jni", calldir=android_jni_dir, module="libpdraw", abis=android_abis, extra_args=["PACKAGES_DIR={}".format(os.path.join(dragon.WORKSPACE_DIR, "packages")), "clean"], ignore_failure=True ) android.add_gradle_task( name="build-app", desc="Build the PDrAW Android app in debug", subtasks=["build-jni"], calldir=android_app_dir, target="assembleDebug" ) android.add_gradle_task( name="clean-app", desc="Clean the PDrAW Android app", subtasks=["clean-jni"], calldir=android_app_dir, target="clean" ) dragon.add_meta_task( name="build", desc="Build libs & app", subtasks=["build-app"] ) dragon.add_meta_task( name="clean", desc="Clean libs & app", subtasks=["clean-app"] )
import sys, os import dragon import apps_tools.android as android import apps_tools.ios as ios android_pdraw_dir = os.path.join(dragon.WORKSPACE_DIR, "packages", "pdraw") android_jni_dir = os.path.join(android_pdraw_dir, "libpdraw", "android", "jni") android_app_dir = os.path.join(android_pdraw_dir, "apps", "pdraw_android") #=============================================================================== # Android #=============================================================================== if dragon.VARIANT == "android": android_abis = ["armeabi", "armeabi-v7a", "arm64-v8a", "mips", "x86"] android.add_task_build_common(android_abis) android.add_ndk_build_task( name="build-jni", desc="Build native libs & jni", subtasks=["build-common"], calldir=android_jni_dir, module="libpdraw", abis=android_abis, extra_args=["PACKAGES_DIR={}".format(os.path.join(dragon.WORKSPACE_DIR, "packages"))] ) android.add_ndk_build_task( name="clean-jni", desc="Clean native libs & jni", calldir=android_jni_dir, module="libpdraw", abis=android_abis, extra_args=["PACKAGES_DIR={}".format(os.path.join(dragon.WORKSPACE_DIR, "packages")), "clean"], ignore_failure=True ) android.add_gradle_task( name="build-app", desc="Build the PDrAW Android app in debug", subtasks=["build-jni"], calldir=android_app_dir, target="assembleDebug" ) android.add_gradle_task( name="clean-app", desc="Clean the PDrAW Android app", subtasks=["clean-jni"], calldir=android_app_dir, target="clean" ) dragon.add_meta_task( name="build", desc="Build libs & app", subtasks=["build-app"] ) dragon.add_meta_task( name="clean", desc="Clean libs & app", subtasks=["clean-app"] )
bsd-3-clause
Python
b46cf3c17afb7300d7a72725e70650c59a1e67ad
Update fun.py
DNAGamer/Helix3
code/fun.py
code/fun.py
import asyncio import discord from discord.ext import commands class Fun: def __init__(self, bot): self.bot = bot @commands.command(pass_context=True, no_pm=True) async def ping(message): if message.content.startswith('!ping'): return await my_bot.say("Hello, world!")
import asyncio import discord from discord.ext import commands class Fun: def __init__(self, bot): self.bot = bot @commands.command(pass_context=True, no_pm=True) async def ping(message): if message.content.startswith('!ping'): await client.send_message(message.channel, 'Pong!')
mit
Python
a307c5fc2555d282dfa6193cdbcfb2d15e185c0c
Allow query without table to run
lebinh/aq
aq/parsers.py
aq/parsers.py
import collections from collections import namedtuple from six import string_types from aq.errors import QueryParsingError from aq.select_parser import select_stmt, ParseException TableId = namedtuple('TableId', ('database', 'table', 'alias')) QueryMetadata = namedtuple('QueryMetadata', ('tables',)) class SelectParser(object): def __init__(self, options): self.options = options @staticmethod def parse_query(query): try: parse_result = select_stmt.parseString(query, parseAll=True) except ParseException as e: raise QueryParsingError(e) tables = [parse_table_id(tid) for tid in parse_result.table_ids] parsed_query = concat(parse_result) return parsed_query, QueryMetadata(tables=tables) def parse_table_id(table_id): database = table_id.database[0] if table_id.database else None table = table_id.table[0] if table_id.table else None alias = table_id.alias[0] if table_id.alias else None return TableId(database, table, alias) def flatten(nested_list): for item in nested_list: if isinstance(item, collections.Iterable) and not isinstance(item, string_types): for nested_item in flatten(item): yield nested_item else: yield item def concat(tokens): return ' '.join(flatten(tokens))
from collections import namedtuple import collections from six import string_types from aq.errors import QueryParsingError from aq.select_parser import select_stmt, ParseException TableId = namedtuple('TableId', ('database', 'table', 'alias')) QueryMetadata = namedtuple('QueryMetadata', ('tables',)) class SelectParser(object): def __init__(self, options): self.options = options @staticmethod def parse_query(query): try: parse_result = select_stmt.parseString(query, parseAll=True) except ParseException as e: raise QueryParsingError(e) if not parse_result.table: raise QueryParsingError('No table specified in query') tables = [parse_table_id(tid) for tid in parse_result.table_ids] parsed_query = concat(parse_result) return parsed_query, QueryMetadata(tables=tables) def parse_table_id(table_id): database = table_id.database[0] if table_id.database else None table = table_id.table[0] if table_id.table else None alias = table_id.alias[0] if table_id.alias else None return TableId(database, table, alias) def flatten(nested_list): for item in nested_list: if isinstance(item, collections.Iterable) and not isinstance(item, string_types): for nested_item in flatten(item): yield nested_item else: yield item def concat(tokens): return ' '.join(flatten(tokens))
mit
Python
f48c0b25556c3ea89dcb3bd4c4d9608730689be8
Make sure www.example.test is checked.
adelton/webauthinfra,adelton/webauthinfra,adelton/webauthinfra,adelton/webauthinfra
src/test-saml.py
src/test-saml.py
#!/usr/bin/python3 from sys import argv from xvfbwrapper import Xvfb from selenium import webdriver from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions print("Admin password " + argv[1]) with Xvfb() as xvfb: driver = webdriver.Firefox(log_path = "/tmp/geckodriver.log") driver.get("https://www.example.test/") title = driver.find_element_by_xpath("//h1/a") print(title.text) assert "Not logged in" in title.text logon_link = driver.find_element_by_xpath("//a[@href][text() = 'login']") current_url = driver.current_url logon_link.click() WebDriverWait(driver, 15).until(expected_conditions.url_changes(current_url)) print(driver.current_url) logon_form = driver.find_element_by_xpath("//form[input[@name = 'ipsilon_transaction_id']]") logon_form.find_element_by_id("login_name").send_keys("admin") logon_form.find_element_by_id("login_password").send_keys(argv[1]) current_url = driver.current_url logon_form.submit() WebDriverWait(driver, 15).until(expected_conditions.url_to_be("https://www.example.test/")) print(driver.current_url) title = driver.find_element_by_xpath("//h1/a") print(title.text) assert "Logged in as admin" in title.text groups = driver.find_element_by_xpath("//tr[td/text() = 'Member of groups']/td[2]") print(groups.text) assert "ext:admins" in groups.text driver.quit()
#!/usr/bin/python3 from sys import argv from xvfbwrapper import Xvfb from selenium import webdriver from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions print("Admin password " + argv[1]) with Xvfb() as xvfb: driver = webdriver.Firefox(log_path = "/tmp/geckodriver.log") driver.get("https://www.example.test/") title = driver.find_element_by_xpath("//h1/a") print(title.text) assert "Not logged in" in title.text logon_link = driver.find_element_by_xpath("//a[@href][text() = 'login']") current_url = driver.current_url logon_link.click() WebDriverWait(driver, 15).until(expected_conditions.url_changes(current_url)) print(driver.current_url) logon_form = driver.find_element_by_xpath("//form[input[@name = 'ipsilon_transaction_id']]") logon_form.find_element_by_id("login_name").send_keys("admin") logon_form.find_element_by_id("login_password").send_keys(argv[1]) current_url = driver.current_url logon_form.submit() WebDriverWait(driver, 15).until(expected_conditions.url_changes(current_url)) print(driver.current_url) title = driver.find_element_by_xpath("//h1/a") print(title.text) assert "Logged in as admin" in title.text groups = driver.find_element_by_xpath("//tr[td/text() = 'Member of groups']/td[2]") print(groups.text) assert "ext:admins" in groups.text driver.quit()
apache-2.0
Python
0bbfcaabcee591ca19702ec071d711ac411597fd
Increment version to 0.2.4
approvals/ApprovalTests.Python,approvals/ApprovalTests.Python,tdpreece/ApprovalTests.Python,approvals/ApprovalTests.Python
approvaltests/version.py
approvaltests/version.py
version_number = "0.2.4"
version_number = "0.2.3"
apache-2.0
Python
1eda7cfbda31ab7b39182e4a2fdacf8bfcf147a2
Update __init__.py
williamFalcon/pytorch-lightning,williamFalcon/pytorch-lightning
pytorch_lightning/__init__.py
pytorch_lightning/__init__.py
"""Root package info.""" __version__ = '0.10.0rc1' __author__ = 'William Falcon et al.' __author_email__ = '[email protected]' __license__ = 'Apache-2.0' __copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__ __homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning' # this has to be simple string, see: https://github.com/pypa/twine/issues/522 __docs__ = ( "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers." " Scale your models. Write less boilerplate." ) __long_docs__ = """ Lightning is a way to organize your PyTorch code to decouple the science code from the engineering. It's more of a style-guide than a framework. In Lightning, you organize your code into 3 distinct categories: 1. Research code (goes in the LightningModule). 2. Engineering code (you delete, and is handled by the Trainer). 3. Non-essential research code (logging, etc. this goes in Callbacks). Although your research/production project might start simple, once you add things like GPU AND TPU training, 16-bit precision, etc, you end up spending more time engineering than researching. Lightning automates AND rigorously tests those parts for you. Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts. Documentation ------------- - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ import logging as python_logging _logger = python_logging.getLogger("lightning") _logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) try: # This variable is injected in the __builtins__ by the build # process. It used to enable importing subpackages of skimage when # the binaries are not built __LIGHTNING_SETUP__ except NameError: __LIGHTNING_SETUP__ = False if __LIGHTNING_SETUP__: import sys # pragma: no-cover sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover # We are not importing the rest of the lightning during the build process, as it may not be compiled yet else: from pytorch_lightning.core import LightningDataModule, LightningModule from pytorch_lightning.core.step_result import TrainResult, EvalResult from pytorch_lightning.callbacks import Callback from pytorch_lightning.trainer import Trainer from pytorch_lightning.utilities.seed import seed_everything from pytorch_lightning import metrics __all__ = [ 'Trainer', 'LightningDataModule', 'LightningModule', 'Callback', 'seed_everything', 'metrics', 'EvalResult', 'TrainResult', ] # necessary for regular bolts imports. Skip exception since bolts is not always installed try: from pytorch_lightning import bolts except ImportError: pass # __call__ = __all__ # for compatibility with namespace packages __import__('pkg_resources').declare_namespace(__name__)
"""Root package info.""" __version__ = '0.9.1rc4' __author__ = 'William Falcon et al.' __author_email__ = '[email protected]' __license__ = 'Apache-2.0' __copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__ __homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning' # this has to be simple string, see: https://github.com/pypa/twine/issues/522 __docs__ = ( "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers." " Scale your models. Write less boilerplate." ) __long_docs__ = """ Lightning is a way to organize your PyTorch code to decouple the science code from the engineering. It's more of a style-guide than a framework. In Lightning, you organize your code into 3 distinct categories: 1. Research code (goes in the LightningModule). 2. Engineering code (you delete, and is handled by the Trainer). 3. Non-essential research code (logging, etc. this goes in Callbacks). Although your research/production project might start simple, once you add things like GPU AND TPU training, 16-bit precision, etc, you end up spending more time engineering than researching. Lightning automates AND rigorously tests those parts for you. Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts. Documentation ------------- - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ import logging as python_logging _logger = python_logging.getLogger("lightning") _logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) try: # This variable is injected in the __builtins__ by the build # process. It used to enable importing subpackages of skimage when # the binaries are not built __LIGHTNING_SETUP__ except NameError: __LIGHTNING_SETUP__ = False if __LIGHTNING_SETUP__: import sys # pragma: no-cover sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover # We are not importing the rest of the lightning during the build process, as it may not be compiled yet else: from pytorch_lightning.core import LightningDataModule, LightningModule from pytorch_lightning.core.step_result import TrainResult, EvalResult from pytorch_lightning.callbacks import Callback from pytorch_lightning.trainer import Trainer from pytorch_lightning.utilities.seed import seed_everything from pytorch_lightning import metrics __all__ = [ 'Trainer', 'LightningDataModule', 'LightningModule', 'Callback', 'seed_everything', 'metrics', 'EvalResult', 'TrainResult', ] # necessary for regular bolts imports. Skip exception since bolts is not always installed try: from pytorch_lightning import bolts except ImportError: pass # __call__ = __all__ # for compatibility with namespace packages __import__('pkg_resources').declare_namespace(__name__)
apache-2.0
Python
d24cf56d0ad2e8388eb931b10c170df86870c5b0
Update __init__.py (#4308)
williamFalcon/pytorch-lightning,williamFalcon/pytorch-lightning
pytorch_lightning/__init__.py
pytorch_lightning/__init__.py
"""Root package info.""" __version__ = '1.0.4rc0' __author__ = 'William Falcon et al.' __author_email__ = '[email protected]' __license__ = 'Apache-2.0' __copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__ __homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning' # this has to be simple string, see: https://github.com/pypa/twine/issues/522 __docs__ = ( "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers." " Scale your models. Write less boilerplate." ) __long_docs__ = """ Lightning is a way to organize your PyTorch code to decouple the science code from the engineering. It's more of a style-guide than a framework. In Lightning, you organize your code into 3 distinct categories: 1. Research code (goes in the LightningModule). 2. Engineering code (you delete, and is handled by the Trainer). 3. Non-essential research code (logging, etc. this goes in Callbacks). Although your research/production project might start simple, once you add things like GPU AND TPU training, 16-bit precision, etc, you end up spending more time engineering than researching. Lightning automates AND rigorously tests those parts for you. Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts. Documentation ------------- - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ import logging as python_logging _logger = python_logging.getLogger("lightning") _logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) try: # This variable is injected in the __builtins__ by the build # process. It used to enable importing subpackages of skimage when # the binaries are not built __LIGHTNING_SETUP__ except NameError: __LIGHTNING_SETUP__ = False if __LIGHTNING_SETUP__: import sys # pragma: no-cover sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover # We are not importing the rest of the lightning during the build process, as it may not be compiled yet else: from pytorch_lightning.core import LightningDataModule, LightningModule from pytorch_lightning.callbacks import Callback from pytorch_lightning.trainer import Trainer from pytorch_lightning.utilities.seed import seed_everything from pytorch_lightning import metrics __all__ = [ 'Trainer', 'LightningDataModule', 'LightningModule', 'Callback', 'seed_everything', 'metrics', ] # necessary for regular bolts imports. Skip exception since bolts is not always installed try: from pytorch_lightning import bolts except ImportError: pass # __call__ = __all__ # for compatibility with namespace packages __import__('pkg_resources').declare_namespace(__name__)
"""Root package info.""" __version__ = '1.0.3' __author__ = 'William Falcon et al.' __author_email__ = '[email protected]' __license__ = 'Apache-2.0' __copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__ __homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning' # this has to be simple string, see: https://github.com/pypa/twine/issues/522 __docs__ = ( "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers." " Scale your models. Write less boilerplate." ) __long_docs__ = """ Lightning is a way to organize your PyTorch code to decouple the science code from the engineering. It's more of a style-guide than a framework. In Lightning, you organize your code into 3 distinct categories: 1. Research code (goes in the LightningModule). 2. Engineering code (you delete, and is handled by the Trainer). 3. Non-essential research code (logging, etc. this goes in Callbacks). Although your research/production project might start simple, once you add things like GPU AND TPU training, 16-bit precision, etc, you end up spending more time engineering than researching. Lightning automates AND rigorously tests those parts for you. Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts. Documentation ------------- - https://pytorch-lightning.readthedocs.io/en/latest - https://pytorch-lightning.readthedocs.io/en/stable """ import logging as python_logging _logger = python_logging.getLogger("lightning") _logger.addHandler(python_logging.StreamHandler()) _logger.setLevel(python_logging.INFO) try: # This variable is injected in the __builtins__ by the build # process. It used to enable importing subpackages of skimage when # the binaries are not built __LIGHTNING_SETUP__ except NameError: __LIGHTNING_SETUP__ = False if __LIGHTNING_SETUP__: import sys # pragma: no-cover sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover # We are not importing the rest of the lightning during the build process, as it may not be compiled yet else: from pytorch_lightning.core import LightningDataModule, LightningModule from pytorch_lightning.callbacks import Callback from pytorch_lightning.trainer import Trainer from pytorch_lightning.utilities.seed import seed_everything from pytorch_lightning import metrics __all__ = [ 'Trainer', 'LightningDataModule', 'LightningModule', 'Callback', 'seed_everything', 'metrics', ] # necessary for regular bolts imports. Skip exception since bolts is not always installed try: from pytorch_lightning import bolts except ImportError: pass # __call__ = __all__ # for compatibility with namespace packages __import__('pkg_resources').declare_namespace(__name__)
apache-2.0
Python
7523ff90cadcefe3d51682d3301f7ceb51c70ced
Revert "Corrige a resolução"
dvl/raspberry-pi_timelapse,dvl/raspberry-pi_timelapse
timelapse.py
timelapse.py
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 768) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
import os import datetime import time import picamera from PIL import Image, ImageStat, ImageFont, ImageDraw with picamera.PiCamera() as camera: camera.resolution = (1024, 728) camera.rotation = 180 time.sleep(2) # camera warm-up time for filename in camera.capture_continuous('images/img_{timestamp:%Y%m%d%H%M%S}.png'): image = Image.open(filename) stat = ImageStat.Stat(image) r, g, b, _ = stat.mean if r < 50 and g < 50 and b < 50: print('[!] Lights must be powered off, sleeping...') try: os.unlink(filename) except: pass time.sleep(60 * 5) else: annotate_text = datetime.datetime.now().strftime('%H:%M:%S @ %d/%m/%Y') draw = ImageDraw.Draw(image) font = ImageFont.truetype('/usr/share/fonts/truetype/roboto/Roboto-Regular.ttf', 24) draw.text((10, 700), annotate_text, (255, 255, 0), font=font) image.save(filename) print('[!] Taken: {}'.format(filename)) time.sleep(60 / 2) image.close()
mit
Python
60f89131b8f18046e4504b20c64f95cb3b30085a
Make sure we allow https flv files
wevoice/wesub,norayr/unisubs,norayr/unisubs,ofer43211/unisubs,ujdhesa/unisubs,eloquence/unisubs,ofer43211/unisubs,pculture/unisubs,pculture/unisubs,eloquence/unisubs,ofer43211/unisubs,ReachingOut/unisubs,norayr/unisubs,wevoice/wesub,ReachingOut/unisubs,ReachingOut/unisubs,ReachingOut/unisubs,eloquence/unisubs,eloquence/unisubs,ofer43211/unisubs,ujdhesa/unisubs,wevoice/wesub,pculture/unisubs,ujdhesa/unisubs,pculture/unisubs,ujdhesa/unisubs,norayr/unisubs,wevoice/wesub
apps/videos/types/flv.py
apps/videos/types/flv.py
# Universal Subtitles, universalsubtitles.org # # Copyright (C) 2010 Participatory Culture Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see # http://www.gnu.org/licenses/agpl-3.0.html. from videos.types.base import VideoType import re URL_REGEX = re.compile('^http(s)?://.+/.+\.flv$', re.I) class FLVVideoType(VideoType): abbreviation = 'L' name = 'FLV' @classmethod def matches_video_url(cls, url): url = cls.format_url(url) return bool(URL_REGEX.match(url))
# Universal Subtitles, universalsubtitles.org # # Copyright (C) 2010 Participatory Culture Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see # http://www.gnu.org/licenses/agpl-3.0.html. from videos.types.base import VideoType import re URL_REGEX = re.compile('^http://.+/.+\.flv$', re.I) class FLVVideoType(VideoType): abbreviation = 'L' name = 'FLV' @classmethod def matches_video_url(cls, url): url = cls.format_url(url) return bool(URL_REGEX.match(url))
agpl-3.0
Python
b856016182a9a0c97ccb5e6593aa16f3a269bf79
fix ToDoList class method add_todo to pass non_boolean test
davidnjakai/bc-8-todo-console-application
todo_list.py
todo_list.py
import todo_item class ToDoList(object): def __init__(self, name, description, todo_items): self.name = name self.description = description self.todo_items = todo_items def add_todo(self, content, complete = False, *args): if type(complete) != type(True): self.complete = False return item = todo_item.ToDoItem(content, complete, *args) self.todo_items.append(item) def finish_item(self, index): if index >= len(self.todo_items) or index < 0: return 'That to do item does not exist' self.todo_items[index] = True def edit_item(self, index, content): self.todo_items[index] = content def delete_item(self, index): del self.todo_items[index] def percentage_completed(self): completed_items = 0 for item in self.todo_items: if item.complete: completed_items += 1 percentage = 100 * (completed_items/len(self.todo_items)) return percentage
import todo_item class ToDoList(object): def __init__(self, name, description, todo_items): self.name = name self.description = description self.todo_items = todo_items def add_todo(self, content, complete = False, *args): item = todo_item.ToDoItem(content, complete, *args) self.todo_items.append(item) def finish_item(self, index): if index >= len(self.todo_items) or index < 0: return 'That to do item does not exist' self.todo_items[index] = True def edit_item(self, index, content): self.todo_items[index] = content def delete_item(self, index): del self.todo_items[index] def percentage_completed(self): completed_items = 0 for item in self.todo_items: if item.complete: completed_items += 1 percentage = 100 * (completed_items/len(self.todo_items)) return percentage
mit
Python
c000dd1d0940b47c13761bb09e0cb50a2adc6a2e
Handle token_endpoint auth type in osc plugin
openstack/python-heatclient
heatclient/osc/plugin.py
heatclient/osc/plugin.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """OpenStackClient plugin for Orchestration service.""" import logging from osc_lib import utils LOG = logging.getLogger(__name__) DEFAULT_ORCHESTRATION_API_VERSION = '1' API_VERSION_OPTION = 'os_orchestration_api_version' API_NAME = 'orchestration' API_VERSIONS = { '1': 'heatclient.v1.client.Client', } def make_client(instance): """Returns an orchestration service client""" heat_client = utils.get_client_class( API_NAME, instance._api_version[API_NAME], API_VERSIONS) LOG.debug('Instantiating orchestration client: %s', heat_client) kwargs = {'region_name': instance.region_name, 'interface': instance.interface} if instance.session: kwargs.update({'session': instance.session, 'service_type': API_NAME}) elif instance.auth_plugin_name == 'token_endpoint': kwargs.update({'endpoint': instance.auth.url, 'token': instance.auth.token}) else: endpoint = instance.get_endpoint_for_service_type( API_NAME, region_name=instance.region_name, interface=instance.interface, ) kwargs.update({'endpoint': endpoint, 'auth_url': instance.auth.auth_url, 'username': instance.auth_ref.username, 'token': instance.auth_ref.auth_token}) client = heat_client(**kwargs) return client def build_option_parser(parser): """Hook to add global options""" parser.add_argument( '--os-orchestration-api-version', metavar='<orchestration-api-version>', default=utils.env( 'OS_ORCHESTRATION_API_VERSION', default=DEFAULT_ORCHESTRATION_API_VERSION), help='Orchestration API version, default=' + DEFAULT_ORCHESTRATION_API_VERSION + ' (Env: OS_ORCHESTRATION_API_VERSION)') return parser
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """OpenStackClient plugin for Orchestration service.""" import logging from osc_lib import utils LOG = logging.getLogger(__name__) DEFAULT_ORCHESTRATION_API_VERSION = '1' API_VERSION_OPTION = 'os_orchestration_api_version' API_NAME = 'orchestration' API_VERSIONS = { '1': 'heatclient.v1.client.Client', } def make_client(instance): """Returns an orchestration service client""" heat_client = utils.get_client_class( API_NAME, instance._api_version[API_NAME], API_VERSIONS) LOG.debug('Instantiating orchestration client: %s', heat_client) kwargs = {'region_name': instance.region_name, 'interface': instance.interface} if instance.session: kwargs.update({'session': instance.session, 'service_type': API_NAME}) else: endpoint = instance.get_endpoint_for_service_type( API_NAME, region_name=instance.region_name, interface=instance.interface, ) kwargs.update({'endpoint': endpoint, 'auth_url': instance.auth.auth_url, 'username': instance.auth_ref.username, 'token': instance.auth_ref.auth_token}) client = heat_client(**kwargs) return client def build_option_parser(parser): """Hook to add global options""" parser.add_argument( '--os-orchestration-api-version', metavar='<orchestration-api-version>', default=utils.env( 'OS_ORCHESTRATION_API_VERSION', default=DEFAULT_ORCHESTRATION_API_VERSION), help='Orchestration API version, default=' + DEFAULT_ORCHESTRATION_API_VERSION + ' (Env: OS_ORCHESTRATION_API_VERSION)') return parser
apache-2.0
Python
ab365a6fdf39feed6f529a4a5170c2d9f674b706
fix unicode issue
nojhan/weboob-devel,laurent-george/weboob,yannrouillard/weboob,RouxRC/weboob,Konubinix/weboob,Boussadia/weboob,sputnick-dev/weboob,Boussadia/weboob,willprice/weboob,willprice/weboob,nojhan/weboob-devel,RouxRC/weboob,laurent-george/weboob,sputnick-dev/weboob,eirmag/weboob,eirmag/weboob,eirmag/weboob,sputnick-dev/weboob,yannrouillard/weboob,RouxRC/weboob,Konubinix/weboob,franek/weboob,willprice/weboob,Boussadia/weboob,frankrousseau/weboob,Konubinix/weboob,yannrouillard/weboob,laurent-george/weboob,franek/weboob,nojhan/weboob-devel,frankrousseau/weboob,franek/weboob,frankrousseau/weboob,Boussadia/weboob
weboob/backends/orange/pages/compose.py
weboob/backends/orange/pages/compose.py
# -*- coding: utf-8 -*- # Copyright(C) 2010-2011 Nicolas Duhamel # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. import re from weboob.capabilities.messages import CantSendMessage from weboob.tools.browser import BasePage __all__ = ['ComposePage', 'ConfirmPage'] class ConfirmPage(BasePage): def on_loaded(self): pass class ComposePage(BasePage): phone_regex = re.compile('^(\+33|0033|0)(6|7)(\d{8})$') def on_loaded(self): #Deal with bad encoding... for ie6 ... response = self.browser.response() response.set_data(response.get_data().decode('utf-8', 'ignore') ) self.browser.set_response(response) def get_nb_remaining_free_sms(self): return "0" def post_message(self, message, sender): receiver = message.thread.id if self.phone_regex.match(receiver) is None: raise CantSendMessage(u'Invalid receiver: %s' % receiver) listetel = ",,"+ receiver #Fill the form self.browser.select_form(name="formulaire") self.browser.new_control("hidden", "autorize",{'value':''}) self.browser.new_control("textarea", "msg", {'value':''}) self.browser.set_all_readonly(False) self.browser["corpsms"] = message.content.encode('utf-8') self.browser["pays"] = "33" self.browser["listetel"] = listetel self.browser["reply"] = "2" self.browser["typesms"] = "2" self.browser["produit"] = "1000" self.browser["destToKeep"] = listetel self.browser["NUMTEL"] = sender self.browser["autorize"] = "1" self.browser["msg"] = message.content.encode('utf-8') self.browser.submit()
# -*- coding: utf-8 -*- # Copyright(C) 2010-2011 Nicolas Duhamel # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. import re from weboob.capabilities.messages import CantSendMessage from weboob.tools.browser import BasePage __all__ = ['ComposePage', 'ConfirmPage'] class ConfirmPage(BasePage): def on_loaded(self): pass class ComposePage(BasePage): phone_regex = re.compile('^(\+33|0033|0)(6|7)(\d{8})$') def on_loaded(self): #Deal with bad encoding... for ie6 ... response = self.browser.response() response.set_data(response.get_data().decode('utf-8', 'ignore') ) self.browser.set_response(response) def get_nb_remaining_free_sms(self): return "0" def post_message(self, message, sender): receiver = message.thread.id if self.phone_regex.match(receiver) is None: raise CantSendMessage(u'Invalid receiver: %s' % receiver) listetel = ",,"+ receiver #Fill the form self.browser.select_form(name="formulaire") self.browser.new_control("hidden", "autorize",{'value':''}) self.browser.new_control("textarea", "msg", {'value':''}) self.browser.set_all_readonly(False) self.browser["corpsms"] = message.content self.browser["pays"] = "33" self.browser["listetel"] = listetel self.browser["reply"] = "2" self.browser["typesms"] = "2" self.browser["produit"] = "1000" self.browser["destToKeep"] = listetel self.browser["NUMTEL"] = sender self.browser["autorize"] = "1" self.browser["msg"] = message.content.encode('utf-8') self.browser.submit()
agpl-3.0
Python
ca327b35c2e45329962da0dc04cfe2354ffd8b35
add lcm gl support to testDrakeVisualizer.py
gizatt/director,rdeits/director,empireryan/director,RobotLocomotion/director,edowson/director,openhumanoids/director,openhumanoids/director,mithrandir123/director,manuelli/director,RobotLocomotion/director,edowson/director,empireryan/director,mitdrc/director,empireryan/director,RussTedrake/director,gizatt/director,rdeits/director,mitdrc/director,rdeits/director,gizatt/director,mithrandir123/director,mithrandir123/director,rdeits/director,mitdrc/director,gizatt/director,RobotLocomotion/director,RobotLocomotion/director,edowson/director,empireryan/director,rdeits/director,manuelli/director,patmarion/director,edowson/director,mithrandir123/director,patmarion/director,RussTedrake/director,RussTedrake/director,openhumanoids/director,manuelli/director,edowson/director,patmarion/director,mitdrc/director,manuelli/director,RussTedrake/director,mitdrc/director,mithrandir123/director,empireryan/director,gizatt/director,patmarion/director,patmarion/director,RussTedrake/director,openhumanoids/director,openhumanoids/director,RobotLocomotion/director,manuelli/director
src/python/tests/testDrakeVisualizer.py
src/python/tests/testDrakeVisualizer.py
from ddapp.consoleapp import ConsoleApp from ddapp.screengrabberpanel import ScreenGrabberPanel from ddapp.drakevisualizer import DrakeVisualizer from ddapp.lcmgl import LCMGLManager from ddapp import objectmodel as om from ddapp import applogic from PythonQt import QtCore, QtGui class DrakeVisualizerApp(ConsoleApp): def __init__(self): ConsoleApp.__init__(self) self.view = self.createView() self.mainWindow = QtGui.QMainWindow() self.mainWindow.setCentralWidget(self.view) self.mainWindow.resize(768 * (16/9.0), 768) self.mainWindow.setWindowTitle('Drake Visualizer') self.mainWindow.setWindowIcon(QtGui.QIcon(':/images/drake_logo.png')) self.drakeVisualizer = DrakeVisualizer(self.view) self.lcmglManager = LCMGLManager(self.view) self.screenGrabberPanel = ScreenGrabberPanel(self.view) self.screenGrabberDock = self.addWidgetToDock(self.screenGrabberPanel.widget, QtCore.Qt.RightDockWidgetArea) self.screenGrabberDock.setVisible(False) model = om.getDefaultObjectModel() model.getTreeWidget().setWindowTitle('Scene Browser') model.getPropertiesPanel().setWindowTitle('Properties Panel') self.sceneBrowserDock = self.addWidgetToDock(model.getTreeWidget(), QtCore.Qt.LeftDockWidgetArea) self.propertiesDock = self.addWidgetToDock(model.getPropertiesPanel(), QtCore.Qt.LeftDockWidgetArea) self.sceneBrowserDock.setVisible(False) self.propertiesDock.setVisible(False) applogic.addShortcut(self.mainWindow, 'Ctrl+Q', self.quit) applogic.addShortcut(self.mainWindow, 'F1', self.toggleObjectModel) applogic.addShortcut(self.mainWindow, 'F2', self.toggleScreenGrabber) def toggleObjectModel(self): self.sceneBrowserDock.setVisible(not self.sceneBrowserDock.visible) self.propertiesDock.setVisible(not self.propertiesDock.visible) def toggleScreenGrabber(self): self.screenGrabberDock.setVisible(not self.screenGrabberDock.visible) def addWidgetToDock(self, widget, dockArea): dock = QtGui.QDockWidget() dock.setWidget(widget) dock.setWindowTitle(widget.windowTitle) self.mainWindow.addDockWidget(dockArea, dock) return dock def main(): # use global so the variable is available in the python console global app app = DrakeVisualizerApp() app.setupGlobals(globals()) app.mainWindow.show() app.start() if __name__ == '__main__': main()
from ddapp.consoleapp import ConsoleApp from ddapp.screengrabberpanel import ScreenGrabberPanel from ddapp.drakevisualizer import DrakeVisualizer from ddapp import objectmodel as om from ddapp import applogic from PythonQt import QtCore, QtGui class DrakeVisualizerApp(ConsoleApp): def __init__(self): ConsoleApp.__init__(self) self.view = self.createView() self.mainWindow = QtGui.QMainWindow() self.mainWindow.setCentralWidget(self.view) self.mainWindow.resize(768 * (16/9.0), 768) self.mainWindow.setWindowTitle('Drake Visualizer') self.mainWindow.setWindowIcon(QtGui.QIcon(':/images/drake_logo.png')) self.drakeVisualizer = DrakeVisualizer(self.view) self.screenGrabberPanel = ScreenGrabberPanel(self.view) self.screenGrabberDock = self.addWidgetToDock(self.screenGrabberPanel.widget, QtCore.Qt.RightDockWidgetArea) self.screenGrabberDock.setVisible(False) model = om.getDefaultObjectModel() model.getTreeWidget().setWindowTitle('Scene Browser') model.getPropertiesPanel().setWindowTitle('Properties Panel') self.sceneBrowserDock = self.addWidgetToDock(model.getTreeWidget(), QtCore.Qt.LeftDockWidgetArea) self.propertiesDock = self.addWidgetToDock(model.getPropertiesPanel(), QtCore.Qt.LeftDockWidgetArea) self.sceneBrowserDock.setVisible(False) self.propertiesDock.setVisible(False) applogic.addShortcut(self.mainWindow, 'Ctrl+Q', self.quit) applogic.addShortcut(self.mainWindow, 'F1', self.toggleObjectModel) applogic.addShortcut(self.mainWindow, 'F2', self.toggleScreenGrabber) def toggleObjectModel(self): self.sceneBrowserDock.setVisible(not self.sceneBrowserDock.visible) self.propertiesDock.setVisible(not self.propertiesDock.visible) def toggleScreenGrabber(self): self.screenGrabberDock.setVisible(not self.screenGrabberDock.visible) def addWidgetToDock(self, widget, dockArea): dock = QtGui.QDockWidget() dock.setWidget(widget) dock.setWindowTitle(widget.windowTitle) self.mainWindow.addDockWidget(dockArea, dock) return dock def main(): # use global so the variable is available in the python console global app app = DrakeVisualizerApp() app.setupGlobals(globals()) app.mainWindow.show() app.start() if __name__ == '__main__': main()
bsd-3-clause
Python
38939635530223ef7d736c19c9c2d666c67baca4
fix file format generated by ffhlwiki.py
FreiFunkMuenster/ffmap-backend,freifunk-mwu/ffmap-backend,FreifunkBremen/ffmap-backend,FreifunkJena/ffmap-backend,ffac/ffmap-backend,FreifunkBremen/ffmap-backend,ff-kbu/ffmap-backend,freifunkhamburg/ffmap-backend,FreiFunkMuenster/ffmap-backend,freifunk-kiel/ffmap-backend,rubo77/ffmap-backend,freifunk-mwu/ffmap-backend,freifunk-fulda/ffmap-backend,ffnord/ffmap-backend,mweinelt/ffmap-backend,freifunkhamburg/ffmap-backend,ff-kbu/ffmap-backend,kpcyrd/ffmap-backend,FreifunkMD/ffmap-backend,ffnord/ffmap-backend,FreifunkMD/ffmap-backend
ffhlwiki.py
ffhlwiki.py
#!/usr/bin/env python3 import json import argparse from itertools import zip_longest from urllib.request import urlopen from bs4 import BeautifulSoup def import_wikigps(url): def fetch_wikitable(url): f = urlopen(url) soup = BeautifulSoup(f) table = soup.find_all("table")[0] rows = table.find_all("tr") headers = [] data = [] def maybe_strip(x): if isinstance(x.string, str): return x.string.strip() else: return "" for row in rows: tds = list([maybe_strip(x) for x in row.find_all("td")]) ths = list([maybe_strip(x) for x in row.find_all("th")]) if any(tds): data.append(tds) if any(ths): headers = ths nodes = [] for d in data: nodes.append(dict(zip(headers, d))) return nodes nodes = fetch_wikitable(url) aliases = {} for node in nodes: try: node['MAC'] = node['MAC'].split(',') except KeyError: pass try: node['GPS'] = node['GPS'].split(',') except KeyError: pass try: node['Knotenname'] = node['Knotenname'].split(',') except KeyError: pass nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) for data in nodes: alias = {} mac = data[0].strip() if data[1]: alias['gps'] = data[1].strip() if data[2]: alias['name'] = data[2].strip() aliases[mac] = alias return aliases parser = argparse.ArgumentParser() parser.add_argument('url', help='wiki URL') args = parser.parse_args() options = vars(args) aliases = import_wikigps(options['url']) print(json.dumps(aliases))
#!/usr/bin/env python3 import json import argparse from itertools import zip_longest from urllib.request import urlopen from bs4 import BeautifulSoup def import_wikigps(url): def fetch_wikitable(url): f = urlopen(url) soup = BeautifulSoup(f) table = soup.find_all("table")[0] rows = table.find_all("tr") headers = [] data = [] def maybe_strip(x): if isinstance(x.string, str): return x.string.strip() else: return "" for row in rows: tds = list([maybe_strip(x) for x in row.find_all("td")]) ths = list([maybe_strip(x) for x in row.find_all("th")]) if any(tds): data.append(tds) if any(ths): headers = ths nodes = [] for d in data: nodes.append(dict(zip(headers, d))) return nodes nodes = fetch_wikitable(url) aliases = [] for node in nodes: try: node['MAC'] = node['MAC'].split(',') except KeyError: pass try: node['GPS'] = node['GPS'].split(',') except KeyError: pass try: node['Knotenname'] = node['Knotenname'].split(',') except KeyError: pass nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) for data in nodes: alias = {} alias['mac'] = data[0].strip() if data[1]: alias['gps'] = data[1].strip() if data[2]: alias['name'] = data[2].strip() aliases.append(alias) return aliases parser = argparse.ArgumentParser() parser.add_argument('url', help='wiki URL') args = parser.parse_args() options = vars(args) aliases = import_wikigps(options['url']) print(json.dumps(aliases))
bsd-3-clause
Python
47c76074e010107fb3bfe3fc0f74482058efac50
Add support for constructor keyword arguments (i.e. pass them through to FilesystemCollection).
kirkeby/sheared
src/sheared/web/collections/entwined.py
src/sheared/web/collections/entwined.py
# # Sheared -- non-blocking network programming library for Python # Copyright (C) 2003 Sune Kirkeby <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # import warnings from entwine import entwine from sheared.python import io from sheared.python import log from sheared.web.collections.filesystem import * def entwined_handler(request, reply, collection, walker): if walker.root.endswith(collection.template_ext): templates = [walker.root] templates.extend(collection.page_templates) ctx = {} for i in range(len(templates)): last = i == len(templates) - 1 r = entwine(io.readfile(templates[i]), ctx) if (not last) and r.strip(): warnings.warn('ignored generated content from %s' % template, UserWarning, stacklevel=2) reply.headers.setHeader('Content-Type', 'text/html') reply.headers.setHeader('Content-Length', len(r)) reply.send(r) else: return normal_handler(request, reply, collection, walker) class EntwinedCollection(FilesystemCollection): def __init__(self, page_templates, *a, **kw): FilesystemCollection.__init__(self, *a, **kw) self.page_templates = page_templates self.normal_handler = entwined_handler self.template_ext = '.html'
# # Sheared -- non-blocking network programming library for Python # Copyright (C) 2003 Sune Kirkeby <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # import warnings from entwine import entwine from sheared.python import io from sheared.python import log from sheared.web.collections.filesystem import * def entwined_handler(request, reply, collection, walker): if walker.root.endswith(collection.template_ext): templates = [walker.root] templates.extend(collection.page_templates) ctx = {} for i in range(len(templates)): last = i == len(templates) - 1 r = entwine(io.readfile(templates[i]), ctx) if (not last) and r.strip(): warnings.warn('ignored generated content from %s' % template, UserWarning, stacklevel=2) reply.headers.setHeader('Content-Type', 'text/html') reply.headers.setHeader('Content-Length', len(r)) reply.send(r) else: return normal_handler(request, reply, collection, walker) class EntwinedCollection(FilesystemCollection): def __init__(self, page_templates, *a): FilesystemCollection.__init__(self, *a) self.page_templates = page_templates self.normal_handler = entwined_handler self.template_ext = '.html'
mit
Python
6fcce1bcecb15000c671c706588d6fd0d92145e5
Add windbg info to header
snare/voltron,snare/voltron,snare/voltron,snare/voltron
voltron/entry.py
voltron/entry.py
""" This is the main entry point for Voltron from the debugger host's perspective. This file is loaded into the debugger through whatever means the given host supports. LLDB: (lldb) command script import /path/to/voltron/entry.py GDB: (gdb) source /path/to/voltron/entry.py VDB: (vdb) script /path/to/voltron/entry.py WinDbg/CDB (via PyKD): > .load pykd.pyd > !py --global C:\path\to\voltron\entry.py """ log = None try: import logging import os import sys blessed = None import blessed import voltron from voltron.plugin import pm from voltron.core import Server log = voltron.setup_logging('debugger') # figure out in which debugger host we are running try: import lldb host = "lldb" except ImportError: pass try: import gdb host = "gdb" except ImportError: pass if "vtrace" in locals(): host = "vdb" try: import pykd host = "windbg" except: pass if not host: raise Exception("No debugger host is present") # register any plugins that were loaded pm.register_plugins() # get the debugger plugin for the host we're in plugin = pm.debugger_plugin_for_host(host) # set up command and adaptor instances voltron.debugger = plugin.adaptor_class() voltron.command = plugin.command_class() # create and start the voltron server voltron.server = Server() if host != "gdb": voltron.server.start() print(blessed.Terminal().bold_red("Voltron loaded.")) if host == 'lldb': print("Run `voltron init` after you load a target.") except Exception as e: import traceback msg = "An error occurred while loading Voltron:\n\n{}".format(traceback.format_exc()) if blessed: msg = blessed.Terminal().bold_red(msg) if log: log.exception("Exception raised while loading Voltron") print(msg)
""" This is the main entry point for Voltron from the debugger host's perspective. This file is loaded into the debugger through whatever means the given host supports. In LLDB: (lldb) command script import /path/to/voltron/entry.py In GDB: (gdb) source /path/to/voltron/entry.py In VDB: (vdb) script /path/to/voltron/entry.py """ log = None try: import logging import os import sys blessed = None import blessed import voltron from voltron.plugin import pm from voltron.core import Server log = voltron.setup_logging('debugger') # figure out in which debugger host we are running try: import lldb host = "lldb" except ImportError: pass try: import gdb host = "gdb" except ImportError: pass if "vtrace" in locals(): host = "vdb" try: import pykd host = "windbg" except: pass if not host: raise Exception("No debugger host is present") # register any plugins that were loaded pm.register_plugins() # get the debugger plugin for the host we're in plugin = pm.debugger_plugin_for_host(host) # set up command and adaptor instances voltron.debugger = plugin.adaptor_class() voltron.command = plugin.command_class() # create and start the voltron server voltron.server = Server() if host != "gdb": voltron.server.start() print(blessed.Terminal().bold_red("Voltron loaded.")) if host == 'lldb': print("Run `voltron init` after you load a target.") except Exception as e: import traceback msg = "An error occurred while loading Voltron:\n\n{}".format(traceback.format_exc()) if blessed: msg = blessed.Terminal().bold_red(msg) if log: log.exception("Exception raised while loading Voltron") print(msg)
mit
Python
6aadcd9739e6cbee01164fccae56a37f6130455c
Add CLI for yatsm map; TODO port script
valpasq/yatsm,valpasq/yatsm,c11/yatsm,c11/yatsm,ceholden/yatsm,ceholden/yatsm
yatsm/cli/map.py
yatsm/cli/map.py
""" Command line interface for creating maps of YATSM algorithm output """ import datetime as dt import logging import os import re import click import numpy as np from osgeo import gdal import patsy from yatsm.cli.cli import (cli, date_arg, date_format_opt, rootdir_opt, resultdir_opt, exampleimg_opt) from yatsm.utils import find_results, iter_records, write_output from yatsm.regression import design_to_indices, design_coefs from yatsm.regression.transforms import harm gdal.AllRegister() gdal.UseExceptions() logger = logging.getLogger('yatsm') # QA/QC values for segment types _intersect_qa = 3 _after_qa = 2 _before_qa = 1 # Filters for results _result_record = 'yatsm_r*' # number of days in year _days = 365.25 w = 2 * np.pi / _days WARN_ON_EMPTY = False @cli.command(short_help='Make map of YATSM output for a given date') @click.argument('type', metavar='<type>', type=click.Choice(['coef', 'predict', 'class', 'pheno'])) @date_arg @click.argument('output', metavar='<output>', type=click.Path(writable=True, dir_okay=False, resolve_path=True)) @rootdir_opt @resultdir_opt @exampleimg_opt @date_format_opt @click.option('--warn-on-empty', is_flag=True, help='Warn user when reading in empty results files') @click.option('--after', is_flag=True, help='Use time segment after <date> if needed for map') @click.option('--before', is_flag=True, help='Use time segment before <date> if needed for map') @click.option('--qa', is_flag=True, help='Add QA band identifying segment type') @click.option('--predict-proba', 'predict_proba', is_flag=True, help='Include prediction probability band (scaled by 10,000)') @click.option('--band', '-b', multiple=True, metavar='<band>', help='Bands to export for coefficient/prediction maps') @click.option('--robust', is_flag=True, help='Use robust results for coefficient/prediction maps') @click.option('--coef', '-c', multiple=True, metavar='<coef>', help='Coefficients to export for coefficient maps') @click.pass_context def map(ctx, type, date, output, root, result, image, date_frmt, warn_on_empty, after, before, qa, predict_proba, band, robust, coef): """ Map types: coef, predict, class, pheno Map QA flags: - 1 => before - 2 => after - 3 => intersect Examples: > yatsm map --coef "intercept, slope" --band "3, 4, 5" --ndv -9999 coef ... 2000-01-01 coef_map.gtif > yatsm map --date "%Y-%j" predict 2000-001 prediction.gtif > yatsm map --result "YATSM_new" --after class 2000-01-01 LCmap.gtif Notes: - Image predictions will not use categorical information in timeseries models. """ raise NotImplementedError('CLI in place; TODO actual script')
""" Command line interface for creating maps of YATSM algorithm output """ from datetime import datetime as dt import logging import os import click import numpy as np from yatsm.cli.cli import cli logger = logging.getLogger('yatsm') @cli.command(short_help='Make map of YATSM output for a given date') @click.pass_context def map(ctx): """ Examples: > yatsm_map.py --coef "intercept, slope" --band "3, 4, 5" --ndv -9999 coef ... 2000-01-01 coef_map.gtif > yatsm_map.py --date "%Y-%j" predict 2000-001 prediction.gtif > yatsm_map.py --result "YATSM_new" --after class 2000-01-01 LCmap.gtif Notes: - Image predictions will not use categorical information in timeseries models. """ raise NotImplementedError('TODO')
mit
Python
3275827ef5578142e07747f9feacc4f47fc22006
Update factorial.py
mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview,mre/the-coding-interview
problems/factorial/factorial.py
problems/factorial/factorial.py
# Recursive factorial def fac(n): return 1 if n == 1 else n * fac(n-1) print(fac(3)) # 6 print(fac(33)) # 8683317618811886495518194401280000000 # Iterative factorial def fac(n): res = i = 1 while i <= n: res *= i i += 1 return res print(fac(3)) # 6 print(fac(33)) # 8683317618811886495518194401280000000
def fac(n): return 1 if n == 1 else n * fac(n-1) print fac(3) print fac(33)
mit
Python
b250cfacdb45d85bf6ef7f0a1f28b89935c24b9b
Update settings.py
shivamsupr/django-jinja2-globals
project-name/my_app/settings.py
project-name/my_app/settings.py
# Snippets from Actual Settings.py TEMPLATES = [ { 'BACKEND': 'django_jinja.backend.Jinja2', "DIRS": ["PROJECT_ROOT_DIRECTORY", "..."], 'APP_DIRS': True, 'OPTIONS': { 'match_extension': '.html', 'context_processors': [ 'django.template.context_processors.request', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz' ], 'globals': { }, 'extensions': DEFAULT_EXTENSIONS + [ 'pipeline.templatetags.ext.PipelineExtension', ], }, }, { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True }, ] # Auto Register Template Globals _template_globals = {} for object_name in dir(app_template_globals): _obj = getattr(app_template_globals, object_name) if callable(_obj) and not object_name.startswith('__'): _template_globals[object_name] = _obj.__module__ + '.' + _obj.__qualname__ TEMPLATES[0]['OPTIONS']['globals'].update(_template_globals)
# Snippets from Actual Settings.py TEMPLATES = [ { 'BACKEND': 'django_jinja.backend.Jinja2', "DIRS": "PROJECT_ROOT_DIRECTORY", 'APP_DIRS': True, 'OPTIONS': { 'match_extension': '.html', 'context_processors': [ 'django.template.context_processors.request', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz' ], 'globals': { }, 'extensions': DEFAULT_EXTENSIONS + [ 'pipeline.templatetags.ext.PipelineExtension', ], }, }, { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True }, ] # Auto Register Template Globals _template_globals = {} for object_name in dir(app_template_globals): _obj = getattr(app_template_globals, object_name) if callable(_obj) and not object_name.startswith('__'): _template_globals[object_name] = _obj.__module__ + '.' + _obj.__qualname__ TEMPLATES[0]['OPTIONS']['globals'].update(_template_globals)
mit
Python
94e85fb24a9b2c327094b880e05251ffb00c1335
Add urls for list by topic and by location
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
bills/urls.py
bills/urls.py
from . import views from django.conf.urls import url urlpatterns = [ url(r'^by_topic/', views.bill_list_by_topic), url(r'^by_location', views.bill_list_by_location), url(r'^latest_activity/', views.latest_bill_activity), url(r'^latest/', views.latest_bill_actions), url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'), ]
from . import views from django.conf.urls import url urlpatterns = [ url(r'^list/', views.bill_list), url(r'^latest_activity/', views.latest_bill_activity), url(r'^latest/', views.latest_bill_actions), url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'), ]
mit
Python
1bc7937bf0c4c65996e586aef997250869bf5ed1
Use python from env.
AtomLinter/linter-pylama
bin/pylama.py
bin/pylama.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import re import sys import os from pylama.main import shell if __name__ == '__main__': try: virtual_env = os.environ.get('VIRTUAL_ENV', '') activate_this = os.path.join(virtual_env, 'bin', 'activate_this.py') with open(activate_this) as f: exec(f.read(), dict(__file__=activate_this)) except IOError: pass sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(shell())
#!/usr/bin/python # -*- coding: utf-8 -*- import re import sys import os from pylama.main import shell if __name__ == '__main__': try: virtual_env = os.environ.get('VIRTUAL_ENV', '') activate_this = os.path.join(virtual_env, 'bin', 'activate_this.py') with open(activate_this) as f: exec(f.read(), dict(__file__=activate_this)) except IOError: pass sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(shell())
mit
Python
b2bc2f50c9866e758c242a6c8b57a86153cc418a
bump version
vmalloc/confetti,Infinidat/infi.conf
infi/conf/__version__.py
infi/conf/__version__.py
__version__ = "0.0.11"
__version__ = "0.0.10"
bsd-3-clause
Python
583a6319230b89a5f19c26e5bab83e28a5a4792e
Fix the but There is an error (cannot concatenate str and int objects) when the user does not specify the inputs.
ricardogsilva/PyWPS,jonas-eberle/pywps,geopython/pywps,ldesousa/PyWPS,bird-house/PyWPS,jachym/PyWPS,tomkralidis/pywps
pywps/processes/dummyprocess.py
pywps/processes/dummyprocess.py
""" DummyProcess to check the WPS structure Author: Jorge de Jesus ([email protected]) as suggested by Kor de Jong """ from pywps.Process import WPSProcess import types class Process(WPSProcess): def __init__(self): # init process WPSProcess.__init__(self, identifier = "dummyprocess", # must be same, as filename title="Dummy Process", version = "0.1", storeSupported = "true", statusSupported = "true", abstract="The Dummy process is used for testing the WPS structure. The process will accept 2 input numbers and will return the XML result with an add one and subtract one operation", grassLocation =False) self.Input1 = self.addLiteralInput(identifier = "input1", title = "Input1 number", type=types.IntType, default="100") self.Input2= self.addLiteralInput(identifier="input2", title="Input2 number", type=types.IntType, default="200") self.Output1=self.addLiteralOutput(identifier="output1", title="Output1 add 1 result") self.Output2=self.addLiteralOutput(identifier="output2",title="Output2 subtract 1 result" ) def execute(self): self.Output1.setValue(int(self.Input1.getValue())+1) self.Output2.setValue(int(self.Input1.getValue())-1) return
""" DummyProcess to check the WPS structure Author: Jorge de Jesus ([email protected]) as suggested by Kor de Jong """ from pywps.Process import WPSProcess class Process(WPSProcess): def __init__(self): # init process WPSProcess.__init__(self, identifier = "dummyprocess", # must be same, as filename title="Dummy Process", version = "0.1", storeSupported = "true", statusSupported = "true", abstract="The Dummy process is used for testing the WPS structure. The process will accept 2 input numbers and will return the XML result with an add one and subtract one operation", grassLocation =False) self.Input1 = self.addLiteralInput(identifier = "input1", title = "Input1 number", default="100") self.Input2= self.addLiteralInput(identifier="input2", title="Input2 number", default="200") self.Output1=self.addLiteralOutput(identifier="output1", title="Output1 add 1 result") self.Output2=self.addLiteralOutput(identifier="output2",title="Output2 subtract 1 result" ) def execute(self): self.Output1.setValue(self.Input1.getValue()+1) self.Output2.setValue(self.Input1.getValue()-1) return
mit
Python
cc7ffbe88b7b71b32e036be6080f03a353fdbafe
Revert to using get_task_logger
caktus/rapidsms,peterayeni/rapidsms,ehealthafrica-ci/rapidsms,lsgunth/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,caktus/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,caktus/rapidsms,peterayeni/rapidsms,catalpainternational/rapidsms,eHealthAfrica/rapidsms,ehealthafrica-ci/rapidsms,eHealthAfrica/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,catalpainternational/rapidsms,lsgunth/rapidsms
rapidsms/router/celery/tasks.py
rapidsms/router/celery/tasks.py
import celery from celery.utils.log import get_task_logger from rapidsms.errors import MessageSendingError logger = get_task_logger(__name__) @celery.task def receive_async(text, connection_id, message_id, fields): """Task used to send inbound message through router phases.""" from rapidsms.models import Connection from rapidsms.router import get_router logger.debug('receive_async: %s' % text) router = get_router() # reconstruct incoming message connection = Connection.objects.select_related().get(pk=connection_id) message = router.new_incoming_message(text=text, connections=[connection], id_=message_id, fields=fields) try: # call process_incoming directly to skip receive_incoming router.process_incoming(message) except Exception: logger.exception("Exception processing incoming message") raise @celery.task def send_async(backend_name, id_, text, identities, context): """Task used to send outgoing messages to backends.""" logger.debug('send_async: %s' % text) from rapidsms.router import get_router router = get_router() try: router.send_to_backend(backend_name=backend_name, id_=id_, text=text, identities=identities, context=context) except MessageSendingError: # This exception has already been logged in send_to_backend. # We'll simply pass here and not re-raise or log the exception again. pass
import celery import logging from rapidsms.errors import MessageSendingError logger = logging.getLogger(__name__) @celery.task def receive_async(text, connection_id, message_id, fields): """Task used to send inbound message through router phases.""" from rapidsms.models import Connection from rapidsms.router import get_router logger.debug('receive_async: %s' % text) router = get_router() # reconstruct incoming message connection = Connection.objects.select_related().get(pk=connection_id) message = router.new_incoming_message(text=text, connections=[connection], id_=message_id, fields=fields) try: # call process_incoming directly to skip receive_incoming router.process_incoming(message) except Exception: logger.exception("Exception processing incoming message") raise @celery.task def send_async(backend_name, id_, text, identities, context): """Task used to send outgoing messages to backends.""" logger.debug('send_async: %s' % text) from rapidsms.router import get_router router = get_router() try: router.send_to_backend(backend_name=backend_name, id_=id_, text=text, identities=identities, context=context) except MessageSendingError: # This exception has already been logged in send_to_backend. # We'll simply pass here and not re-raise or log the exception again. pass
bsd-3-clause
Python
83f2fe37c6eda993d6b9e2cf2d187646a366f6d8
Make timer daemon
ollien/playserver,ollien/playserver,ollien/playserver
playserver/trackchecker.py
playserver/trackchecker.py
from threading import Timer from . import track _listeners = [] class TrackChecker(): def __init__(self, interval = 5): self.listeners = [] self.CHECK_INTERVAL = interval self.currentSong = "" self.currentArtist = "" self.currentAlbum = "" self.timer = None def checkSong(self): song = track.getCurrentSong() artist = track.getCurrentArtist() album = track.getCurrentAlbum() if (song != self.currentSong or artist != self.currentArtist or album != self.currentAlbum): self.currentSong = song self.currentArtist = artist self.currentAlbum = album self._callListeners() if self.timer != None: self.startTimer() def registerListener(self, function): _listeners.append(function) def _callListeners(self): for listener in _listeners: listener() def startTimer(self): self.timer = Timer(self.CHECK_INTERVAL, self.checkSong) timer.daemon = True self.timer.start() def cancelTimer(self): self.timer.cancel() self.timer = None
from threading import Timer from . import track _listeners = [] class TrackChecker(): def __init__(self, interval = 5): self.listeners = [] self.CHECK_INTERVAL = interval self.currentSong = "" self.currentArtist = "" self.currentAlbum = "" self.timer = None def checkSong(self): song = track.getCurrentSong() artist = track.getCurrentArtist() album = track.getCurrentAlbum() if (song != self.currentSong or artist != self.currentArtist or album != self.currentAlbum): self.currentSong = song self.currentArtist = artist self.currentAlbum = album self._callListeners() if self.timer != None: self.startTimer() def registerListener(self, function): _listeners.append(function) def _callListeners(self): for listener in _listeners: listener() def startTimer(self): self.timer = Timer(self.CHECK_INTERVAL, self.checkSong) self.timer.start() def cancelTimer(self): self.timer.cancel() self.timer = None
mit
Python
0d3082f46f0ffccaca10d3f53f22e6403783d874
change the range of the mean transmittance plot.
yishayv/lyacorr,yishayv/lyacorr
plot_mean_transmittance.py
plot_mean_transmittance.py
import matplotlib.pyplot as plt import common_settings import mean_flux lya_center = 1215.67 settings = common_settings.Settings() m = mean_flux.MeanFlux.from_file(settings.get_mean_transmittance_npy()) fig = plt.figure() ax1 = fig.add_subplot(2, 1, 1) ax2 = ax1.twiny() ax1.plot(m.ar_z, m.get_weighted_mean()) # plt.plot(ar_z_range, np.ones(m.size)) ax1.set_ylabel(r"$\left< f_q(z)/C_q(z) \right> $") plt.ylim(0.0, 1.2) # add wavelength tick marks on top x_lim2 = tuple([lya_center * (1 + z) for z in ax1.get_xlim()]) ax2.set_xlim(x_lim2) plt.axis() ax3 = fig.add_subplot(2, 1, 2) ax3.plot(m.ar_z, m.ar_weights) ax3.plot(m.ar_z, m.ar_total_flux) ax3.set_xlim(ax1.get_xlim()) ax3.set_ylabel(r"$\sum_q f_q(z)/C_q(z)$") ax3.set_xlabel(r"$z$") plt.show()
import matplotlib.pyplot as plt import common_settings import mean_flux lya_center = 1215.67 settings = common_settings.Settings() m = mean_flux.MeanFlux.from_file(settings.get_mean_transmittance_npy()) fig = plt.figure() ax1 = fig.add_subplot(2, 1, 1) ax2 = ax1.twiny() ax1.plot(m.ar_z, m.get_weighted_mean()) # plt.plot(ar_z_range, np.ones(m.size)) ax1.set_ylabel(r"$\left< f_q(z)/C_q(z) \right> $") plt.ylim(0.5, 1.5) # add wavelength tick marks on top x_lim2 = tuple([lya_center * (1 + z) for z in ax1.get_xlim()]) ax2.set_xlim(x_lim2) plt.axis() ax3 = fig.add_subplot(2, 1, 2) ax3.plot(m.ar_z, m.ar_weights) ax3.plot(m.ar_z, m.ar_total_flux) ax3.set_xlim(ax1.get_xlim()) ax3.set_ylabel(r"$\sum_q f_q(z)/C_q(z)$") ax3.set_xlabel(r"$z$") plt.show()
mit
Python
7e8f8b7ba96ade849eaed239751ef3d00c57d0bd
Update plots_digits_classification.py
tomlof/scikit-learn,altairpearl/scikit-learn,hlin117/scikit-learn,MatthieuBizien/scikit-learn,waterponey/scikit-learn,hlin117/scikit-learn,mikebenfield/scikit-learn,meduz/scikit-learn,aflaxman/scikit-learn,btabibian/scikit-learn,nikitasingh981/scikit-learn,shyamalschandra/scikit-learn,Sentient07/scikit-learn,YinongLong/scikit-learn,MechCoder/scikit-learn,wlamond/scikit-learn,potash/scikit-learn,BiaDarkia/scikit-learn,mattilyra/scikit-learn,meduz/scikit-learn,waterponey/scikit-learn,nikitasingh981/scikit-learn,zihua/scikit-learn,HolgerPeters/scikit-learn,MatthieuBizien/scikit-learn,jakirkham/scikit-learn,xuewei4d/scikit-learn,olologin/scikit-learn,mjudsp/Tsallis,Vimos/scikit-learn,moutai/scikit-learn,mikebenfield/scikit-learn,btabibian/scikit-learn,ldirer/scikit-learn,sergeyf/scikit-learn,aabadie/scikit-learn,sanketloke/scikit-learn,dsquareindia/scikit-learn,zuku1985/scikit-learn,betatim/scikit-learn,ivannz/scikit-learn,amueller/scikit-learn,devanshdalal/scikit-learn,RPGOne/scikit-learn,Titan-C/scikit-learn,zorroblue/scikit-learn,xyguo/scikit-learn,tomlof/scikit-learn,saiwing-yeung/scikit-learn,bthirion/scikit-learn,ldirer/scikit-learn,themrmax/scikit-learn,HolgerPeters/scikit-learn,tomlof/scikit-learn,JPFrancoia/scikit-learn,pratapvardhan/scikit-learn,ogrisel/scikit-learn,btabibian/scikit-learn,TomDLT/scikit-learn,PatrickOReilly/scikit-learn,bnaul/scikit-learn,ClimbsRocks/scikit-learn,scikit-learn/scikit-learn,rishikksh20/scikit-learn,chrsrds/scikit-learn,raghavrv/scikit-learn,manhhomienbienthuy/scikit-learn,fabioticconi/scikit-learn,Akshay0724/scikit-learn,rvraghav93/scikit-learn,vortex-ape/scikit-learn,bthirion/scikit-learn,Sentient07/scikit-learn,imaculate/scikit-learn,equialgo/scikit-learn,wlamond/scikit-learn,anntzer/scikit-learn,toastedcornflakes/scikit-learn,ogrisel/scikit-learn,TomDLT/scikit-learn,sanketloke/scikit-learn,wazeerzulfikar/scikit-learn,IshankGulati/scikit-learn,sonnyhu/scikit-learn,alexeyum/scikit-learn,joernhees/scikit-learn,jblackburne/scikit-learn,bthirion/scikit-learn,JPFrancoia/scikit-learn,aabadie/scikit-learn,sergeyf/scikit-learn,espg/scikit-learn,clemkoa/scikit-learn,MechCoder/scikit-learn,wlamond/scikit-learn,jakirkham/scikit-learn,jakobworldpeace/scikit-learn,espg/scikit-learn,sergeyf/scikit-learn,bnaul/scikit-learn,glennq/scikit-learn,giorgiop/scikit-learn,sergeyf/scikit-learn,MechCoder/scikit-learn,nelson-liu/scikit-learn,betatim/scikit-learn,scikit-learn/scikit-learn,shyamalschandra/scikit-learn,tomlof/scikit-learn,aflaxman/scikit-learn,RomainBrault/scikit-learn,sanketloke/scikit-learn,alexeyum/scikit-learn,chrsrds/scikit-learn,Titan-C/scikit-learn,anntzer/scikit-learn,hrjn/scikit-learn,nelson-liu/scikit-learn,xyguo/scikit-learn,ndingwall/scikit-learn,hlin117/scikit-learn,joernhees/scikit-learn,betatim/scikit-learn,saiwing-yeung/scikit-learn,waterponey/scikit-learn,sanketloke/scikit-learn,saiwing-yeung/scikit-learn,herilalaina/scikit-learn,pianomania/scikit-learn,mikebenfield/scikit-learn,PatrickOReilly/scikit-learn,IshankGulati/scikit-learn,nhejazi/scikit-learn,chrsrds/scikit-learn,vortex-ape/scikit-learn,IshankGulati/scikit-learn,hrjn/scikit-learn,huzq/scikit-learn,fabioticconi/scikit-learn,moutai/scikit-learn,lesteve/scikit-learn,sonnyhu/scikit-learn,kevin-intel/scikit-learn,glemaitre/scikit-learn,fabioticconi/scikit-learn,RomainBrault/scikit-learn,alexsavio/scikit-learn,jaidevd/scikit-learn,manhhomienbienthuy/scikit-learn,Sentient07/scikit-learn,JPFrancoia/scikit-learn,HolgerPeters/scikit-learn,potash/scikit-learn,IshankGulati/scikit-learn,fabioticconi/scikit-learn,RomainBrault/scikit-learn,shyamalschandra/scikit-learn,TomDLT/scikit-learn,glennq/scikit-learn,jakobworldpeace/scikit-learn,huzq/scikit-learn,ndingwall/scikit-learn,mattilyra/scikit-learn,amueller/scikit-learn,scikit-learn/scikit-learn,herilalaina/scikit-learn,pratapvardhan/scikit-learn,RPGOne/scikit-learn,Vimos/scikit-learn,amueller/scikit-learn,Akshay0724/scikit-learn,Titan-C/scikit-learn,Vimos/scikit-learn,nikitasingh981/scikit-learn,meduz/scikit-learn,ldirer/scikit-learn,toastedcornflakes/scikit-learn,manhhomienbienthuy/scikit-learn,mattilyra/scikit-learn,anntzer/scikit-learn,zihua/scikit-learn,chrsrds/scikit-learn,lesteve/scikit-learn,aabadie/scikit-learn,nelson-liu/scikit-learn,nelson-liu/scikit-learn,altairpearl/scikit-learn,jakirkham/scikit-learn,equialgo/scikit-learn,zorroblue/scikit-learn,mikebenfield/scikit-learn,amueller/scikit-learn,JPFrancoia/scikit-learn,pianomania/scikit-learn,equialgo/scikit-learn,clemkoa/scikit-learn,vinayak-mehta/scikit-learn,ndingwall/scikit-learn,pianomania/scikit-learn,alexsavio/scikit-learn,RPGOne/scikit-learn,manhhomienbienthuy/scikit-learn,vinayak-mehta/scikit-learn,alexeyum/scikit-learn,espg/scikit-learn,themrmax/scikit-learn,YinongLong/scikit-learn,nhejazi/scikit-learn,scikit-learn/scikit-learn,rvraghav93/scikit-learn,ogrisel/scikit-learn,altairpearl/scikit-learn,toastedcornflakes/scikit-learn,vortex-ape/scikit-learn,dsquareindia/scikit-learn,aflaxman/scikit-learn,jakobworldpeace/scikit-learn,joshloyal/scikit-learn,devanshdalal/scikit-learn,joshloyal/scikit-learn,herilalaina/scikit-learn,AlexandreAbraham/scikit-learn,olologin/scikit-learn,aabadie/scikit-learn,zihua/scikit-learn,olologin/scikit-learn,hrjn/scikit-learn,olologin/scikit-learn,AlexandreAbraham/scikit-learn,devanshdalal/scikit-learn,wazeerzulfikar/scikit-learn,joernhees/scikit-learn,giorgiop/scikit-learn,rishikksh20/scikit-learn,vinayak-mehta/scikit-learn,equialgo/scikit-learn,rishikksh20/scikit-learn,MatthieuBizien/scikit-learn,pratapvardhan/scikit-learn,joernhees/scikit-learn,devanshdalal/scikit-learn,ldirer/scikit-learn,imaculate/scikit-learn,huzq/scikit-learn,sonnyhu/scikit-learn,rvraghav93/scikit-learn,pianomania/scikit-learn,lesteve/scikit-learn,RPGOne/scikit-learn,zuku1985/scikit-learn,dsquareindia/scikit-learn,nhejazi/scikit-learn,AlexandreAbraham/scikit-learn,nikitasingh981/scikit-learn,mjudsp/Tsallis,PatrickOReilly/scikit-learn,zorroblue/scikit-learn,meduz/scikit-learn,PatrickOReilly/scikit-learn,giorgiop/scikit-learn,waterponey/scikit-learn,ndingwall/scikit-learn,ClimbsRocks/scikit-learn,joshloyal/scikit-learn,ivannz/scikit-learn,jblackburne/scikit-learn,jaidevd/scikit-learn,Akshay0724/scikit-learn,vortex-ape/scikit-learn,xuewei4d/scikit-learn,YinongLong/scikit-learn,moutai/scikit-learn,rishikksh20/scikit-learn,xuewei4d/scikit-learn,jakobworldpeace/scikit-learn,anntzer/scikit-learn,bnaul/scikit-learn,glennq/scikit-learn,raghavrv/scikit-learn,Sentient07/scikit-learn,betatim/scikit-learn,alexeyum/scikit-learn,wlamond/scikit-learn,lesteve/scikit-learn,raghavrv/scikit-learn,wazeerzulfikar/scikit-learn,alexsavio/scikit-learn,mattilyra/scikit-learn,moutai/scikit-learn,herilalaina/scikit-learn,TomDLT/scikit-learn,hrjn/scikit-learn,bnaul/scikit-learn,alexsavio/scikit-learn,zihua/scikit-learn,glemaitre/scikit-learn,kevin-intel/scikit-learn,mjudsp/Tsallis,Akshay0724/scikit-learn,imaculate/scikit-learn,ivannz/scikit-learn,zuku1985/scikit-learn,xyguo/scikit-learn,btabibian/scikit-learn,aflaxman/scikit-learn,wazeerzulfikar/scikit-learn,shyamalschandra/scikit-learn,MatthieuBizien/scikit-learn,zorroblue/scikit-learn,kevin-intel/scikit-learn,Vimos/scikit-learn,jblackburne/scikit-learn,jblackburne/scikit-learn,vinayak-mehta/scikit-learn,hlin117/scikit-learn,glemaitre/scikit-learn,MechCoder/scikit-learn,glemaitre/scikit-learn,giorgiop/scikit-learn,mjudsp/Tsallis,potash/scikit-learn,raghavrv/scikit-learn,nhejazi/scikit-learn,HolgerPeters/scikit-learn,zuku1985/scikit-learn,bthirion/scikit-learn,themrmax/scikit-learn,RomainBrault/scikit-learn,rvraghav93/scikit-learn,jaidevd/scikit-learn,clemkoa/scikit-learn,potash/scikit-learn,mjudsp/Tsallis,kevin-intel/scikit-learn,espg/scikit-learn,BiaDarkia/scikit-learn,BiaDarkia/scikit-learn,dsquareindia/scikit-learn,imaculate/scikit-learn,jakirkham/scikit-learn,toastedcornflakes/scikit-learn,mattilyra/scikit-learn,pratapvardhan/scikit-learn,xyguo/scikit-learn,clemkoa/scikit-learn,joshloyal/scikit-learn,jaidevd/scikit-learn,xuewei4d/scikit-learn,BiaDarkia/scikit-learn,saiwing-yeung/scikit-learn,Titan-C/scikit-learn,ClimbsRocks/scikit-learn,huzq/scikit-learn,themrmax/scikit-learn,altairpearl/scikit-learn,sonnyhu/scikit-learn,YinongLong/scikit-learn,ogrisel/scikit-learn,ClimbsRocks/scikit-learn,AlexandreAbraham/scikit-learn,ivannz/scikit-learn,glennq/scikit-learn
examples/classification/plot_digits_classification.py
examples/classification/plot_digits_classification.py
""" ================================ Recognizing hand-written digits ================================ An example showing how the scikit-learn can be used to recognize images of hand-written digits. This example is commented in the :ref:`tutorial section of the user manual <introduction>`. """ print(__doc__) # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org> # License: BSD 3 clause # Standard scientific Python imports import matplotlib.pyplot as plt # Import datasets, classifiers and performance metrics from sklearn import datasets, svm, metrics # The digits dataset digits = datasets.load_digits() # The data that we are interested in is made of 8x8 images of digits, let's # have a look at the first 4 images, stored in the `images` attribute of the # dataset. If we were working from image files, we could load them using # pylab.imread. Note that each image must have the same size. For these # images, we know which digit they represent: it is given in the 'target' of # the dataset. images_and_labels = list(zip(digits.images, digits.target)) for index, (image, label) in enumerate(images_and_labels[:4]): plt.subplot(2, 4, index + 1) plt.axis('off') plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest') plt.title('Training: %i' % label) # To apply a classifier on this data, we need to flatten the image, to # turn the data in a (samples, feature) matrix: n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create a classifier: a support vector classifier classifier = svm.SVC(gamma=0.001) # We learn the digits on the first half of the digits classifier.fit(data[:n_samples / 2], digits.target[:n_samples / 2]) # Now predict the value of the digit on the second half: expected = digits.target[n_samples / 2:] predicted = classifier.predict(data[n_samples / 2:]) print("Classification report for classifier %s:\n%s\n" % (classifier, metrics.classification_report(expected, predicted))) print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted)) images_and_predictions = list(zip(digits.images[n_samples / 2:], predicted)) for index, (image, prediction) in enumerate(images_and_predictions[:4]): plt.subplot(2, 4, index + 5) plt.axis('off') plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest') plt.title('Prediction: %i' % prediction) plt.show()
""" ================================ Recognizing hand-written digits ================================ An example showing how the scikit-learn can be used to recognize images of hand-written digits. This example is commented in the :ref:`tutorial section of the user manual <introduction>`. """ print(__doc__) # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org> # License: BSD 3 clause # Standard scientific Python imports import matplotlib.pyplot as plt # Import datasets, classifiers and performance metrics from sklearn import datasets, svm, metrics # The digits dataset digits = datasets.load_digits() # The data that we are interested in is made of 8x8 images of digits, let's # have a look at the first 3 images, stored in the `images` attribute of the # dataset. If we were working from image files, we could load them using # pylab.imread. Note that each image must have the same size. For these # images, we know which digit they represent: it is given in the 'target' of # the dataset. images_and_labels = list(zip(digits.images, digits.target)) for index, (image, label) in enumerate(images_and_labels[:4]): plt.subplot(2, 4, index + 1) plt.axis('off') plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest') plt.title('Training: %i' % label) # To apply a classifier on this data, we need to flatten the image, to # turn the data in a (samples, feature) matrix: n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create a classifier: a support vector classifier classifier = svm.SVC(gamma=0.001) # We learn the digits on the first half of the digits classifier.fit(data[:n_samples / 2], digits.target[:n_samples / 2]) # Now predict the value of the digit on the second half: expected = digits.target[n_samples / 2:] predicted = classifier.predict(data[n_samples / 2:]) print("Classification report for classifier %s:\n%s\n" % (classifier, metrics.classification_report(expected, predicted))) print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted)) images_and_predictions = list(zip(digits.images[n_samples / 2:], predicted)) for index, (image, prediction) in enumerate(images_and_predictions[:4]): plt.subplot(2, 4, index + 5) plt.axis('off') plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest') plt.title('Prediction: %i' % prediction) plt.show()
bsd-3-clause
Python
746eace7e4677b034743b25e0f8d53aabd07dd5c
Fix bugs?
matthewbentley/autopoke
autopoke.py
autopoke.py
#!/bin/env python from selenium import webdriver from selenium.common.exceptions import StaleElementReferenceException from time import sleep from getpass import getpass if __name__ == '__main__': driver = webdriver.phantomjs.webdriver.WebDriver() driver.get('https://facebook.com') driver.find_element_by_id('email').send_keys(input('Email: ')) driver.find_element_by_id('pass').send_keys(getpass()) driver.find_element_by_id('loginbutton').click() driver.get('https://facebook.com/pokes/') assert "Forgot password?" not in driver.page_source c = 0 e = 0 while True: try: for i in driver.find_elements_by_link_text("Poke Back"): i.click() c += 1 print("Clicked so far: " + str(c)) except StaleElementReferenceException: e += 1 if e == 10: print("Found exception, reloading page") driver.get('https://facebook.com/pokes/') e = 0 else: print("Found exception, doing nothing") sleep(1)
#!/bin/env python from selenium import webdriver from selenium.common.exceptions import StaleElementReferenceException from time import sleep from getpass import getpass if __name__ == '__main__': driver = webdriver.phantomjs.webdriver.WebDriver() driver.get('https://facebook.com') driver.find_element_by_id('email').send_keys(input('Email: ')) driver.find_element_by_id('pass').send_keys(getpass()) driver.find_element_by_id('loginbutton').click() driver.get('https://facebook.com/pokes/') assert "Forgot password?" not in driver.page_source c = 0 c2 = 0 while True: try: for i in driver.find_elements_by_link_text("Poke Back"): i.click() c += 1 c2 = 0 print("Clicked so far: " + str(c)) except StaleElementReferenceException: print("Found exception, reloading page") driver.get('https://facebook.com/pokes/') c2 += 1 if c2 % 121 == 0: print("No pokes in last minute. Reloading") driver.get('https://facebook.com/pokes/') sleep(0.5)
mit
Python
d9fb8d20948e76d4df176d083e4284d3c99258ca
return int index for userid's in the Netflix dataset
Evfro/polara
polara/datasets/netflix.py
polara/datasets/netflix.py
import pandas as pd import tarfile def get_netflix_data(gz_file): movie_data = [] movie_inds = [] with tarfile.open(gz_file) as tar: training_data = tar.getmember('download/training_set.tar') with tarfile.open(fileobj=tar.extractfile(training_data)) as inner: for item in inner.getmembers(): if item.isfile(): f = inner.extractfile(item.name) df = pd.read_csv(f) movieid = df.columns[0] movie_inds.append(int(movieid[:-1])) movie_data.append(df[movieid]) data = pd.concat(movie_data, keys=movie_inds) data = data.reset_index().iloc[:, :3].rename(columns={'level_0': 'movieid', 'level_1': 'userid', 'level_2': 'rating'}) return data
import pandas as pd import tarfile def get_netflix_data(gz_file): movie_data = [] movie_name = [] with tarfile.open(gz_file) as tar: training_data = tar.getmember('download/training_set.tar') with tarfile.open(fileobj=tar.extractfile(training_data)) as inner: for item in inner.getmembers(): if item.isfile(): f = inner.extractfile(item.name) df = pd.read_csv(f) movieid = df.columns[0] movie_name.append(movieid) movie_data.append(df[movieid]) data = pd.concat(movie_data, keys=movie_name) data = data.reset_index().iloc[:, :3].rename(columns={'level_0': 'movieid', 'level_1': 'userid', 'level_2': 'rating'}) return data
mit
Python
5e4b661c446ad3fc9d27e55c7b0cfc9b17e4d8f9
add comment
PythonZone/PyAlaOCL,megaplanet/PyAlaOCL
pyalaocl/useocl/state.py
pyalaocl/useocl/state.py
# coding=utf-8 """ Simple metamodel for object states. Contains definitions for: - State, - Object, - Link, - LinkObject. """ from collections import OrderedDict class State(object): def __init__(self): self.objects = OrderedDict() self.links = OrderedDict() self.linkObject = OrderedDict() class StateElement(object): def __init__(self, state): self.state = state class Object(StateElement): def __init__(self, state, className, name): super(Object,self).__init__(state) state.objects[name] = self self.name = name self.className = className self.attributes = OrderedDict() def set(self, name, value): self.attributes[name] = value class Link(StateElement): def __init__(self, state, associationName, objects): super(Link, self).__init__(state) link_name = '_'.join(map(lambda o: o.name, objects)) state.links[link_name] = self self.associationName = associationName self.roles = objects class LinkObject(StateElement): def __init__(self, state, associationClassName, name, objects) : super(LinkObject, self).__init__(state) state.linkObject[name] = self self.name = name self.className = associationClassName self.attributes = OrderedDict() self.roles = objects def set(self, name, value): self.attributes[name] = value
# coding=utf-8 from collections import OrderedDict class State(object): def __init__(self): self.objects = OrderedDict() self.links = OrderedDict() self.linkObject = OrderedDict() class StateElement(object): def __init__(self, state): self.state = state class Object(StateElement): def __init__(self, state, className, name): super(Object,self).__init__(state) state.objects[name] = self self.name = name self.className = className self.attributes = OrderedDict() def set(self, name, value): self.attributes[name] = value class Link(StateElement): def __init__(self, state, associationName, objects): super(Link, self).__init__(state) link_name = '_'.join(map(lambda o: o.name, objects)) state.links[link_name] = self self.associationName = associationName self.roles = objects class LinkObject(StateElement): def __init__(self, state, associationClassName, name, objects) : super(LinkObject, self).__init__(state) state.linkObject[name] = self self.name = name self.className = associationClassName self.attributes = OrderedDict() self.roles = objects def set(self, name, value): self.attributes[name] = value
mit
Python
c824120ea5a33d3ee4cebc61b5bdf6b8258cf11f
remove set_printoptions call from debugging
HIPS/autograd,barak/autograd,hips/autograd,HIPS/autograd,kcarnold/autograd,hips/autograd
autograd/scipy/linalg.py
autograd/scipy/linalg.py
from __future__ import division import scipy.linalg import autograd.numpy as anp from autograd.numpy.numpy_wrapper import wrap_namespace from autograd.numpy.linalg import atleast_2d_col as al2d wrap_namespace(scipy.linalg.__dict__, globals()) # populates module namespace def _flip(a, trans): if anp.iscomplexobj(a): return 'H' if trans in ('N', 0) else 'N' else: return 'T' if trans in ('N', 0) else 'N' def make_grad_solve_triangular(ans, a, b, trans=0, lower=False, **kwargs): tri = anp.tril if (lower ^ (_flip(a, trans) == 'N')) else anp.triu transpose = lambda x: x if _flip(a, trans) != 'N' else x.T def solve_triangular_grad(g): v = al2d(solve_triangular(a, g, trans=_flip(a, trans), lower=lower)) return -transpose(tri(anp.dot(v, al2d(ans).T))) return solve_triangular_grad solve_triangular.defgrad(make_grad_solve_triangular) solve_triangular.defgrad(lambda ans, a, b, trans=0, lower=False, **kwargs: lambda g: solve_triangular(a, g, trans=_flip(a, trans), lower=lower), argnum=1) def make_grad_sqrtm(ans, A, **kwargs): def sqrtm_grad(g): return solve_lyapunov(ans, g) return sqrtm_grad sqrtm.defgrad(make_grad_sqrtm)
from __future__ import division import scipy.linalg import autograd.numpy as anp from autograd.numpy.numpy_wrapper import wrap_namespace from autograd.numpy.linalg import atleast_2d_col as al2d anp.set_printoptions(precision=3) wrap_namespace(scipy.linalg.__dict__, globals()) # populates module namespace def _flip(a, trans): if anp.iscomplexobj(a): return 'H' if trans in ('N', 0) else 'N' else: return 'T' if trans in ('N', 0) else 'N' def make_grad_solve_triangular(ans, a, b, trans=0, lower=False, **kwargs): tri = anp.tril if (lower ^ (_flip(a, trans) == 'N')) else anp.triu transpose = lambda x: x if _flip(a, trans) != 'N' else x.T def solve_triangular_grad(g): v = al2d(solve_triangular(a, g, trans=_flip(a, trans), lower=lower)) return -transpose(tri(anp.dot(v, al2d(ans).T))) return solve_triangular_grad solve_triangular.defgrad(make_grad_solve_triangular) solve_triangular.defgrad(lambda ans, a, b, trans=0, lower=False, **kwargs: lambda g: solve_triangular(a, g, trans=_flip(a, trans), lower=lower), argnum=1) def make_grad_sqrtm(ans, A, **kwargs): def sqrtm_grad(g): return solve_lyapunov(ans, g) return sqrtm_grad sqrtm.defgrad(make_grad_sqrtm)
mit
Python
5dd8c7d2f14e6323655ca9eb879597ab8b2b0ec4
Fix battery voltage calculation
chrisb2/gate-alarm
gate_app.py
gate_app.py
from utime import sleep_ms, sleep import webrepl from mqtt import MQTTClient from machine import Pin, ADC, PWM import secrets # Pin constants LED1 = 16 # GPIO16, D0, Nodemcu led LED2 = 2 # GPIO2, D4, ESP8266 led SWITCH = 5 # GPIO5, D1 BATTERY = 0 # ADC0, A0 BUZZER = 14 # GPIO14, D5 # Resistors in voltage divider (ohms) # NodeMcu internal resister divider (from schematic) NODEMCU_RESISTOR_RATIO = (220 + 100) / 100 # External resister divider R1 = 9970 R2 = 9990 RESISTOR_RATIO = (R1 + R2) / R2 # ADC Reference voltage in Millivolts ADC_REF = 1000 # Average value from 100 reads when A0 is grounded ADC_OFFSET = 3 # Number of ADC reads to take average of ADC_READS = 30 GATE_STATUS_TOPIC = b"back-gate/status" GATE_UPDATE_TOPIC = b"back-gate/update" PAYLOAD_FORMAT = "field1=1&field2={0:.2f}\n" on_for_update = False def device_control(topic, msg): global on_for_update on_for_update = True print((topic, msg)) def run_gate(): global on_for_update c = MQTTClient("gate_client", secrets.MQTT_BROKER) c.set_callback(device_control) c.connect(clean_session=False) c.publish(GATE_STATUS_TOPIC, msg_payload()) c.subscribe(GATE_UPDATE_TOPIC, qos=1) c.check_msg() c.disconnect() flash_led(LED1) if not on_for_update: switch_off() webrepl.start() def gate_alarm(topic, msg): print((topic, msg)) sound_alarm() def run_base(): c = MQTTClient("gate_base_client", secrets.MQTT_BROKER) c.set_callback(gate_alarm) c.connect(clean_session=False) c.subscribe(GATE_STATUS_TOPIC) while True: c.wait_msg() def msg_payload(): return PAYLOAD_FORMAT.format(battery_voltage()) def battery_voltage(): # ADC read at pin A0 adc = ADC(BATTERY) sum = 0 for x in range(0, ADC_READS): sum += adc.read() return ADC_REF * NODEMCU_RESISTOR_RATIO * RESISTOR_RATIO * \ (sum / ADC_READS - ADC_OFFSET) / 1024 / 1000 def switch_off(): # Raise pin high to signal FET switch to turn off flash_led(LED2) pin = Pin(SWITCH, Pin.OUT) pin.on() def sound_alarm(): pwm = PWM(Pin(BUZZER), freq=500, duty=512) sleep(5) pwm.deinit() def flash_led(pin, count=1): pin = Pin(pin, Pin.OUT) pin.on() for x in range(0, count * 2): pin.value(not pin.value()) sleep_ms(100)
from utime import sleep_ms, sleep import webrepl from mqtt import MQTTClient from machine import Pin, ADC, PWM import secrets # Pin constants LED1 = 16 # GPIO16, D0, Nodemcu led LED2 = 2 # GPIO2, D4, ESP8266 led SWITCH = 5 # GPIO5, D1 BATTERY = 0 # ADC0, A0 BUZZER = 14 # GPIO14, D5 # Resistors in voltage divider (ohms) R1 = 9970 R2 = 994 RESISTOR_RATIO = (R1 + R2) / R2 # ADC Reference voltage in Millivolts ADC_REF = 3292 # Measured between 3.3V and GND pins ADC_READS = 30 GATE_STATUS_TOPIC = b"back-gate/status" GATE_UPDATE_TOPIC = b"back-gate/update" PAYLOAD_FORMAT = "field1=1&field2={0:.2f}\n" on_for_update = False def device_control(topic, msg): global on_for_update on_for_update = True print((topic, msg)) def run_gate(): global on_for_update c = MQTTClient("gate_client", secrets.MQTT_BROKER) c.set_callback(device_control) c.connect(clean_session=False) c.publish(GATE_STATUS_TOPIC, msg_payload()) c.subscribe(GATE_UPDATE_TOPIC, qos=1) c.check_msg() c.disconnect() flash_led(LED1) if not on_for_update: switch_off() webrepl.start() def gate_alarm(topic, msg): print((topic, msg)) sound_alarm() def run_base(): c = MQTTClient("gate_base_client", secrets.MQTT_BROKER) c.set_callback(gate_alarm) c.connect(clean_session=False) c.subscribe(GATE_STATUS_TOPIC) while True: c.wait_msg() def msg_payload(): return PAYLOAD_FORMAT.format(battery_voltage()) def battery_voltage(): # ADC read at pin A0 adc = ADC(BATTERY) sum = 0 for x in range(0, ADC_READS): sum += adc.read() return ADC_REF * RESISTOR_RATIO * (sum / ADC_READS) / 1024 / 1000 def switch_off(): # Raise pin high to signal FET switch to turn off flash_led(LED2) pin = Pin(SWITCH, Pin.OUT) pin.on() def sound_alarm(): pwm = PWM(Pin(BUZZER), freq=500, duty=512) sleep(5) pwm.deinit() def flash_led(pin, count=1): pin = Pin(pin, Pin.OUT) pin.on() for x in range(0, count * 2): pin.value(not pin.value()) sleep_ms(100)
mit
Python
0a89c9e32e625e53cbe5ea151aff42031fb833a5
Add canonical link
yashodhank/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,yashodhank/frappe,yashodhank/frappe,mhbu50/frappe,mhbu50/frappe,frappe/frappe,almeidapaulopt/frappe,frappe/frappe
frappe/website/page_controllers/base_template_page.py
frappe/website/page_controllers/base_template_page.py
import frappe from frappe.website.doctype.website_settings.website_settings import get_website_settings from frappe.website.page_controllers.web_page import WebPage from frappe.website.website_components.metatags import MetaTags class BaseTemplatePage(WebPage): def init_context(self): self.context = frappe._dict() self.context.update(get_website_settings()) self.context.update(frappe.local.conf.get("website_context") or {}) def add_csrf_token(self, html): if frappe.local.session: csrf_token = frappe.local.session.data.csrf_token return html.replace("<!-- csrf_token -->", f'<script>frappe.csrf_token = "{csrf_token}";</script>') return html def post_process_context(self): self.tags = MetaTags(self.path, self.context).tags self.context.metatags = self.tags self.set_base_template_if_missing() self.set_title_with_prefix() self.update_website_context() # set using frappe.respond_as_web_page if hasattr(frappe.local, 'response') and frappe.local.response.get('context'): self.context.update(frappe.local.response.context) # to be able to inspect the context dict # Use the macro "inspect" from macros.html self.context._context_dict = self.context self.context.canonical = frappe.utils.get_url(frappe.utils.escape_html(self.path)) # context sends us a new template path if self.context.template: self.template_path = self.context.template def set_base_template_if_missing(self): if not self.context.base_template_path: app_base = frappe.get_hooks("base_template") self.context.base_template_path = app_base[-1] if app_base else "templates/base.html" def set_title_with_prefix(self): if (self.context.title_prefix and self.context.title and not self.context.title.startswith(self.context.title_prefix)): self.context.title = '{0} - {1}'.format(self.context.title_prefix, self.context.title) def update_website_context(self): # apply context from hooks update_website_context = frappe.get_hooks('update_website_context') for method in update_website_context: values = frappe.get_attr(method)(self.context) if values: self.context.update(values)
import frappe from frappe.website.doctype.website_settings.website_settings import get_website_settings from frappe.website.page_controllers.web_page import WebPage from frappe.website.website_components.metatags import MetaTags class BaseTemplatePage(WebPage): def init_context(self): self.context = frappe._dict() self.context.update(get_website_settings()) self.context.update(frappe.local.conf.get("website_context") or {}) def add_csrf_token(self, html): if frappe.local.session: csrf_token = frappe.local.session.data.csrf_token return html.replace("<!-- csrf_token -->", f'<script>frappe.csrf_token = "{csrf_token}";</script>') return html def post_process_context(self): self.tags = MetaTags(self.path, self.context).tags self.context.metatags = self.tags self.set_base_template_if_missing() self.set_title_with_prefix() self.update_website_context() # set using frappe.respond_as_web_page if hasattr(frappe.local, 'response') and frappe.local.response.get('context'): self.context.update(frappe.local.response.context) # to be able to inspect the context dict # Use the macro "inspect" from macros.html self.context._context_dict = self.context # context sends us a new template path if self.context.template: self.template_path = self.context.template def set_base_template_if_missing(self): if not self.context.base_template_path: app_base = frappe.get_hooks("base_template") self.context.base_template_path = app_base[-1] if app_base else "templates/base.html" def set_title_with_prefix(self): if (self.context.title_prefix and self.context.title and not self.context.title.startswith(self.context.title_prefix)): self.context.title = '{0} - {1}'.format(self.context.title_prefix, self.context.title) def update_website_context(self): # apply context from hooks update_website_context = frappe.get_hooks('update_website_context') for method in update_website_context: values = frappe.get_attr(method)(self.context) if values: self.context.update(values)
mit
Python
6a7bc9e7dacd30b27b48d37763c47b2419aca2a9
Change the imports to be Python3 compatible
mossberg/pyipinfodb
pyipinfodb/pyipinfodb.py
pyipinfodb/pyipinfodb.py
#!/usr/bin/env python """ Simple python wrapper around the IPInfoDB API. """ import json try: from urllib import urlencode except ImportError: from urllib.parse import urlencode try: import urllib2 except ImportError: import urllib.request as urllib2 import socket class IPInfo() : def __init__(self, apikey): self.apikey = apikey def get_ip_info(self, baseurl, ip=None): """ Same as get_city and get_country, but a baseurl is required. This is for if you want to use a different server that uses the php scripts on ipinfodb.com. """ passdict = {'format': 'json', 'key': self.apikey} if ip: try: # allows user to enter in domain instead of ip passdict['ip'] = socket.gethostbyaddr(ip)[2][0] except socket.herror: # if domain is not found, just use input passdict['ip'] = ip url = baseurl + "?" + urlencode(passdict) urlobj = urllib2.urlopen(url) data = urlobj.read() urlobj.close() datadict = json.loads(data) return datadict def get_country(self, ip=None): """ Gets the location with the context of the country of the given IP. If no IP is given, then the location of the client is given. The timezone option defaults to False, to spare the server some queries. """ baseurl = 'http://api.ipinfodb.com/v3/ip-country/' return self.get_ip_info(baseurl, ip) def get_city(self, ip=None): """ Gets the location with the context of the city of the given IP. If no IP is given, then the location of the client is given. The timezone option defaults to False, to spare the server some queries. """ baseurl = 'http://api.ipinfodb.com/v3/ip-city/' return self.get_ip_info(baseurl, ip)
#!/usr/bin/env python """ Simple python wrapper around the IPInfoDB API. """ import json from urllib import urlencode import urllib2 import socket class IPInfo() : def __init__(self, apikey): self.apikey = apikey def get_ip_info(self, baseurl, ip=None): """ Same as get_city and get_country, but a baseurl is required. This is for if you want to use a different server that uses the php scripts on ipinfodb.com. """ passdict = {'format': 'json', 'key': self.apikey} if ip: try: # allows user to enter in domain instead of ip passdict['ip'] = socket.gethostbyaddr(ip)[2][0] except socket.herror: # if domain is not found, just use input passdict['ip'] = ip url = baseurl + "?" + urlencode(passdict) urlobj = urllib2.urlopen(url) data = urlobj.read() urlobj.close() datadict = json.loads(data) return datadict def get_country(self, ip=None): """ Gets the location with the context of the country of the given IP. If no IP is given, then the location of the client is given. The timezone option defaults to False, to spare the server some queries. """ baseurl = 'http://api.ipinfodb.com/v3/ip-country/' return self.get_ip_info(baseurl, ip) def get_city(self, ip=None): """ Gets the location with the context of the city of the given IP. If no IP is given, then the location of the client is given. The timezone option defaults to False, to spare the server some queries. """ baseurl = 'http://api.ipinfodb.com/v3/ip-city/' return self.get_ip_info(baseurl, ip)
mit
Python
305e54c328cf212e01a3af7cec7b940894044e55
Use float, not int for random WPM
nickodell/morse-code
gen_test.py
gen_test.py
import math import numpy import random from demodulate.cfg import * def gen_test_data(): pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ radians_per_sample = cycles_per_sample * 2 * math.pi WPM = random.uniform(2,20) elements_per_second = WPM * 50.0 / 60.0 samples_per_element = int(SAMPLE_FREQ/elements_per_second) length = samples_per_element * len(pattern) # Empty returns array containing random stuff, so we NEED to overwrite it data = numpy.empty(length, dtype=numpy.float32) for i in xrange(length): keyed = pattern[int(i/samples_per_element)] #keyed = 1 data[i] = 0 if not keyed else (radians_per_sample * i) data = numpy.sin(data) return data
import math import numpy import random from demodulate.cfg import * def gen_test_data(): pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ radians_per_sample = cycles_per_sample * 2 * math.pi WPM = random.randint(2,20) elements_per_second = WPM * 50.0 / 60.0 samples_per_element = int(SAMPLE_FREQ/elements_per_second) length = samples_per_element * len(pattern) # Empty returns array containing random stuff, so we NEED to overwrite it data = numpy.empty(length, dtype=numpy.float32) for i in xrange(length): keyed = pattern[int(i/samples_per_element)] #keyed = 1 data[i] = 0 if not keyed else (radians_per_sample * i) data = numpy.sin(data) return data
mit
Python
dfb1784009549829a9a9bb1b72be51dacd44ec99
Update auth.py
pathakvaidehi2391/WorkSpace,pathakvaidehi2391/WorkSpace
azurecloudify/auth.py
azurecloudify/auth.py
import requests import json import urllib2 from cloudify import ctx import constants def get_token_from_client_credentials(): client_id = ctx.node.properties['client_id'] client_secret = ctx.node.properties['password'] tenant_id = ctx.node.properties['tenant_id'] endpoints = 'https://login.microsoftonline.com/'+tenant_id+'/oauth2/token' payload = { 'grant_type': 'client_credentials', 'client_id': client_id, 'client_secret': client_secret, 'resource': constants.resource, } response = requests.post(endpoints, data=payload).json() return response["access_token :"] """ def _generate_credentials(**_): client_id=ctx.node.properties['client_id'] tenant_id=ctx.node.properties['tenant_id'] username=ctx.node.properties['username'] password=ctx.node.properties['password'] url='https://login.microsoftonline.com/'+tenant_id+'/oauth2/token' headers ={"Content-Type":"application/x-www-form-urlencoded"} body = "grant_type=password&username="+username+"&password="+password+"&client_id="+client_id+"&resource=https://management.core.windows.net/" req = Request(method="POST",url=url,data=body) req_prepped = req.prepare() s = Session() res = Response() res = s.send(req_prepped) s=res.content end_of_leader = s.index('access_token":"') + len('access_token":"') start_of_trailer = s.index('"', end_of_leader) token=s[end_of_leader:start_of_trailer] credentials = "Bearer " + token head = {"Content-Type": "application/json", "Authorization": credentials} return head """
import requests import json import urllib2 from cloudify import ctx import constants def get_token_from_client_credentials(): client_id = ctx.node.properties['client_id'] client_secret = ctx.node.properties['password'] tenant_id = ctx.node.properties['tenant_id'] endpoints = 'https://login.microsoftonline.com/'+tenant_id+'/oauth2/token' payload = { 'grant_type': 'client_credentials', 'client_id': client_id, 'client_secret': client_secret, 'resource': constants.resource, } response = requests.post(endpoints, data=payload).json() return response['u'access_token''] """ def _generate_credentials(**_): client_id=ctx.node.properties['client_id'] tenant_id=ctx.node.properties['tenant_id'] username=ctx.node.properties['username'] password=ctx.node.properties['password'] url='https://login.microsoftonline.com/'+tenant_id+'/oauth2/token' headers ={"Content-Type":"application/x-www-form-urlencoded"} body = "grant_type=password&username="+username+"&password="+password+"&client_id="+client_id+"&resource=https://management.core.windows.net/" req = Request(method="POST",url=url,data=body) req_prepped = req.prepare() s = Session() res = Response() res = s.send(req_prepped) s=res.content end_of_leader = s.index('access_token":"') + len('access_token":"') start_of_trailer = s.index('"', end_of_leader) token=s[end_of_leader:start_of_trailer] credentials = "Bearer " + token head = {"Content-Type": "application/json", "Authorization": credentials} return head """
apache-2.0
Python
89a18ea91fb2d095541510155dcdf94ad76b8374
Fix broken lookdev loader
getavalon/core,mindbender-studio/core,pyblish/pyblish-mindbender,getavalon/core,MoonShineVFX/core,MoonShineVFX/core,mindbender-studio/core
mindbender/maya/loaders/mindbender_look.py
mindbender/maya/loaders/mindbender_look.py
import json from mindbender import api from mindbender.maya import lib, pipeline from maya import cmds class LookLoader(api.Loader): """Specific loader for lookdev""" families = ["mindbender.lookdev"] def process(self, asset, subset, version, representation): fname = representation["path"].format( dirname=version["path"].format(root=api.registered_root()), format=representation["format"] ) namespace = asset["name"] + "_" name = lib.unique_name(subset["name"]) with lib.maintained_selection(): nodes = cmds.file(fname, namespace=namespace, reference=True, returnNewNodes=True) # Containerising pipeline.containerise(name=name, namespace=namespace, nodes=nodes, version=version) # Assign shaders representation = next( (rep for rep in version["representations"] if rep["format"] == ".json"), None) if representation is None: cmds.warning("Look development asset has no relationship data.") else: path = representation["path"].format( dirname=version["path"].format(root=api.registered_root()), format=representation["format"] ) with open(path) as f: relationships = json.load(f) lib.apply_shaders(relationships) return cmds.referenceQuery(nodes[0], referenceNode=True)
import json from mindbender import api from mindbender.maya import lib, pipeline from maya import cmds class LookLoader(api.Loader): """Specific loader for lookdev""" families = ["mindbender.look"] def process(self, asset, subset, version, representation): fname = representation["path"].format( dirname=version["path"].format(root=api.registered_root()), format=representation["format"] ) namespace = asset["name"] + "_" name = lib.unique_name(subset["name"]) with lib.maintained_selection(): nodes = cmds.file(fname, namespace=namespace, reference=True, returnNewNodes=True) # Containerising pipeline.containerise(name=name, namespace=namespace, nodes=nodes, version=version) # Assign shaders representation = next( (rep for rep in version["representations"] if rep["format"] == ".json"), None) if representation is None: cmds.warning("Look development asset has no relationship data.") else: path = representation["path"].format( dirname=version["path"], format=representation["format"] ) with open(path) as f: relationships = json.load(f) lib.apply_shaders(relationships) return cmds.referenceQuery(nodes[0], referenceNode=True)
mit
Python
33fef0560e14f94bab7d74d0c6a62d2016487822
Tidy urls.py
nikdoof/test-auth
app/urls.py
app/urls.py
from django.conf.urls.defaults import * from django.contrib import admin from django.contrib.auth.views import login from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.conf import settings from utils import installed from registration.views import register from sso.forms import RegistrationFormUniqueEmailBlocked admin.autodiscover() urlpatterns = patterns('', ('', include('registration.backends.default.urls')), (r'^register/$', register, {'backend': 'registration.backends.default.DefaultBackend', 'form_class': RegistrationFormUniqueEmailBlocked}), ('', include('sso.urls')), (r'^eve/', include('eve_api.urls')), (r'^eveapi/', include('eve_proxy.urls')), (r'^api/', include('api.urls')), ) if installed('reddit'): urlpatterns += patterns('', ('', include('reddit.urls')), ) if installed('hr'): urlpatterns += patterns('', (r'^hr/', include('hr.urls')), ) if installed('groups'): urlpatterns += patterns('', (r'^groups/', include('groups.urls')), ) if installed('sentry'): urlpatterns += patterns('', (r'^sentry/', include('sentry.web.urls')), ) if installed('nexus'): import nexus nexus.autodiscover() urlpatterns += patterns('', (r'^nexus/', include(nexus.site.urls)), ) if settings.DEBUG: urlpatterns += staticfiles_urlpatterns()
from django.conf.urls.defaults import * from django.contrib import admin from django.contrib.auth.views import login from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.conf import settings from utils import installed from registration.views import register from sso.forms import RegistrationFormUniqueEmailBlocked admin.autodiscover() urlpatterns = patterns('', ('', include('registration.backends.default.urls')), (r'^register/$', register, {'backend': 'registration.backends.default.DefaultBackend', 'form_class': RegistrationFormUniqueEmailBlocked}), ('', include('sso.urls')), (r'^eve/', include('eve_api.urls')), (r'^eveapi/', include('eve_proxy.urls')), (r'^api/', include('api.urls')), ) if installed('reddit'): urlpatterns += patterns('', ('', include('reddit.urls')), ) if installed('hr'): urlpatterns += patterns('', (r'^hr/', include('hr.urls')), ) if installed('groups'): urlpatterns += patterns('', (r'^groups/', include('groups.urls')), ) if installed('sentry'): urlpatterns += patterns('', (r'^sentry/', include('sentry.web.urls')), ) if installed('nexus'): import nexus nexus.autodiscover() urlpatterns += patterns('', (r'^nexus/', include(nexus.site.urls)), ) if settings.DEBUG: urlpatterns += staticfiles_urlpatterns()
bsd-3-clause
Python
8a9422f7c323394af04f90a43a078098197076b9
fix small bug in dynamic urls.py
RyFry/leagueofdowning,RyFry/leagueofdowning,RyFry/leagueofdowning,RyFry/leagueofdowning
app/urls.py
app/urls.py
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^about', views.about, name='about'), url(r'^test', views.test, name='test'), url(r'^champions/$', views.champions), url(r'^champions/.*', views.champion), url(r'^champions/*', views.champions), url(r'^items/$', views.items), url(r'^items/.*', views.item), url(r'^items/*', views.items), url(r'^players/$', views.players), url(r'^players/.*', views.player), url(r'^players/*', views.players), url(r'^.*/$', views.index) ]
from django.conf.urls import url from . import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^about', views.about, name='about'), url(r'^test', views.test, name='test'), url(r'^champions/$', views.champions), url(r'^champions/.*', views.champion), url(r'^champions/*', views.champions), url(r'^items/$', views.items), url(r'^items/.*', views.item), url(r'^items/*', views.items), url(r'^players/$', views.players), url(r'^players/.*', views.balls), url(r'^players/*', views.players), url(r'^.*/$', views.index) ]
mit
Python
2a1b5dbbd3e0c78df76d904602f1c4fcc6157a6b
Clean up imports
rscada/python-mbus,Cougar/python-mbus,neurobat/python-mbus
mbus/MBusHandle.py
mbus/MBusHandle.py
from ctypes import Structure, c_uint8, c_void_p, c_int, c_byte class MBusHandle(Structure): _fields_ = [("fd", c_int), ("max_data_retry", c_int), ("max_search_retry", c_int), ("purge_first_frame", c_byte), ("is_serial", c_uint8), ("internal", c_void_p * 10)] # pointers def __str__(self): return "MBusHandle: XXX"
from ctypes import Structure, c_uint32, c_uint8, c_void_p, c_int, c_byte class MBusHandle(Structure): _fields_ = [("fd", c_int), ("max_data_retry", c_int), ("max_search_retry", c_int), ("purge_first_frame", c_byte), ("is_serial", c_uint8), ("internal", c_void_p * 10)] # pointers def __str__(self): return "MBusHandle: XXX"
bsd-3-clause
Python
cdb3e3872ad0dfa722f9955a7beff38b2cfa3547
remove schema form requester
emilkjer/django-memorycms,emilkjer/django-memorycms
backend/auth/utils.py
backend/auth/utils.py
import json from django.http import HttpResponse from auth.models import Token def json_response(response_dict, status=200): response = HttpResponse(json.dumps(response_dict), content_type="application/json", status=status) response['Access-Control-Allow-Origin'] = 'memorycms.moome.net' response['Access-Control-Allow-Headers'] = 'Content-Type, Authorization' return response def get_token(request): auth_header = request.META.get('HTTP_AUTHORIZATION', None) if auth_header is not None: tokens = auth_header.split(' ') if len(tokens) == 2 and tokens[0] == 'Token': token = tokens[1] return Token.objects.filter(token=token).first() def token_required(func): def inner(request, *args, **kwargs): if request.method == 'OPTIONS': return func(request, *args, **kwargs) auth_header = request.META.get('HTTP_AUTHORIZATION', None) if auth_header is not None: tokens = auth_header.split(' ') if len(tokens) == 2 and tokens[0] == 'Token': token = tokens[1] token_obj = get_token(request) if token_obj: request.token = token_obj return func(request, *args, **kwargs) else: return json_response({ 'error': 'Token not found' }, status=401) return json_response({ 'error': 'Invalid Header' }, status=401) return inner
import json from django.http import HttpResponse from auth.models import Token def json_response(response_dict, status=200): response = HttpResponse(json.dumps(response_dict), content_type="application/json", status=status) response['Access-Control-Allow-Origin'] = 'http://memorycms.moome.net/' response['Access-Control-Allow-Headers'] = 'Content-Type, Authorization' return response def get_token(request): auth_header = request.META.get('HTTP_AUTHORIZATION', None) if auth_header is not None: tokens = auth_header.split(' ') if len(tokens) == 2 and tokens[0] == 'Token': token = tokens[1] return Token.objects.filter(token=token).first() def token_required(func): def inner(request, *args, **kwargs): if request.method == 'OPTIONS': return func(request, *args, **kwargs) auth_header = request.META.get('HTTP_AUTHORIZATION', None) if auth_header is not None: tokens = auth_header.split(' ') if len(tokens) == 2 and tokens[0] == 'Token': token = tokens[1] token_obj = get_token(request) if token_obj: request.token = token_obj return func(request, *args, **kwargs) else: return json_response({ 'error': 'Token not found' }, status=401) return json_response({ 'error': 'Invalid Header' }, status=401) return inner
mit
Python
2a4bbb19bf32a08e7c398558d39c201f8b089342
change to len
hanabishi/pythoncam
backend/camservice.py
backend/camservice.py
import cherrypy from cammodule import CamModule, get_camera_list, setup_pygame_camera class CamService(object): def __init__(self): self.camera_list = [] setup_pygame_camera() camera_list = get_camera_list() for camera_index, camera_name in enumerate(camera_list): self.camera_list.append(CamModule(camera_name, camera_index)) @cherrypy.expose @cherrypy.tools.json_out() def get_cameras(self): return {"cameraCount": len(get_camera_list())} @cherrypy.expose def get_image(self, cam_index="0", fake="1"): cherrypy.response.headers['Content-Type'] = "image/jpg" return self.camera_list[int(cam_index)].get_bytes()
import cherrypy from cammodule import CamModule, get_camera_list, setup_pygame_camera class CamService(object): def __init__(self): self.camera_list = [] setup_pygame_camera() camera_list = get_camera_list() for camera_index, camera_name in enumerate(camera_list): self.camera_list.append(CamModule(camera_name, camera_index)) @cherrypy.expose @cherrypy.tools.json_out() def get_cameras(self): return {"cameraCount": 1} @cherrypy.expose def get_image(self, cam_index="0", fake="1"): cherrypy.response.headers['Content-Type'] = "image/jpg" return self.camera_list[int(cam_index)].get_bytes()
mit
Python
ae7cc245938b1e02974f9b54830146019ca9c0c1
make imports init __init__ prettier
PeerAssets/pypeerassets
pypeerassets/__init__.py
pypeerassets/__init__.py
from pypeerassets.kutil import Kutil from pypeerassets.provider import * from pypeerassets.__main__ import (deck_parser, find_all_valid_cards, find_all_valid_decks, find_deck, deck_spawn, deck_transfer, get_card_transfers, card_transfer)
from pypeerassets.kutil import Kutil from pypeerassets.provider import * from pypeerassets.__main__ import *
bsd-3-clause
Python
3aa36ff6ef79f061158ad57b1f4a251b3eeafd7a
Add virtual shift dealer_btn method
ishikota/PyPokerEngine
pypoker2/engine/table.py
pypoker2/engine/table.py
from pypoker2.engine.card import Card from pypoker2.engine.seats import Seats from pypoker2.engine.deck import Deck class Table: def __init__(self, cheat_deck=None): self.dealer_btn = 0 self.seats = Seats() self.deck = cheat_deck if cheat_deck else Deck() self.__community_card = [] def get_community_card(self): return self.__community_card[::] def add_community_card(self, card): if len(self.__community_card) == 5: raise ValueError(self.__exceed_card_size_msg) self.__community_card.append(card) def reset(self): self.deck.restore() self.__community_card = [] for player in self.seats.players: player.clear_holecard() player.clear_action_histories() player.clear_pay_info() def shift_dealer_btn(self, exec_shit=True): dealer_pos = self.dealer_btn while True: dealer_pos = (dealer_pos + 1) % self.seats.size() if self.seats.players[dealer_pos].is_active(): break if exec_shit: self.dealer_btn = dealer_pos return dealer_pos def serialize(self): community_card = [card.to_id() for card in self.__community_card] return [ self.dealer_btn, Seats.serialize(self.seats), Deck.serialize(self.deck), community_card ] @classmethod def deserialize(self, serial): deck = Deck.deserialize(serial[2]) community_card = [Card.from_id(cid) for cid in serial[3]] table = self(cheat_deck=deck) table.dealer_btn = serial[0] table.seats = Seats.deserialize(serial[1]) table.__community_card = community_card return table __exceed_card_size_msg = "Community card is already full"
from pypoker2.engine.card import Card from pypoker2.engine.seats import Seats from pypoker2.engine.deck import Deck class Table: def __init__(self, cheat_deck=None): self.dealer_btn = 0 self.seats = Seats() self.deck = cheat_deck if cheat_deck else Deck() self.__community_card = [] def get_community_card(self): return self.__community_card[::] def add_community_card(self, card): if len(self.__community_card) == 5: raise ValueError(self.__exceed_card_size_msg) self.__community_card.append(card) def reset(self): self.deck.restore() self.__community_card = [] for player in self.seats.players: player.clear_holecard() player.clear_action_histories() player.clear_pay_info() def shift_dealer_btn(self): while True: self.dealer_btn = (self.dealer_btn + 1) % self.seats.size() if self.seats.players[self.dealer_btn].is_active(): break def serialize(self): community_card = [card.to_id() for card in self.__community_card] return [ self.dealer_btn, Seats.serialize(self.seats), Deck.serialize(self.deck), community_card ] @classmethod def deserialize(self, serial): deck = Deck.deserialize(serial[2]) community_card = [Card.from_id(cid) for cid in serial[3]] table = self(cheat_deck=deck) table.dealer_btn = serial[0] table.seats = Seats.deserialize(serial[1]) table.__community_card = community_card return table __exceed_card_size_msg = "Community card is already full"
mit
Python
58a69bf2dd93027f083fe54721847c438f861f10
Fix import of new data after rebase
bert9bert/statsmodels,wkfwkf/statsmodels,kiyoto/statsmodels,yl565/statsmodels,nvoron23/statsmodels,bashtage/statsmodels,yl565/statsmodels,edhuckle/statsmodels,jseabold/statsmodels,phobson/statsmodels,statsmodels/statsmodels,adammenges/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,huongttlan/statsmodels,wwf5067/statsmodels,josef-pkt/statsmodels,kiyoto/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,nguyentu1602/statsmodels,bavardage/statsmodels,astocko/statsmodels,saketkc/statsmodels,nvoron23/statsmodels,nvoron23/statsmodels,YihaoLu/statsmodels,hainm/statsmodels,wkfwkf/statsmodels,wkfwkf/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,adammenges/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,phobson/statsmodels,hlin117/statsmodels,wzbozon/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,yl565/statsmodels,detrout/debian-statsmodels,bert9bert/statsmodels,edhuckle/statsmodels,nvoron23/statsmodels,josef-pkt/statsmodels,phobson/statsmodels,Averroes/statsmodels,bsipocz/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,bzero/statsmodels,bashtage/statsmodels,musically-ut/statsmodels,bsipocz/statsmodels,bert9bert/statsmodels,edhuckle/statsmodels,yl565/statsmodels,bzero/statsmodels,wzbozon/statsmodels,ChadFulton/statsmodels,yarikoptic/pystatsmodels,rgommers/statsmodels,cbmoore/statsmodels,detrout/debian-statsmodels,wwf5067/statsmodels,alekz112/statsmodels,YihaoLu/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,wkfwkf/statsmodels,jseabold/statsmodels,DonBeo/statsmodels,Averroes/statsmodels,jseabold/statsmodels,gef756/statsmodels,bzero/statsmodels,yarikoptic/pystatsmodels,wwf5067/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,bavardage/statsmodels,jstoxrocky/statsmodels,astocko/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,adammenges/statsmodels,huongttlan/statsmodels,astocko/statsmodels,saketkc/statsmodels,nvoron23/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,musically-ut/statsmodels,waynenilsen/statsmodels,bavardage/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,ChadFulton/statsmodels,yl565/statsmodels,wzbozon/statsmodels,hainm/statsmodels,hainm/statsmodels,phobson/statsmodels,saketkc/statsmodels,gef756/statsmodels,DonBeo/statsmodels,saketkc/statsmodels,bzero/statsmodels,kiyoto/statsmodels,wdurhamh/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,jseabold/statsmodels,Averroes/statsmodels,bert9bert/statsmodels,wwf5067/statsmodels,kiyoto/statsmodels,alekz112/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,jstoxrocky/statsmodels,jstoxrocky/statsmodels,kiyoto/statsmodels,nguyentu1602/statsmodels,YihaoLu/statsmodels,hlin117/statsmodels,wzbozon/statsmodels,wdurhamh/statsmodels,adammenges/statsmodels,waynenilsen/statsmodels,bsipocz/statsmodels,nguyentu1602/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,hainm/statsmodels,gef756/statsmodels,edhuckle/statsmodels,wzbozon/statsmodels,yarikoptic/pystatsmodels,musically-ut/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,detrout/debian-statsmodels,rgommers/statsmodels,phobson/statsmodels,josef-pkt/statsmodels,saketkc/statsmodels,musically-ut/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,astocko/statsmodels,gef756/statsmodels,bavardage/statsmodels,hlin117/statsmodels,DonBeo/statsmodels,waynenilsen/statsmodels,huongttlan/statsmodels,gef756/statsmodels,alekz112/statsmodels,bavardage/statsmodels,huongttlan/statsmodels,bzero/statsmodels,DonBeo/statsmodels,hlin117/statsmodels
statsmodels/datasets/statecrime/data.py
statsmodels/datasets/statecrime/data.py
#! /usr/bin/env python """Statewide Crime Data""" __docformat__ = 'restructuredtext' COPYRIGHT = """Public domain.""" TITLE = """Statewide Crime Data 2009""" SOURCE = """ All data is for 2009 and was obtained from the American Statistical Abstracts except as indicated below. """ DESCRSHORT = """State crime data 2009""" DESCRLONG = DESCRSHORT #suggested notes NOTE = """ Number of observations: 51 Number of variables: 8 Variable name definitions: state All 50 states plus DC. violent Rate of violent crimes / 100,000 population. Includes murder, forcible rape, robbery, and aggravated assault. Numbers for Illinois and Minnesota do not include forcible rapes. Footnote included with the American Statistical Abstract table reads: "The data collection methodology for the offense of forcible rape used by the Illinois and the Minnesota state Uniform Crime Reporting (UCR) Programs (with the exception of Rockford, Illinois, and Minneapolis and St. Paul, Minnesota) does not comply with national UCR guidelines. Consequently, their state figures for forcible rape and violent crime (of which forcible rape is a part) are not published in this table." murder Rate of murders / 100,000 population. hs_grad Precent of population having graduated from high school or higher. poverty % of individuals below the poverty line white Percent of population that is one race - white only. From 2009 American Community Survey single Calculated from 2009 1-year American Community Survey obtained obtained from Census. Variable is Male householder, no wife present, family household combined with Female household, no husband prsent, family household, divided by the total number of Family households. urban % of population in Urbanized Areas as of 2010 Census. Urbanized Areas are area of 50,000 or more people.""" import numpy as np from statsmodels.datasets import utils as du from os.path import dirname, abspath def load(): """ Load the statecrime data and return a Dataset class instance. Returns ------- Dataset instance: See DATASET_PROPOSAL.txt for more information. """ data = _get_data() ##### SET THE INDICES ##### #NOTE: None for exog_idx is the complement of endog_idx return du.process_recarray(data, endog_idx=2, exog_idx=[7, 4, 3, 5], dtype=float) def load_pandas(): data = _get_data() ##### SET THE INDICES ##### #NOTE: None for exog_idx is the complement of endog_idx return du.process_recarray_pandas(data, endog_idx=2, exog_idx=[7,4,3,5], dtype=float, index_idx=0) def _get_data(): filepath = dirname(abspath(__file__)) ##### EDIT THE FOLLOWING TO POINT TO DatasetName.csv ##### data = np.recfromtxt(open(filepath + '/statecrime.csv', 'rb'), delimiter=",", names=True, dtype=None) return data
#! /usr/bin/env python """Statewide Crime Data""" __docformat__ = 'restructuredtext' COPYRIGHT = """Public domain.""" TITLE = """Statewide Crime Data 2009""" SOURCE = """ All data is for 2009 and was obtained from the American Statistical Abstracts except as indicated below. """ DESCRSHORT = """State crime data 2009""" DESCRLONG = DESCRSHORT #suggested notes NOTE = """ Number of observations: 51 Number of variables: 8 Variable name definitions: state All 50 states plus DC. violent Rate of violent crimes / 100,000 population. Includes murder, forcible rape, robbery, and aggravated assault. Numbers for Illinois and Minnesota do not include forcible rapes. Footnote included with the American Statistical Abstract table reads: "The data collection methodology for the offense of forcible rape used by the Illinois and the Minnesota state Uniform Crime Reporting (UCR) Programs (with the exception of Rockford, Illinois, and Minneapolis and St. Paul, Minnesota) does not comply with national UCR guidelines. Consequently, their state figures for forcible rape and violent crime (of which forcible rape is a part) are not published in this table." murder Rate of murders / 100,000 population. hs_grad Precent of population having graduated from high school or higher. poverty % of individuals below the poverty line white Percent of population that is one race - white only. From 2009 American Community Survey single Calculated from 2009 1-year American Community Survey obtained obtained from Census. Variable is Male householder, no wife present, family household combined with Female household, no husband prsent, family household, divided by the total number of Family households. urban % of population in Urbanized Areas as of 2010 Census. Urbanized Areas are area of 50,000 or more people.""" import numpy as np from statsmodels.tools import datautils as du from os.path import dirname, abspath def load(): """ Load the statecrime data and return a Dataset class instance. Returns ------- Dataset instance: See DATASET_PROPOSAL.txt for more information. """ data = _get_data() ##### SET THE INDICES ##### #NOTE: None for exog_idx is the complement of endog_idx return du.process_recarray(data, endog_idx=2, exog_idx=[7, 4, 3, 5], dtype=float) def load_pandas(): data = _get_data() ##### SET THE INDICES ##### #NOTE: None for exog_idx is the complement of endog_idx return du.process_recarray_pandas(data, endog_idx=2, exog_idx=[7,4,3,5], dtype=float, index_idx=0) def _get_data(): filepath = dirname(abspath(__file__)) ##### EDIT THE FOLLOWING TO POINT TO DatasetName.csv ##### data = np.recfromtxt(open(filepath + '/statecrime.csv', 'rb'), delimiter=",", names=True, dtype=None) return data
bsd-3-clause
Python
ea92aeed4dc606def49df643cadc696fec6452b3
fix docstring (again)
mapbox/geocoding-example,mapbox/geocoding-example,mapbox/geocoding-example,mapbox/geocoding-example
python/mapbox_geocode.py
python/mapbox_geocode.py
import __future__ import os, sys, json try: # python 3 from urllib.request import urlopen as urlopen from urllib.parse import quote_plus as quote_plus except: # python 2 from urllib import quote_plus as quote_plus from urllib2 import urlopen as urlopen def geocode(mapbox_access_token, query): """ Submit a geocoding query to Mapbox's geocoder. Args: mapbox_access_token (str): valid Mapbox access token with geocoding permissions query (str): input text to geocode """ resp = urlopen('https://api.tiles.mapbox.com/v4/geocode/mapbox.places/{query}.json?access_token={token}'.format(query=quote_plus(query), token=mapbox_access_token)) return json.loads(resp.read().decode('utf-8')) if __name__ == '__main__': token = os.environ.get('MapboxAccessToken', False) if not token: print('environment variable MapboxAccessToken must be set') sys.exit(1) # geocode result = geocode(token, sys.argv[1]) # print result print(json.dumps(result, indent=2))
import __future__ import os, sys, json try: # python 3 from urllib.request import urlopen as urlopen from urllib.parse import quote_plus as quote_plus except: # python 2 from urllib import quote_plus as quote_plus from urllib2 import urlopen as urlopen def geocode(mapbox_access_token, query): """ Submit a geocoding query to Mapbox's permanent geocoding endpoint. Args: mapbox_access_token (str): valid Mapbox access token with geocoding permissions query (str): input text to geocode """ resp = urlopen('https://api.tiles.mapbox.com/v4/geocode/mapbox.places/{query}.json?access_token={token}'.format(query=quote_plus(query), token=mapbox_access_token)) return json.loads(resp.read().decode('utf-8')) if __name__ == '__main__': token = os.environ.get('MapboxAccessToken', False) if not token: print('environment variable MapboxAccessToken must be set') sys.exit(1) # geocode result = geocode(token, sys.argv[1]) # print result print(json.dumps(result, indent=2))
isc
Python
d6ffb7c91d3cfd9b9e0caeec41921ec3ddce6efa
rewrite custom command for django 1.10 compatibility
hddn/studentsdb,hddn/studentsdb,hddn/studentsdb
students/management/commands/stcount.py
students/management/commands/stcount.py
from django.core.management.base import BaseCommand from django.contrib.auth.models import User from students.models import Student, Group class Command(BaseCommand): help = 'Prints to console number of students related in database.' models = (('student', Student), ('group', Group), ('user', User)) def add_arguments(self, parser): parser.add_argument('model', nargs='+') def handle(self, *args, **options): for name, model in self.models: if name in options['model']: self.stdout.write('Number of {}s in database: {:d}'.format(name, model.objects.count()))
from django.core.management.base import BaseCommand from django.contrib.auth.models import User from students.models import Student, Group class Command(BaseCommand): args = '<model_name model_name ...>' help = 'Prints to console number of students related in database.' models = (('student', Student), ('group', Group), ('user', User)) def handle(self, *args, **options): for name, model in self.models: if name in args: self.stdout.write('Number of {}s in database: {:d}'.format(name, model.objects.count()))
mit
Python
42ad2c26368dfaa19efcc5ea57902857aae3e2cf
fix horizon metrics
klynch/skyline,klynch/skyline,klynch/skyline
src/horizon/protocols.py
src/horizon/protocols.py
from twisted.internet.error import ConnectionDone from twisted.internet.protocol import DatagramProtocol, ServerFactory from twisted.protocols.basic import LineOnlyReceiver, Int32StringReceiver from twisted.python import log from utils import SafeUnpickler from cache import MetricCache from regexlist import WhiteList, BlackList def emit(metric, value): log.msg(metric + " " + value) class MetricReceiver: """ Base class for all metric receiving protocols, handles flow control events and connection state logging. """ def connectionMade(self): self.peerName = self.transport.getPeer() log.msg("%s connection with %s established" % (self.__class__.__name__, self.peerName)) def connectionLost(self, reason): if reason.check(ConnectionDone): log.msg("%s connection with %s closed cleanly" % (self.__class__.__name__, self.peerName)) else: log.msg("%s connection with %s lost: %s" % (self.__class__.__name__, self.peerName, reason.value)) def metricReceived(self, metric, datapoint): if BlackList and metric in BlackList: emit('skyline.horizon.blacklistMatches', metric) return if WhiteList and metric not in WhiteList: emit('skyline.horizon.whiteListRejects', metric) return MetricCache.store(metric, datapoint) class MetricLineReceiver(MetricReceiver, LineOnlyReceiver): delimiter = '\n' def lineReceived(self, line): try: metric, value, timestamp = line.strip().split() self.metricReceived(metric, (float(timestamp), float(value))) except: log.msg('invalid line (%s) received from client %s, ignoring' % (line.strip(), self.peerName)) class MetricPickleReceiver(MetricReceiver, Int32StringReceiver): MAX_LENGTH = 2 ** 20 def connectionMade(self): MetricReceiver.connectionMade(self) ##Use the safe unpickler that comes with carbon rather than standard python pickle/cpickle self.unpickler = SafeUnpickler def stringReceived(self, data): try: datapoints = self.unpickler.loads(data) except: log.msg('invalid pickle received from %s, ignoring' % self.peerName) return for (metric, datapoint) in datapoints: try: datapoint = ( float(datapoint[0]), float(datapoint[1]) ) #force proper types except: continue self.metricReceived(metric, datapoint) class MetricDatagramReceiver(MetricReceiver, DatagramProtocol): def datagramReceived(self, data, (host, port)): for line in data.splitlines(): try: metric, value, timestamp = line.strip().split() self.metricReceived(metric, (float(timestamp), float(value))) except: log.msg('invalid line (%s) received from %s, ignoring' % (line, host)) class MetricLineFactory(ServerFactory): protocol = MetricLineReceiver class MetricPickleFactory(ServerFactory): protocol = MetricPickleReceiver
from twisted.internet.error import ConnectionDone from twisted.internet.protocol import DatagramProtocol, ServerFactory from twisted.protocols.basic import LineOnlyReceiver, Int32StringReceiver from twisted.python import log from utils import SafeUnpickler from cache import MetricCache from regexlist import WhiteList, BlackList def emit(metric, value): log.msg(metric + " " + value) class MetricReceiver: """ Base class for all metric receiving protocols, handles flow control events and connection state logging. """ def connectionMade(self): self.peerName = self.transport.getPeer() log.msg("%s connection with %s established" % (self.__class__.__name__, self.peerName)) def connectionLost(self, reason): if reason.check(ConnectionDone): log.msg("%s connection with %s closed cleanly" % (self.__class__.__name__, self.peerName)) else: log.msg("%s connection with %s lost: %s" % (self.__class__.__name__, self.peerName, reason.value)) def metricReceived(self, metric, datapoint): if BlackList and metric in BlackList: emit('blacklistMatches', metric) return if WhiteList and metric not in WhiteList: emit('whiteListRejects ', metric) return MetricCache.store(metric, datapoint) class MetricLineReceiver(MetricReceiver, LineOnlyReceiver): delimiter = '\n' def lineReceived(self, line): try: metric, value, timestamp = line.strip().split() self.metricReceived(metric, (float(timestamp), float(value))) except: log.msg('invalid line (%s) received from client %s, ignoring' % (line.strip(), self.peerName)) class MetricPickleReceiver(MetricReceiver, Int32StringReceiver): MAX_LENGTH = 2 ** 20 def connectionMade(self): MetricReceiver.connectionMade(self) ##Use the safe unpickler that comes with carbon rather than standard python pickle/cpickle self.unpickler = SafeUnpickler def stringReceived(self, data): try: datapoints = self.unpickler.loads(data) except: log.msg('invalid pickle received from %s, ignoring' % self.peerName) return for (metric, datapoint) in datapoints: try: datapoint = ( float(datapoint[0]), float(datapoint[1]) ) #force proper types except: continue self.metricReceived(metric, datapoint) class MetricDatagramReceiver(MetricReceiver, DatagramProtocol): def datagramReceived(self, data, (host, port)): for line in data.splitlines(): try: metric, value, timestamp = line.strip().split() self.metricReceived(metric, (float(timestamp), float(value))) except: log.msg('invalid line (%s) received from %s, ignoring' % (line, host)) class MetricLineFactory(ServerFactory): protocol = MetricLineReceiver class MetricPickleFactory(ServerFactory): protocol = MetricPickleReceiver
mit
Python
454c7d322af3328279582aef629736b92c87e869
Revert "It seems the mechanism to declare a namespace package changed."
peterjc/backports.lzma,peterjc/backports.lzma
backports/__init__.py
backports/__init__.py
# This file is part of a backport of 'lzma' included with Python 3.3, # exposed under the namespace of backports.lzma following the conventions # laid down here: http://pypi.python.org/pypi/backports/1.0 # Backports homepage: http://bitbucket.org/brandon/backports # A Python "namespace package" http://www.python.org/dev/peps/pep-0382/ # This always goes inside of a namespace package's __init__.py from pkgutil import extend_path __path__ = extend_path(__path__, __name__)
# This file is part of a backport of 'lzma' included with Python 3.3, # exposed under the namespace of backports.lzma following the conventions # laid down here: http://pypi.python.org/pypi/backports/1.0 # Backports homepage: http://bitbucket.org/brandon/backports # A Python "namespace package" http://www.python.org/dev/peps/pep-0382/ # This always goes inside of a namespace package's __init__.py try: import pkg_resources pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil __path__ = pkgutil.extend_path(__path__, __name__)
bsd-3-clause
Python
d1be7f345529594ba25ed5d0f22e544735a64404
Add a custom admin site header.
qubs/climate-data-api,qubs/climate-data-api,qubs/data-centre,qubs/data-centre
qubs_data_centre/urls.py
qubs_data_centre/urls.py
from django.conf.urls import url, include from django.contrib import admin admin.site.site_header = 'QUBS Data Centre Admin' urlpatterns = [ url(r'^api/', include('api.urls')), url(r'^admin/', admin.site.urls), ]
from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^api/', include('api.urls')), url(r'^admin/', admin.site.urls), ]
apache-2.0
Python
914f95b8acc84828c8a5aea1138415542b066a62
switch order
tjcsl/director,tjcsl/director,tjcsl/director,tjcsl/director
web3/urls.py
web3/urls.py
"""web3 URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from django.conf import settings from .apps.authentication import views as auth_views from .apps.users import views as user_views from .apps.error.views import (handle_404_view, handle_500_view, handle_503_view) urlpatterns = [ url('', include('social.apps.django_app.urls', namespace='social')), url(r'^$', auth_views.index_view, name='index'), url('^about$', auth_views.about_view, name='about'), url(r'^login/superuser/$', auth_views.login_view, name='login_superuser'), url(r'^login/$', auth_views.login_view, name='login'), url(r'^logout/$', auth_views.logout_view, name='logout'), url(r'^wsauth$', auth_views.node_auth_view, name='node_auth'), url(r"^user/", include("web3.apps.users.urls")), url(r"^site/", include("web3.apps.sites.urls")), url(r'^vm/', include("web3.apps.vms.urls")), url(r'^admin/', admin.site.urls), url(r'^github_oauth/$', user_views.github_oauth_view) ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ] handler404 = handle_404_view handler500 = handle_500_view handler503 = handle_503_view
"""web3 URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from django.conf import settings from .apps.authentication import views as auth_views from .apps.users import views as user_views from .apps.error.views import (handle_404_view, handle_500_view, handle_503_view) urlpatterns = [ url('', include('social.apps.django_app.urls', namespace='social')), url(r'^$', auth_views.index_view, name='index'), url('^about$', auth_views.about_view, name='about'), url(r'^login/$', auth_views.login_view, name='login'), url(r'^login/superuser/$', auth_views.login_view, name='login_superuser'), url(r'^logout/$', auth_views.logout_view, name='logout'), url(r'^wsauth$', auth_views.node_auth_view, name='node_auth'), url(r"^user/", include("web3.apps.users.urls")), url(r"^site/", include("web3.apps.sites.urls")), url(r'^vm/', include("web3.apps.vms.urls")), url(r'^admin/', admin.site.urls), url(r'^github_oauth/$', user_views.github_oauth_view) ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ] handler404 = handle_404_view handler500 = handle_500_view handler503 = handle_503_view
mit
Python
b2270d751146ed8f27a0d0cc85a10a15ea28dab3
Fix float to byte conversion.
eliteraspberries/avena
avena/np.py
avena/np.py
#!/usr/bin/env python import numpy import sys _eps = 10.0 * sys.float_info.epsilon # Map of NumPy array type strings to types _np_dtypes = { 'int8': numpy.int8, 'int16': numpy.int16, 'int32': numpy.int32, 'int64': numpy.int64, 'uint8': numpy.uint8, 'uint16': numpy.uint16, 'uint32': numpy.uint32, 'uint64': numpy.uint64, 'float32': numpy.float32, 'float64': numpy.float64, } _dtype_bounds = { 'float32': (0.0, 1.0), 'float64': (0.0, 1.0), 'uint8': (0, 255), } def from_uint8(array): float_array = array.astype(numpy.float32) float_array *= 1.0 / 256.0 return float_array def to_uint8(array): float_array = numpy.around(array * 255.0) uint8_array = float_array.astype(numpy.uint8) return uint8_array def clip(array, bounds): """Clip the values of an array to the given interval.""" (min, max) = bounds x = array < min + _eps y = array > max - _eps array[x] = min array[y] = max return def normalize(array): """Normalize an array to the interval [0,1].""" mu = numpy.mean(array) rho2 = numpy.std(array) min = mu - 1.5 * rho2 max = mu + 1.5 * rho2 array -= min if max - min > _eps: array /= max - min return def peak(array): """Return the index of the peak value of an array.""" return numpy.unravel_index(numpy.argmax(array), array.shape) def _zeropad(array, size): m, n = array.shape p, q = size z = numpy.zeros((p, q), dtype=array.dtype) z[:m, :n] = array return z if __name__ == '__main__': pass
#!/usr/bin/env python import numpy import sys _eps = 10.0 * sys.float_info.epsilon # Map of NumPy array type strings to types _np_dtypes = { 'int8': numpy.int8, 'int16': numpy.int16, 'int32': numpy.int32, 'int64': numpy.int64, 'uint8': numpy.uint8, 'uint16': numpy.uint16, 'uint32': numpy.uint32, 'uint64': numpy.uint64, 'float32': numpy.float32, 'float64': numpy.float64, } _dtype_bounds = { 'float32': (0.0, 1.0), 'float64': (0.0, 1.0), 'uint8': (0, 255), } def from_uint8(array): float_array = array.astype(numpy.float32) float_array *= 1.0 / 256.0 return float_array def to_uint8(array): uint8_array = numpy.empty(array.shape, dtype=numpy.uint8) numpy.around(array * 255, out=uint8_array) return uint8_array def clip(array, bounds): """Clip the values of an array to the given interval.""" (min, max) = bounds x = array < min + _eps y = array > max - _eps array[x] = min array[y] = max return def normalize(array): """Normalize an array to the interval [0,1].""" mu = numpy.mean(array) rho2 = numpy.std(array) min = mu - 1.5 * rho2 max = mu + 1.5 * rho2 array -= min if max - min > _eps: array /= max - min return def peak(array): """Return the index of the peak value of an array.""" return numpy.unravel_index(numpy.argmax(array), array.shape) def _zeropad(array, size): m, n = array.shape p, q = size z = numpy.zeros((p, q), dtype=array.dtype) z[:m, :n] = array return z if __name__ == '__main__': pass
isc
Python
2b21a07ad1a26f7006809936e5a58e5af710f61b
bump version: 1.0.1
collab-project/django-admin-footer,collab-project/django-admin-footer
admin_footer/__init__.py
admin_footer/__init__.py
# Copyright Collab 2015-2016 # See LICENSE for details. """ `django-admin-footer` application. """ from __future__ import unicode_literals #: Application version. __version__ = (1, 0, 1) def short_version(version=None): """ Return short application version. For example: `1.0.0`. """ v = version or __version__ return '.'.join([str(x) for x in v[:3]]) def get_version(version=None): """ Return full version nr, inc. rc, beta etc tags. For example: `2.0.0a1` :rtype: str """ v = version or __version__ if len(v) == 4: return '{0}{1}'.format(short_version(v), v[3]) return short_version(v) #: Full version number. version = get_version()
# Copyright Collab 2015-2016 # See LICENSE for details. """ `django-admin-footer` application. """ from __future__ import unicode_literals #: Application version. __version__ = (1, 0, 0) def short_version(version=None): """ Return short application version. For example: `1.0.0`. """ v = version or __version__ return '.'.join([str(x) for x in v[:3]]) def get_version(version=None): """ Return full version nr, inc. rc, beta etc tags. For example: `2.0.0a1` :rtype: str """ v = version or __version__ if len(v) == 4: return '{0}{1}'.format(short_version(v), v[3]) return short_version(v) #: Full version number. version = get_version()
mit
Python
2847daea8c3508ee9a71a0513d77e83ef5216e1c
Reduce caching time of sidebar to 3 minutes
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
app/soc/cache/sidebar.py
app/soc/cache/sidebar.py
#!/usr/bin/python2.5 # # Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module contains sidebar memcaching functions. """ __authors__ = [ '"Sverre Rabbelier" <[email protected]>', ] from google.appengine.api import memcache from google.appengine.api import users import soc.cache.base def key(id): """Returns the memcache key for the user's sidebar """ return 'sidebar_for_%s' % repr(id) def get(id, user): """Retrieves the sidebar for the specified user from the memcache """ memcache_key = key(id) return memcache.get(memcache_key) def put(sidebar, id, user): """Sets the sidebar for the specified user in the memcache Args: sidebar: the sidebar to be cached """ # Store sidebar for ten minutes since new programs might get added retention = 3*60 memcache_key = key(id) memcache.add(memcache_key, sidebar, retention) def flush(user=None): """Removes the sidebar for the current user from the memcache Args: user: defaults to the current user if not set """ if not user: user = users.get_current_user() memcache_key = key(user) memcache.delete(memcache_key) # define the cache function cache = soc.cache.base.getCacher(get, put)
#!/usr/bin/python2.5 # # Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module contains sidebar memcaching functions. """ __authors__ = [ '"Sverre Rabbelier" <[email protected]>', ] from google.appengine.api import memcache from google.appengine.api import users import soc.cache.base def key(id): """Returns the memcache key for the user's sidebar """ return 'sidebar_for_%s' % repr(id) def get(id, user): """Retrieves the sidebar for the specified user from the memcache """ memcache_key = key(id) return memcache.get(memcache_key) def put(sidebar, id, user): """Sets the sidebar for the specified user in the memcache Args: sidebar: the sidebar to be cached """ # Store sidebar for ten minutes since new programs might get added retention = 10*60 memcache_key = key(id) memcache.add(memcache_key, sidebar, retention) def flush(user=None): """Removes the sidebar for the current user from the memcache Args: user: defaults to the current user if not set """ if not user: user = users.get_current_user() memcache_key = key(user) memcache.delete(memcache_key) # define the cache function cache = soc.cache.base.getCacher(get, put)
apache-2.0
Python
9968247d4a73549f1c5b02abf8976f11662b46f7
Add a default logger. Specifically log repeated regulation node labels
18F/regulations-core,cmc333333/regulations-core,eregs/regulations-core
regcore/settings/base.py
regcore/settings/base.py
"""Base settings file; used by manage.py. All settings can be overridden via local_settings.py""" import os from django.utils.crypto import get_random_string INSTALLED_APPS = [ 'haystack', 'regcore', 'regcore_read', 'regcore_write', 'south' ] SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', get_random_string(50)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'eregs.db' } } TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner' ROOT_URLCONF = 'regcore.urls' DEBUG = True BACKENDS = { 'regulations': 'regcore.db.django_models.DMRegulations', 'layers': 'regcore.db.django_models.DMLayers', 'notices': 'regcore.db.django_models.DMNotices', 'diffs': 'regcore.db.django_models.DMDiffs' } NOSE_ARGS = [ '--with-coverage', '--cover-package=regcore,regcore_read,regcore_write' ] ELASTIC_SEARCH_URLS = [] ELASTIC_SEARCH_INDEX = 'eregs' HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.solr_backend.SolrEngine', 'URL': 'http://localhost:8983/solr' } } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', } }, 'loggers': { '': { 'handlers': ['console'], 'level': 'INFO', }, 'django.request': { 'handlers': ['console'], 'propagate': False, 'level': 'ERROR' } } } try: from local_settings import * except ImportError: pass
"""Base settings file; used by manage.py. All settings can be overridden via local_settings.py""" import os from django.utils.crypto import get_random_string INSTALLED_APPS = [ 'haystack', 'regcore', 'regcore_read', 'regcore_write', 'south' ] SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', get_random_string(50)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'eregs.db' } } TEST_RUNNER = 'django_nose.runner.NoseTestSuiteRunner' ROOT_URLCONF = 'regcore.urls' DEBUG = True BACKENDS = { 'regulations': 'regcore.db.django_models.DMRegulations', 'layers': 'regcore.db.django_models.DMLayers', 'notices': 'regcore.db.django_models.DMNotices', 'diffs': 'regcore.db.django_models.DMDiffs' } NOSE_ARGS = [ '--with-coverage', '--cover-package=regcore,regcore_read,regcore_write' ] ELASTIC_SEARCH_URLS = [] ELASTIC_SEARCH_INDEX = 'eregs' HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.solr_backend.SolrEngine', 'URL': 'http://localhost:8983/solr' } } try: from local_settings import * except ImportError: pass
cc0-1.0
Python
16eda1aac6183f612c678ae555367113f1326c0a
Mark upcoming release number.
myimages/django-registration,ubernostrum/django-registration
registration/__init__.py
registration/__init__.py
VERSION = (2, 2, 0, 'alpha', 0) def get_version(): """ Returns a PEP 386-compliant version number from VERSION. """ assert len(VERSION) == 5 assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if VERSION[2] == 0 else 3 main = '.'.join(str(x) for x in VERSION[:parts]) sub = '' if VERSION[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
VERSION = (2, 1, 2, 'final', 0) def get_version(): """ Returns a PEP 386-compliant version number from VERSION. """ assert len(VERSION) == 5 assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if VERSION[2] == 0 else 3 main = '.'.join(str(x) for x in VERSION[:parts]) sub = '' if VERSION[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
bsd-3-clause
Python
18e4e457752051dc4d5f57e78e83572638c4fe62
Refactor syncdb replacement. Clone existing schemata if they don't exist at syncdb time.
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
multi_schema/management/commands/syncdb.py
multi_schema/management/commands/syncdb.py
from django.core.management.commands import syncdb from django.db import models, connection, transaction try: from south.management.commands import syncdb except ImportError: pass from ...models import Schema, template_schema class Command(syncdb.Command): def handle_noargs(self, **options): # Ensure we have a __template__ schema. template_schema.create_schema() # Set the search path, so we find created models correctly cursor = connection.cursor() cursor.execute("SET search_path TO public,__template__;") super(Command, self).handle_noargs(**options) # Ensure all existing schemata exist (in case we imported them using loaddata or something) for schema in Schema.objects.all(): schema.create_schema()
from django.core.management.commands import syncdb from django.db import models, connection, transaction try: from south.management.commands import syncdb except ImportError: pass class Command(syncdb.Command): def handle_noargs(self, **options): cursor = connection.cursor() # Ensure we have a __template__ schema. cursor.execute("SELECT schema_name FROM information_schema.schemata WHERE schema_name = '__template__';") if not cursor.fetchone(): cursor.execute("CREATE SCHEMA __template__;") transaction.commit_unless_managed() # Set the search path, so we find created models correctly cursor.execute("SET search_path TO public,__template__;") super(Command, self).handle_noargs(**options)
bsd-3-clause
Python
2665aa46702175a0d33ae76cfccdbbbddf42d316
Allow for comments in the sql file that do not start the line.
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
multi_schema/management/commands/syncdb.py
multi_schema/management/commands/syncdb.py
import os.path from django.core.management.commands import syncdb from django.db import models, connection, transaction try: from south.management.commands import syncdb except ImportError: pass from ...models import Schema, template_schema class Command(syncdb.Command): def handle_noargs(self, **options): # Ensure we have the clone_schema() function clone_schema_file = os.path.join(os.path.abspath(__file__ + '/../../../'), 'sql', 'clone_schema.sql') clone_schema_function = " ".join([x.strip() for x in open(clone_schema_file).readlines() if not x.strip().startswith('--')]) clone_schema_function = clone_schema_function.replace("'%'", "'%%'") cursor = connection.cursor() cursor.execute(clone_schema_function) # Ensure we have a __template__ schema. template_schema.create_schema() # Set the search path, so we find created models correctly cursor = connection.cursor() cursor.execute("SET search_path TO public,__template__;") super(Command, self).handle_noargs(**options) # Ensure all existing schemata exist (in case we imported them using loaddata or something) for schema in Schema.objects.all(): schema.create_schema()
import os.path from django.core.management.commands import syncdb from django.db import models, connection, transaction try: from south.management.commands import syncdb except ImportError: pass from ...models import Schema, template_schema class Command(syncdb.Command): def handle_noargs(self, **options): # Ensure we have the clone_schema() function clone_schema_file = os.path.join(os.path.abspath(__file__ + '/../../../'), 'sql', 'clone_schema.sql') clone_schema_function = " ".join([x.strip() for x in open(clone_schema_file).readlines() if not x.startswith('--')]) clone_schema_function = clone_schema_function.replace("'%'", "'%%'") cursor = connection.cursor() cursor.execute(clone_schema_function) # Ensure we have a __template__ schema. template_schema.create_schema() # Set the search path, so we find created models correctly cursor = connection.cursor() cursor.execute("SET search_path TO public,__template__;") super(Command, self).handle_noargs(**options) # Ensure all existing schemata exist (in case we imported them using loaddata or something) for schema in Schema.objects.all(): schema.create_schema()
bsd-3-clause
Python
e2555422c12f0b4cf59d8c636a087eddc3150948
allow CR
ContinuumIO/anaconda-verify,mandeep/conda-verify
anaconda_verify/utils.py
anaconda_verify/utils.py
import sys import collections from anaconda_verify.const import MAGIC_HEADERS, DLL_TYPES def get_object_type(data): head = data[:4] if head not in MAGIC_HEADERS: return None lookup = MAGIC_HEADERS.get(head) if lookup == 'DLL': pos = data.find('PE\0\0') if pos < 0: return "<no PE header found>" i = ord(data[pos + 4]) + 256 * ord(data[pos + 5]) return "DLL " + DLL_TYPES.get(i) elif lookup.startswith('MachO'): return lookup elif lookup == 'ELF': return "ELF" + {'\x01': '32', '\x02': '64'}.get(data[4]) def all_ascii(data): for c in data: n = ord(c) if sys.version_info[0] == 2 else c if not (n in (10, 13) or 32 <= n < 127): return False return True class memoized(object): """Decorator. Caches a function's return value each time it is called. If called later with the same arguments, the cached value is returned (not reevaluated). """ def __init__(self, func): self.func = func self.cache = {} def __call__(self, *args): if not isinstance(args, collections.Hashable): # uncacheable. a list, for instance. # better to not cache than blow up. return self.func(*args) if args in self.cache: return self.cache[args] else: value = self.func(*args) self.cache[args] = value return value if __name__ == '__main__': print(sys.version) print(all_ascii(b'Hello\x00'), all_ascii(b"Hello World!"))
import sys import collections from anaconda_verify.const import MAGIC_HEADERS, DLL_TYPES def get_object_type(data): head = data[:4] if head not in MAGIC_HEADERS: return None lookup = MAGIC_HEADERS.get(head) if lookup == 'DLL': pos = data.find('PE\0\0') if pos < 0: return "<no PE header found>" i = ord(data[pos + 4]) + 256 * ord(data[pos + 5]) return "DLL " + DLL_TYPES.get(i) elif lookup.startswith('MachO'): return lookup elif lookup == 'ELF': return "ELF" + {'\x01': '32', '\x02': '64'}.get(data[4]) def all_ascii(data): for c in data: n = ord(c) if sys.version_info[0] == 2 else c if not (n == 10 or 32 <= n < 127): return False return True class memoized(object): """Decorator. Caches a function's return value each time it is called. If called later with the same arguments, the cached value is returned (not reevaluated). """ def __init__(self, func): self.func = func self.cache = {} def __call__(self, *args): if not isinstance(args, collections.Hashable): # uncacheable. a list, for instance. # better to not cache than blow up. return self.func(*args) if args in self.cache: return self.cache[args] else: value = self.func(*args) self.cache[args] = value return value if __name__ == '__main__': print(sys.version) print(all_ascii(b'Hello\x00'), all_ascii(b"Hello World!"))
bsd-3-clause
Python
d4ff0f80f065b6f3efa79a5cf17bc4e81a6bb6f2
Add TODO comment.
ohsu-qin/qipipe
qipipe/staging/__init__.py
qipipe/staging/__init__.py
""" Image processing preparation. The staging package defines the functions used to prepare the study image files for import into XNAT, submission to the TCIA QIN collections and pipeline processing. """ # OHSU - The ohsu module creates the OHSU QIN collections. # TODO - this should be a config item. from . import ohsu
""" Image processing preparation. The staging package defines the functions used to prepare the study image files for import into XNAT, submission to the TCIA QIN collections and pipeline processing. """ # The ohsu module creates the OHSU QIN collections. # TODO - this should be a config item. from . import ohsu
bsd-2-clause
Python
0d0041678b598e623b3479942c3dd4fc23c5ab23
Upgrade Pip
GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,AdamIsrael/PerfKitBenchmarker,AdamIsrael/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
perfkitbenchmarker/linux_packages/pip.py
perfkitbenchmarker/linux_packages/pip.py
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module containing pip installation and cleanup functions. Uninstalling the pip package will also remove all python packages added after installation. """ from perfkitbenchmarker import vm_util def _Install(vm): """Install pip on the VM.""" vm.InstallPackages('python-pip') vm.RemoteCommand('sudo pip install -U pip') # Make pip upgrade pip vm.RemoteCommand('mkdir -p {0} && pip freeze > {0}/requirements.txt'.format( vm_util.VM_TMP_DIR)) def YumInstall(vm): """Installs the pip package on the VM.""" vm.InstallEpelRepo() _Install(vm) def AptInstall(vm): """Installs the pip package on the VM.""" _Install(vm) def _Uninstall(vm): """Uninstalls the pip package on the VM.""" vm.RemoteCommand('pip freeze | grep --fixed-strings --line-regexp ' '--invert-match --file {0}/requirements.txt | ' 'xargs --no-run-if-empty sudo pip uninstall -y'.format( vm_util.VM_TMP_DIR)) def YumUninstall(vm): """Uninstalls the pip package on the VM.""" _Uninstall(vm) def AptUninstall(vm): """Uninstalls the pip package on the VM.""" _Uninstall(vm)
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module containing pip installation and cleanup functions. Uninstalling the pip package will also remove all python packages added after installation. """ from perfkitbenchmarker import vm_util def _Install(vm): """Install pip on the VM.""" vm.InstallPackages('python-pip') vm.RemoteCommand('mkdir -p {0} && pip freeze > {0}/requirements.txt'.format( vm_util.VM_TMP_DIR)) def YumInstall(vm): """Installs the pip package on the VM.""" vm.InstallEpelRepo() _Install(vm) def AptInstall(vm): """Installs the pip package on the VM.""" _Install(vm) def _Uninstall(vm): """Uninstalls the pip package on the VM.""" vm.RemoteCommand('pip freeze | grep --fixed-strings --line-regexp ' '--invert-match --file {0}/requirements.txt | ' 'xargs --no-run-if-empty sudo pip uninstall -y'.format( vm_util.VM_TMP_DIR)) def YumUninstall(vm): """Uninstalls the pip package on the VM.""" _Uninstall(vm) def AptUninstall(vm): """Uninstalls the pip package on the VM.""" _Uninstall(vm)
apache-2.0
Python
d25bfd459bfc03ea7a3a84a26d80b9db8036c168
Add new NAMES_TO_EDITIONS mapping
freelawproject/reporters-db
reporters_db/__init__.py
reporters_db/__init__.py
import datetime import json import os import six from .utils import suck_out_editions, suck_out_names, suck_out_variations_only # noinspection PyBroadException def datetime_parser(dct): for k, v in dct.items(): if isinstance(v, six.string_types): try: dct[k] = datetime.datetime.strptime(v, "%Y-%m-%dT%H:%M:%S") except: pass return dct db_root = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(db_root, 'data', 'reporters.json')) as f: REPORTERS = json.load(f, object_hook=datetime_parser) with open(os.path.join(db_root, 'data', 'state_abbreviations.json')) as f: STATE_ABBREVIATIONS = json.load(f) with open(os.path.join(db_root, 'data', 'case_name_abbreviations.json')) as f: CASE_NAME_ABBREVIATIONS = json.load(f) VARIATIONS_ONLY = suck_out_variations_only(REPORTERS) EDITIONS = suck_out_editions(REPORTERS) NAMES_TO_EDITIONS = suck_out_names(REPORTERS)
import datetime import json import os import six from .utils import suck_out_variations_only from .utils import suck_out_editions # noinspection PyBroadException def datetime_parser(dct): for k, v in dct.items(): if isinstance(v, six.string_types): try: dct[k] = datetime.datetime.strptime(v, "%Y-%m-%dT%H:%M:%S") except: pass return dct db_root = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(db_root, 'data', 'reporters.json')) as f: REPORTERS = json.load(f, object_hook=datetime_parser) with open(os.path.join(db_root, 'data', 'state_abbreviations.json')) as f: STATE_ABBREVIATIONS = json.load(f) with open(os.path.join(db_root, 'data', 'case_name_abbreviations.json')) as f: CASE_NAME_ABBREVIATIONS = json.load(f) VARIATIONS_ONLY = suck_out_variations_only(REPORTERS) EDITIONS = suck_out_editions(REPORTERS)
bsd-2-clause
Python
bffdc451e8dc9df2219158349b60f082ab087a27
add proposal pk to serializer to give votes unique id
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
meinberlin/apps/budgeting/serializers.py
meinberlin/apps/budgeting/serializers.py
from rest_framework import serializers from adhocracy4.categories.models import Category from .models import Proposal class CategoryField(serializers.Field): def to_internal_value(self, category): if category: return Category.objects.get(pk=category) else: return None def to_representation(self, category): return {'id': category.pk, 'name': category.name} class ProposalSerializer(serializers.ModelSerializer): creator = serializers.SerializerMethodField() comment_count = serializers.SerializerMethodField() positive_rating_count = serializers.SerializerMethodField() negative_rating_count = serializers.SerializerMethodField() category = CategoryField() url = serializers.SerializerMethodField() class Meta: model = Proposal fields = ('budget', 'category', 'comment_count', 'created', 'creator', 'is_archived', 'name', 'negative_rating_count', 'positive_rating_count', 'url', 'pk') read_only_fields = ('budget', 'category', 'comment_count', 'created', 'creator', 'is_archived', 'name', 'negative_rating_count', 'positive_rating_count', 'url', 'pk') def get_creator(self, proposal): return proposal.creator.username def get_comment_count(self, proposal): if hasattr(proposal, 'comment_count'): return proposal.comment_count else: return 0 def get_positive_rating_count(self, proposal): if hasattr(proposal, 'positive_rating_count'): return proposal.positive_rating_count else: return 0 def get_negative_rating_count(self, proposal): if hasattr(proposal, 'negative_rating_count'): return proposal.negative_rating_count else: return 0 def get_url(self, proposal): return proposal.get_absolute_url()
from rest_framework import serializers from adhocracy4.categories.models import Category from .models import Proposal class CategoryField(serializers.Field): def to_internal_value(self, category): if category: return Category.objects.get(pk=category) else: return None def to_representation(self, category): return {'id': category.pk, 'name': category.name} class ProposalSerializer(serializers.ModelSerializer): creator = serializers.SerializerMethodField() comment_count = serializers.SerializerMethodField() positive_rating_count = serializers.SerializerMethodField() negative_rating_count = serializers.SerializerMethodField() category = CategoryField() url = serializers.SerializerMethodField() class Meta: model = Proposal fields = ('budget', 'category', 'comment_count', 'created', 'creator', 'is_archived', 'name', 'negative_rating_count', 'positive_rating_count', 'url') read_only_fields = ('budget', 'category', 'comment_count', 'created', 'creator', 'is_archived', 'name', 'negative_rating_count', 'positive_rating_count', 'url') def get_creator(self, proposal): return proposal.creator.username def get_comment_count(self, proposal): if hasattr(proposal, 'comment_count'): return proposal.comment_count else: return 0 def get_positive_rating_count(self, proposal): if hasattr(proposal, 'positive_rating_count'): return proposal.positive_rating_count else: return 0 def get_negative_rating_count(self, proposal): if hasattr(proposal, 'negative_rating_count'): return proposal.negative_rating_count else: return 0 def get_url(self, proposal): return proposal.get_absolute_url()
agpl-3.0
Python
9df2420f152e48a0e99598220e4560fe25c9fd36
add an argument to TblTreeEntries.__init__()
TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl
AlphaTwirl/HeppyResult/TblTreeEntries.py
AlphaTwirl/HeppyResult/TblTreeEntries.py
# Tai Sakuma <[email protected]> from ..mkdir_p import mkdir_p from ..listToAlignedText import listToAlignedText import os import ROOT ##__________________________________________________________________|| class TblTreeEntries(object): def __init__(self, analyzerName, fileName, treeName, outPath, columnName = 'n'): self.analyzerName = analyzerName self.fileName = fileName self.treeName = treeName self.outPath = outPath self._rows = [['component', columnName]] def begin(self): pass def read(self, component): inputPath = os.path.join(getattr(component, self.analyzerName).path, self.fileName) file = ROOT.TFile.Open(inputPath) tree = file.Get(self.treeName) row = [component.name, tree.GetEntries()] self._rows.append(row) def end(self): f = self._open(self.outPath) f.write(listToAlignedText(self._rows)) self._close(f) def _open(self, path): mkdir_p(os.path.dirname(path)) return open(path, 'w') def _close(self, file): file.close() ##__________________________________________________________________||
# Tai Sakuma <[email protected]> from ..mkdir_p import mkdir_p from ..listToAlignedText import listToAlignedText import os import ROOT ##__________________________________________________________________|| class TblTreeEntries(object): def __init__(self, analyzerName, fileName, treeName, outPath): self.analyzerName = analyzerName self.fileName = fileName self.treeName = treeName self.outPath = outPath self._rows = [['component', 'n']] def begin(self): pass def read(self, component): inputPath = os.path.join(getattr(component, self.analyzerName).path, self.fileName) file = ROOT.TFile.Open(inputPath) tree = file.Get(self.treeName) row = [component.name, tree.GetEntries()] self._rows.append(row) def end(self): f = self._open(self.outPath) f.write(listToAlignedText(self._rows)) self._close(f) def _open(self, path): mkdir_p(os.path.dirname(path)) return open(path, 'w') def _close(self, file): file.close() ##__________________________________________________________________||
bsd-3-clause
Python
0de57c0c14362d2f9c40975326c8cb1bf792e2a0
make compiled dir
deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera
binding.gyp
binding.gyp
{ 'targets': [ { 'target_name': 'chimera', 'sources': [ 'src/top.cc', 'src/cookiejar.cc', 'src/chimera.cc', 'src/browser.cc' ], 'conditions': [ ['OS=="mac"', { 'include_dirs': [ 'qt_compiled/include', 'qt_compiled/include/QtCore', 'qt_compiled/include/QtGui', 'qt_compiled/include/QtNetwork', 'qt_compiled/include/QtWebkit' ], 'libraries': [ '-framework AppKit', '../qt_compiled/lib/libQtGui.a', '../qt_compiled/lib/libQtCore.a', '../qt_compiled/lib/libQtNetwork.a', '../qt_compiled/lib/libQtWebKit.a', '../qt_compiled/lib/libjscore.a', '../qt_compiled/lib/libwebcore.a', '../qt_compiled/lib/libQtXml.a' ], }], ['OS=="linux"', { 'include_dirs': [ 'qt_compiled/include', 'qt_compiled/include/QtCore', 'qt_compiled/include/QtGui', 'qt_compiled/include/QtNetwork', 'qt_compiled/include/QtWebKit' ], 'libraries': [ '../deps/openssl/linux/lib/libssl.a', '../deps/openssl/linux/lib/libcrypto.a', '../qt_compiled/lib/libQtCore.a', '../qt_compiled/lib/libQtGui.a', '../qt_compiled/lib/libQtXml.a', '../qt_compiled/lib/libQtNetwork.a', '../qt_compiled/lib/libQtWebKit.a', '../qt_compiled/lib/libwebcore.a', '../qt_compiled/lib/libjscore.a' ], }] ] } ] }
{ 'targets': [ { 'target_name': 'chimera', 'sources': [ 'src/top.cc', 'src/cookiejar.cc', 'src/chimera.cc', 'src/browser.cc' ], 'conditions': [ ['OS=="mac"', { 'include_dirs': [ 'qt/include', 'qt/include/QtCore', 'qt/include/QtGui', 'qt/include/QtNetwork', 'qt/include/QtWebkit' ], 'libraries': [ '-framework AppKit', '../qt/lib/libQtGui.a', '../qt/lib/libQtCore.a', '../qt/lib/libQtNetwork.a', '../qt/lib/libQtWebKit.a', '../qt/lib/libjscore.a', '../qt/lib/libwebcore.a', '../qt/lib/libQtXml.a' ], }], ['OS=="linux"', { 'include_dirs': [ 'qt/include', 'qt/include/QtCore', 'qt/include/QtGui', 'qt/include/QtNetwork', 'qt/include/QtWebKit' ], 'libraries': [ '../deps/openssl/linux/lib/libssl.a', '../deps/openssl/linux/lib/libcrypto.a', '../qt/lib/libQtCore.a', '../qt/lib/libQtGui.a', '../qt/lib/libQtXml.a', '../qt/lib/libQtNetwork.a', '../qt/lib/libQtWebKit.a', '../qt/lib/libwebcore.a', '../qt/lib/libjscore.a' ], }] ] } ] }
mit
Python
20ee95f56033b5a7d9d1e5f022118850b339ace9
remove old ssl
deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera,deanmao/node-chimera
binding.gyp
binding.gyp
{ 'targets': [ { 'target_name': 'chimera', 'sources': [ 'src/top.cc', 'src/cookiejar.cc', 'src/chimera.cc', 'src/browser.cc' ], 'conditions': [ ['OS=="mac"', { 'include_dirs': [ 'qt_compiled/include', 'qt_compiled/include/QtCore', 'qt_compiled/include/QtGui', 'qt_compiled/include/QtNetwork', 'qt_compiled/include/QtWebkit' ], 'libraries': [ '-framework AppKit', '../qt_compiled/lib/libQtGui.a', '../qt_compiled/lib/libQtCore.a', '../qt_compiled/lib/libQtNetwork.a', '../qt_compiled/lib/libQtWebKit.a', '../qt_compiled/lib/libjscore.a', '../qt_compiled/lib/libwebcore.a', '../qt_compiled/lib/libQtXml.a' ], }], ['OS=="linux"', { 'include_dirs': [ 'qt_compiled/include', 'qt_compiled/include/QtCore', 'qt_compiled/include/QtGui', 'qt_compiled/include/QtNetwork', 'qt_compiled/include/QtWebKit' ], 'libraries': [ '../qt_compiled/lib/libQtCore.a', '../qt_compiled/lib/libQtGui.a', '../qt_compiled/lib/libQtXml.a', '../qt_compiled/lib/libQtNetwork.a', '../qt_compiled/lib/libQtWebKit.a', '../qt_compiled/lib/libwebcore.a', '../qt_compiled/lib/libjscore.a' ], }] ] } ] }
{ 'targets': [ { 'target_name': 'chimera', 'sources': [ 'src/top.cc', 'src/cookiejar.cc', 'src/chimera.cc', 'src/browser.cc' ], 'conditions': [ ['OS=="mac"', { 'include_dirs': [ 'qt_compiled/include', 'qt_compiled/include/QtCore', 'qt_compiled/include/QtGui', 'qt_compiled/include/QtNetwork', 'qt_compiled/include/QtWebkit' ], 'libraries': [ '-framework AppKit', '../qt_compiled/lib/libQtGui.a', '../qt_compiled/lib/libQtCore.a', '../qt_compiled/lib/libQtNetwork.a', '../qt_compiled/lib/libQtWebKit.a', '../qt_compiled/lib/libjscore.a', '../qt_compiled/lib/libwebcore.a', '../qt_compiled/lib/libQtXml.a' ], }], ['OS=="linux"', { 'include_dirs': [ 'qt_compiled/include', 'qt_compiled/include/QtCore', 'qt_compiled/include/QtGui', 'qt_compiled/include/QtNetwork', 'qt_compiled/include/QtWebKit' ], 'libraries': [ '../deps/openssl/linux/lib/libssl.a', '../deps/openssl/linux/lib/libcrypto.a', '../qt_compiled/lib/libQtCore.a', '../qt_compiled/lib/libQtGui.a', '../qt_compiled/lib/libQtXml.a', '../qt_compiled/lib/libQtNetwork.a', '../qt_compiled/lib/libQtWebKit.a', '../qt_compiled/lib/libwebcore.a', '../qt_compiled/lib/libjscore.a' ], }] ] } ] }
mit
Python
fe290c9f3edc477707e88cb5942ee6c5bd1db568
fix the http backend -- outgoing was still busted
catalpainternational/rapidsms,catalpainternational/rapidsms,eHealthAfrica/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,ken-muturi/rapidsms,dimagi/rapidsms-core-dev,unicefuganda/edtrac,unicefuganda/edtrac,ehealthafrica-ci/rapidsms,rapidsms/rapidsms-core-dev,dimagi/rapidsms-core-dev,dimagi/rapidsms,unicefuganda/edtrac,catalpainternational/rapidsms,caktus/rapidsms,peterayeni/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,catalpainternational/rapidsms,lsgunth/rapidsms,ken-muturi/rapidsms,rapidsms/rapidsms-core-dev,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,caktus/rapidsms,eHealthAfrica/rapidsms,ehealthafrica-ci/rapidsms,dimagi/rapidsms,lsgunth/rapidsms,caktus/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms
lib/rapidsms/backends/http.py
lib/rapidsms/backends/http.py
#!/usr/bin/env python # vim: ai ts=4 sts=4 et sw=4 import BaseHTTPServer, SocketServer import select import random import re import urllib import httphandlers as handlers import rapidsms from rapidsms.backends.base import BackendBase class HttpServer (BaseHTTPServer.HTTPServer, SocketServer.ThreadingMixIn): def handle_request (self, timeout=1.0): # don't block on handle_request reads, writes, errors = (self,), (), () reads, writes, errors = select.select(reads, writes, errors, timeout) if reads: BaseHTTPServer.HTTPServer.handle_request(self) class Backend(BackendBase): def configure(self, host="localhost", port=8080, handler="HttpHandler", **kwargs): #module_name = "httphandlers" #module = __import__(module_name, {}, {}, ['']) component_class = getattr(handlers, handler) self.handler = component_class self.server = HttpServer((host, int(port)), component_class) self.type = "HTTP" # set this backend in the server instance so it # can callback when a message is received self.server.backend = self # also set it in the handler class so we can callback self.handler.backend = self # set the slug based on the handler, so we can have multiple # http backends self._slug = "http_%s" % handler def run (self): while self.running: msg = self.next_message() if msg: if handlers.msg_store.has_key(msg.connection.identity): handlers.msg_store[msg.connection.identity].append(msg.text) else: handlers.msg_store[msg.connection.identity] = [] handlers.msg_store[msg.connection.identity].append(msg.text) self.server.handle_request() def send(self, message): self.handler.outgoing(message)
#!/usr/bin/env python # vim: ai ts=4 sts=4 et sw=4 import BaseHTTPServer, SocketServer import select import random import re import urllib import httphandlers as handlers import rapidsms from rapidsms.backends.base import BackendBase class HttpServer (BaseHTTPServer.HTTPServer, SocketServer.ThreadingMixIn): def handle_request (self, timeout=1.0): # don't block on handle_request reads, writes, errors = (self,), (), () reads, writes, errors = select.select(reads, writes, errors, timeout) if reads: BaseHTTPServer.HTTPServer.handle_request(self) class Backend(BackendBase): def configure(self, host="localhost", port=8080, handler="HttpHandler", **kwargs): #module_name = "httphandlers" #module = __import__(module_name, {}, {}, ['']) component_class = getattr(handlers, handler) self.server = HttpServer((host, int(port)), component_class) self.type = "HTTP" # set this backend in the server instance so it # can callback when a message is received self.server.backend = self def run (self): while self.running: msg = self.next_message() if msg: if handlers.msg_store.has_key(msg.connection.identity): handlers.msg_store[msg.connection.identity].append(msg.text) else: handlers.msg_store[msg.connection.identity] = [] handlers.msg_store[msg.connection.identity].append(msg.text) self.server.handle_request()
bsd-3-clause
Python
ad94ae60f418b6030be12d4e650eac5ddb33df4b
Hide vk_app_id setting in stage settings
sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/ritmserdtsa
rynda/Rynda/settings/stage.py
rynda/Rynda/settings/stage.py
# coding: utf-8 import os from .base import * DEBUG = TEMPLATE_DEBUG = True DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'devrynda', 'USER': 'devrynda', 'PASSWORD': 'RyndaDeveloper', 'HOST': 'rynda.org', 'PORT': '', } } STATIC_ROOT = os.path.join(get_env_var('STATIC_ROOT'), 'rynda', 'static') EXTERNAL = True # VK_APP_ID = get_env_var('vk_app_id') try: from local_stage import * except: pass
# coding: utf-8 import os from .base import * DEBUG = TEMPLATE_DEBUG = True DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'devrynda', 'USER': 'devrynda', 'PASSWORD': 'RyndaDeveloper', 'HOST': 'rynda.org', 'PORT': '', } } STATIC_ROOT = os.path.join(get_env_var('STATIC_ROOT'), 'rynda', 'static') EXTERNAL = True VK_APP_ID = get_env_var('vk_app_id') try: from local_stage import * except: pass
mit
Python
bf336d99484cc3804f469631b513a927940ada30
Add scan_steps wrapper for scan_nd
NSLS-II-HXN/ipython_ophyd,NSLS-II-HXN/ipython_ophyd
profile_collection/startup/50-scans.py
profile_collection/startup/50-scans.py
# vim: sw=4 ts=4 sts expandtab smarttab # HXN step-scan configuration import hxntools.scans from bluesky.global_state import get_gs gs = get_gs() hxntools.scans.setup() ct = hxntools.scans.count ascan = hxntools.scans.absolute_scan dscan = hxntools.scans.relative_scan fermat = hxntools.scans.relative_fermat spiral = hxntools.scans.relative_spiral mesh = hxntools.scans.absolute_mesh dmesh = hxntools.scans.relative_mesh d2scan = hxntools.scans.d2scan a2scan = hxntools.scans.a2scan scan_steps = hxntools.scans.scan_steps gs.DETS = [zebra, sclr1, merlin1, xspress3, lakeshore2] gs.TABLE_COLS = ['sclr1_ch2','sclr1_ch3', 'sclr1_ch4', 'sclr1_ch5_calc', 'ssx', 'ssy', 'ssz', 't_base', 't_sample', 't_vlens', 't_hlens'] # Plot this by default versus motor position: gs.PLOT_Y = 'Det2_Cr' gs.OVERPLOT = False gs.BASELINE_DEVICES = [dcm, m1, m2, beamline_status, smll, vmll, hmll, ssa2, zp]
# vim: sw=4 ts=4 sts expandtab smarttab # HXN step-scan configuration import hxntools.scans from bluesky.global_state import get_gs gs = get_gs() hxntools.scans.setup() ct = hxntools.scans.count ascan = hxntools.scans.absolute_scan dscan = hxntools.scans.relative_scan fermat = hxntools.scans.relative_fermat spiral = hxntools.scans.relative_spiral mesh = hxntools.scans.absolute_mesh dmesh = hxntools.scans.relative_mesh d2scan = hxntools.scans.d2scan a2scan = hxntools.scans.a2scan gs.DETS = [zebra, sclr1, merlin1, xspress3, lakeshore2] gs.TABLE_COLS = ['sclr1_ch2','sclr1_ch3', 'sclr1_ch4', 'sclr1_ch5_calc', 'ssx', 'ssy', 'ssz', 't_base', 't_sample', 't_vlens', 't_hlens'] # Plot this by default versus motor position: gs.PLOT_Y = 'Det2_Cr' gs.OVERPLOT = False gs.BASELINE_DEVICES = [smll,vmll, hmll, ssa2, zp]
bsd-2-clause
Python
a292e1fe8ec72355ce2bb3c1f99dd82d6f145438
Add path to homebrew-installed pkgconfig for Mac OS 10.8 (10.9 is symlinked to 10.8) #9
leiyangyou/sharp,cmtt/sharp,mhirsch/sharp,kevinsawicki/sharp,kevinsawicki/sharp,lovell/sharp,brandonaaron/sharp,mcanthony/sharp,lovell/sharp,digital-flowers/sharp-win32,papandreou/sharp,mhirsch/sharp,digital-flowers/sharp-win32,mcanthony/sharp,pporada-gl/sharp,digital-flowers/sharp-win32,papandreou/sharp,mdimitrov/sharp,pporada-gl/sharp,digital-flowers/sharp-win32,mdimitrov/sharp,dtest/sharp,lovell/sharp,mdimitrov/sharp,dtest/sharp,kevinsawicki/sharp,philip8728/sharp,lovell/sharp,papandreou/sharp,dtest/sharp,lovell/sharp,mdimitrov/sharp,pporada-gl/sharp,digital-flowers/sharp-win32,pporada-gl/sharp,chrisriley/sharp,dtest/sharp,philip8728/sharp,chrisriley/sharp,philip8728/sharp,mdimitrov/sharp,digital-flowers/sharp-win32,brandonaaron/sharp,papandreou/sharp,leiyangyou/sharp,mcanthony/sharp,philip8728/sharp,mcanthony/sharp,leiyangyou/sharp,cmtt/sharp,leiyangyou/sharp,cmtt/sharp,dtest/sharp,mhirsch/sharp,digital-flowers/sharp-win32,brandonaaron/sharp,chrisriley/sharp,pporada-gl/sharp,chrisriley/sharp,kevinsawicki/sharp,papandreou/sharp,chrisriley/sharp,leiyangyou/sharp,mhirsch/sharp,philip8728/sharp,cmtt/sharp,kevinsawicki/sharp,brandonaaron/sharp,brandonaaron/sharp,mcanthony/sharp,lovell/sharp
binding.gyp
binding.gyp
{ 'targets': [{ 'target_name': 'sharp', 'sources': ['src/sharp.cc'], 'libraries': [ '<!@(PKG_CONFIG_PATH="/usr/local/Library/ENV/pkgconfig/10.8:/usr/local/lib/pkgconfig:/usr/lib/pkgconfig" pkg-config --libs vips)' ], 'include_dirs': [ '/usr/local/include/glib-2.0', '/usr/local/lib/glib-2.0/include', '/usr/include/glib-2.0', '/usr/lib/glib-2.0/include', '/usr/lib/x86_64-linux-gnu/glib-2.0/include' ], 'cflags': ['-fexceptions', '-pedantic', '-Wall', '-O3'], 'cflags_cc': ['-fexceptions', '-pedantic', '-Wall', '-O3'] }] }
{ 'targets': [{ 'target_name': 'sharp', 'sources': ['src/sharp.cc'], 'libraries': [ '<!@(PKG_CONFIG_PATH="/usr/local/lib/pkgconfig" pkg-config --libs vips)', '<!@(PKG_CONFIG_PATH="/usr/lib/pkgconfig" pkg-config --libs vips)' ], 'include_dirs': [ '/usr/local/include/glib-2.0', '/usr/local/lib/glib-2.0/include', '/usr/include/glib-2.0', '/usr/lib/glib-2.0/include', '/usr/lib/x86_64-linux-gnu/glib-2.0/include' ], 'cflags': ['-fexceptions', '-pedantic', '-Wall', '-O3'], 'cflags_cc': ['-fexceptions', '-pedantic', '-Wall', '-O3'] }] }
apache-2.0
Python
88021aeb5e7c4d0f3a50333b3f77624ac718c03c
Use `ASM` mode on the linux + non glibc environ
Icenium/node-fibers,laverdet/node-fibers,meteor/node-fibers,Icenium/node-fibers,laverdet/node-fibers,meteor/node-fibers,laverdet/node-fibers,meteor/node-fibers,meteor/node-fibers,laverdet/node-fibers,Icenium/node-fibers,Icenium/node-fibers
binding.gyp
binding.gyp
{ 'target_defaults': { 'default_configuration': 'Release', 'configurations': { 'Release': { 'cflags': [ '-O3' ], 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': '3', 'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO', }, 'msvs_settings': { 'VCCLCompilerTool': { 'Optimization': 3, 'FavorSizeOrSpeed': 1, }, }, } }, }, 'targets': [ { 'target_name': 'fibers', 'sources': [ 'src/fibers.cc', 'src/coroutine.cc', 'src/libcoro/coro.c', # Rebuild on header changes 'src/coroutine.h', 'src/libcoro/coro.h', ], 'cflags!': ['-ansi'], 'conditions': [ ['OS == "win"', {'defines': ['CORO_FIBER', 'WINDOWS']}, # else { 'defines': ['USE_CORO', 'CORO_GUARDPAGES=1'], 'ldflags': ['-pthread'], } ], ['OS == "linux"', { 'variables': { 'USE_GLIBC': '<!(ldd --version 2>&1 | head -n 1 | grep -i "glibc" | wc -l)', }, 'conditions': [ ['<(USE_GLIBC) == 1', {'defines': ['CORO_UCONTEXT'],}, # no use glibc {'defines': ['CORO_ASM'],} ], ], }, ], ['OS == "solaris" or OS == "sunos" or OS == "freebsd" or OS == "aix"', {'defines': ['CORO_UCONTEXT']}], ['OS == "mac"', {'defines': ['CORO_SJLJ']}], ['OS == "openbsd"', {'defines': ['CORO_ASM']}], ['target_arch == "arm"', { # There's been problems getting real fibers working on arm 'defines': ['CORO_PTHREAD'], 'defines!': ['CORO_UCONTEXT', 'CORO_SJLJ', 'CORO_ASM'], }, ], ], }, ], }
{ 'target_defaults': { 'default_configuration': 'Release', 'configurations': { 'Release': { 'cflags': [ '-O3' ], 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': '3', 'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO', }, 'msvs_settings': { 'VCCLCompilerTool': { 'Optimization': 3, 'FavorSizeOrSpeed': 1, }, }, } }, }, 'targets': [ { 'target_name': 'fibers', 'sources': [ 'src/fibers.cc', 'src/coroutine.cc', 'src/libcoro/coro.c', # Rebuild on header changes 'src/coroutine.h', 'src/libcoro/coro.h', ], 'cflags!': ['-ansi'], 'conditions': [ ['OS == "win"', {'defines': ['CORO_FIBER', 'WINDOWS']}, # else { 'defines': ['USE_CORO', 'CORO_GUARDPAGES=1'], 'ldflags': ['-pthread'], } ], ['OS == "linux" or OS == "solaris" or OS == "sunos" or OS == "freebsd" or OS == "aix"', {'defines': ['CORO_UCONTEXT']}], ['OS == "mac"', {'defines': ['CORO_SJLJ']}], ['OS == "openbsd"', {'defines': ['CORO_ASM']}], ['target_arch == "arm"', { # There's been problems getting real fibers working on arm 'defines': ['CORO_PTHREAD'], 'defines!': ['CORO_UCONTEXT', 'CORO_SJLJ', 'CORO_ASM'], }, ], ], }, ], }
mit
Python
b5df30371e7f975311ed4e783e204a9e38f97b0a
add conditions in binding.gyp file to fix issues
charliegerard/Epoc.js,charliegerard/Epoc.js,charliegerard/Epoc.js,charliegerard/Epoc.js
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "addon", "sources": [ "addon.cc", "myobject.cc" ], "conditions": [ ['OS=="mac"', { "cflags": [ "-m64" ], "ldflags": [ "-m64" ], "xcode_settings": { "OTHER_CFLAGS": ["-ObjC++"], "ARCHS": [ "x86_64" ] }, "link_settings": { "libraries": [ "/usr/local/lib/EmotivXavier-PREMIUM/libedk.dylib", "/usr/local/lib/EmotivXavier-PREMIUM/libedk.1.dylib", "/usr/local/lib/EmotivXavier-PREMIUM/libedk_ultils_mac.dylib", "/usr/local/lib/EmotivXavier-PREMIUM/libiomp5.dylib" ] } }], ['OS=="linux"', { "cflags": [ "-m64" ], "ldflags": [ "-m64" ], "xcode_settings": { "ARCHS": [ "x86_64" ] }, "link_settings": { "libraries": [ "/usr/local/lib/libedk.so.1", "/usr/local/lib/libhal.so.1", "/usr/local/lib/libedk_utils.so", "/usr/local/lib/libqwt.so.5" ] } }] ] } ] }
{ "targets": [ { "target_name": "addon", "sources": [ "addon.cc", "myobject.cc" ] } ] }
mit
Python
9b9359c06e44fe5a8f5f16f662fcea2ef3e8f18d
Remove delay load hook
trevnorris/node-ofe,trevnorris/node-ofe,trevnorris/node-ofe
binding.gyp
binding.gyp
{ "targets" : [{ "target_name" : "ofe", "sources" : [ "ofe.cc" ], "include_dirs": [ '<!(node -e "require(\'nan\')")' ], "win_delay_load_hook" : "false" }] }
{ "targets" : [{ "target_name" : "ofe", "sources" : [ "ofe.cc" ], "include_dirs": [ '<!(node -e "require(\'nan\')")' ] }] }
mit
Python
a30eb4a1eaa3a9677950c37f273e7ac16cae698f
Change init method
JOHNKYON/DSTC
DSTC2/basic.py
DSTC2/basic.py
# -*- coding:utf-8 -*- from sklearn.cross_validation import train_test_split from DSTC2.traindev.scripts import myLogger from DSTC2.traindev.scripts.model import bp from traindev.scripts import file_reader from traindev.scripts import initializer from traindev.scripts.initializer import Set __author__ = "JOHNKYON" global logger if __name__ == "__main__": global logger logger = myLogger.myLogger("basic") logger.info("Starting basic") # 选择模式 dataset = file_reader.get_dataset("dstc2_debug") logger.info("token check test begin") raw = initializer.raw_initializer(dataset) # Build token and dictionary token = initializer.token_initializer(raw["input"]) dictionary = initializer.dictionary_initializer(token) # Build input vector one_set = Set(token, dictionary, raw["output"]) input_mtr, output_mtr = bp.bp_initialize(one_set.input_mtr, one_set.output_mtr) # get model model = bp.bp_builder(one_set.dimension * one_set.sentence_dim, len(one_set.act_dict) * one_set.sentence_dim) # train X_train, X_test, y_train, y_test = train_test_split(input_mtr, output_mtr, test_size=0.2) model.fit(X_train, y_train, batch_size=2, nb_epoch=5) # test print model.evaluate(X_test, y_test, batch_size=2)
# -*- coding:utf-8 -*- from sklearn.cross_validation import train_test_split from DSTC2.traindev.scripts import myLogger from DSTC2.traindev.scripts.model import bp from traindev.scripts import file_reader from traindev.scripts import initializer from traindev.scripts.initializer import Set __author__ = "JOHNKYON" global logger if __name__ == "__main__": global logger logger = myLogger.myLogger("basic") logger.info("Starting basic") # 选择模式 dataset = file_reader.get_dataset("dstc2_debug") logger.info("token check test begin") raw = initializer.raw_initializer(dataset) # Build token and dictionary token = initializer.token_initializer(raw["input"]) dictionary = initializer.dictionary_initializer(token) # Build input vector one_set = Set(token, dictionary, raw["output"]) # get model model = bp.bp_builder(one_set.dimension * one_set.sentence_dim, len(one_set.act_dict) * one_set.sentence_dim) # train X_train, X_test, y_train, y_test = train_test_split(one_set.input_mtr, one_set.output_mtr, test_size=0.2) model.fit(X_train, y_train, batch_size=2, nb_epoch=5) # test print model.evaluate(X_test, y_test, batch_size=2)
mit
Python
e36054ab878b10d9e2bc0b21a21d589a16945449
Add -Wno-unused-function to xcode flags
zbjornson/node-bswap,zbjornson/node-bswap,zbjornson/node-bswap
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "bswap", "sources": [ "src/bswap.cc" ], "include_dirs" : [ "<!(node -e \"require('nan')\")" ], "cflags":[ "-march=native", "-falign-loops=32", # See readme; significant improvement for some cases "-Wno-unused-function", # CPU feature detection only used on Win "-Wno-unused-const-variable", # cpuid regs "-Wno-cast-function-type" # https://github.com/nodejs/nan/issues/807 ], "msvs_settings": { "VCCLCompilerTool": { "EnableEnhancedInstructionSet": 3 # /arch:AVX # 0-not set, 1-sse, 2-sse2, 3-avx, 4-ia32, 5-avx2 } }, "xcode_settings": { "OTHER_CPLUSPLUSFLAGS": [ "-march=native", "-Wno-unused-function", # CPU feature detection only used on Win "-Wno-unused-const-variable" ] } } ] }
{ "targets": [ { "target_name": "bswap", "sources": [ "src/bswap.cc" ], "include_dirs" : [ "<!(node -e \"require('nan')\")" ], "cflags":[ "-march=native", "-falign-loops=32", # See readme; significant improvement for some cases "-Wno-unused-function", # CPU feature detection only used on Win "-Wno-unused-const-variable", # cpuid regs "-Wno-cast-function-type" # https://github.com/nodejs/nan/issues/807 ], "msvs_settings": { "VCCLCompilerTool": { "EnableEnhancedInstructionSet": 3 # /arch:AVX # 0-not set, 1-sse, 2-sse2, 3-avx, 4-ia32, 5-avx2 } }, "xcode_settings": { "OTHER_CPLUSPLUSFLAGS": [ "-march=native", "-Wno-unused-const-variable" ] } } ] }
mit
Python
d749361a7ee14b96c2235300b15fbba1222a6a9c
remove comment.
snogcel/bitcore-node-dash,jameswalpole/bitcore-node,fanatid/bitcore-node,pnagurny/bitcoind.js,kleetus/bitcore-node,eXcomm/bitcoind.js,wzrdtales/bitcore-node,bitpay/bitcoind.js,bitpay/bitcoind.js,isghe/bitcore-node,CryptArc/bitcore-node,snogcel/bitcore-node-dash,studio666/bitcoind.js,bitpay/bitcoind.js,pnagurny/bitcoind.js,phplaboratory/psiacore-node,bankonme/bitcoind.js,zcoinrocks/bitcore-node,wzrdtales/bitcore-node,eXcomm/bitcoind.js,eXcomm/bitcoind.js,wzrdtales/bitcore-node,bankonme/bitcoind.js,bitpay/bitcoind.js,kleetus/bitcoind.js,kleetus/bitcore-node,kleetus/bitcoind.js,braydonf/bitcoind.js,studio666/bitcoind.js,studio666/bitcoind.js,fanatid/bitcore-node,jameswalpole/bitcore-node,braydonf/bitcoind.js,pnagurny/bitcoind.js,bitpay/bitcoind.js,isghe/bitcore-node,fanatid/bitcore-node,phplaboratory/psiacore-node,braydonf/bitcore-node,studio666/bitcoind.js,wzrdtales/bitcore-node,kleetus/bitcoind.js,wzrdtales/bitcore-node,bankonme/bitcoind.js,isghe/bitcore-node,eXcomm/bitcoind.js,phplaboratory/psiacore-node,kleetus/bitcoind.js,braydonf/bitcore-node,braydonf/bitcoind.js,CryptArc/bitcore-node,studio666/bitcoind.js,CryptArc/bitcore-node,CryptArc/bitcore-node,pnagurny/bitcoind.js,isghe/bitcore-node,kleetus/bitcoind.js,zcoinrocks/bitcore-node,fanatid/bitcore-node,braydonf/bitcoind.js,phplaboratory/psiacore-node,isghe/bitcore-node,eXcomm/bitcoind.js,jameswalpole/bitcore-node,phplaboratory/psiacore-node,bankonme/bitcoind.js,pnagurny/bitcoind.js,jameswalpole/bitcore-node,braydonf/bitcoind.js,fanatid/bitcore-node,jameswalpole/bitcore-node,CryptArc/bitcore-node,bankonme/bitcoind.js
binding.gyp
binding.gyp
{ 'targets': [{ 'target_name': 'bitcoindjs', 'variables': { 'BOOST_INCLUDE': '<!(test -n "$BOOST_INCLUDE"'\ ' && echo "$BOOST_INCLUDE"'\ ' || test -e /usr/include/boost && echo /usr/include/boost' \ ' || echo ./include)', 'LEVELDB_INCLUDE': '<!(test -n "$LEVELDB_INCLUDE"'\ ' && echo "$LEVELDB_INCLUDE"'\ ' || test "$BITCOIN_DIR" && echo "${BITCOIN_DIR}/src/leveldb/include"' \ ' || echo ./include)', 'BITCOIN_DIR': '<!(test -n "$BITCOIN_DIR"'\ ' && echo "$BITCOIN_DIR"'\ ' || echo "${HOME}/bitcoin")', 'LIBBITCOIND': '<!(./platform/os.sh)', }, 'defines': [ 'ENABLE_WALLET=1', ], 'include_dirs' : [ '<(BOOST_INCLUDE)', '<(LEVELDB_INCLUDE)', '<(BITCOIN_DIR)/src', '<!(node -e "require(\'nan\')")', ], 'sources': [ './src/bitcoindjs.cc', ], 'cflags_cc': [ '-fexceptions', '-frtti', '-fpermissive', ], 'libraries': [ '-lboost_system', '-lboost_filesystem', '-lboost_program_options', '-lboost_thread', '-lboost_chrono', '-lsecp256k1', '<(LIBBITCOIND)', ] }] }
{ 'targets': [{ 'target_name': 'bitcoindjs', 'variables': { 'BOOST_INCLUDE': '<!(test -n "$BOOST_INCLUDE"'\ ' && echo "$BOOST_INCLUDE"'\ ' || test -e /usr/include/boost && echo /usr/include/boost' \ ' || echo ./include)', 'LEVELDB_INCLUDE': '<!(test -n "$LEVELDB_INCLUDE"'\ ' && echo "$LEVELDB_INCLUDE"'\ ' || test "$BITCOIN_DIR" && echo "${BITCOIN_DIR}/src/leveldb/include"' \ ' || echo ./include)', 'BITCOIN_DIR': '<!(test -n "$BITCOIN_DIR"'\ ' && echo "$BITCOIN_DIR"'\ ' || echo "${HOME}/bitcoin")', 'LIBBITCOIND': '<!(./platform/os.sh)', }, 'defines': [ 'ENABLE_WALLET=1', ], 'include_dirs' : [ '<(BOOST_INCLUDE)', '<(LEVELDB_INCLUDE)', '<(BITCOIN_DIR)/src', '<!(node -e "require(\'nan\')")', ], 'sources': [ './src/bitcoindjs.cc', ], 'cflags_cc': [ '-fexceptions', '-frtti', '-fpermissive', ], 'libraries': [ '-lboost_system', '-lboost_filesystem', '-lboost_program_options', '-lboost_thread', '-lboost_chrono', # XXX NEW '-lsecp256k1', '<(LIBBITCOIND)', ] }] }
mit
Python
539f575832244e426d768b0901113a1e45b25f3f
modify python ext setup script
neeraj9/zfor,neeraj9/zfor,chaoslawful/zfor,neeraj9/zfor,neeraj9/zfor,chaoslawful/zfor,chaoslawful/zfor,chaoslawful/zfor,neeraj9/zfor
src/python_zfor/setup.py
src/python_zfor/setup.py
#!/usr/bin/env python from distutils.core import setup, Extension zformod = Extension( 'zfor', sources = ['src/zfor.c'], include_dirs = ['../libzfor'], library_dirs = ['/usr/local/lib', '../libzfor'], libraries = ['zfor'] ) setup( name = 'zfor', version = '0.1', description = 'Python zfor binding', author = ['Chris Goffinet'], author_email = ['[email protected]'], packages = [ 'zfor', ], package_dir = {'zfor' : 'src'}, ext_modules = [zformod], ) # vim:ft=python ts=4 sw=4 et
#!/usr/bin/env python from distutils.core import setup, Extension zformod = Extension('zfor', sources = ['src/zfor.c'], library_dirs = ['/usr/local/lib'], libraries = ['zfor'] ) setup(name = 'zfor', version = '0.1', description = 'Python zfor binding', author = ['Chris Goffinet'], author_email = ['[email protected]'], packages = [ 'zfor', ], package_dir = {'zfor' : 'src'}, ext_modules = [zformod], )
bsd-3-clause
Python
e346f70eb34a029642410a92e449915801d9f78f
use relative import
Crossway/antimarkdown,Crossway/antimarkdown
antimarkdown/__init__.py
antimarkdown/__init__.py
# -*- coding: utf-8 -*- """antimarkdown -- convert Markdown to HTML. """ from lxml import html from lxml.builder import E from . import handlers default_safe_tags = set('p blockquote i em strong b u a h1 h2 h3 h4 h5 h6 hr pre code div br img ul ol li span'.split()) default_safe_attrs = set('href src alt style title'.split()) def to_markdown(html_string, safe_tags=None, safe_attrs=None): """Convert the given HTML text fragment to Markdown. """ # out = StringIO() # for f in parse_fragments(html_string, safe_tags=None, safe_attrs=None): # handlers.process_tag_events(f, out) # return normalize(out.getvalue()) return handlers.render(*parse_fragments(html_string, safe_tags)) def parse_fragments(html_string, safe_tags=None, safe_attrs=None): """Parse HTML fragments from the given HTML fragment string. """ for f in html.fragments_fromstring(html_string): cf = clean_fragment(f, safe_tags=safe_tags, safe_attrs=safe_attrs) if cf is not None: yield cf def clean_fragment(subtree, safe_tags=None, safe_attrs=None): """Clean an HTML fragment subtree of unsafe tags and attrs. """ if isinstance(subtree, str): return E('p', subtree) if safe_tags is None: safe_tags = default_safe_tags if safe_attrs is None: safe_attrs = default_safe_attrs if subtree.tag not in safe_tags: if callable(subtree.tag): # A comment... return None p = html.Element('p') p.append(subtree) subtree = p for el in list(subtree.iter()): if el.tag not in safe_tags: el.drop_tag() else: for attr in list(el.attrib.keys()): if attr not in safe_attrs: el.attrib.pop(attr) return subtree
# -*- coding: utf-8 -*- """antimarkdown -- convert Markdown to HTML. """ from lxml import html from lxml.builder import E import handlers default_safe_tags = set('p blockquote i em strong b u a h1 h2 h3 h4 h5 h6 hr pre code div br img ul ol li span'.split()) default_safe_attrs = set('href src alt style title'.split()) def to_markdown(html_string, safe_tags=None, safe_attrs=None): """Convert the given HTML text fragment to Markdown. """ # out = StringIO() # for f in parse_fragments(html_string, safe_tags=None, safe_attrs=None): # handlers.process_tag_events(f, out) # return normalize(out.getvalue()) return handlers.render(*parse_fragments(html_string, safe_tags)) def parse_fragments(html_string, safe_tags=None, safe_attrs=None): """Parse HTML fragments from the given HTML fragment string. """ for f in html.fragments_fromstring(html_string): cf = clean_fragment(f, safe_tags=safe_tags, safe_attrs=safe_attrs) if cf is not None: yield cf def clean_fragment(subtree, safe_tags=None, safe_attrs=None): """Clean an HTML fragment subtree of unsafe tags and attrs. """ if isinstance(subtree, str): return E('p', subtree) if safe_tags is None: safe_tags = default_safe_tags if safe_attrs is None: safe_attrs = default_safe_attrs if subtree.tag not in safe_tags: if callable(subtree.tag): # A comment... return None p = html.Element('p') p.append(subtree) subtree = p for el in list(subtree.iter()): if el.tag not in safe_tags: el.drop_tag() else: for attr in list(el.attrib.keys()): if attr not in safe_attrs: el.attrib.pop(attr) return subtree
mit
Python
0a725db8e8d7f1e73a84fb0d0acc181603e706cb
Refactor to readability test
hyesun03/k-board,kboard/kboard,hyesun03/k-board,guswnsxodlf/k-board,guswnsxodlf/k-board,guswnsxodlf/k-board,kboard/kboard,hyesun03/k-board,kboard/kboard,darjeeling/k-board
kboard/functional_test/test_post_delete.py
kboard/functional_test/test_post_delete.py
from .base import FunctionalTest, login_test_user_with_browser, NotFoundPostError class DeletePostTest(FunctionalTest): @login_test_user_with_browser def test_delete_post(self): # 지훈이는 게시글을 삭제하는 기능이 제대로 동작하는지 확인하기 위해 기본 게시판으로 이동한다. self.move_to_default_board() # 'django' 대한 게시글과 'spring'에 대한 게시글을 작성한다. self.add_post(title='django', content='Hello django') self.add_post(title='spring', content='Hello spring') # 나중에 보니 'spring' 게시글이 마음에 들지 않아서 삭제를 하려고 한다. # 'spring' 게시글을 눌러서 게시글 페이지로 이동한 후 try: self.open_post(title='spring') except NotFoundPostError as notFoundPostError: self.fail(notFoundPostError.message) # '삭제' 버튼을 누른다. self.browser.find_element_by_id('id_delete_post_button').click() # 'spring' 게시글이 잘 삭제 돼서 목록에 보이지 않는다. self.assertPostNotIn('spring') # 'django' 게시글은 삭제되지 않고 잘 남아있다. self.assertPostIn('django')
from .base import FunctionalTest, login_test_user_with_browser, NotFoundPostError class DeletePostTest(FunctionalTest): @login_test_user_with_browser def test_delete_post(self): # 지훈이는 게시글을 삭제하는 기능이 제대로 동작하는지 확인하기 위해 기본 게시판으로 이동한다. self.move_to_default_board() # 'django' 대한 게시글과 'spring'에 대한 게시글을 작성한다. self.add_post(title='django', content='Hello django') self.add_post(title='spring', content='Hello spring') # 나중에 보니 'spring' 게시글이 마음에 들지 않아서 삭제를 한다. # 'spring' 게시글을 눌러서 게시글 페이지로 이동한 후 '삭제' 버튼을 누른다. try: self.open_post(title='spring') except NotFoundPostError as notFoundPostError: self.fail(notFoundPostError.message) delete_post_button = self.browser.find_element_by_id('id_delete_post_button') delete_post_button.click() # 'spring' 게시글이 잘 삭제 돼서 목록에 보이지 않는다. self.assertPostNotIn('spring') # 'django' 게시글은 삭제되지 않고 잘 남아있다. self.assertPostIn('django')
mit
Python
bbd7266a9e228ac111497b12d00ea71b3e0f4f5a
fix imports
xahgmah/edx-proctoring,edx/edx-proctoring,edx/edx-proctoring,edx/edx-proctoring,xahgmah/edx-proctoring,xahgmah/edx-proctoring
edx_proctoring/management/commands/set_attempt_status.py
edx_proctoring/management/commands/set_attempt_status.py
""" Django management command to manually set the attempt status for a user in a proctored exam """ from optparse import make_option from django.core.management.base import BaseCommand from edx_proctoring.models import ProctoredExamStudentAttemptStatus class Command(BaseCommand): """ Django Management command to force a background check of all possible notifications """ option_list = BaseCommand.option_list + ( make_option('-e', '--exam', metavar='EXAM_ID', dest='exam_id', help='exam_id to change'), make_option('-u', '--user', metavar='USER', dest='user_id', help="user_id of user to affect"), make_option('-t', '--to', metavar='TO_STATUS', dest='to_status', help='the status to set'), ) def handle(self, *args, **options): """ Management command entry point, simply call into the signal firiing """ from edx_proctoring.api import ( update_attempt_status, get_exam_by_id ) exam_id = options['exam_id'] user_id = options['user_id'] to_status = options['to_status'] msg = ( 'Running management command to update user {user_id} ' 'attempt status on exam_id {exam_id} to {to_status}'.format( user_id=user_id, exam_id=exam_id, to_status=to_status ) ) print msg if not ProctoredExamStudentAttemptStatus.is_valid_status(to_status): raise Exception('{to_status} is not a valid attempt status!'.format(to_status=to_status)) # get exam, this will throw exception if does not exist, so let it bomb out get_exam_by_id(exam_id) update_attempt_status(exam_id, user_id, to_status) print 'Completed!'
""" Django management command to manually set the attempt status for a user in a proctored exam """ from optparse import make_option from django.core.management.base import BaseCommand from edx_proctoring.api import ( update_attempt_status, get_exam_by_id ) from edx_proctoring.models import ProctoredExamStudentAttemptStatus class Command(BaseCommand): """ Django Management command to force a background check of all possible notifications """ option_list = BaseCommand.option_list + ( make_option('-e', '--exam', metavar='EXAM_ID', dest='exam_id', help='exam_id to change'), make_option('-u', '--user', metavar='USER', dest='user', help="user_id of user to affect"), make_option('-t', '--to', metavar='TO_STATUS', dest='to_status', help='the status to set'), ) def handle(self, *args, **options): """ Management command entry point, simply call into the signal firiing """ exam_id = options['exam_id'] user_id = options['user_id'] to_status = options['to_status'] msg = ( 'Running management command to update user {user_id} ' 'attempt status on exam_id {exam_id} to {to_status}'.format( user_id=user_id, exam_id=exam_id, to_status=to_status ) ) print msg if not ProctoredExamStudentAttemptStatus.is_valid_status(to_status): raise Exception('{to_status} is not a valid attempt status!'.format(to_status=to_status)) # get exam, this will throw exception if does not exist, so let it bomb out get_exam_by_id(exam_id) update_attempt_status(exam_id, user_id, to_status) print 'Completed!'
agpl-3.0
Python
9c0d62d7b08d63b7daf338a16fc34896856aefb2
Test code for encoding password in postgresql uri
frankyrumple/smc,frankyrumple/smc,frankyrumple/smc,frankyrumple/smc
controllers/lms.py
controllers/lms.py
import sys import os import subprocess import urllib from gluon import current import paramiko from ednet.ad import AD from ednet.canvas import Canvas from ednet.appsettings import AppSettings # Needed for remote connection? auth.settings.allow_basic_login = True #auth.settings.actions_disabled.append('login') #auth.settings.allow_basic_login_only = True #auth.settings.actions.login_url=URL('your_own_error_page') @auth.requires_membership("Administrators") def test(): try: canvas_db_pw = str(os.environ["IT_PW"]) + "" except KeyError as ex: # IT_PW not set? canvas_db_pw = "<IT_PW_NOT_SET>" db_canvas = None err = None try: db_canvas = DAL('postgres://postgres:' + urllib.quote_plus(canvas_db_pw) + '@postgresql/canvas_production', decode_credentials=True, migrate=False) except RuntimeError as ex: # Error connecting, move on and return None db_canvas = None err = str(ex) return dict(db_canvas=db_canvas, err=err) @auth.requires_membership("Administrators") def credential_student(): response.view = 'generic.json' db = current.db key = "" msg = "" hash = "" user_name = "" full_name = "" # Get the user in question if len(request.args) > 0: user_name = request.args[0] if user_name is not None: # First - does the user exist? user_exists = False rows = db(db.auth_user.username == user_name).select(db.auth_user.id) for row in rows: user_exists = True if user_exists is True: key, msg, hash, full_name = Canvas.EnsureStudentAccessToken(user_name) else: # User doesn't exit! msg = "Invalid User!" return dict(key=key, msg=msg, hash=hash, full_name=full_name) def get_firewall_list(): response.view = 'default/index.json' db = current.db rs = db(db.ope_laptop_firewall_rules).select(db.ope_laptop_firewall_rules.ALL).as_list() return response.json(rs)
import sys import os import subprocess from gluon import current import paramiko from ednet.ad import AD from ednet.canvas import Canvas from ednet.appsettings import AppSettings # Needed for remote connection? auth.settings.allow_basic_login = True #auth.settings.actions_disabled.append('login') #auth.settings.allow_basic_login_only = True #auth.settings.actions.login_url=URL('your_own_error_page') @auth.requires_membership("Administrators") def credential_student(): response.view = 'generic.json' db = current.db key = "" msg = "" hash = "" user_name = "" full_name = "" # Get the user in question if len(request.args) > 0: user_name = request.args[0] if user_name is not None: # First - does the user exist? user_exists = False rows = db(db.auth_user.username == user_name).select(db.auth_user.id) for row in rows: user_exists = True if user_exists is True: key, msg, hash, full_name = Canvas.EnsureStudentAccessToken(user_name) else: # User doesn't exit! msg = "Invalid User!" return dict(key=key, msg=msg, hash=hash, full_name=full_name) def get_firewall_list(): response.view = 'default/index.json' db = current.db rs = db(db.ope_laptop_firewall_rules).select(db.ope_laptop_firewall_rules.ALL).as_list() return response.json(rs)
mit
Python
a619f703b2d259877e30d3e1ede11813c014f3ad
Fix the AvailableActionsPrinter to support the new multiplayer action spec.
deepmind/pysc2
pysc2/env/available_actions_printer.py
pysc2/env/available_actions_printer.py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """An env wrapper to print the available actions.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from pysc2.env import base_env_wrapper class AvailableActionsPrinter(base_env_wrapper.BaseEnvWrapper): """An env wrapper to print the available actions.""" def __init__(self, env): super(AvailableActionsPrinter, self).__init__(env) self._seen = set() self._action_spec = self.action_spec()[0] def step(self, *args, **kwargs): all_obs = super(AvailableActionsPrinter, self).step(*args, **kwargs) for obs in all_obs: for avail in obs.observation["available_actions"]: if avail not in self._seen: self._seen.add(avail) self._print(self._action_spec.functions[avail].str(True)) return all_obs def _print(self, s): print(s)
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """An env wrapper to print the available actions.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from pysc2.env import base_env_wrapper class AvailableActionsPrinter(base_env_wrapper.BaseEnvWrapper): """An env wrapper to print the available actions.""" def __init__(self, env): super(AvailableActionsPrinter, self).__init__(env) self._seen = set() self._action_spec = self.action_spec() def step(self, *args, **kwargs): all_obs = super(AvailableActionsPrinter, self).step(*args, **kwargs) for obs in all_obs: for avail in obs.observation["available_actions"]: if avail not in self._seen: self._seen.add(avail) self._print(self._action_spec.functions[avail].str(True)) return all_obs def _print(self, s): print(s)
apache-2.0
Python