commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
b1815075ac1a1697c99a6293c8cc7719060ab9b2 | Add cpuspeed sensor | oandrew/home-assistant,hmronline/home-assistant,emilhetty/home-assistant,molobrakos/home-assistant,ct-23/home-assistant,Julian/home-assistant,srcLurker/home-assistant,GenericStudent/home-assistant,titilambert/home-assistant,ma314smith/home-assistant,ct-23/home-assistant,aoakeson/home-assistant,adrienbrault/home-assistant,sffjunkie/home-assistant,bdfoster/blumate,nugget/home-assistant,sfam/home-assistant,leoc/home-assistant,ewandor/home-assistant,ct-23/home-assistant,turbokongen/home-assistant,tchellomello/home-assistant,devdelay/home-assistant,morphis/home-assistant,persandstrom/home-assistant,florianholzapfel/home-assistant,miniconfig/home-assistant,bdfoster/blumate,philipbl/home-assistant,instantchow/home-assistant,sander76/home-assistant,keerts/home-assistant,robbiet480/home-assistant,Zac-HD/home-assistant,postlund/home-assistant,florianholzapfel/home-assistant,shaftoe/home-assistant,philipbl/home-assistant,jamespcole/home-assistant,mKeRix/home-assistant,srcLurker/home-assistant,robjohnson189/home-assistant,kyvinh/home-assistant,tinloaf/home-assistant,kennedyshead/home-assistant,sdague/home-assistant,turbokongen/home-assistant,instantchow/home-assistant,luxus/home-assistant,florianholzapfel/home-assistant,xifle/home-assistant,DavidLP/home-assistant,w1ll1am23/home-assistant,PetePriority/home-assistant,ewandor/home-assistant,betrisey/home-assistant,badele/home-assistant,HydrelioxGitHub/home-assistant,miniconfig/home-assistant,nugget/home-assistant,Julian/home-assistant,sffjunkie/home-assistant,stefan-jonasson/home-assistant,jabesq/home-assistant,robjohnson189/home-assistant,nkgilley/home-assistant,florianholzapfel/home-assistant,aoakeson/home-assistant,miniconfig/home-assistant,mikaelboman/home-assistant,philipbl/home-assistant,mikaelboman/home-assistant,jnewland/home-assistant,varunr047/homefile,instantchow/home-assistant,jnewland/home-assistant,luxus/home-assistant,DavidLP/home-assistant,srcLurker/home-assistant,oandrew/home-assistant,MungoRae/home-assistant,hexxter/home-assistant,postlund/home-assistant,Smart-Torvy/torvy-home-assistant,leoc/home-assistant,dmeulen/home-assistant,justyns/home-assistant,emilhetty/home-assistant,JshWright/home-assistant,persandstrom/home-assistant,auduny/home-assistant,toddeye/home-assistant,luxus/home-assistant,deisi/home-assistant,tboyce1/home-assistant,Teagan42/home-assistant,jabesq/home-assistant,tinloaf/home-assistant,jaharkes/home-assistant,tboyce1/home-assistant,FreekingDean/home-assistant,jabesq/home-assistant,happyleavesaoc/home-assistant,emilhetty/home-assistant,partofthething/home-assistant,home-assistant/home-assistant,robbiet480/home-assistant,kyvinh/home-assistant,aequitas/home-assistant,Smart-Torvy/torvy-home-assistant,caiuspb/home-assistant,stefan-jonasson/home-assistant,nevercast/home-assistant,jawilson/home-assistant,sander76/home-assistant,HydrelioxGitHub/home-assistant,miniconfig/home-assistant,morphis/home-assistant,coteyr/home-assistant,shaftoe/home-assistant,tchellomello/home-assistant,JshWright/home-assistant,philipbl/home-assistant,nugget/home-assistant,keerts/home-assistant,nevercast/home-assistant,MungoRae/home-assistant,home-assistant/home-assistant,JshWright/home-assistant,Theb-1/home-assistant,qedi-r/home-assistant,keerts/home-assistant,joopert/home-assistant,rohitranjan1991/home-assistant,xifle/home-assistant,sffjunkie/home-assistant,stefan-jonasson/home-assistant,nevercast/home-assistant,fbradyirl/home-assistant,caiuspb/home-assistant,Zyell/home-assistant,mKeRix/home-assistant,eagleamon/home-assistant,mikaelboman/home-assistant,varunr047/homefile,jnewland/home-assistant,leoc/home-assistant,justyns/home-assistant,coteyr/home-assistant,auduny/home-assistant,LinuxChristian/home-assistant,alexmogavero/home-assistant,Zac-HD/home-assistant,lukas-hetzenecker/home-assistant,pschmitt/home-assistant,soldag/home-assistant,balloob/home-assistant,varunr047/homefile,auduny/home-assistant,jamespcole/home-assistant,nnic/home-assistant,lukas-hetzenecker/home-assistant,happyleavesaoc/home-assistant,tboyce1/home-assistant,partofthething/home-assistant,shaftoe/home-assistant,rohitranjan1991/home-assistant,betrisey/home-assistant,Theb-1/home-assistant,tboyce021/home-assistant,aronsky/home-assistant,hexxter/home-assistant,hmronline/home-assistant,Danielhiversen/home-assistant,devdelay/home-assistant,eagleamon/home-assistant,oandrew/home-assistant,mikaelboman/home-assistant,robjohnson189/home-assistant,hmronline/home-assistant,deisi/home-assistant,FreekingDean/home-assistant,sfam/home-assistant,MartinHjelmare/home-assistant,LinuxChristian/home-assistant,bdfoster/blumate,toddeye/home-assistant,molobrakos/home-assistant,hmronline/home-assistant,keerts/home-assistant,fbradyirl/home-assistant,tboyce1/home-assistant,w1ll1am23/home-assistant,Zac-HD/home-assistant,open-homeautomation/home-assistant,molobrakos/home-assistant,ct-23/home-assistant,eagleamon/home-assistant,oandrew/home-assistant,fbradyirl/home-assistant,betrisey/home-assistant,aequitas/home-assistant,sfam/home-assistant,open-homeautomation/home-assistant,Julian/home-assistant,sffjunkie/home-assistant,alexmogavero/home-assistant,joopert/home-assistant,Cinntax/home-assistant,Smart-Torvy/torvy-home-assistant,mKeRix/home-assistant,Theb-1/home-assistant,devdelay/home-assistant,balloob/home-assistant,jamespcole/home-assistant,MungoRae/home-assistant,varunr047/homefile,JshWright/home-assistant,leoc/home-assistant,kyvinh/home-assistant,MartinHjelmare/home-assistant,titilambert/home-assistant,soldag/home-assistant,alexmogavero/home-assistant,sdague/home-assistant,MartinHjelmare/home-assistant,emilhetty/home-assistant,MungoRae/home-assistant,PetePriority/home-assistant,ma314smith/home-assistant,jaharkes/home-assistant,jaharkes/home-assistant,persandstrom/home-assistant,nnic/home-assistant,open-homeautomation/home-assistant,badele/home-assistant,ma314smith/home-assistant,HydrelioxGitHub/home-assistant,bdfoster/blumate,coteyr/home-assistant,srcLurker/home-assistant,dmeulen/home-assistant,Duoxilian/home-assistant,Zyell/home-assistant,GenericStudent/home-assistant,open-homeautomation/home-assistant,adrienbrault/home-assistant,MungoRae/home-assistant,Duoxilian/home-assistant,happyleavesaoc/home-assistant,caiuspb/home-assistant,DavidLP/home-assistant,LinuxChristian/home-assistant,nkgilley/home-assistant,balloob/home-assistant,Zyell/home-assistant,ewandor/home-assistant,xifle/home-assistant,betrisey/home-assistant,xifle/home-assistant,aoakeson/home-assistant,alexmogavero/home-assistant,happyleavesaoc/home-assistant,kennedyshead/home-assistant,tboyce021/home-assistant,sffjunkie/home-assistant,PetePriority/home-assistant,leppa/home-assistant,morphis/home-assistant,shaftoe/home-assistant,tinloaf/home-assistant,Zac-HD/home-assistant,Duoxilian/home-assistant,aequitas/home-assistant,rohitranjan1991/home-assistant,mikaelboman/home-assistant,bdfoster/blumate,hexxter/home-assistant,mezz64/home-assistant,eagleamon/home-assistant,LinuxChristian/home-assistant,deisi/home-assistant,stefan-jonasson/home-assistant,jaharkes/home-assistant,emilhetty/home-assistant,robjohnson189/home-assistant,morphis/home-assistant,nnic/home-assistant,dmeulen/home-assistant,Teagan42/home-assistant,deisi/home-assistant,devdelay/home-assistant,Smart-Torvy/torvy-home-assistant,aronsky/home-assistant,LinuxChristian/home-assistant,kyvinh/home-assistant,justyns/home-assistant,mKeRix/home-assistant,hexxter/home-assistant,hmronline/home-assistant,varunr047/homefile,ma314smith/home-assistant,leppa/home-assistant,Danielhiversen/home-assistant,ct-23/home-assistant,qedi-r/home-assistant,dmeulen/home-assistant,badele/home-assistant,mezz64/home-assistant,Julian/home-assistant,jawilson/home-assistant,pschmitt/home-assistant,Cinntax/home-assistant,Duoxilian/home-assistant,deisi/home-assistant | homeassistant/components/sensor/cpuspeed.py | homeassistant/components/sensor/cpuspeed.py | """
homeassistant.components.sensor.cpuspeed
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Shows the current CPU speed.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.cpuspeed.html
"""
import logging
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['py-cpuinfo==0.1.6']
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "CPU speed"
ATTR_VENDOR = 'Vendor ID'
ATTR_BRAND = 'Brand'
ATTR_HZ = 'GHz Advertised'
# pylint: disable=unused-variable
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the CPU speed sensor. """
try:
import cpuinfo # noqa
except ImportError:
_LOGGER.exception(
"Unable to import cpuinfo. "
"Did you maybe not install the 'py-cpuinfo' package?")
return False
add_devices([CpuSpeedSensor(config.get('name', DEFAULT_NAME))])
class CpuSpeedSensor(Entity):
""" A CPU info sensor. """
def __init__(self, name):
self._name = name
self._state = None
self._unit_of_measurement = 'GHz'
self.update()
@property
def name(self):
return self._name
@property
def state(self):
""" Returns the state of the device. """
return self._state
@property
def unit_of_measurement(self):
return self._unit_of_measurement
@property
def state_attributes(self):
""" Returns the state attributes. """
if self.info is not None:
return {
ATTR_VENDOR: self.info['vendor_id'],
ATTR_BRAND: self.info['brand'],
ATTR_HZ: round(self.info['hz_advertised_raw'][0]/10**9, 2)
}
def update(self):
""" Gets the latest data and updates the state. """
from cpuinfo import cpuinfo
self.info = cpuinfo.get_cpu_info()
self._state = round(float(self.info['hz_actual_raw'][0])/10**9, 2)
| mit | Python |
|
43605bd5340374a3a62e91cf544b2ba16edb320e | Add a tf-serving example for KerasBERT. | tombstone/models,tombstone/models,tombstone/models,tombstone/models,tombstone/models,tombstone/models | official/nlp/bert/serving.py | official/nlp/bert/serving.py | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Examples of SavedModel export for tf-serving."""
from absl import app
from absl import flags
import tensorflow as tf
from official.nlp.bert import bert_models
from official.nlp.bert import configs
flags.DEFINE_integer("sequence_length", None,
"Sequence length to parse the tf.Example. If "
"sequence_length > 0, add a signature for serialized "
"tf.Example and define the parsing specification by the "
"sequence_length.")
flags.DEFINE_string("bert_config_file", None,
"Bert configuration file to define core bert layers.")
flags.DEFINE_string("model_checkpoint_path", None,
"File path to TF model checkpoint.")
flags.DEFINE_string("export_path", None,
"Destination folder to export the serving SavedModel.")
FLAGS = flags.FLAGS
class BertServing(tf.keras.Model):
"""Bert transformer encoder model for serving."""
def __init__(self, bert_config, name_to_features=None, name="serving_model"):
super(BertServing, self).__init__(name=name)
self.encoder = bert_models.get_transformer_encoder(
bert_config, sequence_length=None)
self.name_to_features = name_to_features
def call(self, inputs):
input_word_ids = inputs["input_ids"]
input_mask = inputs["input_mask"]
input_type_ids = inputs["segment_ids"]
encoder_outputs, _ = self.encoder(
[input_word_ids, input_mask, input_type_ids])
return encoder_outputs
def serve_body(self, input_ids, input_mask=None, segment_ids=None):
if segment_ids is None:
# Requires CLS token is the first token of inputs.
segment_ids = tf.zeros_like(input_ids)
if input_mask is None:
# The mask has 1 for real tokens and 0 for padding tokens.
input_mask = tf.where(
tf.equal(input_ids, 0), tf.zeros_like(input_ids),
tf.ones_like(input_ids))
inputs = dict(
input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids)
return self.call(inputs)
@tf.function
def serve(self, input_ids, input_mask=None, segment_ids=None):
outputs = self.serve_body(input_ids, input_mask, segment_ids)
# Returns a dictionary to control SignatureDef output signature.
return {"outputs": outputs[-1]}
@tf.function
def serve_examples(self, inputs):
features = tf.io.parse_example(inputs, self.name_to_features)
for key in list(features.keys()):
t = features[key]
if t.dtype == tf.int64:
t = tf.cast(t, tf.int32)
features[key] = t
return self.serve(
features["input_ids"],
input_mask=features["input_mask"] if "input_mask" in features else None,
segment_ids=features["segment_ids"]
if "segment_ids" in features else None)
@classmethod
def export(cls, model, export_dir):
if not isinstance(model, cls):
raise ValueError("Invalid model instance: %s, it should be a %s" %
(model, cls))
signatures = {
"serving_default":
model.serve.get_concrete_function(
input_ids=tf.TensorSpec(
shape=[None, None], dtype=tf.int32, name="inputs")),
}
if model.name_to_features:
signatures[
"serving_examples"] = model.serve_examples.get_concrete_function(
tf.TensorSpec(shape=[None], dtype=tf.string, name="examples"))
tf.saved_model.save(model, export_dir=export_dir, signatures=signatures)
def main(_):
sequence_length = FLAGS.sequence_length
if sequence_length is not None and sequence_length > 0:
name_to_features = {
"input_ids": tf.io.FixedLenFeature([sequence_length], tf.int64),
"input_mask": tf.io.FixedLenFeature([sequence_length], tf.int64),
"segment_ids": tf.io.FixedLenFeature([sequence_length], tf.int64),
}
else:
name_to_features = None
bert_config = configs.BertConfig.from_json_file(FLAGS.bert_config_file)
serving_model = BertServing(
bert_config=bert_config, name_to_features=name_to_features)
checkpoint = tf.train.Checkpoint(model=serving_model.encoder)
checkpoint.restore(FLAGS.model_checkpoint_path
).assert_existing_objects_matched().run_restore_ops()
BertServing.export(serving_model, FLAGS.export_path)
if __name__ == "__main__":
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("model_checkpoint_path")
flags.mark_flag_as_required("export_path")
app.run(main)
| apache-2.0 | Python |
|
abd41ea78f2962f0b8b7166f0540727538d56471 | ajoute la state get_object | WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors | sara_flexbe_states/src/sara_flexbe_states/Wonderland_Get_Object.py | sara_flexbe_states/src/sara_flexbe_states/Wonderland_Get_Object.py | #!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import requests
import json
from geometry_msgs.msg import Pose, Point
from tf.transformations import quaternion_from_euler
class GetObject(EventState):
'''
Get an
># id int id of the object
># name string name of the object
># color string color of the object
># room string room of the object
># type string category of the object
># expected_pose pose/point expected position of the object
#> object_pose pose the pose of the returned object
#> object_name string name of the object
#> object_color string color of the object
#> object_room string room of the object
#> object_type string category of the object
<= found object found
<= unknown the object is unknown
<= error return when error reading data
'''
def __init__(self):
# See example_state.py for basic explanations.
super(GetObject, self).__init__(outcomes=['found', 'unknown', 'error'],
input_keys=['id', 'name', 'color', 'room', 'type', 'robot_pose'],
output_keys=['id', 'object_pose', 'object_name', 'object_color', 'object_room', 'expected_pose'])
self._index = 0
self._header = {'api-key': 'asdf'}
def execute(self, userdata):
# Generate URL to contact
url = "http://wonderland:8000/api/object/?"
if userdata.id != None:
url += "?id="+userdata.id+"&"
if userdata.name != None:
url += "?name="+userdata.name+"&"
if userdata.name != None:
url += "?color="+userdata.color+"&"
if userdata.name != None:
url += "?room="+userdata.room+"&"
if userdata.name != None:
url += "?type="+userdata.type+"&"
if userdata.expected_pose == None:
Logger.logerr("in "+self.name+", you must give an expected pose or point")
return 'error'
if type(userdata.expected_pose) is Pose:
expX = userdata.expected_pose.position.x
expY = userdata.expected_pose.position.y
expZ = userdata.expected_pose.position.z
elif type(userdata.expected_pose) is Point:
expX = userdata.expected_pose.position.x
expY = userdata.expected_pose.position.y
expZ = userdata.expected_pose.position.z
else:
return 'error'
# try the request
try:
response = requests.get(url, headers=self._header)
except requests.exceptions.RequestException as e:
Logger.logerr(str(e))
return 'error'
# parse parameter json data
data = json.loads(response.content)
if len(data) == 0:
return 'unknown'
# find the nearest object
bestScore = 1000000
best = None
for d in data:
score = ((expX-d['object']['x_position'])**2+(expY-d['object']['y_position'])**2+(expZ-d['object']['z_position'])**2)**0.5
if score < bestScore:
bestScore = score
best = d
# generate the output pose
pose = Pose()
pose.position.x = best['object']['x']
pose.position.y = best['object']['y']
pose.position.z = best['object']['z']
quat = quaternion_from_euler(0, 0, best['object']['t'])
pose.orientation.x = quat[0]
pose.orientation.y = quat[1]
pose.orientation.z = quat[2]
pose.orientation.w = quat[3]
# send the outputs
userdata.object_id = best['object']['id']
userdata.object_pose = pose
userdata.object_name = best['object']['name']
userdata.object_color = best['object']['color']
userdata.object_type = best['object']['type']
userdata.object_category = best['object']['category']
return 'found'
| bsd-3-clause | Python |
|
272031cfbef13a5a3edbf3cf3c6fe5f00608d650 | add test for importcuedmembers command | cuedpc/edpcmentoring,cuedpc/edpcmentoring,cuedpc/edpcmentoring,cuedpc/edpcmentoring | edpcmentoring/cuedmembers/tests/test_managementcommands.py | edpcmentoring/cuedmembers/tests/test_managementcommands.py | import os
import shutil
import tempfile
from django.core.management import call_command
from django.test import TestCase
from ..models import Member
class TemporaryDirectoryTestCase(TestCase):
"""A TestCase which creates a temporary directory for each test whose path
is available as the "tmpdir" attribute.
"""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
class ImportCUEDMembersTestCase(TemporaryDirectoryTestCase):
def test_csv_import(self):
self.assertEqual(Member.objects.active().count(), 0)
inpath = os.path.join(self.tmpdir, 'input.csv')
with open(inpath, 'w') as f:
f.write(MEMBERS_CSV_1)
call_command('importcuedmembers', inpath)
self.assertEqual(Member.objects.active().count(), 6)
def test_import_deactivates_members(self):
self.assertEqual(Member.objects.active().count(), 0)
inpath = os.path.join(self.tmpdir, 'input.csv')
with open(inpath, 'w') as f:
f.write(MEMBERS_CSV_1)
call_command('importcuedmembers', inpath)
self.assertEqual(Member.objects.active().count(), 6)
inpath = os.path.join(self.tmpdir, 'input.csv')
with open(inpath, 'w') as f:
f.write(MEMBERS_CSV_2)
call_command('importcuedmembers', inpath)
self.assertEqual(Member.objects.active().count(), 5)
self.assertEqual(Member.objects.all().count(), 7)
def test_email_domain(self):
self.assertEqual(Member.objects.active().count(), 0)
inpath = os.path.join(self.tmpdir, 'input.csv')
with open(inpath, 'w') as f:
f.write(MEMBERS_CSV_1)
call_command('importcuedmembers', '-e', 'mailinator.com', inpath)
self.assertEqual(Member.objects.active().count(), 6)
u1 = Member.objects.filter(user__username='test0001').first().user
self.assertEqual(u1.email, '[email protected]')
# Two CSV files with different sets of users
MEMBERS_CSV_1 = '''
crsid,status,surname,fnames,pref_name,room,phone,arrived,start_date,end_date,division,role_course,host_supervisor,research_group
test0001,,Klein,Alexandra Corrina,Alexandra,,,,,,C,,,Materials Engineering
test0002,,Herman,Verna Ibrahim Fletcher,Verna,,,,,,,,,
test0004,,Kihn,Clementine,Clementine,,,,,,C,,,Engineering Design
test0005,,Lindgren,Eric,Eric,,,,,,A,,,Turbomachinery
test0006,,Torphy,Shirleyann Arden Minerva,Minerva,,,,,,,,,
test0008,,Kling,Jorden,Jorden,,,,,,A,,,Turbomachinery
'''.strip()
MEMBERS_CSV_2 = '''
crsid,status,surname,fnames,pref_name,room,phone,arrived,start_date,end_date,division,role_course,host_supervisor,research_group
test0001,,Klein,Alexandra Corrina,Alexandra,,,,,,C,,,Materials Engineering
test0003,,Emmerich,Pleasant,Pleasant,,,,,,A,,,Turbomachinery
test0004,,Kihn,Clementine,Clementine,,,,,,C,,,Engineering Design
test0006,,Torphy,Shirleyann Arden Minerva,Minerva,,,,,,,,,
test0008,,Kling,Jorden,Jorden,,,,,,A,,,Turbomachinery
'''.strip()
| mit | Python |
|
0ba15652a5624cf8fa42f4caf603d84c09a0698b | Add kata: 6 kyu | nkapliev/codewars,nkapliev/codewars,nkapliev/codewars,nkapliev/codewars | 6_kyu/Decode_the_Morse_code.py | 6_kyu/Decode_the_Morse_code.py | # @see: https://www.codewars.com/kata/decode-the-morse-code
def decodeMorse(morseCode):
return ' '.join(
map(lambda m_word: ''.join(
map(lambda m_symbol: MORSE_CODE[m_symbol],
m_word.split())),
morseCode.strip().split(' ')))
| mit | Python |
|
6837bbf2a1816d97b6c517bcb244aa51cf1eb7ba | Create robots_txt.py | HeyIamJames/Crawlers | robots_txt.py | robots_txt.py | import urlib.request
import io
def ger_robots_txt(url):
if url.endswith('/')
path = url
else:
path - url + '/'
# https://reddit.com/
| mit | Python |
|
b36192eec53664f9178bfc4000d89b8ca9be1544 | Add merge migration | Johnetordoff/osf.io,TomBaxter/osf.io,sloria/osf.io,caseyrollins/osf.io,saradbowman/osf.io,leb2dg/osf.io,baylee-d/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,sloria/osf.io,laurenrevere/osf.io,mattclark/osf.io,mfraezz/osf.io,caseyrollins/osf.io,chennan47/osf.io,hmoco/osf.io,brianjgeiger/osf.io,felliott/osf.io,adlius/osf.io,acshi/osf.io,chennan47/osf.io,mattclark/osf.io,acshi/osf.io,brianjgeiger/osf.io,icereval/osf.io,chrisseto/osf.io,adlius/osf.io,hmoco/osf.io,cwisecarver/osf.io,pattisdr/osf.io,acshi/osf.io,monikagrabowska/osf.io,chennan47/osf.io,sloria/osf.io,saradbowman/osf.io,felliott/osf.io,aaxelb/osf.io,aaxelb/osf.io,cslzchen/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,pattisdr/osf.io,baylee-d/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,binoculars/osf.io,cslzchen/osf.io,aaxelb/osf.io,hmoco/osf.io,icereval/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,Nesiehr/osf.io,chrisseto/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,icereval/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,cwisecarver/osf.io,chrisseto/osf.io,cslzchen/osf.io,mfraezz/osf.io,cwisecarver/osf.io,leb2dg/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,hmoco/osf.io,chrisseto/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,felliott/osf.io,adlius/osf.io,caneruguz/osf.io,baylee-d/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,erinspace/osf.io,adlius/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,TomBaxter/osf.io,acshi/osf.io,erinspace/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,Nesiehr/osf.io,binoculars/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,acshi/osf.io | osf/migrations/0030_merge.py | osf/migrations/0030_merge.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-01-24 18:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0029_merge'),
('osf', '0029_externalaccount_date_last_refreshed'),
]
operations = [
]
| apache-2.0 | Python |
|
6abf2f993813142ea685bc48a7a5a266d1905f1a | build indices qsub for bowtie or star with rsem | idbedead/RNA-sequence-tools,idbedead/RNA-sequence-tools,iandriver/RNA-sequence-tools,iandriver/RNA-sequence-tools,iandriver/RNA-sequence-tools,idbedead/RNA-sequence-tools | rsem_build.py | rsem_build.py | #/usr/bin/env python
import commands
import os
from subprocess import call
def write_file(filename, contents):
"""Write the given contents to a text file.
ARGUMENTS
filename (string) - name of the file to write to, creating if it doesn't exist
contents (string) - contents of the file to be written
"""
# Open the file for writing
file = open(filename, 'w')
# Write the file contents
file.write(contents)
# Close the file
file.close()
return
def qsub_submit(command_filename, hold_jobid = None, name = None):
"""Submit the given command filename to the queue.
ARGUMENTS
command_filename (string) - the name of the command file to submit
OPTIONAL ARGUMENTS
hold_jobid (int) - job id to hold on as a prerequisite for execution
RETURNS
jobid (integer) - the jobid
"""
# Form command
command = 'qsub'
if name: command += ' -N %s' % name
if hold_jobid: command += ' -hold_jid %d' % hold_jobid
command += ' %s' % command_filename
# Submit the job and capture output.
import subprocess
print "> " + command
process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
print(out)
# Match job id
jobid = out.split(' ')[2]
return int(jobid)
out= '${TMPDIR}'
call_files = ''
path_gtf= '/netapp/home/idriver/Mus_musculus.GRCm38.83.gtf'
path_fa='/netapp/home/idriver/Mus_musculus.GRCm38.dna.primary_assembly.fa'
command = 'rsem-prepare-reference --gtf '+path_gtf+' --bowtie2 -p 6 '+path_fa+' GRCm38'
print command
call('mkdir -p /netapp/home/idriver/rsem_m38', shell=True)
contents = """\
#!/bin/sh
#$ -l arch=linux-x64
#$ -S /bin/bash
#$ -o /netapp/home/idriver/rsem_m38
#$ -e /netapp/home/idriver/error_spc
#$ -cwd
#$ -r y
#$ -j y
#$ -l netapp=40G,scratch=100G,mem_total=100G
#$ -pe smp 6
#$ -R yes
#$ -l h_rt=7:59:00
set echo on
date
hostname
pwd
export PATH=$PATH:${HOME}/bin
PATH=$PATH:/netapp/home/idriver/bin/cufflinks-2.2.1.Linux_x86_64
PATH=$PATH:/netapp/home/idriver/bin/bowtie2-2.2.8
PATH=$PATH:/netapp/home/idriver/bin/samtools-1.2
PATH=$PATH:/netapp/home/idriver/bin/tophat-2.1.0.Linux_x86_64
PATH=$PATH:/netapp/home/idriver/bin/RSEM-1.2.28
PATH=$PATH:/netapp/home/idriver/bin/STAR-STAR_2.4.2a/source
PATH=$PATH:/usr/bin/gunzip
export PATH
alias STAR="/netapp/home/idriver/bin/STAR-STAR_2.4.2a/source/STAR"
echo $PATH
export TMPDIR=/scratch
echo $TMPDIR
cd $TMPDIR
mkdir $TMPDIR/rsem_m38
cd rsem_m38
%(command)s
# Copy the results back to the project directory:
cd $TMPDIR
cp -r rsem_m38 /netapp/home/idriver/rsem_m38
rm -r rsem_m38
date
""" % vars()
filename = 'rsem_build.sh'
write_file(filename, contents)
jobid = qsub_submit(filename, name = 'rsem_build')
print "Submitted. jobid = %d" % jobid
# Write jobid to a file.
import subprocess
process = subprocess.Popen('echo %d > jobids' % jobid, stdout=subprocess.PIPE, shell = True)
out, err = process.communicate()
print(out)
| mit | Python |
|
db41bce3d90cfada9916baa8f9267cd9e6160a94 | Add an example for opening a file. | MrTheodor/pyh5md,khinsen/pyh5md | examples/open_file.py | examples/open_file.py | import numpy as np
import pyh5md
f = pyh5md.H5MD_File('poc.h5', 'r')
at = f.trajectory('atoms')
at_pos = at.data('position')
r = at_pos.v.value
print r
f.f.close()
| bsd-3-clause | Python |
|
2cd57876c72d5c941bcb1ae497df48dbbc943ba9 | Create new package. (#6213) | krafczyk/spack,matthiasdiener/spack,mfherbst/spack,skosukhin/spack,skosukhin/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,tmerrick1/spack,mfherbst/spack,LLNL/spack,EmreAtes/spack,krafczyk/spack,EmreAtes/spack,matthiasdiener/spack,EmreAtes/spack,LLNL/spack,LLNL/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,tmerrick1/spack,skosukhin/spack,tmerrick1/spack,EmreAtes/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,skosukhin/spack | var/spack/repos/builtin/packages/r-forecast/package.py | var/spack/repos/builtin/packages/r-forecast/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RForecast(RPackage):
"""Methods and tools for displaying and analysing univariate time
series forecasts including exponential smoothing via state space
models and automatic ARIMA modelling."""
homepage = "https://cran.r-project.org/package=forecast"
url = "https://cran.r-project.org/src/contrib/forecast_8.2.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/forecast"
version('8.2', '3ef095258984364c100b771b3c90d15e')
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-ggplot2', type=('build', 'run'))
depends_on('r-colorspace', type=('build', 'run'))
depends_on('r-nnet', type=('build', 'run'))
depends_on('r-rcpp', type=('build', 'run'))
depends_on('r-fracdiff', type=('build', 'run'))
depends_on('r-tseries', type=('build', 'run'))
depends_on('r-lmtest', type=('build', 'run'))
depends_on('r-zoo', type=('build', 'run'))
depends_on('r-timedate', type=('build', 'run'))
depends_on('r-rcpparmadillo', type=('build', 'run'))
| lgpl-2.1 | Python |
|
10eb703867fd10df543a141837c2a57d1052ba2c | Rename file with correct pattern | ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube | ideascube/conf/kb_civ_babylab.py | ideascube/conf/kb_civ_babylab.py | # -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
from django.utils.translation import ugettext_lazy as _
LANGUAGE_CODE = 'fr'
IDEASCUBE_NAME = 'BabyLab'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'bsfcampus',
},
{
'id': 'khanacademy',
},
{
'id': 'wikistage',
'languages': ['fr']
},
{
'id': 'wikimooc',
'languages': ['fr']
},
{
'id': 'vikidia',
'languages': ['fr']
},
{
'id': 'universcience',
'languages': ['fr']
},
{
'id': 'e-penser',
'languages': ['fr']
},
{
'id': 'deus-ex-silicium',
'languages': ['fr']
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikipedia',
'languages': ['fr']
},
{
'id': 'wikiversity',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
| agpl-3.0 | Python |
|
f31fcd789254f95b311f4fa4009a04ad919c2027 | add url update migration | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/migrations/0049_update_fluent_redirect_url.py | accelerator/migrations/0049_update_fluent_redirect_url.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-04-11 11:35
from __future__ import unicode_literals
from django.db import migrations
from accelerator.sitetree_navigation.sub_navigation import (
create_directory_subnav,
create_events_subnav,
create_home_subnav,
create_judging_subnav,
create_resources_subnav,
create_startup_dashboard_subnav,
delete_directory_subnav,
delete_events_subnav,
delete_home_subnav,
delete_judging_subnav,
delete_resources_subnav,
delete_startup_dashboard_subnav
)
def create_subnav_trees_and_items(apps, schema_editor):
create_directory_subnav()
create_events_subnav()
create_home_subnav()
create_judging_subnav()
create_resources_subnav()
create_startup_dashboard_subnav()
def delete_subnav_trees_and_items(apps, schema_editor):
delete_directory_subnav()
delete_events_subnav()
delete_home_subnav()
delete_judging_subnav()
delete_resources_subnav()
delete_startup_dashboard_subnav()
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0048_create_sub_navigation_objects'),
]
operations = [
migrations.RunPython(
create_subnav_trees_and_items,
delete_subnav_trees_and_items),
]
| mit | Python |
|
f21ff91cb3c70a150eca68dc03c51577ff343f18 | Solve challenge 8 | HKuz/PythonChallenge | Challenges/chall_8.py | Challenges/chall_8.py | #!/Applications/anaconda/envs/Python3/bin
# Python challenge - 8
# http://www.pythonchallenge.com/pc/def/integrity.html
# http://www.pythonchallenge.com/pc/return/good.html
import bz2
def main():
'''
Hint: Where is the missing link?
<area shape="poly" coords="179,284,214,311,255,320,281,226,319,224,363,309,339,222,371,225,411,229,404,242,415,252,428,233,428,214,394,207,383,205,390,195,423,192,439,193,442,209,440,215,450,221,457,226,469,202,475,187,494,188,494,169,498,147,491,121,477,136,481,96,471,94,458,98,444,91,420,87,405,92,391,88,376,82,350,79,330,82,314,85,305,90,299,96,290,103,276,110,262,114,225,123,212,125,185,133,138,144,118,160,97,168,87,176,110,180,145,176,153,176,150,182,137,190,126,194,121,198,126,203,151,205,160,195,168,217,169,234,170,260,174,282" href="../return/good.html">
un:
'BZh91AY&SYA\xaf\x82\r\x00\x00\x01\x01\x80\x02\xc0\x02\x00 \x00!\x9ah3M\x07<]\xc9\x14\xe1BA\x06\xbe\x084'
pw:
'BZh91AY&SY\x94$|\x0e\x00\x00\x00\x81\x00\x03$ \x00!\x9ah3M\x13<]\xc9\x14\xe1BBP\x91\xf08'
'''
username_comp = b'BZh91AY&SYA\xaf\x82\r\x00\x00\x01\x01\x80\x02\xc0\x02\x00 \x00!\x9ah3M\x07<]\xc9\x14\xe1BA\x06\xbe\x084'
pw_comp = b'BZh91AY&SY\x94$|\x0e\x00\x00\x00\x81\x00\x03$ \x00!\x9ah3M\x13<]\xc9\x14\xe1BBP\x91\xf08'
un = bz2.decompress(username_comp)
pw = bz2.decompress(pw_comp)
print(un)
print(pw)
return 0
# Keywords: huge; file
if __name__ == '__main__':
main()
| mit | Python |
|
96ba88c74a77f3b71ef4a8b51c29013d16e23973 | Create tofu/plugins/MISTRAL/Inputs with empty __init__.py | Didou09/tofu,ToFuProject/tofu,ToFuProject/tofu | tofu/plugins/MISTRAL/Inputs/__init__.py | tofu/plugins/MISTRAL/Inputs/__init__.py | mit | Python |
||
35317e778b2fe1d238e21954df1eac0c5380b00b | Add corpus fetch from database | greenify/zodiacy,greenify/zodiacy | generate_horoscope.py | generate_horoscope.py | #!/usr/bin/env python3
# encoding: utf-8
import argparse
import sqlite3
import sys
"""generate_horoscope.py: Generates horoscopes based provided corpuses"""
__author__ = "Project Zodiacy"
__copyright__ = "Copyright 2015, Project Zodiacy"
_parser = argparse.ArgumentParser(description="Awesome SQLite importer")
_parser.add_argument('-d', '--database', dest='database', required=True, help='sqlite database file')
_parser.add_argument('-s', '--sign', dest='sign', help='zodiac sign to generate', default=None)
_parser.add_argument('-k', '--keyword', dest='keyword', help='keyword for the horoscope', default=None)
_parser.add_argument('-t', '--threshold', dest='threshold', help='minimum count of horoscopes for the given filters', default=10)
def keyword_valid(cursor, keyword, threshold=10):
""" Checks whether enough horoscopes are present for the keyword """
# TODO implement
return True
def get_corpuses(cursor, with_rating=False, zodiac_sign=None, keyword=None):
""" Returns a cursor with all horoscopes for the given parameters """
# ugly code =(
zodiac_signs = dict(zip(['general', 'aries', 'taurus', 'gemini', 'cancer', 'leo', 'virgo', 'libra', 'scorpio', 'sagittarius', 'capricorn', 'aquarius', 'pisces'], range(13)))
if zodiac_sign not in zodiac_signs:
if zodiac_sign is not None:
raise ValueError('Invalid zodiac sign')
else:
zodiac_sign_ordinal = zodiac_signs[zodiac_sign]
base_stmt = 'SELECT interp%s from horoscopes' % (',rating' if with_rating else '')
if zodiac_sign is None:
if keyword is None:
return cursor.execute(base_stmt)
else:
return cursor.execute(base_stmt + ' WHERE keyword=?', (keyword,))
else:
if keyword is None:
return cursor.execute(base_stmt + ' WHERE sign=?', (str(zodiac_sign_ordinal),))
else:
return cursor.execute(base_stmt + ' WHERE sign=? and keyword=?', (str(zodiac_sign_ordinal), keyword))
if __name__ == '__main__':
args = _parser.parse_args()
with sqlite3.connect(args.database) as conn:
if not keyword_valid:
print('Not enough horoscopes for the given keyword', sys.stderr)
sys.exit(1)
corpuses = get_corpuses(conn.cursor(), zodiac_sign=None, keyword='enthusiasm')
print(corpuses.fetchone())
| mit | Python |
|
0b03dd638dd5ac3358d89a5538c707d5412b84ae | Add basic network broker state machine | WesleyAC/raft | broker/network.py | broker/network.py | from hypothesis.stateful import GenericStateMachine
class NetworkBroker(GenericStateMachine):
"""
Broker to coordinate network traffic
nodes = A map of node ids to node objects.
network = An adjacency list of what nodes can talk to each other. If a is
in network[b] than b -> a communcation is allowed. This is a map of
id type -> set(id type)
messages = A queue of messages. messages[0] is the head, where messages are
sent from. Messages are tuples in the form of (from, to, data).
"""
def __init__(self, nodes):
self.nodes = nodes
self.network = dict([(i, set(nodes.keys())) for i in nodes.keys()])
self.messages = []
def steps(self):
pass
def execute_step(self, step):
"""
Actions:
DeliverMsg
If next message is deliverable, deliver it. Otherwise, drop it.
DropMsg
Drop the next message.
DestroyEdge (from, to)
Destroys the edge from -> to, causing any packets sent along it to be dropped.
HealEdge (from, to)
Heal the edge from -> to, allowing packets to be sent along it.
DuplicateMsg
Create a copy of the message at the front of the queue
DelayMsg n
Push the message at the front of the queue back by n slots
"""
action, value = step
if action == "DeliverMsg":
message = self.messages.pop(0)
self.nodes[message[1]].recv(message[0], message[2])
if action == "DropMsg":
self.messages.pop(0)
if action == "DestroyEdge":
self.network[step[0]].remove(step[1])
if action == "HealEdge":
self.network[step[0]].add(step[1])
if action == "DuplicateMsg":
self.messages.insert(0, self.messages[0])
if action == "DelayMsg":
self.messages.insert(value, self.messages.pop(0))
| mit | Python |
|
3ab61b1e9cc155868108e658ad7e87fac9569e10 | add run script for bulk loader. | daewon/incubator-s2graph,jongwook/incubator-s2graph,SteamShon/incubator-s2graph,SteamShon/incubator-s2graph,jongwook/incubator-s2graph,daewon/incubator-s2graph,SteamShon/incubator-s2graph | loader/loader.py | loader/loader.py | #!/usr/bin/python
import os, sys, urllib2, urllib
def cleanup(args):
cmd = "hadoop fs -rm -r /tmp/%s" % args["htable_name"]
print cmd
ret = os.system(cmd)
print cmd, "return", ret
return ret
def hfile(args):
cmd = """spark-submit --class "subscriber.TransferToHFile" \
--name "TransferToHFile@shon" \
--conf "spark.task.maxFailures=20" \
--master yarn-cluster \
--num-executors %s --driver-memory 1g --executor-memory 2g --executor-cores 1 %s \
%s /tmp/%s %s %s %s %s %s %s""" % (args["num_executors"], JAR, args["input"], args["htable_name"], args["hbase_zk"], args["htable_name"], args["db_url"], args["max_file_per_region"], args["label_mapping"], args["auto_create_edge"])
print cmd
ret = os.system(cmd)
print cmd, "return", ret
return ret
def distcp(args):
cmd = "hadoop distcp -overwrite -m %s -bandwidth %s /tmp/%s %s/tmp/%s" % (args["-m"], args["-bandwidth"], args["htable_name"], args["hbase_namenode"], args["htable_name"])
print cmd
ret = os.system(cmd)
print cmd, "return", ret
return ret
def chmod(args):
cmd = "export HADOOP_CONF_DIR=%s; export HADOOP_USER_NAME=hdfs; hadoop fs -chmod -R 777 /tmp/%s" % (args["HADOOP_CONF_DIR"], args["htable_name"])
print cmd
ret = os.system(cmd)
print cmd, "return", ret
return ret
def load(args):
cmd = "export HADOOP_CONF_DIR=%s; export HBASE_CONF_DIR=%s; hbase %s /tmp/%s %s" % (args["HADOOP_CONF_DIR"], args["HBASE_CONF_DIR"], LOADER_CLASS, args["htable_name"], args["htable_name"])
print cmd
ret = os.system(cmd)
print cmd, "return", ret
return ret
def send(msg):
print msg
def run(args):
cleanup(args)
send("[Start]: bulk loader")
ret = hfile(args)
if ret != 0: return send("[Failed]: loader build hfile failed %s" % ret)
else: send("[Success]: loader build hfile")
ret = distcp(args)
if ret != 0: return send("[Failed]: loader distcp failed %s" % ret)
else: send("[Success]: loader distcp")
ret = chmod(args)
if ret != 0: return send("[Failed]: loader chmod failed %s" % ret)
else: send("[Success]: loader chmod")
ret = load(args)
if ret != 0: return send("[Failed]: loader complete bulkload failed %s" % ret)
else: send("[Success]: loader complete bulkload")
LOADER_CLASS = "org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles"
JAR="loader/target/scala-2.10/s2loader-assembly-0.11.0-SNAPSHOT.jar"
args = {
"HADOOP_CONF_DIR": "hdfs_conf_gasan",
"HBASE_CONF_DIR": "hbase_conf_gasan",
"htable_name": "test",
"hbase_namenode": "hdfs://nameservice:8020",
"hbase_zk": "localhost",
"db_url": "jdbc:mysql://localhost:3306/graph_dev",
"max_file_per_region": 1,
"label_mapping": "none",
"auto_create_edge": "false",
"-m": 1,
"-bandwidth": 10,
"num_executors": 2,
"input": "/user/test.txt"
}
run(args)
| apache-2.0 | Python |
|
c48be39a1f04af887349ef7f19ecea4312425cf9 | initialize for production | kwantopia/shoppley-migrate,kwantopia/shoppley-migrate,kwantopia/shoppley-migrate,kwantopia/shoppley-migrate,kwantopia/shoppley-migrate,kwantopia/shoppley-migrate,kwantopia/shoppley-migrate | shoppley.com/shoppley/apps/offer/management/commands/initialize.py | shoppley.com/shoppley/apps/offer/management/commands/initialize.py | from django.core.management.base import NoArgsCommand
from shoppleyuser.models import Country, Region, City, ZipCode, ShoppleyUser
import os, csv
from googlevoice import Voice
FILE_ROOT = os.path.abspath(os.path.dirname(__file__))
class Command(NoArgsCommand):
def handle_noargs(self, **options):
f = open(FILE_ROOT+"/../../../shoppleyuser/data/US.txt", "r")
zip_reader = csv.reader(f, delimiter="\t")
for row in zip_reader:
country_obj, created = Country.objects.get_or_create(name="United States", code=row[0])
zip_code = row[1]
city = row[2]
region = row[3]
region_code = row[4]
latitude = row[9]
longitude = row[10]
region_obj, created = Region.objects.get_or_create(name=region,
code=region_code, country=country_obj)
city_obj, created = City.objects.get_or_create(name=city, region=region_obj)
zip_obj, created = ZipCode.objects.get_or_create(code=zip_code,
city=city_obj, latitude=latitude, longitude=longitude)
print "done"
| mit | Python |
|
00f3e74387fc7a215af6377cb90555d142b81d74 | Add acoustics module with class AcousticMaterial. | emtpb/pyfds | pyfds/acoustics.py | pyfds/acoustics.py | class AcousticMaterial:
"""Class for specification of acoustic material parameters."""
def __init__(self, sound_velocity, density,
shear_viscosity=0, bulk_viscosity=0,
thermal_conductivity=0, isobaric_heat_cap=1, isochoric_heat_cap=1):
"""Default values for optional parameters create lossless medium."""
self.sound_velocity = sound_velocity
self.density = density
self.shear_viscosity = shear_viscosity
self.bulk_viscosity = bulk_viscosity
self.thermal_conductivity = thermal_conductivity
self.isobaric_heat_cap = isobaric_heat_cap
self.isochoric_heat_cap = isochoric_heat_cap
@property
def absorption_coef(self):
"""This is a helper variable that sums up all losses into a single quantity."""
return (4/3 * self.shear_viscosity + self.bulk_viscosity + self.thermal_conductivity *
(self.isobaric_heat_cap - self.isochoric_heat_cap) /
(self.isobaric_heat_cap * self.isochoric_heat_cap))
| bsd-3-clause | Python |
|
75ffc049d021e88fed37dc009376761661452cbe | Add unit tests for heat.scaling.template | srznew/heat,pratikmallya/heat,dims/heat,gonzolino/heat,jasondunsmore/heat,pshchelo/heat,pshchelo/heat,cryptickp/heat,steveb/heat,takeshineshiro/heat,rdo-management/heat,jasondunsmore/heat,rdo-management/heat,srznew/heat,rh-s/heat,dragorosson/heat,noironetworks/heat,NeCTAR-RC/heat,takeshineshiro/heat,maestro-hybrid-cloud/heat,dims/heat,gonzolino/heat,redhat-openstack/heat,pratikmallya/heat,NeCTAR-RC/heat,ntt-sic/heat,redhat-openstack/heat,miguelgrinberg/heat,noironetworks/heat,cwolferh/heat-scratch,maestro-hybrid-cloud/heat,rh-s/heat,dragorosson/heat,steveb/heat,miguelgrinberg/heat,cryptickp/heat,openstack/heat,cwolferh/heat-scratch,openstack/heat,ntt-sic/heat | heat/tests/test_scaling_template.py | heat/tests/test_scaling_template.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from heat.scaling import template
from heat.common import short_id
from heat.tests.common import HeatTestCase
class ResourceTemplatesTest(HeatTestCase):
def setUp(self):
super(ResourceTemplatesTest, self).setUp()
ids = ('stubbed-id-%s' % (i,) for i in itertools.count())
self.patchobject(short_id, 'generate_id').side_effect = ids.next
def test_create_template(self):
"""
When creating a template from scratch, an empty list is accepted as
the "old" resources and new resources are created up to num_resource.
"""
templates = template.resource_templates([], {'type': 'Foo'}, 2, 0)
expected = [
('stubbed-id-0', {'type': 'Foo'}),
('stubbed-id-1', {'type': 'Foo'})]
self.assertEqual(expected, list(templates))
def test_replace_template(self):
"""
If num_replace is the number of old resources, then all of the
resources will be replaced.
"""
old_resources = [
('old-id-0', {'type': 'Foo'}),
('old-id-1', {'type': 'Foo'})]
templates = template.resource_templates(old_resources, {'type': 'Bar'},
1, 2)
expected = [('old-id-1', {'type': 'Bar'})]
self.assertEqual(expected, list(templates))
def test_replace_some_units(self):
"""
If the resource definition changes, only the number of replacements
specified will be made; beyond that, the original templates are used.
"""
old_resources = [
('old-id-0', {'type': 'Foo'}),
('old-id-1', {'type': 'Foo'})]
new_spec = {'type': 'Bar'}
templates = template.resource_templates(old_resources, new_spec, 2, 1)
expected = [
('old-id-0', {'type': 'Bar'}),
('old-id-1', {'type': 'Foo'})]
self.assertEqual(expected, list(templates))
def test_growth_counts_as_replacement(self):
"""
If we grow the template and replace some elements at the same time, the
number of replacements to perform is reduced by the number of new
resources to be created.
"""
spec = {'type': 'Foo'}
old_resources = [
('old-id-0', spec),
('old-id-1', spec)]
new_spec = {'type': 'Bar'}
templates = template.resource_templates(old_resources, new_spec, 4, 2)
expected = [
('old-id-0', spec),
('old-id-1', spec),
('stubbed-id-0', new_spec),
('stubbed-id-1', new_spec)]
self.assertEqual(expected, list(templates))
def test_replace_units_some_already_up_to_date(self):
"""
If some of the old resources already have the new resource definition,
then they won't be considered for replacement, and the next resource
that is out-of-date will be replaced.
"""
old_resources = [
('old-id-0', {'type': 'Bar'}),
('old-id-1', {'type': 'Foo'})]
new_spec = {'type': 'Bar'}
templates = template.resource_templates(old_resources, new_spec, 2, 1)
second_batch_expected = [
('old-id-0', {'type': 'Bar'}),
('old-id-1', {'type': 'Bar'})]
self.assertEqual(second_batch_expected, list(templates))
| apache-2.0 | Python |
|
3d8ef3b0f31575354f03583a5f053fad6838084d | add `YouCompleteMe` config file. | DO-CV/sara,DO-CV/sara,DO-CV/sara,DO-CV/sara,DO-CV/sara,DO-CV/sara | .ycm_extra_conf.py | .ycm_extra_conf.py | import os
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
'-DNDEBUG',
# Do you use C++11 features?
'-std=c++11',
# Do you want C++98 compatible code?
#'-Wc++98-compat',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c++',
# Specific includes.
'-DQT_CORE_LIB',
'-DQT_GUI_LIB',
'-DQT_NETWORK_LIB',
'-DQT_QML_LIB',
'-DQT_QUICK_LIB',
'-DQT_SQL_LIB',
'-DQT_WIDGETS_LIB',
'-DQT_XML_LIB',
'-fPIE',
'-I', '/usr/include/qt5/QtCore',
'-I', '/usr/include/qt5/QtGui',
'-I', '/usr/include/qt5/QtWidgets',
'-I', './third-party/gtest/include',
# Project-specific include.
'-I', './src'
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
| mpl-2.0 | Python |
|
3e723f3419468654c9606b27d2127c94054b4bed | Add YouCompleteMe config for vim autocompletion | AbeelLab/phasm,AbeelLab/phasm | .ycm_extra_conf.py | .ycm_extra_conf.py | # This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import re
import subprocess
import pybind11
def LoadSystemIncludes():
regex = re.compile(r'(?:\#include \<...\> search starts here\:)'
r'(?P<list>.*?)(?:End of search list)', re.DOTALL)
process = subprocess.Popen(['clang', '-v', '-E', '-x', 'c++', '-'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process_out, process_err = process.communicate('')
output = process_out + process_err
output = output.decode('utf-8')
includes = []
for p in re.search(regex, output).group('list').split('\n'):
p = p.strip()
if len(p) > 0 and p.find('(framework directory)') < 0:
includes.append('-isystem')
includes.append(p)
return includes
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-fexceptions',
'-x',
'c++',
'-std=c++14',
'-stdlib=libc++',
'-I',
pybind11.get_include(),
'-I',
pybind11.get_include(True),
'-I',
'vendor/'
]
systemIncludes = LoadSystemIncludes()
flags = flags + systemIncludes
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def FlagsForFile( filename, **kwargs ):
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return { 'flags': final_flags }
| mit | Python |
|
b25a172cd89e8811e5cb38414bdf86ef5a5afaee | fix ABC for py2.7 | cfelton/rhea,cfelton/rhea,NickShaffner/rhea,NickShaffner/rhea | rhea/system/cso.py | rhea/system/cso.py |
from __future__ import absolute_import
from abc import ABCMeta, abstractclassmethod
from myhdl import Signal, SignalType, always_comb
class ControlStatusBase(object):
__metaclass__ = ABCMeta
def __init__(self):
""" Base class for control and status classes
Many complex digital block have control and status interfaces.
The base class is the base class for the specific control and
status objects (typically ``ControlStatus``) in a block, the
control-status-objects (CSO) can be used to dynamically
interact with the block from other blocks, statically configure,
or assign to a register-file that can be accessed from a
memory-mapped bus.
"""
self._isstatic = False
@property
def isstatic(self):
return self._isstatic
@isstatic.setter
def isstatic(self, val):
self._isstatic = val
def get_config_bits(self):
attrs = vars(self)
cfgbits = {}
for k, v in attrs.items():
if isinstance(v, SignalType) and v.config and not v.driven:
cfgbits[k] = v.initial_value
return cfgbits
@abstractclassmethod
def default_assign(self):
""" A myhdl.block that assigns the control-status defaults.
For certain synthesis tools the static values of the signals
need to be assigned. This will return generators to keep
the default signals. If the synthesis tool supports initial
values, initial values should be used otherwise this can be
used to assign a static value to a signal. Note, the synthesis
tool will generate warnings that the signal is stuck at a
value - this is desired.
Returns:
myhdl generators
"""
raise NotImplemented
def get_register_file(self):
""" get the register-file for this control-status object"""
# @todo: this function currently lives in memmap.regfile
# @todo: return build_register_file(self)
return None
@abstractclassmethod
def get_generators(self):
""" get any hardware logic associated with the cso"""
return None
def assign_config(sig, val):
"""
Arguments:
sig (Signal): The signals to be assigned to a constant value
val (int): The constant value
"""
keep = Signal(bool(0))
keep.driven = 'wire'
@always_comb
def beh_assign():
sig.next = val if keep else val
return beh_assign
|
from __future__ import absolute_import
from abc import ABCMeta, abstractclassmethod
from myhdl import Signal, SignalType, always_comb
class ControlStatusBase(metaclass=ABCMeta):
def __init__(self):
self._isstatic = False
@property
def isstatic(self):
return self._isstatic
@isstatic.setter
def isstatic(self, val):
self._isstatic = val
def get_config_bits(self):
attrs = vars(self)
cfgbits = {}
for k, v in attrs.items():
if isinstance(v, SignalType) and v.config and not v.driven:
cfgbits[k] = v.initial_value
return cfgbits
@abstractclassmethod
def default_assign(self):
raise NotImplemented
def get_register_file(self):
""" get the register-file for this control-status object"""
# @todo: this function currently lives in memmap.regfile
# @todo: return build_register_file(self)
return None
@abstractclassmethod
def get_generators(self):
""" get any hardware logic associated with the cso"""
return None
def assign_config(sig, val):
"""
Arguments:
sig (Signal): The signals to be assigned to a constant value
val (int): The constant value
"""
keep = Signal(bool(0))
keep.driven = 'wire'
@always_comb
def beh_assign():
sig.next = val if keep else val
return beh_assign
| mit | Python |
c0ab9b755b4906129988348b2247452b6dfc157f | Add a module to set the "display name" of a dedicated server | synthesio/infra-ovh-ansible-module | plugins/modules/dedicated_server_display_name.py | plugins/modules/dedicated_server_display_name.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
from ansible.module_utils.basic import AnsibleModule
__metaclass__ = type
DOCUMENTATION = '''
---
module: dedicated_server_display_name
short_description: Modify the server display name in ovh manager
description:
- Modify the server display name in ovh manager, to help you find your server with your own naming
author: Synthesio SRE Team
requirements:
- ovh >= 0.5.0
options:
service_name:
required: true
description: The service name
display_name:
required: true
description: The display name to set
'''
EXAMPLES = '''
synthesio.ovh.display_name
service_name: "{{ ovhname }}"
display_name: "{{ ansible_hostname }}"
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible_collections.synthesio.ovh.plugins.module_utils.ovh import ovh_api_connect, ovh_argument_spec
try:
from ovh.exceptions import APIError
HAS_OVH = True
except ImportError:
HAS_OVH = False
def run_module():
module_args = ovh_argument_spec()
module_args.update(dict(
display_name=dict(required=True),
service_name=dict(required=True)
))
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
client = ovh_api_connect(module)
display_name = module.params['display_name']
service_name = module.params['service_name']
if module.check_mode:
module.exit_json(msg="display_name has been set to {} ! - (dry run mode)".format(display_name), changed=True)
try:
result = client.get('/dedicated/server/%s/serviceInfos' % service_name)
except APIError as api_error:
return module.fail_json(msg="Failed to call OVH API: {0}".format(api_error))
service_id = result["serviceId"]
resource = {
"resource": {
'displayName': display_name,
'name': service_name}}
try:
client.put(
'/service/%s' % service_id,
**resource
)
module.exit_json(
msg="displayName succesfully set to {} for {} !".format(display_name, service_name),
changed=True)
except APIError as api_error:
return module.fail_json(msg="Failed to call OVH API: {0}".format(api_error))
def main():
run_module()
if __name__ == '__main__':
main()
| mit | Python |
|
ffdee2f18d5e32c2d0b4f4eb0cebe8b63ee555f7 | Document tools/mac/dump-static-initializers.py more. | M4sse/chromium.src,patrickm/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,Chilledheart/chromium,fujunwei/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,Chilledheart/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,patrickm/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,Jonekee/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,axinging/chromium-crosswalk,ChromiumWebApps/chromium,Jonekee/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,dednal/chromium.src,Just-D/chromium-1,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,littlstar/chromium.src,littlstar/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,anirudhSK/chromium,jaruba/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,jaruba/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,dushu1203/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,Jonekee/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,dednal/chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,ChromiumWebApps/chromium,ltilve/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,ltilve/chromium,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,Jonekee/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,M4sse/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,anirudhSK/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk | tools/mac/dump-static-initializers.py | tools/mac/dump-static-initializers.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Dumps a list of files with static initializers. Use with release builds.
Usage:
tools/mac/dump-static-initializers.py out/Release/Chromium\ Framework.framework.dSYM/Contents/Resources/DWARF/Chromium\ Framework
Do NOT use mac_strip_release=0 or component=shared_library if you want to use
this script.
"""
import optparse
import re
import subprocess
import sys
# Matches for example:
# [ 1] 000001ca 64 (N_SO ) 00 0000 0000000000000000 'test.cc'
dsymutil_file_re = re.compile("N_SO.*'([^']*)'")
# Matches for example:
# [ 2] 000001d2 66 (N_OSO ) 00 0001 000000004ed856a0 '/Volumes/MacintoshHD2/src/chrome-git/src/test.o'
dsymutil_o_file_re = re.compile("N_OSO.*'([^']*)'")
# Matches for example:
# [ 8] 00000233 24 (N_FUN ) 01 0000 0000000000001b40 '__GLOBAL__I_s'
# [185989] 00dc69ef 26 (N_STSYM ) 02 0000 00000000022e2290 '__GLOBAL__I_a'
dsymutil_re = re.compile(r"(?:N_FUN|N_STSYM).*\s[0-9a-f]*\s'__GLOBAL__I_")
def ParseDsymutil(binary):
"""Given a binary, prints source and object filenames for files with
static initializers.
"""
child = subprocess.Popen(['dsymutil', '-s', binary], stdout=subprocess.PIPE)
for line in child.stdout:
file_match = dsymutil_file_re.search(line)
if file_match:
current_filename = file_match.group(1)
else:
o_file_match = dsymutil_o_file_re.search(line)
if o_file_match:
current_o_filename = o_file_match.group(1)
else:
match = dsymutil_re.search(line)
if match:
print current_filename
print current_o_filename
print
def main():
parser = optparse.OptionParser(usage='%prog filename')
opts, args = parser.parse_args()
if len(args) != 1:
parser.error('missing filename argument')
return 1
binary = args[0]
ParseDsymutil(binary)
return 0
if '__main__' == __name__:
sys.exit(main())
| #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import re
import subprocess
import sys
# Matches for example:
# [ 1] 000001ca 64 (N_SO ) 00 0000 0000000000000000 'test.cc'
dsymutil_file_re = re.compile("N_SO.*'([^']*)'")
# Matches for example:
# [ 2] 000001d2 66 (N_OSO ) 00 0001 000000004ed856a0 '/Volumes/MacintoshHD2/src/chrome-git/src/test.o'
dsymutil_o_file_re = re.compile("N_OSO.*'([^']*)'")
# Matches for example:
# [ 8] 00000233 24 (N_FUN ) 01 0000 0000000000001b40 '__GLOBAL__I_s'
# [185989] 00dc69ef 26 (N_STSYM ) 02 0000 00000000022e2290 '__GLOBAL__I_a'
dsymutil_re = re.compile(r"(?:N_FUN|N_STSYM).*\s[0-9a-f]*\s'__GLOBAL__I_")
def ParseDsymutil(binary):
"""Given a binary, prints source and object filenames for files with
static initializers.
"""
child = subprocess.Popen(['dsymutil', '-s', binary], stdout=subprocess.PIPE)
for line in child.stdout:
file_match = dsymutil_file_re.search(line)
if file_match:
current_filename = file_match.group(1)
else:
o_file_match = dsymutil_o_file_re.search(line)
if o_file_match:
current_o_filename = o_file_match.group(1)
else:
match = dsymutil_re.search(line)
if match:
print current_filename
print current_o_filename
print
def main():
parser = optparse.OptionParser(usage='%prog filename')
opts, args = parser.parse_args()
if len(args) != 1:
parser.error('missing filename argument')
return 1
binary = args[0]
ParseDsymutil(binary)
return 0
if '__main__' == __name__:
sys.exit(main())
| bsd-3-clause | Python |
8fbc5877fa97b6b8df621ff7afe7515b501660fc | Convert string to camel case | SelvorWhim/competitive,SelvorWhim/competitive,SelvorWhim/competitive,SelvorWhim/competitive | LeetCode/ConvertStringToCamelCase.py | LeetCode/ConvertStringToCamelCase.py | def to_camel_case(text):
if len(text) < 2:
return text
capped_camel = "".join([word.title() for word in text.replace('-','_').split('_')])
return capped_camel if text[0].isupper() else capped_camel[0].lower()+capped_camel[1:]
| unlicense | Python |
|
6dd1545ae9ff3ac10586144494f763bcc1bea1d8 | Add script to verify that image files exist for every actual_result checksum | HalCanary/skia-hc,TeamExodus/external_skia,sigysmund/platform_external_skia,wildermason/external_skia,Hikari-no-Tenshi/android_external_skia,AsteroidOS/android_external_skia,zhaochengw/platform_external_skia,AOSPU/external_chromium_org_third_party_skia,suyouxin/android_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,UBERMALLOW/external_skia,spezi77/android_external_skia,mmatyas/skia,google/skia,Fusion-Rom/android_external_skia,nox/skia,xzzz9097/android_external_skia,AOSP-YU/platform_external_skia,Omegaphora/external_chromium_org_third_party_skia,Omegaphora/external_chromium_org_third_party_skia,w3nd1go/android_external_skia,CyanogenMod/android_external_chromium_org_third_party_skia,sudosurootdev/external_skia,TeslaOS/android_external_skia,noselhq/skia,chenlian2015/skia_from_google,Android-AOSP/external_skia,ench0/external_chromium_org_third_party_skia,nvoron23/skia,larsbergstrom/skia,xzzz9097/android_external_skia,vvuk/skia,UBERMALLOW/external_skia,AOSPB/external_skia,MyAOSP/external_chromium_org_third_party_skia,akiss77/skia,FusionSP/android_external_skia,google/skia,Omegaphora/external_chromium_org_third_party_skia,MinimalOS/external_chromium_org_third_party_skia,RadonX-ROM/external_skia,MinimalOS/external_skia,OneRom/external_skia,geekboxzone/lollipop_external_skia,HalCanary/skia-hc,NamelessRom/android_external_skia,ominux/skia,mydongistiny/external_chromium_org_third_party_skia,zhaochengw/platform_external_skia,VentureROM-L/android_external_skia,samuelig/skia,mmatyas/skia,mydongistiny/external_chromium_org_third_party_skia,timduru/platform-external-skia,timduru/platform-external-skia,jtg-gg/skia,aosp-mirror/platform_external_skia,DiamondLovesYou/skia-sys,VRToxin-AOSP/android_external_skia,pcwalton/skia,InfinitiveOS/external_skia,pacerom/external_skia,temasek/android_external_skia,nox/skia,CyanogenMod/android_external_chromium_org_third_party_skia,OptiPop/external_chromium_org_third_party_skia,TeamEOS/external_skia,Tesla-Redux/android_external_skia,MinimalOS-AOSP/platform_external_skia,aosp-mirror/platform_external_skia,wildermason/external_skia,Jichao/skia,DesolationStaging/android_external_skia,MonkeyZZZZ/platform_external_skia,MarshedOut/android_external_skia,MinimalOS/android_external_skia,xin3liang/platform_external_chromium_org_third_party_skia,houst0nn/external_skia,Purity-Lollipop/platform_external_skia,Android-AOSP/external_skia,Fusion-Rom/android_external_skia,HalCanary/skia-hc,AsteroidOS/android_external_skia,VRToxin-AOSP/android_external_skia,AOSPU/external_chromium_org_third_party_skia,RadonX-ROM/external_skia,MyAOSP/external_chromium_org_third_party_skia,nox/skia,DiamondLovesYou/skia-sys,Plain-Andy/android_platform_external_skia,MinimalOS/external_skia,suyouxin/android_external_skia,AndroidOpenDevelopment/android_external_skia,Hikari-no-Tenshi/android_external_skia,F-AOSP/platform_external_skia,mydongistiny/android_external_skia,VentureROM-L/android_external_skia,tmpvar/skia.cc,shahrzadmn/skia,chenlian2015/skia_from_google,YUPlayGodDev/platform_external_skia,Plain-Andy/android_platform_external_skia,sudosurootdev/external_skia,Hikari-no-Tenshi/android_external_skia,chenlian2015/skia_from_google,geekboxzone/lollipop_external_skia,AOSPU/external_chromium_org_third_party_skia,geekboxzone/lollipop_external_skia,mozilla-b2g/external_skia,MyAOSP/external_chromium_org_third_party_skia,VRToxin-AOSP/android_external_skia,OptiPop/external_skia,nfxosp/platform_external_skia,xzzz9097/android_external_skia,nox/skia,amyvmiwei/skia,ominux/skia,HealthyHoney/temasek_SKIA,TeslaProject/external_skia,boulzordev/android_external_skia,geekboxzone/mmallow_external_skia,akiss77/skia,MarshedOut/android_external_skia,TeslaProject/external_skia,Fusion-Rom/external_chromium_org_third_party_skia,mozilla-b2g/external_skia,mmatyas/skia,F-AOSP/platform_external_skia,Jichao/skia,Infinitive-OS/platform_external_skia,MinimalOS/android_external_chromium_org_third_party_skia,UBERMALLOW/external_skia,chenlian2015/skia_from_google,geekboxzone/mmallow_external_skia,PAC-ROM/android_external_skia,FusionSP/android_external_skia,SlimSaber/android_external_skia,DesolationStaging/android_external_skia,zhaochengw/platform_external_skia,GladeRom/android_external_skia,OptiPop/external_chromium_org_third_party_skia,MyAOSP/external_chromium_org_third_party_skia,MinimalOS/android_external_chromium_org_third_party_skia,Hikari-no-Tenshi/android_external_skia,mydongistiny/android_external_skia,mmatyas/skia,chenlian2015/skia_from_google,Hybrid-Rom/external_skia,Pure-Aosp/android_external_skia,pcwalton/skia,mydongistiny/android_external_skia,aospo/platform_external_skia,larsbergstrom/skia,Euphoria-OS-Legacy/android_external_skia,android-ia/platform_external_skia,NamelessRom/android_external_skia,Omegaphora/external_chromium_org_third_party_skia,rubenvb/skia,YUPlayGodDev/platform_external_skia,Fusion-Rom/android_external_skia,android-ia/platform_external_skia,TeamEOS/external_skia,ominux/skia,BrokenROM/external_skia,nfxosp/platform_external_skia,DiamondLovesYou/skia-sys,AOSPA-L/android_external_skia,byterom/android_external_skia,larsbergstrom/skia,TeamTwisted/external_skia,chenlian2015/skia_from_google,MinimalOS/android_external_skia,TeamTwisted/external_skia,google/skia,ominux/skia,wildermason/external_skia,jtg-gg/skia,vvuk/skia,google/skia,Fusion-Rom/android_external_skia,BrokenROM/external_skia,xzzz9097/android_external_skia,mmatyas/skia,Pure-Aosp/android_external_skia,mydongistiny/android_external_skia,rubenvb/skia,Euphoria-OS-Legacy/android_external_skia,aosp-mirror/platform_external_skia,YUPlayGodDev/platform_external_skia,YUPlayGodDev/platform_external_skia,DesolationStaging/android_external_skia,Plain-Andy/android_platform_external_skia,scroggo/skia,HalCanary/skia-hc,rubenvb/skia,Plain-Andy/android_platform_external_skia,aosp-mirror/platform_external_skia,VentureROM-L/android_external_skia,sigysmund/platform_external_skia,Omegaphora/external_chromium_org_third_party_skia,VentureROM-L/android_external_skia,w3nd1go/android_external_skia,MinimalOS/external_skia,FusionSP/external_chromium_org_third_party_skia,AsteroidOS/android_external_skia,aospo/platform_external_skia,sombree/android_external_skia,MonkeyZZZZ/platform_external_skia,mmatyas/skia,ench0/external_skia,Pure-Aosp/android_external_skia,sombree/android_external_skia,spezi77/android_external_skia,UBERMALLOW/external_skia,MIPS/external-chromium_org-third_party-skia,mydongistiny/external_chromium_org_third_party_skia,ominux/skia,MinimalOS-AOSP/platform_external_skia,vanish87/skia,noselhq/skia,fire855/android_external_skia,samuelig/skia,todotodoo/skia,VRToxin-AOSP/android_external_skia,houst0nn/external_skia,Plain-Andy/android_platform_external_skia,TeamEOS/external_chromium_org_third_party_skia,GladeRom/android_external_skia,MinimalOS-AOSP/platform_external_skia,houst0nn/external_skia,Purity-Lollipop/platform_external_skia,Khaon/android_external_skia,AOSP-YU/platform_external_skia,TeamExodus/external_skia,Samsung/skia,AOSPA-L/android_external_skia,Asteroid-Project/android_external_skia,scroggo/skia,timduru/platform-external-skia,android-ia/platform_external_skia,android-ia/platform_external_skia,mydongistiny/external_chromium_org_third_party_skia,tmpvar/skia.cc,invisiblek/android_external_skia,NamelessRom/android_external_skia,ctiao/platform-external-skia,Pure-Aosp/android_external_skia,akiss77/skia,sudosurootdev/external_skia,mmatyas/skia,Samsung/skia,Omegaphora/external_skia,scroggo/skia,OneRom/external_skia,DARKPOP/external_chromium_org_third_party_skia,HalCanary/skia-hc,MarshedOut/android_external_skia,InfinitiveOS/external_skia,temasek/android_external_skia,pcwalton/skia,MarshedOut/android_external_skia,spezi77/android_external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,temasek/android_external_skia,vanish87/skia,OneRom/external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,NamelessRom/android_external_skia,noselhq/skia,Asteroid-Project/android_external_skia,scroggo/skia,MinimalOS/external_chromium_org_third_party_skia,SlimSaber/android_external_skia,ench0/external_skia,Infusion-OS/android_external_skia,samuelig/skia,OptiPop/external_skia,Asteroid-Project/android_external_skia,Hybrid-Rom/external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,CyanogenMod/android_external_chromium_org_third_party_skia,TeamEOS/external_chromium_org_third_party_skia,FusionSP/external_chromium_org_third_party_skia,wildermason/external_skia,todotodoo/skia,wildermason/external_skia,AsteroidOS/android_external_skia,TeamEOS/external_skia,Asteroid-Project/android_external_skia,geekboxzone/mmallow_external_skia,MinimalOS/external_skia,OptiPop/external_skia,MinimalOS-AOSP/platform_external_skia,InfinitiveOS/external_skia,invisiblek/android_external_skia,noselhq/skia,android-ia/platform_external_chromium_org_third_party_skia,Tesla-Redux/android_external_skia,TeamBliss-LP/android_external_skia,Jichao/skia,HalCanary/skia-hc,SlimSaber/android_external_skia,YUPlayGodDev/platform_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,amyvmiwei/skia,nfxosp/platform_external_skia,MinimalOS/android_external_skia,android-ia/platform_external_chromium_org_third_party_skia,DiamondLovesYou/skia-sys,F-AOSP/platform_external_skia,android-ia/platform_external_chromium_org_third_party_skia,vanish87/skia,BrokenROM/external_skia,MonkeyZZZZ/platform_external_skia,nvoron23/skia,Jichao/skia,MinimalOS-AOSP/platform_external_skia,Tesla-Redux/android_external_skia,chenlian2015/skia_from_google,google/skia,todotodoo/skia,ench0/external_chromium_org_third_party_skia,MonkeyZZZZ/platform_external_skia,TeslaProject/external_skia,xin3liang/platform_external_chromium_org_third_party_skia,sudosurootdev/external_skia,Euphoria-OS-Legacy/android_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,AOSPA-L/android_external_skia,pacerom/external_skia,OptiPop/external_chromium_org_third_party_skia,Hikari-no-Tenshi/android_external_skia,Igalia/skia,CyanogenMod/android_external_chromium_org_third_party_skia,MonkeyZZZZ/platform_external_skia,Fusion-Rom/android_external_skia,TeamTwisted/external_skia,MinimalOS/external_chromium_org_third_party_skia,aospo/platform_external_skia,nox/skia,byterom/android_external_skia,BrokenROM/external_skia,Fusion-Rom/android_external_skia,ench0/external_chromium_org_third_party_skia,MinimalOS/android_external_skia,Infinitive-OS/platform_external_skia,nvoron23/skia,geekboxzone/mmallow_external_skia,scroggo/skia,MinimalOS/android_external_chromium_org_third_party_skia,Android-AOSP/external_skia,timduru/platform-external-skia,suyouxin/android_external_skia,PAC-ROM/android_external_skia,aospo/platform_external_skia,OptiPop/external_chromium_org_third_party_skia,Khaon/android_external_skia,suyouxin/android_external_skia,MonkeyZZZZ/platform_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,rubenvb/skia,nfxosp/platform_external_skia,mydongistiny/external_chromium_org_third_party_skia,Omegaphora/external_skia,temasek/android_external_skia,geekboxzone/mmallow_external_skia,byterom/android_external_skia,DARKPOP/external_chromium_org_third_party_skia,mozilla-b2g/external_skia,Jichao/skia,MinimalOS/android_external_chromium_org_third_party_skia,sudosurootdev/external_skia,qrealka/skia-hc,todotodoo/skia,YUPlayGodDev/platform_external_skia,FusionSP/external_chromium_org_third_party_skia,tmpvar/skia.cc,AndroidOpenDevelopment/android_external_skia,OptiPop/external_chromium_org_third_party_skia,geekboxzone/mmallow_external_skia,AOSPB/external_skia,TeslaOS/android_external_skia,TeamBliss-LP/android_external_skia,tmpvar/skia.cc,Infinitive-OS/platform_external_skia,Asteroid-Project/android_external_skia,Omegaphora/external_skia,HealthyHoney/temasek_SKIA,Omegaphora/external_skia,OneRom/external_skia,todotodoo/skia,MyAOSP/external_chromium_org_third_party_skia,VRToxin-AOSP/android_external_skia,noselhq/skia,sombree/android_external_skia,Android-AOSP/external_skia,nfxosp/platform_external_skia,mozilla-b2g/external_skia,rubenvb/skia,invisiblek/android_external_skia,sombree/android_external_skia,GladeRom/android_external_skia,shahrzadmn/skia,AOSPU/external_chromium_org_third_party_skia,Android-AOSP/external_skia,HalCanary/skia-hc,OptiPop/external_skia,MarshedOut/android_external_skia,OneRom/external_skia,w3nd1go/android_external_skia,larsbergstrom/skia,VentureROM-L/android_external_skia,w3nd1go/android_external_skia,Khaon/android_external_skia,boulzordev/android_external_skia,TeamTwisted/external_skia,qrealka/skia-hc,GladeRom/android_external_skia,TeamTwisted/external_skia,MinimalOS/android_external_skia,Euphoria-OS-Legacy/android_external_skia,ench0/external_chromium_org_third_party_skia,MinimalOS-AOSP/platform_external_skia,vanish87/skia,larsbergstrom/skia,aosp-mirror/platform_external_skia,timduru/platform-external-skia,TeslaOS/android_external_skia,sombree/android_external_skia,samuelig/skia,MIPS/external-chromium_org-third_party-skia,sigysmund/platform_external_skia,mydongistiny/external_chromium_org_third_party_skia,boulzordev/android_external_skia,OneRom/external_skia,Fusion-Rom/external_chromium_org_third_party_skia,AOSP-YU/platform_external_skia,xzzz9097/android_external_skia,MarshedOut/android_external_skia,codeaurora-unoffical/platform-external-skia,mydongistiny/android_external_skia,Tesla-Redux/android_external_skia,tmpvar/skia.cc,suyouxin/android_external_skia,TeamExodus/external_skia,codeaurora-unoffical/platform-external-skia,android-ia/platform_external_skia,jtg-gg/skia,byterom/android_external_skia,TeamBliss-LP/android_external_skia,OptiPop/external_skia,vanish87/skia,ctiao/platform-external-skia,geekboxzone/mmallow_external_skia,mmatyas/skia,wildermason/external_skia,sigysmund/platform_external_skia,BrokenROM/external_skia,rubenvb/skia,geekboxzone/lollipop_external_skia,Infusion-OS/android_external_skia,AOSPA-L/android_external_skia,vanish87/skia,wildermason/external_skia,TeamEOS/external_skia,TeamEOS/external_skia,MinimalOS/android_external_chromium_org_third_party_skia,Omegaphora/external_chromium_org_third_party_skia,MonkeyZZZZ/platform_external_skia,InfinitiveOS/external_skia,Hikari-no-Tenshi/android_external_skia,wildermason/external_skia,InfinitiveOS/external_skia,AOSP-YU/platform_external_skia,akiss77/skia,DiamondLovesYou/skia-sys,ench0/external_chromium_org_third_party_skia,vvuk/skia,FusionSP/android_external_skia,F-AOSP/platform_external_skia,AOSP-YU/platform_external_skia,boulzordev/android_external_skia,FusionSP/android_external_skia,nox/skia,Purity-Lollipop/platform_external_skia,vvuk/skia,google/skia,shahrzadmn/skia,MinimalOS-AOSP/platform_external_skia,AOSPA-L/android_external_skia,VentureROM-L/android_external_skia,HalCanary/skia-hc,ctiao/platform-external-skia,sigysmund/platform_external_skia,Hybrid-Rom/external_skia,HealthyHoney/temasek_SKIA,PAC-ROM/android_external_skia,sombree/android_external_skia,AOSP-YU/platform_external_skia,AOSPA-L/android_external_skia,Samsung/skia,scroggo/skia,tmpvar/skia.cc,Infinitive-OS/platform_external_skia,Samsung/skia,VRToxin-AOSP/android_external_skia,Infinitive-OS/platform_external_skia,mydongistiny/android_external_skia,TeslaOS/android_external_skia,larsbergstrom/skia,BrokenROM/external_skia,android-ia/platform_external_chromium_org_third_party_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,houst0nn/external_skia,RadonX-ROM/external_skia,pacerom/external_skia,geekboxzone/lollipop_external_skia,pcwalton/skia,AndroidOpenDevelopment/android_external_skia,F-AOSP/platform_external_skia,MarshedOut/android_external_skia,ench0/external_skia,InfinitiveOS/external_skia,todotodoo/skia,mmatyas/skia,Samsung/skia,zhaochengw/platform_external_skia,DesolationStaging/android_external_skia,VRToxin-AOSP/android_external_skia,scroggo/skia,todotodoo/skia,Fusion-Rom/android_external_skia,MinimalOS/android_external_skia,temasek/android_external_skia,shahrzadmn/skia,MyAOSP/external_chromium_org_third_party_skia,TeamBliss-LP/android_external_skia,pcwalton/skia,larsbergstrom/skia,AndroidOpenDevelopment/android_external_skia,Euphoria-OS-Legacy/android_external_skia,android-ia/platform_external_chromium_org_third_party_skia,UBERMALLOW/external_skia,OptiPop/external_skia,noselhq/skia,FusionSP/external_chromium_org_third_party_skia,TeamExodus/external_skia,xin3liang/platform_external_chromium_org_third_party_skia,Android-AOSP/external_skia,codeaurora-unoffical/platform-external-skia,VentureROM-L/android_external_skia,TeamTwisted/external_skia,larsbergstrom/skia,Fusion-Rom/android_external_skia,MarshedOut/android_external_skia,BrokenROM/external_skia,Tesla-Redux/android_external_skia,fire855/android_external_skia,mozilla-b2g/external_skia,TeamExodus/external_skia,DARKPOP/external_chromium_org_third_party_skia,rubenvb/skia,temasek/android_external_skia,BrokenROM/external_skia,F-AOSP/platform_external_skia,ench0/external_skia,OneRom/external_skia,fire855/android_external_skia,nvoron23/skia,amyvmiwei/skia,HealthyHoney/temasek_SKIA,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,Khaon/android_external_skia,TeamBliss-LP/android_external_skia,MinimalOS/external_skia,YUPlayGodDev/platform_external_skia,Igalia/skia,DiamondLovesYou/skia-sys,TeamEOS/external_chromium_org_third_party_skia,AsteroidOS/android_external_skia,boulzordev/android_external_skia,UBERMALLOW/external_skia,TeamExodus/external_skia,Hikari-no-Tenshi/android_external_skia,android-ia/platform_external_chromium_org_third_party_skia,pcwalton/skia,Android-AOSP/external_skia,OptiPop/external_skia,FusionSP/external_chromium_org_third_party_skia,DARKPOP/external_chromium_org_third_party_skia,AndroidOpenDevelopment/android_external_skia,aosp-mirror/platform_external_skia,ctiao/platform-external-skia,OptiPop/external_chromium_org_third_party_skia,Pure-Aosp/android_external_skia,amyvmiwei/skia,RadonX-ROM/external_skia,google/skia,nvoron23/skia,VRToxin-AOSP/android_external_skia,geekboxzone/lollipop_external_skia,RadonX-ROM/external_skia,UBERMALLOW/external_skia,NamelessRom/android_external_skia,xzzz9097/android_external_skia,samuelig/skia,temasek/android_external_skia,Pure-Aosp/android_external_skia,byterom/android_external_skia,DiamondLovesYou/skia-sys,samuelig/skia,codeaurora-unoffical/platform-external-skia,CyanogenMod/android_external_chromium_org_third_party_skia,MIPS/external-chromium_org-third_party-skia,Pure-Aosp/android_external_skia,fire855/android_external_skia,android-ia/platform_external_chromium_org_third_party_skia,AOSPU/external_chromium_org_third_party_skia,MIPS/external-chromium_org-third_party-skia,Plain-Andy/android_platform_external_skia,Tesla-Redux/android_external_skia,sudosurootdev/external_skia,ench0/external_chromium_org_third_party_skia,invisiblek/android_external_skia,VRToxin-AOSP/android_external_skia,vanish87/skia,w3nd1go/android_external_skia,qrealka/skia-hc,AsteroidOS/android_external_skia,MinimalOS-AOSP/platform_external_skia,vanish87/skia,Asteroid-Project/android_external_skia,fire855/android_external_skia,TeamExodus/external_skia,larsbergstrom/skia,nfxosp/platform_external_skia,vvuk/skia,nox/skia,nvoron23/skia,PAC-ROM/android_external_skia,tmpvar/skia.cc,ominux/skia,android-ia/platform_external_chromium_org_third_party_skia,amyvmiwei/skia,Fusion-Rom/external_chromium_org_third_party_skia,CyanogenMod/android_external_chromium_org_third_party_skia,shahrzadmn/skia,TeslaProject/external_skia,MinimalOS/android_external_chromium_org_third_party_skia,mozilla-b2g/external_skia,jtg-gg/skia,Purity-Lollipop/platform_external_skia,pcwalton/skia,fire855/android_external_skia,CyanogenMod/android_external_chromium_org_third_party_skia,ench0/external_skia,samuelig/skia,MinimalOS/external_chromium_org_third_party_skia,android-ia/platform_external_skia,aospo/platform_external_skia,DARKPOP/external_chromium_org_third_party_skia,qrealka/skia-hc,Tesla-Redux/android_external_skia,ominux/skia,amyvmiwei/skia,Jichao/skia,qrealka/skia-hc,boulzordev/android_external_skia,TeslaOS/android_external_skia,FusionSP/android_external_skia,HealthyHoney/temasek_SKIA,mozilla-b2g/external_skia,Euphoria-OS-Legacy/android_external_skia,AOSPB/external_skia,Infusion-OS/android_external_skia,sudosurootdev/external_skia,RadonX-ROM/external_skia,HalCanary/skia-hc,ctiao/platform-external-skia,RadonX-ROM/external_skia,jtg-gg/skia,mydongistiny/android_external_skia,GladeRom/android_external_skia,sigysmund/platform_external_skia,SlimSaber/android_external_skia,TeamExodus/external_skia,TeslaProject/external_skia,akiss77/skia,invisiblek/android_external_skia,MonkeyZZZZ/platform_external_skia,zhaochengw/platform_external_skia,NamelessRom/android_external_skia,noselhq/skia,Pure-Aosp/android_external_skia,TeamBliss-LP/android_external_skia,vvuk/skia,MonkeyZZZZ/platform_external_skia,boulzordev/android_external_skia,TeslaOS/android_external_skia,vanish87/skia,pacerom/external_skia,TeamEOS/external_chromium_org_third_party_skia,HalCanary/skia-hc,Omegaphora/external_chromium_org_third_party_skia,xzzz9097/android_external_skia,aosp-mirror/platform_external_skia,ench0/external_skia,MinimalOS/external_skia,invisiblek/android_external_skia,pacerom/external_skia,ench0/external_skia,PAC-ROM/android_external_skia,w3nd1go/android_external_skia,Igalia/skia,spezi77/android_external_skia,aospo/platform_external_skia,rubenvb/skia,suyouxin/android_external_skia,shahrzadmn/skia,ctiao/platform-external-skia,MinimalOS/external_chromium_org_third_party_skia,MinimalOS/android_external_chromium_org_third_party_skia,timduru/platform-external-skia,aospo/platform_external_skia,shahrzadmn/skia,PAC-ROM/android_external_skia,shahrzadmn/skia,jtg-gg/skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,MIPS/external-chromium_org-third_party-skia,xin3liang/platform_external_chromium_org_third_party_skia,Hybrid-Rom/external_skia,vvuk/skia,Jichao/skia,nfxosp/platform_external_skia,Igalia/skia,MinimalOS-AOSP/platform_external_skia,AndroidOpenDevelopment/android_external_skia,GladeRom/android_external_skia,GladeRom/android_external_skia,FusionSP/external_chromium_org_third_party_skia,F-AOSP/platform_external_skia,w3nd1go/android_external_skia,AOSP-YU/platform_external_skia,HealthyHoney/temasek_SKIA,ctiao/platform-external-skia,OptiPop/external_chromium_org_third_party_skia,mydongistiny/external_chromium_org_third_party_skia,Infinitive-OS/platform_external_skia,UBERMALLOW/external_skia,zhaochengw/platform_external_skia,houst0nn/external_skia,codeaurora-unoffical/platform-external-skia,Jichao/skia,akiss77/skia,geekboxzone/lollipop_external_skia,ominux/skia,TeslaOS/android_external_skia,AOSPA-L/android_external_skia,TeamBliss-LP/android_external_skia,google/skia,xin3liang/platform_external_chromium_org_third_party_skia,mydongistiny/external_chromium_org_third_party_skia,aosp-mirror/platform_external_skia,RadonX-ROM/external_skia,byterom/android_external_skia,houst0nn/external_skia,AOSPB/external_skia,TeamEOS/external_chromium_org_third_party_skia,Omegaphora/external_skia,mozilla-b2g/external_skia,Jichao/skia,NamelessRom/android_external_skia,AOSPB/external_skia,YUPlayGodDev/platform_external_skia,Igalia/skia,qrealka/skia-hc,android-ia/platform_external_skia,jtg-gg/skia,TeamTwisted/external_skia,Infusion-OS/android_external_skia,noselhq/skia,nox/skia,Euphoria-OS-Legacy/android_external_skia,Infinitive-OS/platform_external_skia,DARKPOP/external_chromium_org_third_party_skia,MyAOSP/external_chromium_org_third_party_skia,nvoron23/skia,ench0/external_chromium_org_third_party_skia,Omegaphora/external_chromium_org_third_party_skia,codeaurora-unoffical/platform-external-skia,google/skia,Plain-Andy/android_platform_external_skia,w3nd1go/android_external_skia,PAC-ROM/android_external_skia,MIPS/external-chromium_org-third_party-skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,UBERMALLOW/external_skia,TeslaProject/external_skia,MIPS/external-chromium_org-third_party-skia,amyvmiwei/skia,Purity-Lollipop/platform_external_skia,TeamTwisted/external_skia,Khaon/android_external_skia,akiss77/skia,OneRom/external_skia,timduru/platform-external-skia,AsteroidOS/android_external_skia,tmpvar/skia.cc,YUPlayGodDev/platform_external_skia,Infinitive-OS/platform_external_skia,MIPS/external-chromium_org-third_party-skia,DesolationStaging/android_external_skia,F-AOSP/platform_external_skia,OneRom/external_skia,temasek/android_external_skia,akiss77/skia,MyAOSP/external_chromium_org_third_party_skia,mydongistiny/android_external_skia,nfxosp/platform_external_skia,Igalia/skia,sigysmund/platform_external_skia,vvuk/skia,suyouxin/android_external_skia,houst0nn/external_skia,Hybrid-Rom/external_skia,byterom/android_external_skia,DesolationStaging/android_external_skia,MinimalOS/android_external_skia,Fusion-Rom/external_chromium_org_third_party_skia,android-ia/platform_external_skia,ominux/skia,SlimSaber/android_external_skia,TeslaProject/external_skia,Samsung/skia,spezi77/android_external_skia,qrealka/skia-hc,boulzordev/android_external_skia,Hybrid-Rom/external_skia,sudosurootdev/external_skia,AOSPB/external_skia,SlimSaber/android_external_skia,Omegaphora/external_skia,Asteroid-Project/android_external_skia,samuelig/skia,Infusion-OS/android_external_skia,NamelessRom/android_external_skia,fire855/android_external_skia,ench0/external_skia,AOSP-YU/platform_external_skia,TeamEOS/external_skia,Igalia/skia,sombree/android_external_skia,DARKPOP/external_chromium_org_third_party_skia,MinimalOS/external_skia,OptiPop/external_skia,Purity-Lollipop/platform_external_skia,AOSPA-L/android_external_skia,Igalia/skia,geekboxzone/lollipop_external_skia,TeamEOS/external_chromium_org_third_party_skia,codeaurora-unoffical/platform-external-skia,akiss77/skia,invisiblek/android_external_skia,Purity-Lollipop/platform_external_skia,MinimalOS/android_external_chromium_org_third_party_skia,AOSPU/external_chromium_org_third_party_skia,aospo/platform_external_skia,Infinitive-OS/platform_external_skia,DARKPOP/external_chromium_org_third_party_skia,Purity-Lollipop/platform_external_skia,zhaochengw/platform_external_skia,boulzordev/android_external_skia,qrealka/skia-hc,amyvmiwei/skia,Samsung/skia,AOSPB/external_skia,TeslaOS/android_external_skia,Infusion-OS/android_external_skia,Euphoria-OS-Legacy/android_external_skia,spezi77/android_external_skia,AOSPU/external_chromium_org_third_party_skia,noselhq/skia,MinimalOS/external_chromium_org_third_party_skia,MinimalOS/android_external_skia,VentureROM-L/android_external_skia,Omegaphora/external_skia,codeaurora-unoffical/platform-external-skia,geekboxzone/mmallow_external_skia,invisiblek/android_external_skia,rubenvb/skia,tmpvar/skia.cc,byterom/android_external_skia,todotodoo/skia,Khaon/android_external_skia,MarshedOut/android_external_skia,InfinitiveOS/external_skia,Asteroid-Project/android_external_skia,shahrzadmn/skia,FusionSP/external_chromium_org_third_party_skia,geekboxzone/mmallow_external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,MinimalOS/external_skia,Omegaphora/external_skia,Infusion-OS/android_external_skia,ench0/external_chromium_org_third_party_skia,FusionSP/android_external_skia,pacerom/external_skia,geekboxzone/lollipop_external_chromium_org_third_party_skia,vvuk/skia,nvoron23/skia,TeamEOS/external_chromium_org_third_party_skia,google/skia,scroggo/skia,TeamExodus/external_skia,Hybrid-Rom/external_skia,pcwalton/skia,FusionSP/android_external_skia,pcwalton/skia,sigysmund/platform_external_skia,PAC-ROM/android_external_skia,MinimalOS/external_chromium_org_third_party_skia,nfxosp/platform_external_skia,DesolationStaging/android_external_skia,TeamEOS/external_chromium_org_third_party_skia,HealthyHoney/temasek_SKIA,MinimalOS/external_chromium_org_third_party_skia,AsteroidOS/android_external_skia,xzzz9097/android_external_skia,SlimSaber/android_external_skia,DesolationStaging/android_external_skia,SlimSaber/android_external_skia,AOSP-YU/platform_external_skia,InfinitiveOS/external_skia,todotodoo/skia,Hybrid-Rom/external_skia,AndroidOpenDevelopment/android_external_skia,sombree/android_external_skia,TeamTwisted/external_skia,PAC-ROM/android_external_skia,Khaon/android_external_skia,Samsung/skia,fire855/android_external_skia,xin3liang/platform_external_chromium_org_third_party_skia,xin3liang/platform_external_chromium_org_third_party_skia,TeamEOS/external_skia,zhaochengw/platform_external_skia,AOSPB/external_skia,AOSPB/external_skia,w3nd1go/android_external_skia,Tesla-Redux/android_external_skia,OptiPop/external_chromium_org_third_party_skia,Infusion-OS/android_external_skia,nvoron23/skia,Khaon/android_external_skia,nox/skia,pacerom/external_skia,GladeRom/android_external_skia,HealthyHoney/temasek_SKIA,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,TeslaProject/external_skia,FusionSP/external_chromium_org_third_party_skia | tools/verify_images_for_gm_results.py | tools/verify_images_for_gm_results.py | #!/usr/bin/python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Look through skia-autogen, searching for all checksums which should have
corresponding files in Google Storage, and verify that those files exist. """
import json
import posixpath
import re
import subprocess
import sys
AUTOGEN_URL = 'http://skia-autogen.googlecode.com/svn/gm-actual'
GS_URL = 'gs://chromium-skia-gm/gm'
TEST_NAME_PATTERN = re.compile('(\S+)_(\S+).png')
def FileNameToGSURL(filename, hash_type, hash_value):
""" Convert a file name given in a checksum file to the URL of the
corresponding image file in Google Storage.
filename: string; the file name to convert. Takes the form specified by
TEST_NAME_PATTERN.
hash_type: string; the type of the checksum.
hash_value: string; the checksum itself.
"""
test_name = TEST_NAME_PATTERN.match(filename).group(1)
if not test_name:
raise Exception('Invalid test name for file: %s' % filename)
return '%s/%s/%s/%s.png' % (GS_URL, hash_type, test_name, hash_value)
def FindURLSInJSON(json_file, gs_urls):
""" Extract Google Storage URLs from a JSON file in svn, adding them to the
gs_urls dictionary.
json_file: string; URL of the JSON file.
gs_urls: dict; stores Google Storage URLs as keys and lists of the JSON files
which reference them.
Example gs_urls:
{ 'gs://chromium-skia-gm/gm/sometest/12345.png': [
'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini/Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug/base-macmini/actual-results.json',
'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini-10_8/Test-Mac10.8-MacMini4.1-GeForce320M-x86-Debug/base-macmini-10_8/actual-results.json',
]
}
"""
output = subprocess.check_output(['svn', 'cat', json_file])
json_content = json.loads(output)
for dict_type in ['actual-results']:
for result_type in json_content[dict_type]:
if json_content[dict_type][result_type]:
for result in json_content[dict_type][result_type].keys():
hash_type, hash_value = json_content[dict_type][result_type][result]
gs_url = FileNameToGSURL(result, hash_type, str(hash_value))
if gs_urls.get(gs_url):
gs_urls[gs_url].append(json_file)
else:
gs_urls[gs_url] = [json_file]
def _FindJSONFiles(url, json_files):
""" Helper function for FindJsonFiles. Recursively explore the repository,
adding JSON files to a list.
url: string; URL of the repository (or subdirectory thereof) to explore.
json_files: list to which JSON file urls will be added.
"""
proc = subprocess.Popen(['svn', 'ls', url], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
raise Exception('Failed to list svn directory.')
output = proc.communicate()[0].splitlines()
subdirs = []
for item in output:
if item.endswith(posixpath.sep):
subdirs.append(item)
elif item.endswith('.json'):
json_files.append(posixpath.join(url, item))
else:
print 'Warning: ignoring %s' % posixpath.join(url, item)
for subdir in subdirs:
_FindJSONFiles(posixpath.join(url, subdir), json_files)
def FindJSONFiles(url):
""" Recursively explore the given repository and return a list of the JSON
files it contains.
url: string; URL of the repository to explore.
"""
print 'Searching for JSON files in %s' % url
json_files = []
_FindJSONFiles(url, json_files)
return json_files
def FindURLs(url):
""" Find Google Storage URLs inside of JSON files in the given repository.
Returns a dictionary whose keys are Google Storage URLs and values are lists
of the JSON files which reference them.
url: string; URL of the repository to explore.
Example output:
{ 'gs://chromium-skia-gm/gm/sometest/12345.png': [
'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini/Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug/base-macmini/actual-results.json',
'http://skia-autogen.googlecode.com/svn/gm-actual/base-macmini-10_8/Test-Mac10.8-MacMini4.1-GeForce320M-x86-Debug/base-macmini-10_8/actual-results.json',
]
}
"""
gs_urls = {}
for json_file in FindJSONFiles(url):
print 'Looking for checksums in %s' % json_file
FindURLSInJSON(json_file, gs_urls)
return gs_urls
def VerifyURL(url):
""" Verify that the given URL exists.
url: string; the Google Storage URL of the image file in question.
"""
proc = subprocess.Popen(['gsutil', 'ls', url], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
return False
return True
def VerifyURLs(urls):
""" Verify that each of the given URLs exists. Return a list of which URLs do
not exist.
urls: dictionary; URLs of the image files in question.
"""
print 'Verifying that images exist for URLs...'
missing = []
for url in urls.iterkeys():
if not VerifyURL(url):
print 'Missing: %s, referenced by: \n %s' % (url, '\n '.join(urls[url]))
missing.append(url)
return missing
def Main():
urls = FindURLs(AUTOGEN_URL)
missing = VerifyURLs(urls)
if missing:
print 'Found %d Missing files.' % len(missing)
return 1
if __name__ == '__main__':
sys.exit(Main())
| bsd-3-clause | Python |
|
98abb69d2c5cd41e9cdf9decc1180fe35112bc28 | Add initial base for the feed handler | tobbez/lys-reader | backend/feed_daemon.py | backend/feed_daemon.py | import feedparser
import psycopg2
import sys
import configparser
import logging
class FeedHandler():
def __init__(self):
self.config = configparser.ConfigParser(interpolation=None)
self.config.read(('config.ini',))
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
self.con = None
try:
self.con = psycopg2.connect(
database=self.config.get('database', 'database'),
user=self.config.get('database', 'user'),
password=self.config.get('database', 'password'),
host=self.config.get('database', 'host'),
async=False)
except psycopg2.OperationalError as e:
logging.error('Database: {}'.format(str(e).split('\n')[0]))
def update_feed(self, feed_id, feed_url=None):
if feed_url == None:
cur = self.con.cursor()
cur.execute('SELECT url FROM lysr_feed WHERE id=%s', (feed_id,))
self.con.commit()
feed_url = cur.fetchone()[0]
logging.info('Updating feed {}: {}'.format(feed_id, feed_url))
feed = feedparser.parse(feed_url)
new_entries = 0
if feed.status is 200:
try:
cur = self.con.cursor()
for entry in feed.entries:
# Bad HTML is removed by default :D
cur.execute('SELECT id FROM lysr_feed_entry WHERE feed = %s AND guid = %s', (feed_id, entry.link))
self.con.commit()
if cur.rowcount is 0:
new_entries += 1
cur.execute('INSERT INTO lysr_feed_entry (feed, guid, content, title) VALUES (%s, %s, %s, %s)',
(feed_id, entry.link, entry.description, entry.title))
self.con.commit()
logging.info('Fetched feed {}, {} new entries found'.format(feed_id, new_entries))
except Exception as e:
logging.error('Database: {}'.format(str(e).split('\n')[0]))
else:
logging.info('Failed to fetch feed {}, status {}'.format(feed_id, feed.status))
cur = self.con.cursor()
cur.execute('UPDATE lysr_feed SET last_check=NOW() WHERE id=%s', (feed_id,))
self.con.commit()
if new_entries:
cur.execute('UPDATE lysr_feed SET last_update=NOW() WHERE id=%s', (feed_id,))
else:
cur.execute('UPDATE lysr_feed SET update_interval=2*update_interval WHERE id=%s', (feed_id,))
self.con.commit()
def parse_feeds(self):
cur = self.con.cursor()
cur.execute('SELECT id, url FROM lysr_feed WHERE NOW() > last_check + update_interval')
self.con.commit()
for feed in cur:
self.update_feed(*feed)
def main(args):
fh = FeedHandler()
#fh.update_feed(1)
fh.parse_feeds()
if __name__ == '__main__':
main(sys.argv)
| isc | Python |
|
c684ab17fc83242ee32db4b4c4bf57a7798acae4 | Add ordering prefix | cprogrammer1994/ModernGL,cprogrammer1994/ModernGL,cprogrammer1994/ModernGL | examples/00_empty_window.py | examples/00_empty_window.py | import ModernGL
from ModernGL.ext.examples import run_example
class Example:
def __init__(self, wnd):
self.wnd = wnd
self.ctx = ModernGL.create_context()
def render(self):
self.ctx.viewport = self.wnd.viewport
self.ctx.clear(0.2, 0.4, 0.7)
run_example(Example)
| mit | Python |
|
35a683738f00a67b88f26fdc2453a29777fe7f82 | Add raw outputter | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/output/raw.py | salt/output/raw.py | '''
Print out the raw python data, the original outputter
'''
def ouput(data):
'''
Rather basic....
'''
print(data)
| apache-2.0 | Python |
|
ad284dfe63b827aaa1ca8d7353e1bf1a54ea4fdf | Change arduino board from first example from mega to nano | Scheik/ROS-Workspace,Scheik/ROS-Groovy-Workspace,Scheik/ROS-Workspace,Scheik/ROS-Groovy-Workspace | src/arduino_sourcecodes/src/arduino_serial_nodes/connect_arduino_nano1.py | src/arduino_sourcecodes/src/arduino_serial_nodes/connect_arduino_nano1.py | #!/usr/bin/env python
#####################################################################
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__author__ = "[email protected] (Michael Ferguson)"
import rospy
from rosserial_python import SerialClient, RosSerialServer
import multiprocessing
import sys
if __name__=="__main__":
rospy.init_node("serial_node_arduinoNano1")
rospy.loginfo("ROS Serial Python Node")
port_name = rospy.get_param('~port','/dev/ttyUSB0')
baud = int(rospy.get_param('~baud','57600'))
# TODO: should these really be global?
tcp_portnum = int(rospy.get_param('/rosserial_embeddedlinux/tcp_port', '11411'))
fork_server = rospy.get_param('/rosserial_embeddedlinux/fork_server', False)
# TODO: do we really want command line params in addition to parameter server params?
sys.argv = rospy.myargv(argv=sys.argv)
if len(sys.argv) == 2 :
port_name = sys.argv[1]
if len(sys.argv) == 3 :
tcp_portnum = int(sys.argv[2])
if port_name == "tcp" :
server = RosSerialServer(tcp_portnum, fork_server)
rospy.loginfo("Waiting for socket connections on port %d" % tcp_portnum)
try:
server.listen()
except KeyboardInterrupt:
rospy.loginfo("got keyboard interrupt")
finally:
rospy.loginfo("Shutting down")
for process in multiprocessing.active_children():
rospy.loginfo("Shutting down process %r", process)
process.terminate()
process.join()
rospy.loginfo("All done")
else : # Use serial port
rospy.loginfo("Connecting to %s at %d baud" % (port_name,baud) )
client = SerialClient(port_name, baud)
try:
client.run()
except KeyboardInterrupt:
pass
| bsd-3-clause | Python |
|
07528bd828c28a18f3118481d1cdb9cf1287fd0b | Revert "don't track django.wsgi". It is part of the documentation. | marineam/nagcat,marineam/nagcat,marineam/nagcat | railroad/sample/django.wsgi | railroad/sample/django.wsgi | # Copyright 2010 ITA Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
# Describes the location of our Django configuration file. Unless you move the
# settings file this default should be fine
os.environ['DJANGO_SETTINGS_MODULE'] = 'railroad.settings'
# These should correspond to the paths of your railroad and nagcat
# installation
sys.path.append('/var/lib/nagcat/railroad')
sys.path.append('/var/lib/nagcat/python')
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| apache-2.0 | Python |
|
e7dd12377a5f3a46019c5244de08a5cfc00f44db | add Anscombe's Quartet example | percyfal/bokeh,saifrahmed/bokeh,bsipocz/bokeh,srinathv/bokeh,schoolie/bokeh,rothnic/bokeh,ahmadia/bokeh,carlvlewis/bokeh,PythonCharmers/bokeh,dennisobrien/bokeh,mutirri/bokeh,aavanian/bokeh,paultcochrane/bokeh,ericdill/bokeh,msarahan/bokeh,jakirkham/bokeh,daodaoliang/bokeh,khkaminska/bokeh,ChristosChristofidis/bokeh,msarahan/bokeh,bokeh/bokeh,rothnic/bokeh,bokeh/bokeh,jplourenco/bokeh,xguse/bokeh,stonebig/bokeh,timothydmorton/bokeh,khkaminska/bokeh,PythonCharmers/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,gpfreitas/bokeh,maxalbert/bokeh,draperjames/bokeh,eteq/bokeh,ericdill/bokeh,muku42/bokeh,ptitjano/bokeh,dennisobrien/bokeh,mindriot101/bokeh,canavandl/bokeh,khkaminska/bokeh,matbra/bokeh,birdsarah/bokeh,CrazyGuo/bokeh,gpfreitas/bokeh,bsipocz/bokeh,awanke/bokeh,josherick/bokeh,awanke/bokeh,aavanian/bokeh,aiguofer/bokeh,canavandl/bokeh,aavanian/bokeh,philippjfr/bokeh,ptitjano/bokeh,azjps/bokeh,percyfal/bokeh,rothnic/bokeh,stuart-knock/bokeh,phobson/bokeh,justacec/bokeh,jplourenco/bokeh,schoolie/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,abele/bokeh,DuCorey/bokeh,ahmadia/bokeh,bokeh/bokeh,jakirkham/bokeh,DuCorey/bokeh,draperjames/bokeh,awanke/bokeh,philippjfr/bokeh,paultcochrane/bokeh,roxyboy/bokeh,rs2/bokeh,bokeh/bokeh,clairetang6/bokeh,jplourenco/bokeh,ChristosChristofidis/bokeh,josherick/bokeh,birdsarah/bokeh,draperjames/bokeh,rs2/bokeh,aavanian/bokeh,msarahan/bokeh,ericmjl/bokeh,eteq/bokeh,carlvlewis/bokeh,canavandl/bokeh,srinathv/bokeh,sahat/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,schoolie/bokeh,muku42/bokeh,akloster/bokeh,deeplook/bokeh,ericmjl/bokeh,philippjfr/bokeh,dennisobrien/bokeh,stonebig/bokeh,carlvlewis/bokeh,maxalbert/bokeh,jakirkham/bokeh,tacaswell/bokeh,muku42/bokeh,xguse/bokeh,stuart-knock/bokeh,percyfal/bokeh,eteq/bokeh,birdsarah/bokeh,quasiben/bokeh,dennisobrien/bokeh,alan-unravel/bokeh,stonebig/bokeh,phobson/bokeh,ericmjl/bokeh,deeplook/bokeh,laurent-george/bokeh,sahat/bokeh,timothydmorton/bokeh,bsipocz/bokeh,sahat/bokeh,jplourenco/bokeh,abele/bokeh,srinathv/bokeh,ericdill/bokeh,mutirri/bokeh,roxyboy/bokeh,justacec/bokeh,bsipocz/bokeh,akloster/bokeh,phobson/bokeh,stuart-knock/bokeh,schoolie/bokeh,stonebig/bokeh,caseyclements/bokeh,josherick/bokeh,paultcochrane/bokeh,Karel-van-de-Plassche/bokeh,Karel-van-de-Plassche/bokeh,rs2/bokeh,philippjfr/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,gpfreitas/bokeh,rhiever/bokeh,PythonCharmers/bokeh,timsnyder/bokeh,percyfal/bokeh,caseyclements/bokeh,clairetang6/bokeh,azjps/bokeh,laurent-george/bokeh,akloster/bokeh,laurent-george/bokeh,lukebarnard1/bokeh,DuCorey/bokeh,mutirri/bokeh,alan-unravel/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,lukebarnard1/bokeh,ChinaQuants/bokeh,timothydmorton/bokeh,azjps/bokeh,awanke/bokeh,rhiever/bokeh,ahmadia/bokeh,tacaswell/bokeh,ericmjl/bokeh,schoolie/bokeh,alan-unravel/bokeh,caseyclements/bokeh,mindriot101/bokeh,jakirkham/bokeh,almarklein/bokeh,abele/bokeh,KasperPRasmussen/bokeh,srinathv/bokeh,quasiben/bokeh,rothnic/bokeh,dennisobrien/bokeh,evidation-health/bokeh,daodaoliang/bokeh,paultcochrane/bokeh,ericdill/bokeh,bokeh/bokeh,xguse/bokeh,almarklein/bokeh,mutirri/bokeh,evidation-health/bokeh,mindriot101/bokeh,laurent-george/bokeh,carlvlewis/bokeh,stuart-knock/bokeh,CrazyGuo/bokeh,htygithub/bokeh,josherick/bokeh,matbra/bokeh,KasperPRasmussen/bokeh,CrazyGuo/bokeh,akloster/bokeh,philippjfr/bokeh,Karel-van-de-Plassche/bokeh,timsnyder/bokeh,saifrahmed/bokeh,rs2/bokeh,tacaswell/bokeh,justacec/bokeh,justacec/bokeh,aiguofer/bokeh,maxalbert/bokeh,roxyboy/bokeh,jakirkham/bokeh,aiguofer/bokeh,evidation-health/bokeh,lukebarnard1/bokeh,ChristosChristofidis/bokeh,ptitjano/bokeh,phobson/bokeh,azjps/bokeh,daodaoliang/bokeh,birdsarah/bokeh,ahmadia/bokeh,gpfreitas/bokeh,caseyclements/bokeh,percyfal/bokeh,CrazyGuo/bokeh,evidation-health/bokeh,eteq/bokeh,ChinaQuants/bokeh,satishgoda/bokeh,satishgoda/bokeh,PythonCharmers/bokeh,draperjames/bokeh,daodaoliang/bokeh,htygithub/bokeh,saifrahmed/bokeh,htygithub/bokeh,almarklein/bokeh,timothydmorton/bokeh,saifrahmed/bokeh,ptitjano/bokeh,phobson/bokeh,matbra/bokeh,clairetang6/bokeh,ChristosChristofidis/bokeh,maxalbert/bokeh,xguse/bokeh,khkaminska/bokeh,roxyboy/bokeh,matbra/bokeh,clairetang6/bokeh,aavanian/bokeh,aiguofer/bokeh,msarahan/bokeh,satishgoda/bokeh,muku42/bokeh,canavandl/bokeh,rs2/bokeh,timsnyder/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,lukebarnard1/bokeh,timsnyder/bokeh,draperjames/bokeh,DuCorey/bokeh,abele/bokeh,alan-unravel/bokeh,rhiever/bokeh,ericmjl/bokeh,tacaswell/bokeh,mindriot101/bokeh,satishgoda/bokeh,rhiever/bokeh,deeplook/bokeh,htygithub/bokeh,deeplook/bokeh | examples/glyphs/anscombe.py | examples/glyphs/anscombe.py |
import os
import numpy as np
import pandas as pd
from bokeh.objects import (
ColumnDataSource, GlyphRenderer, Grid, GridPlot, LinearAxis, Plot, Range1d
)
from bokeh.glyphs import Circle, Line
from bokeh import session
from StringIO import StringIO
data = """
I I II II III III IV IV 5
x y x y x y x y 5
10.0 8.04 10.0 9.14 10.0 7.46 8.0 6.58
8.0 6.95 8.0 8.14 8.0 6.77 8.0 5.76
13.0 7.58 13.0 8.74 13.0 12.74 8.0 7.71
9.0 8.81 9.0 8.77 9.0 7.11 8.0 8.84
11.0 8.33 11.0 9.26 11.0 7.81 8.0 8.47
14.0 9.96 14.0 8.10 14.0 8.84 8.0 7.04
6.0 7.24 6.0 6.13 6.0 6.08 8.0 5.25
4.0 4.26 4.0 3.10 4.0 5.39 19.0 12.50
12.0 10.84 12.0 9.13 12.0 8.15 8.0 5.56
7.0 4.82 7.0 7.26 7.0 6.42 8.0 7.91
5.0 5.68 5.0 4.74 5.0 5.73 8.0 6.89
"""
quartet = pd.read_fwf(StringIO(data), widths=[8]*8, header=[1,2], tupleize_cols=False)
circles_source = ColumnDataSource(
data = dict(
xi = quartet['I']['x'],
yi = quartet['I']['y'],
xii = quartet['II']['x'],
yii = quartet['II']['y'],
xiii = quartet['III']['x'],
yiii = quartet['III']['y'],
xiv = quartet['IV']['x'],
yiv = quartet['IV']['y'],
)
)
x = np.linspace(0,10, 10)
y = 3 + 0.5 * x
lines_source = ColumnDataSource(data=dict(x=x, y=y))
xdr = Range1d(start=0, end=10)
ydr = Range1d(start=0, end=10)
def make_plot(title, xname, yname):
plot = Plot(x_range=xdr, y_range=ydr, data_sources=[lines_source, circles_source], title=title, width=400, height=400)
xaxis = LinearAxis(plot=plot, dimension=0, location="bottom", axis_line_alpha=0)
yaxis = LinearAxis(plot=plot, dimension=1, location="left", axis_line_alpha=0)
xgrid = Grid(plot=plot, dimension=0)
ygrid = Grid(plot=plot, dimension=1)
line_renderer = GlyphRenderer(
data_source = lines_source,
xdata_range = xdr,
ydata_range = ydr,
glyph = Line(x='x', y='y', line_color="navy", line_width=2, line_alpha=0.5),
)
plot.renderers.append(line_renderer)
circle_renderer = GlyphRenderer(
data_source = circles_source,
xdata_range = xdr,
ydata_range = ydr,
glyph = Circle(x=xname, y=yname, radius=6, fill_color="orange", line_color="orange", fill_alpha=0.5),
)
plot.renderers.append(circle_renderer)
return plot, (line_renderer, circle_renderer, xaxis, yaxis, xgrid, ygrid)
I, objsI = make_plot('I', 'xi', 'yi')
II, objsII = make_plot('II', 'xii', 'yii')
III, objsIII = make_plot('III', 'xiii', 'yiii')
IV, objsIV = make_plot('IV', 'xiv', 'yiv')
grid = GridPlot(children=[[I, II], [III, IV]])
sess = session.HTMLFileSession("anscombe.html")
sess.add(lines_source, circles_source, xdr, ydr)
sess.add(*(objsI + objsII + objsIII + objsIV))
sess.add(grid, I, II, III, IV)
sess.plotcontext.children.append(grid)
sess.save(js="relative", css="relative", rootdir=os.path.abspath("."))
try:
import webbrowser
webbrowser.open("file://" + os.path.abspath("anscombe.html"))
except:
pass
| bsd-3-clause | Python |
|
2d65862d77338dc503e34f389de1dc3bc553b6cd | Add DomainCaseRuleRun to admin site | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/data_interfaces/admin.py | corehq/apps/data_interfaces/admin.py | from django.contrib import admin
from corehq.apps.data_interfaces.models import DomainCaseRuleRun
class DomainCaseRuleRunAdmin(admin.ModelAdmin):
list_display = [
'domain',
'started_on',
'finished_on',
'status',
'cases_checked',
'num_updates',
'num_closes',
'num_related_updates',
'num_related_closes',
]
search_fields = [
'domain',
]
ordering = ['-started_on']
admin.site.register(DomainCaseRuleRun, DomainCaseRuleRunAdmin)
| bsd-3-clause | Python |
|
017f276bb9544578417444c34ce2c04d87bb5852 | Fix zds #323 | zestedesavoir/Python-ZMarkdown,Situphen/Python-ZMarkdown,zestedesavoir/Python-ZMarkdown,Situphen/Python-ZMarkdown,zestedesavoir/Python-ZMarkdown,Situphen/Python-ZMarkdown | markdown/extensions/emoticons.py | markdown/extensions/emoticons.py | # Emoticon extension for python-markdown
# Original version :
# https://gist.github.com/insin/815656/raw/a68516f1ffc03df465730b3ddef6de0a11b7e9a5/mdx_emoticons.py
#
# Patched by cgabard for supporting newer python-markdown version and extend for support multi-extensions
import re
import markdown
from markdown.inlinepatterns import Pattern
from markdown.util import etree
class EmoticonExtension(markdown.Extension):
def __init__ (self, configs):
self.config = {
'EMOTICONS': [{
":)" : "test.png",
}, 'A mapping from emoticon symbols to image names.'],
}
for key, value in configs.iteritems() :
self.config[key][0] = value
def extendMarkdown(self, md, md_globals):
self.md = md
EMOTICON_RE = r'(?=(^|\W))(?P<emoticon>%s)(?=(\W|$))' % '|'.join(
[re.escape(emoticon) for emoticon in self.getConfig('EMOTICONS').keys()])
md.inlinePatterns.add('emoticons', EmoticonPattern(EMOTICON_RE, self),">not_strong")
class EmoticonPattern(Pattern):
def __init__ (self, pattern, emoticons):
Pattern.__init__(self, pattern)
self.emoticons = emoticons
def handleMatch(self, m):
emoticon = m.group('emoticon')
el = etree.Element('img')
el.set('src', '%s' % (self.emoticons.getConfig('EMOTICONS')[emoticon],))
el.set('alt', emoticon)
return el
def makeExtension(configs=None) :
return EmoticonExtension(configs=configs)
| # Emoticon extension for python-markdown
# Original version :
# https://gist.github.com/insin/815656/raw/a68516f1ffc03df465730b3ddef6de0a11b7e9a5/mdx_emoticons.py
#
# Patched by cgabard for supporting newer python-markdown version and extend for support multi-extensions
import re
import markdown
from markdown.inlinepatterns import Pattern
from markdown.util import etree
class EmoticonExtension(markdown.Extension):
def __init__ (self, configs):
self.config = {
'EMOTICONS': [{
":)" : "test.png",
}, 'A mapping from emoticon symbols to image names.'],
}
for key, value in configs.iteritems() :
self.config[key][0] = value
def extendMarkdown(self, md, md_globals):
self.md = md
EMOTICON_RE = '(?P<emoticon>%s)' % '|'.join(
[re.escape(emoticon) for emoticon in self.getConfig('EMOTICONS').keys()])
md.inlinePatterns.add('emoticons', EmoticonPattern(EMOTICON_RE, self),">not_strong")
class EmoticonPattern(Pattern):
def __init__ (self, pattern, emoticons):
Pattern.__init__(self, pattern)
self.emoticons = emoticons
def handleMatch(self, m):
emoticon = m.group('emoticon')
el = etree.Element('img')
el.set('src', '%s' % (self.emoticons.getConfig('EMOTICONS')[emoticon],))
el.set('alt', emoticon)
return el
def makeExtension(configs=None) :
return EmoticonExtension(configs=configs)
| bsd-3-clause | Python |
56ee8843c355ffa56f7c2583d8d524e1ecfd29c3 | Create __init__.py | ShifuML/pyshifu,ShifuML/pyshifu | module/submodule/tests/__init__.py | module/submodule/tests/__init__.py | apache-2.0 | Python |
||
5e1d5644b2279b31191870b4a8099f3f6f31e851 | Enable admin for Project, Platform, Dataset | nguyenduchien1994/django-ncharts,nguyenduchien1994/django-ncharts,nguyenduchien1994/django-ncharts,nguyenduchien1994/django-ncharts,nguyenduchien1994/django-ncharts | ncharts/admin.py | ncharts/admin.py | from django.contrib import admin
from ncharts.models import Project, Platform, Dataset
class ProjectAdmin(admin.ModelAdmin):
pass
class PlatformAdmin(admin.ModelAdmin):
pass
class DatasetAdmin(admin.ModelAdmin):
pass
admin.site.register(Project,ProjectAdmin)
admin.site.register(Platform,PlatformAdmin)
admin.site.register(Dataset,DatasetAdmin)
| bsd-2-clause | Python |
|
23808a3d65db23163969aeb08adaa29f6403e720 | Fix a test | RianFuro/vint,Kuniwak/vint,Kuniwak/vint,RianFuro/vint | test/lib/lint/policy/test_abstract_policy.py | test/lib/lint/policy/test_abstract_policy.py | import unittest
from lib.lint.policy.abstract_policy import AbstractPolicy
class ConcretePolicy(AbstractPolicy):
def __init__(self):
super().__init__()
self.description = 'Found something invalid'
self.reference = 'http://example.com'
self.level = 0
class TestAbstractPolicy(unittest.TestCase):
def test_listen_node_types(self):
policy = AbstractPolicy()
self.assertEqual(policy.listen_node_types(), [])
def test_create_violation_report(self):
pos = {
'col': 3,
'i': 24,
'lnum': 3,
}
node = {'pos': pos}
env = {'path': 'path/to/file.vim'}
expected_violation = {
'name': 'ConcretePolicy',
'level': 0,
'description': 'Found something invalid',
'reference': 'http://example.com',
'position': {
'column': 3,
'line': 3,
'path': 'path/to/file.vim',
},
}
policy = ConcretePolicy()
self.assertEqual(
policy.create_violation_report(node, env),
expected_violation)
if __name__ == '__main__':
unittest.main()
| import unittest
from lib.lint.policy.abstract_policy import AbstractPolicy
class ConcretePolicy(AbstractPolicy):
def __init__(self):
super().__init__()
self.description = 'Found something invalid'
self.reference = 'http://example.com'
self.level = 0
class TestAbstractPolicy(unittest.TestCase):
def test_listen_node_types(self):
policy = AbstractPolicy()
self.assertEqual(policy.listen_node_types(), [])
def test_create_violation_report(self):
pos = {
'col': 3,
'i': 24,
'lnum': 3,
}
env = {'path': 'path/to/file.vim'}
expected_violation = {
'name': 'ConcretePolicy',
'level': 0,
'description': 'Found something invalid',
'reference': 'http://example.com',
'path': 'path/to/file.vim',
'position': pos,
}
policy = ConcretePolicy()
self.assertEqual(
policy.create_violation_report(pos, env),
expected_violation)
if __name__ == '__main__':
unittest.main()
| mit | Python |
e1c6f344e804f0d972dbc685b9492a126d74a7d3 | Create new management app | usingnamespace/usingnamespace | usingnamespace/management/__init__.py | usingnamespace/management/__init__.py | from pyramid.config import Configurator
from pyramid.session import SignedCookieSessionFactory
from pyramid.settings import asbool
from pyramid.wsgi import wsgiapp2
default_settings = (
('route_path', str, '/management'),
('domain', str, ''),
)
# Stolen from pyramid_debugtoolbar
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
name = '%s%s' % ('usingnamespace.management.', name)
value = convert(settings.get(name, default))
parsed[name] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
# Go parse the settings
settings = parse_settings(config.registry.settings)
# Update the config
config.registry.settings.update(settings)
# Create the application
application = make_application(config.registry.settings, config.registry)
# Add the API route
route_kw = {}
if config.registry.settings['usingnamespace.management.domain'] != '':
route_kw['is_management_domain'] = config.registry.settings['usingnamespace.management.domain']
config.add_route_predicate('is_management_domain', config.maybe_dotted('.predicates.route.Management'))
config.add_route('usingnamespace.management',
config.registry.settings['usingnamespace.management.route_path'] + '/*subpath',
**route_kw)
# Add the API view
config.add_view(wsgiapp2(application), route_name='usingnamespace.management')
def make_application(settings, parent_registry):
config = Configurator()
config.registry.settings.update(settings)
config.registry.parent_registry = parent_registry
config.include('pyramid_mako')
# Create the session factory, we are using the stock one
_session_factory = SignedCookieSessionFactory(
settings['pyramid.secret.session'],
httponly=True,
max_age=864000
)
config.set_session_factory(_session_factory)
config.include('..security')
config.add_static_view('static', 'usingnamespace:static/', cache_max_age=3600)
def is_management(request):
if request.matched_route is not None and request.matched_route.name == 'usingnamespace.management.main':
return True
return False
config.add_request_method(callable=is_management, name='is_management', reify=True)
config.add_subscriber_predicate('is_management', config.maybe_dotted('.predicates.subscriber.IsManagement'))
config.add_route('management',
'/*traverse',
factory='.traversal.Root',
use_global_views=False,
)
config.scan('.views')
config.scan('.subscribers')
return config.make_wsgi_app()
def main(global_config, **settings):
pass
| isc | Python |
|
4fbb9ca1b055b040214c82dc307f69793947b800 | Add handler for syncing wallets to server | habibmasuro/omniwallet,VukDukic/omniwallet,dexX7/omniwallet,FuzzyBearBTC/omniwallet,Nevtep/omniwallet,OmniLayer/omniwallet,arowser/omniwallet,FuzzyBearBTC/omniwallet,ripper234/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet,arowser/omniwallet,OmniLayer/omniwallet,OmniLayer/omniwallet,maran/omniwallet,maran/omniwallet,curtislacy/omniwallet,achamely/omniwallet,achamely/omniwallet,VukDukic/omniwallet,dexX7/omniwallet,dexX7/omniwallet,maran/omniwallet,habibmasuro/omniwallet,habibmasuro/omniwallet,arowser/omniwallet,Nevtep/omniwallet,Nevtep/omniwallet,Nevtep/omniwallet,VukDukic/omniwallet,habibmasuro/omniwallet,achamely/omniwallet,curtislacy/omniwallet,OmniLayer/omniwallet,curtislacy/omniwallet,ripper234/omniwallet,ripper234/omniwallet | api/sync_wallet.py | api/sync_wallet.py | import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
print request_dict
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
response_data = syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK', 'data': response_data }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
print master_wallets
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
| agpl-3.0 | Python |
|
eefa26090a4ff8fc23908afa83c87c2d54568929 | add pager duty sample alert plugin, closes #249 | mozilla/MozDef,DarkPrince304/MozDef,jeffbryner/MozDef,triplekill/MozDef,serbyy/MozDef,DarkPrince304/MozDef,mpurzynski/MozDef,triplekill/MozDef,gsssrao/MozDef,jeffbryner/MozDef,netantho/MozDef,gdestuynder/MozDef,eXcomm/MozDef,eXcomm/MozDef,mozilla/MozDef,DarkPrince304/MozDef,netantho/MozDef,serbyy/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,eXcomm/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,526avijitgupta/MozDef,gsssrao/MozDef,gdestuynder/MozDef,gsssrao/MozDef,526avijitgupta/MozDef,triplekill/MozDef,jeffbryner/MozDef,ameihm0912/MozDef,mozilla/MozDef,526avijitgupta/MozDef,Phrozyn/MozDef,gsssrao/MozDef,ameihm0912/MozDef,Phrozyn/MozDef,ameihm0912/MozDef,serbyy/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,mozilla/MozDef,netantho/MozDef,serbyy/MozDef,DarkPrince304/MozDef,526avijitgupta/MozDef,eXcomm/MozDef,mpurzynski/MozDef,gdestuynder/MozDef,netantho/MozDef,jeffbryner/MozDef,triplekill/MozDef | alerts/plugins/pagerDutyTriggerEvent.py | alerts/plugins/pagerDutyTriggerEvent.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Jeff Bryner [email protected]
import requests
import json
import os
import sys
from configlib import getConfig, OptionParser
class message(object):
def __init__(self):
'''
takes an incoming alert
and uses it to trigger an event using
the pager duty event api
'''
self.registration = ['bro']
self.priority = 2
# set my own conf file
# relative path to the rest index.py file
self.configfile = './plugins/pagerDutyTriggerEvent.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def initConfiguration(self):
myparser = OptionParser()
# setup self.options by sending empty list [] to parse_args
(self.options, args) = myparser.parse_args([])
# fill self.options with plugin-specific options
# change this to your default zone for when it's not specified
self.options.serviceKey = getConfig('serviceKey', 'APIKEYHERE', self.configfile)
def onMessage(self, message):
# here is where you do something with the incoming alert message
if 'summary' in message.keys() :
print message['summary']
headers = {
'Content-type': 'application/json',
}
payload = json.dumps({
"service_key": "{0}".format(self.options.serviceKey),
"incident_key": "bro",
"event_type": "trigger",
"description": "{0}".format(message['summary']),
"client": "mozdef",
"client_url": "http://mozdef.rocks",
"details": message['events']
})
r = requests.post(
'https://events.pagerduty.com/generic/2010-04-15/create_event.json',
headers=headers,
data=payload,
)
print r.status_code
print r.text
# you can modify the message if needed
# plugins registered with lower (>2) priority
# will receive the message and can also act on it
# but even if not modified, you must return it
return message | mpl-2.0 | Python |
|
32ea116ff172da3e7f0eeb7d9dea6b9a0378be08 | Add persistance | jmercier/Codebench | persistance.py | persistance.py | import numpy as np
import os
from itertools import izip
T_FILE = "t.npy"
C_FILE = "c.npy"
K_FILE = "k.npy"
U_FILE = "u.npy"
FP_FILE = "fp.npy"
IER_FILE = "ier.npy"
MSG_FILE = "msg.txt"
def saveSplines(directory, splines):
((t, c, k), u), fp, ier, msg = splines[0]
tlst = []
clst = []
klst = []
ulst = []
fplst = []
ierlst = []
msglst = []
for ((t, c, k), u), fp, ier, msg in splines:
tlst.append(t)
clst.append(c)
klst.append(k)
ulst.append(u)
fplst.append(fp)
ierlst.append(ier)
msglst.append(msg + '\n')
tarr = np.array(tlst)
carr = np.array(clst)
karr = np.array(klst)
uarr = np.array(ulst)
fparr = np.array(fplst)
ierarr = np.array(ierlst)
np.save(os.path.join(directory, T_FILE), tarr)
np.save(os.path.join(directory, C_FILE), carr)
np.save(os.path.join(directory, K_FILE), karr)
np.save(os.path.join(directory, U_FILE), uarr)
np.save(os.path.join(directory, FP_FILE), fparr)
np.save(os.path.join(directory, IER_FILE), ierarr)
with open(os.path.join(directory, MSG_FILE), 'w') as f:
f.writelines(msglst)
def loadSplines(directory):
tarr = np.load(os.path.join(directory, T_FILE))
carr = np.load(os.path.join(directory, C_FILE))
karr = np.load(os.path.join(directory, K_FILE))
uarr = np.load(os.path.join(directory, U_FILE))
fparr = np.load(os.path.join(directory, FP_FILE))
ierarr = np.load(os.path.join(directory, IER_FILE))
with open(os.path.join(directory, MSG_FILE)) as f:
msglst = f.readlines()
return [(([t, c, k], u), fp, ier, msg) for t, c, k, u, fp, ier, msg in izip(tarr, carr, karr, uarr, fparr, ierarr, msglst)]
| mit | Python |
|
a34318312199e6dab8ca3db92f247f0bda369e17 | Add missing testcase file | wger-project/wger,rolandgeider/wger,rolandgeider/wger,kjagoo/wger_stark,wger-project/wger,wger-project/wger,DeveloperMal/wger,kjagoo/wger_stark,kjagoo/wger_stark,rolandgeider/wger,petervanderdoes/wger,rolandgeider/wger,DeveloperMal/wger,DeveloperMal/wger,DeveloperMal/wger,kjagoo/wger_stark,petervanderdoes/wger,wger-project/wger,petervanderdoes/wger,petervanderdoes/wger | exercises/tests/testcase.py | exercises/tests/testcase.py | # This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.test import TestCase
class WorkoutManagerTestCase(TestCase):
fixtures = ['tests-user-data', 'test-exercises', ]
def user_login(self, user='admin'):
"""Login the user, by default as 'admin'
"""
self.client.login(username=user, password='%(user)s%(user)s' % {'user': user})
def user_logout(self):
"""Visit the logout page
"""
self.client.logout()
| agpl-3.0 | Python |
|
b36ef2473c70ff16f4033f24ffc69a3b30b0ce26 | add edit-prowjobs.py | cjwagner/test-infra,dims/test-infra,monopole/test-infra,fejta/test-infra,kubernetes/test-infra,monopole/test-infra,cjwagner/test-infra,kubernetes/test-infra,fejta/test-infra,cblecker/test-infra,kubernetes/test-infra,BenTheElder/test-infra,michelle192837/test-infra,cjwagner/test-infra,cblecker/test-infra,kubernetes/test-infra,BenTheElder/test-infra,monopole/test-infra,BenTheElder/test-infra,monopole/test-infra,kubernetes/test-infra,jessfraz/test-infra,fejta/test-infra,michelle192837/test-infra,cjwagner/test-infra,fejta/test-infra,BenTheElder/test-infra,fejta/test-infra,cjwagner/test-infra,fejta/test-infra,michelle192837/test-infra,jessfraz/test-infra,cblecker/test-infra,cblecker/test-infra,michelle192837/test-infra,jessfraz/test-infra,jessfraz/test-infra,kubernetes/test-infra,cblecker/test-infra,jessfraz/test-infra,michelle192837/test-infra,BenTheElder/test-infra,dims/test-infra,cblecker/test-infra,jessfraz/test-infra,monopole/test-infra,dims/test-infra,michelle192837/test-infra,monopole/test-infra,BenTheElder/test-infra,dims/test-infra,dims/test-infra,dims/test-infra,cjwagner/test-infra | experiment/edit-prowjobs.py | experiment/edit-prowjobs.py | #!/usr/bin/env python3
# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Edit prowjobs en-masse by round-tripping them through ruamel.yaml
This is not intended for general usage, because:
- not all jobs can or should be edited
- many jobs have different formatting, and we're not at a point where
we can enforce formatting standards, so this is almost guaranteed
to introduce formatting change noise
- the idea is to manually edit this file with the specific edit to be
done, rather that developing a general purpose language to do this
"""
import re
import argparse
import glob
from os import path, walk
import ruamel.yaml
# Prow files that will be ignored
EXCLUDED_JOB_CONFIGS = [
# Ruamel won't be able to successfully dump fejta-bot-periodics
# See https://bitbucket.org/ruamel/yaml/issues/258/applying-json-patch-breaks-comment
"fejta-bot-periodics.yaml",
# generated.yaml is generated by generate_tests.py, and will be overwritten.
"generated.yaml",
]
# A hilariously large line length to ensure we never line-wrap
MAX_WIDTH = 2000000000
def setup_yaml():
# Setup the ruamel.yaml parser
yaml = ruamel.yaml.YAML(typ='rt')
yaml.preserve_quotes = True
# GoogleCloudPlatform/ - no yaml.indent
# bazelbuild/ - no yaml.indent
# cadvisor/ - no yaml.indent
# containerd/ - no yaml.indent
# image-pushing/ - yaml.indent(mapping=2, sequence=4, offset=2)
# kubernetes/ - yaml.indent(mapping=2) seems to cause the least change
# kubernetes-client - TBD
# kubernetes-csi - TBD
# kubernetes-sigs - TBD
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.width = MAX_WIDTH
return yaml
def edit_job_config(yaml, prow_job_file_name):
with open(prow_job_file_name, "r") as job_fp:
prow_config = yaml.load(job_fp)
def should_edit(job):
return job["name"] == "a-specific-job-to-edit"
def edit(job):
return job
# For each presubmit, postsubmit, and periodic
# presubmits -> <any repository> -> [{name: prowjob}]
if "presubmits" in prow_config:
for _, jobs in prow_config["presubmits"].items():
for job in jobs:
if should_edit(job):
edit(job)
# postsubmits -> <any repository> -> [{name: prowjob}]
if "postsubmits" in prow_config:
for _, jobs in prow_config["postsubmits"].items():
for job in jobs:
if should_edit(job):
edit(job)
# periodics -> [{name: prowjob}]
if "periodics" in prow_config:
for job in prow_config["periodics"]:
if should_edit(job):
edit(job)
# Dump ProwConfig to prowJobFile
with open(prow_job_file_name, "w") as job_fp:
yaml.dump(prow_config, job_fp)
job_fp.truncate()
def main(prow_job_dir):
yaml = setup_yaml()
for f in glob.glob(f'{prow_job_dir}/**/*.yaml', recursive=True):
if path.basename(f) not in EXCLUDED_JOB_CONFIGS:
try:
print(f'editing {f}')
edit_job_config(yaml, f)
except:
print(f'ERROR: could not edit {f}')
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Does things to prowjob configs')
PARSER.add_argument(
'--prow-job-dir',
default='../config/jobs',
help='Path to Prow Job Directory')
ARGS = PARSER.parse_args()
main(ARGS.prow_job_dir)
| apache-2.0 | Python |
|
ac8c78682e77d77be44910c36057e0217477b0a4 | Test OAI endpoint model | WSULib/combine,WSULib/combine,WSULib/combine,WSULib/combine | tests/test_models/test_oai_endpoint.py | tests/test_models/test_oai_endpoint.py | from django.test import TestCase
from core.models import OAIEndpoint
class OAIEndpointTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.attributes = {
'name': 'Test OAI Endpoint',
'endpoint': 'http://oai.example.com',
'verb': 'ListRecords',
'metadataPrefix': 'mods',
'scope_type': 'setList',
'scope_value': 'someset, anotherset'
}
cls.oai_endpoint = OAIEndpoint(**cls.attributes)
cls.oai_endpoint.save()
def test_str(self):
self.assertEqual('OAI endpoint: Test OAI Endpoint', format(OAIEndpointTestCase.oai_endpoint))
def test_as_dict(self):
as_dict = OAIEndpointTestCase.oai_endpoint.as_dict()
for k, v in OAIEndpointTestCase.attributes.items():
self.assertEqual(as_dict[k], v) | mit | Python |
|
613a0056e12a28232542aaf561831d276868e413 | Add parametric map generator, good for wrinkles | roboticslab-uc3m/xgnitive,roboticslab-uc3m/xgnitive,roboticslab-uc3m/xgnitive | programs/kinbody-creator/openraveMapGenerator.py | programs/kinbody-creator/openraveMapGenerator.py | #!/usr/bin/python
#import lxml.etree
#import lxml.builder
from lxml import etree
#E = lxml.builder.ElementMaker()
#KINBODY=E.KinBody
#BODY=E.Body
#GEOM=E.Geom
#EXTENTS=E.Extents
#TRANSLATION=E.Translation
#DIFUSSECOLOR=E.diffuseColor
# User variables
nX = 3
nY = 2
boxHeight = 1.0
resolution = 2.0 # Just to make similar to MATLAB [pixel/meter]
meterPerPixel = 1 / resolution # [meter/pixel]
# Program
Ez = boxHeight / 2.0 # Box size is actually double the extent
Ex = meterPerPixel / 2.0
Ey = meterPerPixel / 2.0
KinBody = etree.Element("KinBody", name="map")
for iY in range(nY):
# print "iY:",iY
for iX in range(nX):
# print "* iX:",iX
#-- Add E___ to each to force begin at 0,0,0 (centered by default)
x = Ex + iX*meterPerPixel
y = Ey + iY*meterPerPixel
z = Ez # Add this to raise to floor level (centered by default)
Number = iX + (iY * nX)
#Create pixel
Body = etree.SubElement(KinBody, "Body", name="square"+str(Number), type="static")
Geom = etree.SubElement(Body, "Geom", type="box")
Extents = etree.SubElement(Geom, "Extents").text= str(Ex)+" "+ str(Ey)+" "+str(Ez)
Translation = etree.SubElement(Geom, "Translation").text= str(x)+" "+str(y)+" "+str(z)
DifusseColor = etree.SubElement(Geom, "diffuseColor").text= ".5 .5 .5"
'''
the_doc = KINBODY(
BODY(
GEOM(
EXTENTS("0.001 0.115 0.065"),
TRANSLATION("0.6 "+ "-0.8 0.32"),
DIFUSSECOLOR(".5 .5 .5"),
type="box",
),
name="square"+str(i), type="static"
),
name="wall",
)
'''
myStr = etree.tostring(KinBody, pretty_print=True)
outFile = open('map.kinbody.xml', 'w')
outFile.write(myStr)
outFile.close()
| lgpl-2.1 | Python |
|
668c28fd55daa93e0024e14e7137f78919e93e2c | Add python client script | chuckmitchell/basement-weather-server,chuckmitchell/basement-weather-server,chuckmitchell/basement-weather-server | docs/basement_weather.py | docs/basement_weather.py | #!/usr/bin/python
import sys
import commands
import Adafruit_DHT
import twitter
import requests
import json
date = commands.getoutput('TZ=":Canada/Atlantic" date')
#Get temp and humidity
humidity, temperature = Adafruit_DHT.read_retry(11, 4)
message = 'Temp: {0:0.1f} C Humidity: {1:0.1f} %'.format(temperature, humidity)
#send to basementweather API
url = 'https://basementweather.herokuapp.com/readings.json'
payload = {'temperature': '{0:0.1f}'.format(temperature), 'humidity': '{0:0.1f}'.format(humidity)}
headers = {'content-type': 'application/json'}
r = requests.post(url, data=json.dumps(payload), headers=headers)
#send to twitter
api = twitter.Api(consumer_key="QeT4mgIqGqAi6y7sKEgkcR8HQ",
consumer_secret="zM2dFpIk3YojKBdlZOwTCC82tEP3RxffLZG6MQJQwTBeckG8Pk",
access_token_key="771330006068830209-4QTn99ThbM6V2DT0hxlNymQOLykbmMM",
access_token_secret="akoFlkoNgov5aDJrmkCJTtSqQgvs2Q2Phl0rjVgVjh9Zi")
status = api.PostUpdate(message+" "+date)
print "%s just posted: %s" % (status.user.name, status.text) | mit | Python |
|
5a221296e9e7cc59e4fe4c85b178db06c1376f13 | Add product streamfield migrations | pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo | demo/apps/catalogue/migrations/0012_auto_20160617_1115.py | demo/apps/catalogue/migrations/0012_auto_20160617_1115.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
import wagtail.wagtailcore.blocks
import wagtail.wagtailimages.blocks
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0011_auto_20160616_1335'),
]
operations = [
migrations.AddField(
model_name='category',
name='body',
field=wagtail.wagtailcore.fields.StreamField([(b'heading', wagtail.wagtailcore.blocks.CharBlock(classname=b'full title')), (b'paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), (b'image', wagtail.wagtailimages.blocks.ImageChooserBlock())]),
preserve_default=False,
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(max_length=255, verbose_name='Name', db_index=True),
),
]
| mit | Python |
|
a9b35aff92c099aa52ce9e1ca1cb0df169a54ef5 | Add author to header. | juhasch/euroscipy_proceedings,mikaem/euroscipy_proceedings,euroscipy/euroscipy_proceedings,mjklemm/euroscipy_proceedings,euroscipy/euroscipy_proceedings,dotsdl/scipy_proceedings,michaelpacer/scipy_proceedings,chendaniely/scipy_proceedings,mikaem/euroscipy_proceedings,Stewori/euroscipy_proceedings,springcoil/euroscipy_proceedings,sbenthall/scipy_proceedings,sbenthall/scipy_proceedings,katyhuff/scipy_proceedings,mwcraig/scipy_proceedings,helgee/euroscipy_proceedings,chendaniely/scipy_proceedings,euroscipy/euroscipy_proceedings,mjklemm/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,katyhuff/scipy_proceedings,mikaem/euroscipy_proceedings,juhasch/euroscipy_proceedings,sbenthall/scipy_proceedings,Stewori/euroscipy_proceedings,michaelpacer/scipy_proceedings,mwcraig/scipy_proceedings,helgee/euroscipy_proceedings,mjklemm/euroscipy_proceedings,dotsdl/scipy_proceedings,mwcraig/scipy_proceedings,springcoil/euroscipy_proceedings,springcoil/euroscipy_proceedings,dotsdl/scipy_proceedings,katyhuff/scipy_proceedings,Stewori/euroscipy_proceedings,helgee/euroscipy_proceedings,michaelpacer/scipy_proceedings,SepidehAlassi/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,chendaniely/scipy_proceedings,juhasch/euroscipy_proceedings | publisher/writer.py | publisher/writer.py | __all__ = ['writer']
import docutils.core as dc
import docutils.writers
from docutils import nodes
from docutils.writers.latex2e import (Writer, LaTeXTranslator,
PreambleCmds)
class Translator(LaTeXTranslator):
def __init__(self, *args, **kwargs):
LaTeXTranslator.__init__(self, *args, **kwargs)
# Handle author declarations
current_field = ''
author_names = []
author_institutions = []
author_emails = []
def visit_docinfo(self, node):
pass
def depart_docinfo(self, node):
pass
def visit_author(self, node):
self.author_names.append(self.encode(node.astext()))
raise nodes.SkipNode
def depart_author(self, node):
pass
def visit_classifier(self, node):
pass
def depart_classifier(self, node):
pass
def visit_field_name(self, node):
self.current_field = node.astext()
raise nodes.SkipNode
def visit_field_body(self, node):
text = self.encode(node.astext())
if self.current_field == 'email':
self.author_emails.append(text)
elif self.current_field == 'institution':
self.author_institutions.append(text)
self.current_field = ''
raise nodes.SkipNode
def depart_field_body(self, node):
raise nodes.SkipNode
def depart_document(self, node):
LaTeXTranslator.depart_document(self, node)
doc_title = '\\title{Test 1 2 3}'
doc_title += '\\author{%s}' % ', '.join(self.author_names)
doc_title += '\\maketitle'
self.body_pre_docinfo = [doc_title]
writer = Writer()
writer.translator_class = Translator
| __all__ = ['writer']
import docutils.core as dc
import docutils.writers
from docutils import nodes
from docutils.writers.latex2e import (Writer, LaTeXTranslator,
PreambleCmds)
class Translator(LaTeXTranslator):
def __init__(self, *args, **kwargs):
LaTeXTranslator.__init__(self, *args, **kwargs)
# Handle author declarations
current_field = ''
def visit_docinfo(self, node):
pass
def depart_docinfo(self, node):
pass
def visit_author(self, node):
self.author_stack.append([self.encode(node.astext())])
raise nodes.SkipNode
def depart_author(self, node):
pass
def visit_classifier(self, node):
pass
def depart_classifier(self, node):
pass
def visit_field_name(self, node):
self.current_field = node.astext()
raise nodes.SkipNode
def visit_field_body(self, node):
if self.current_field == 'email':
pass
elif self.current_field == 'institution':
institute = '\\thanks{%s}' % self.encode(node.astext())
self.author_stack[-1].append(institute)
self.current_field = ''
raise nodes.SkipNode
def depart_field_body(self, node):
raise nodes.SkipNode
def depart_document(self, node):
LaTeXTranslator.depart_document(self, node)
doc_title = r'\title{Test 1 2 3}\author{Me}\maketitle'
self.body_pre_docinfo = [doc_title]
writer = Writer()
writer.translator_class = Translator
| bsd-2-clause | Python |
6fb3c87f0f9b238eab71df6880568005e3a2b461 | add DuckDuckGo Instant Answer plugin | Rouji/Yui,Rj48/ircbot | plugins/ddg.py | plugins/ddg.py | # coding=utf-8
import json
import urllib.request
@yui.threaded
@yui.command('duckduckgo', 'ddg')
def ddg(argv):
'''Returns the Instant Answer for a given query. Usage: ddg -lang <query>'''
lang = 'en_US'
if len(argv) < 1:
return
# check if a language was given
argv = argv[1:]
if len(argv) > 1 and argv[0].startswith('-'):
lang = argv[0][1:]
argv = argv[1:]
q = urllib.request.quote(' '.join(argv).encode('utf-8'))
url = f'https://api.duckduckgo.com/?q={q}&format=json&no_html=1&skip_disambig=1&no_redirect=1'
h = { 'Accept-Language' : lang }
req = urllib.request.Request(url, headers=h)
with urllib.request.urlopen(req) as r:
js = json.loads(r.read().decode('utf-8'))
Type = js.get('Type')
AbstractText = js.get('AbstractText')
AbstractURL = js.get('AbstractURL')
Heading = js.get('Heading')
Answer = js.get('Answer')
Redirect = js.get('Redirect')
reply = 'No results.'
if Type == 'D' or Type == 'C': # disambiguation or category
reply = f'{Heading}: {AbstractURL}'
elif Type == 'A': # article
reply = f'{Heading}: {AbstractText} - {AbstractURL}'
elif Type == 'E': # exclusive, e.g. calc/conversion and redirects
if type(Answer) is str and Answer != '':
reply = Answer
elif type(Answer) is dict and 'result' in Answer:
reply = Answer['result']
elif Redirect != '':
reply = f'Redirect: {Redirect}'
return reply
| mit | Python |
|
e8576b67f8a3778c43a85e24d1cbc0e1985fe2ca | Add float.is_integer() example | devlights/try-python | trypython/basic/builtinfunc/float01.py | trypython/basic/builtinfunc/float01.py | """
組み込みクラス float のサンプルです.
float.is_integer() について
"""
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
class Sample(SampleBase):
def exec(self):
num = 1.00
pr('type(num)', type(num))
pr('is_integer', num.is_integer()) # ==> True (整数に出来るので)
pr('int()', int(num))
num = 1.05
pr('is_integer', num.is_integer()) # ==> False (整数に出来ないので)
pr('int()', int(num))
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| mit | Python |
|
3b41e3bcc721d47ce5a7314af2ac4d9006598164 | add word class | alexbhandari/webParser,alexbhandari/webParser,alexbhandari/webParser | pythonfiles/word.py | pythonfiles/word.py |
#Comtains models for storing words and data returned from parsing websites
#
class word(object):
def __init__(self,name=None,part_of_speech=None,definition=None,context=None,subwords=None,count=None,rating=None):
self.name=name
self.part_of_speech=part_of_speech
self.definition=definition
self.context=context
self.subwords=subwords
self.count=count
self.rating=rating
def __str__(self):
return self.name
def get_name(self):
return self.name
def get_part_of_speech(self):
return self.part_of_speech
def get_definition(self):
return self.definition
def get_context(self):
return self.context
def get_forms(self):
return self.forms
def get_count(self):
return self.count
def get_rating(self):
return self.rating
def set_count(self,value):
self.count = value
def incr_count(self,value):
self.count += value
| apache-2.0 | Python |
|
4cedd5cf992c180da7d778a9a8adc225a7d8c496 | test various odd characters in the filenames, including some UTF-8 | owncloud/smashbox,switch-ch/smashbox,mrow4a/smashbox,nickv-oc/smashbox,mrow4a/smashbox,cernbox/smashbox,cernbox/smashbox,mrow4a/smashbox,owncloud/smashbox,labkode/smashbox,cernbox/smashbox,owncloud/smashbox,labkode/smashbox,nickv-oc/smashbox,switch-ch/smashbox,cernbox/smashbox,labkode/smashbox,switch-ch/smashbox,nickv-oc/smashbox,labkode/smashbox | lib/test_filenames.py | lib/test_filenames.py | from smashbox.utilities import *
from smashbox.utilities.hash_files import count_files
__doc__ = """ Test various characters in the file names.
bug #104648: add UTF-8 escaping in PROPFIND response body (https://savannah.cern.ch/bugs/?104648)
Notes:
- unescaped % characters in propfind response crashes csync
"""
filesizeKB = int(config.get('filenames_filesizeKB',1))
# see: mirall/csync/src/csync_exclude.c
charsets_excluded_from_sync = {
'backslash' : '\\',
'colon' : ':',
'questionmark' : '?',
'asterisk' : '*',
'doublequote' : '"',
'greater' : '>',
'smaller' : '<',
'pipe' : '|'
}
@add_worker
def creator(step):
reset_owncloud_account()
reset_rundir()
step(1,'create initial content and sync')
d = make_workdir()
namepatterns = [
"space1 testfile.dat",
"space2testfile .dat",
" space3testfile .dat",
"space4testfile.dat ",
"space5testfile. dat",
" space6 testfile . dat ",
" "
]
charsets = { 'space' : ' ',
'plus' : '+',
'underscore' : '_',
'moscicki' : '\xc5\x9b', # some UTF-8 unicode character...
'singlequote' : "'"
}
charsets.update(charsets_excluded_from_sync)
filenames = []
for c in charsets:
for n in namepatterns:
nn = n.replace('space', "_"+c+"_").replace(' ',charsets[c])
#print nn
filenames.append(nn)
createfile(os.path.join(d,nn),'1',count=filesizeKB,bs=1000)
# generic charsets -- let's take a hammer and test (almost) all ANSI characters
# we don't test for the foward slash
char_range = range(32,47)+range(58,65)+range(91,97)+range(123,127)
#char_range.remove(37) #to see the tests to complition temporarily remove this character as it crashes csync
#char_range=[]
for i in char_range:
for n in namepatterns:
nn = n.replace('space','_chr'+str(i)+'_').replace(' ',chr(i))
if nn == '.': # skip this
continue
filenames.append(nn)
createfile(os.path.join(d,nn),'1',count=filesizeKB,bs=1000)
files_1 = os.listdir(d)
N = count_files(d)
shared = reflection.getSharedObject()
shared['files_1'] = files_1
shared['N'] = N
for i in range(3): # 2 is enough but 3 is better ;-)
list_files(d)
run_ocsync(d)
error_check(count_files(d) == N, "some files lost!")
files_2 = os.listdir(d)
for fn in set(files_1)-set(files_2):
error_check(False, "the file has disappeared: %s"%repr(fn))
@add_worker
def propagator(step):
step(2,'check propagation of files')
d = make_workdir()
shared = reflection.getSharedObject()
files_1 = shared['files_1']
# take the original file list produced by creator and remove all file names containing characters excluded from sync
expected_files = [fn for fn in files_1 if not any((c in charsets_excluded_from_sync.values()) for c in fn) ]
logger.info("expected %d files to be propagated (excluding the ones with unsyncable characters %s)",len(expected_files),repr(charsets_excluded_from_sync.values()))
run_ocsync(d)
N2 = count_files(d)
files_3 = os.listdir(d)
for fn in set(expected_files)-set(files_3):
error_check(False, "the file has not been propagated: %s"%repr(fn))
| agpl-3.0 | Python |
|
6ac6202837cade41b1dad5af9a474be171255993 | Check that Constant is given Moments instance, not class | jluttine/bayespy,bayespy/bayespy,SalemAmeen/bayespy,fivejjs/bayespy | bayespy/inference/vmp/nodes/constant.py | bayespy/inference/vmp/nodes/constant.py | ################################################################################
# Copyright (C) 2011-2012,2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
import numpy as np
from .node import Node, Moments
class Constant(Node):
r"""
Node for presenting constant values.
The node wraps arrays into proper node type.
"""
def __init__(self, moments, x, **kwargs):
if not isinstance(moments, Moments) and issubclass(moments, Moments):
raise ValueError("Give moments as an object instance instead of a class")
self._moments = moments
x = np.asanyarray(x)
# Compute moments
self.u = self._moments.compute_fixed_moments(x)
# Dimensions of the moments
dims = self._moments.compute_dims_from_values(x)
# Resolve plates
D = len(dims[0])
if D > 0:
plates = np.shape(self.u[0])[:-D]
else:
plates = np.shape(self.u[0])
# Parent constructor
super().__init__(dims=dims, plates=plates, **kwargs)
def _get_id_list(self):
"""
Returns the stochastic ID list.
This method is used to check that same stochastic nodes are not direct
parents of a node several times. It is only valid if there are
intermediate stochastic nodes.
To put it another way: each ID corresponds to one factor q(..) in the
posterior approximation. Different IDs mean different factors, thus they
mean independence. The parents must have independent factors.
Stochastic nodes should return their unique ID. Deterministic nodes
should return the IDs of their parents. Constant nodes should return
empty list of IDs.
"""
return []
def get_moments(self):
return self.u
def set_value(self, x):
x = np.asanyarray(x)
shapes = [np.shape(ui) for ui in self.u]
self.u = self._moments.compute_fixed_moments(x)
for (i, shape) in enumerate(shapes):
if np.shape(self.u[i]) != shape:
raise ValueError("Incorrect shape for the array")
def lower_bound_contribution(self, gradient=False, **kwargs):
# Deterministic functions are delta distributions so the lower bound
# contribuion is zero.
return 0
| ################################################################################
# Copyright (C) 2011-2012,2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
import numpy as np
from .node import Node
class Constant(Node):
r"""
Node for presenting constant values.
The node wraps arrays into proper node type.
"""
def __init__(self, moments, x, **kwargs):
self._moments = moments
x = np.asanyarray(x)
# Compute moments
self.u = self._moments.compute_fixed_moments(x)
# Dimensions of the moments
dims = self._moments.compute_dims_from_values(x)
# Resolve plates
D = len(dims[0])
if D > 0:
plates = np.shape(self.u[0])[:-D]
else:
plates = np.shape(self.u[0])
# Parent constructor
super().__init__(dims=dims, plates=plates, **kwargs)
def _get_id_list(self):
"""
Returns the stochastic ID list.
This method is used to check that same stochastic nodes are not direct
parents of a node several times. It is only valid if there are
intermediate stochastic nodes.
To put it another way: each ID corresponds to one factor q(..) in the
posterior approximation. Different IDs mean different factors, thus they
mean independence. The parents must have independent factors.
Stochastic nodes should return their unique ID. Deterministic nodes
should return the IDs of their parents. Constant nodes should return
empty list of IDs.
"""
return []
def get_moments(self):
return self.u
def set_value(self, x):
x = np.asanyarray(x)
shapes = [np.shape(ui) for ui in self.u]
self.u = self._moments.compute_fixed_moments(x)
for (i, shape) in enumerate(shapes):
if np.shape(self.u[i]) != shape:
raise ValueError("Incorrect shape for the array")
def lower_bound_contribution(self, gradient=False, **kwargs):
# Deterministic functions are delta distributions so the lower bound
# contribuion is zero.
return 0
| mit | Python |
435004cebce00510db3bf36ae21b2cbf37020f32 | add petpvc specs file | mick-d/nipype,FCP-INDI/nipype,FCP-INDI/nipype,mick-d/nipype,carolFrohlich/nipype,FCP-INDI/nipype,carolFrohlich/nipype,carolFrohlich/nipype,FCP-INDI/nipype,sgiavasis/nipype,mick-d/nipype,sgiavasis/nipype,carolFrohlich/nipype,mick-d/nipype,sgiavasis/nipype,sgiavasis/nipype | nipype/interfaces/tests/test_auto_PETPVC.py | nipype/interfaces/tests/test_auto_PETPVC.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ...testing import assert_equal
from ..petpvc import PETPVC
def test_PETPVC_inputs():
input_map = dict(alpha=dict(argstr='-a %.4f',
),
args=dict(argstr='%s',
),
debug=dict(argstr='-d',
usedefault=True,
),
environ=dict(nohash=True,
usedefault=True,
),
fwhm_x=dict(argstr='-x %.4f',
mandatory=True,
),
fwhm_y=dict(argstr='-y %.4f',
mandatory=True,
),
fwhm_z=dict(argstr='-z %.4f',
mandatory=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='-i %s',
mandatory=True,
),
mask_file=dict(argstr='-m %s',
mandatory=True,
),
n_deconv=dict(argstr='-k %d',
),
n_iter=dict(argstr='-n %d',
),
out_file=dict(argstr='-o %s',
genfile=True,
hash_files=False,
),
pvc=dict(argstr='-p %s',
mandatory=True,
),
stop_crit=dict(argstr='-a %.4f',
),
terminal_output=dict(nohash=True,
),
)
inputs = PETPVC.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_PETPVC_outputs():
output_map = dict(out_file=dict(),
)
outputs = PETPVC.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause | Python |
|
99e531ec0e86d7c1d34de154cc49584821f85904 | Make all metavars uppercased | lukw00/powerline,prvnkumar/powerline,wfscheper/powerline,seanfisk/powerline,firebitsbr/powerline,xfumihiro/powerline,firebitsbr/powerline,IvanAli/powerline,IvanAli/powerline,bezhermoso/powerline,bartvm/powerline,s0undt3ch/powerline,darac/powerline,russellb/powerline,s0undt3ch/powerline,EricSB/powerline,s0undt3ch/powerline,bezhermoso/powerline,xfumihiro/powerline,lukw00/powerline,russellb/powerline,seanfisk/powerline,DoctorJellyface/powerline,darac/powerline,EricSB/powerline,QuLogic/powerline,S0lll0s/powerline,junix/powerline,Luffin/powerline,blindFS/powerline,IvanAli/powerline,Luffin/powerline,dragon788/powerline,DoctorJellyface/powerline,Liangjianghao/powerline,wfscheper/powerline,cyrixhero/powerline,cyrixhero/powerline,prvnkumar/powerline,areteix/powerline,cyrixhero/powerline,wfscheper/powerline,QuLogic/powerline,kenrachynski/powerline,xxxhycl2010/powerline,DoctorJellyface/powerline,blindFS/powerline,Liangjianghao/powerline,bezhermoso/powerline,seanfisk/powerline,S0lll0s/powerline,xfumihiro/powerline,Luffin/powerline,Liangjianghao/powerline,blindFS/powerline,dragon788/powerline,junix/powerline,junix/powerline,lukw00/powerline,kenrachynski/powerline,bartvm/powerline,russellb/powerline,areteix/powerline,xxxhycl2010/powerline,S0lll0s/powerline,areteix/powerline,prvnkumar/powerline,bartvm/powerline,darac/powerline,xxxhycl2010/powerline,QuLogic/powerline,EricSB/powerline,firebitsbr/powerline,kenrachynski/powerline,dragon788/powerline | powerline/commands/config.py | powerline/commands/config.py | # vim:fileencoding=utf-8:noet
from __future__ import (division, absolute_import, print_function)
import argparse
import powerline.bindings.config as config
class StrFunction(object):
def __init__(self, function, name=None):
self.name = name or function.__name__
self.function = function
def __call__(self, *args, **kwargs):
self.function(*args, **kwargs)
def __str__(self):
return self.name
TMUX_ACTIONS = {
'source': StrFunction(config.source_tmux_files, 'source'),
}
SHELL_ACTIONS = {
'command': StrFunction(config.shell_command, 'command'),
'uses': StrFunction(config.uses),
}
class ConfigArgParser(argparse.ArgumentParser):
def parse_args(self, *args, **kwargs):
ret = super(ConfigArgParser, self).parse_args(*args, **kwargs)
if not hasattr(ret, 'function'):
# In Python-3* `powerline-config` (without arguments) raises
# AttributeError. I have not found any standard way to display same
# error message as in Python-2*.
self.error('too few arguments')
return ret
def get_argparser(ArgumentParser=ConfigArgParser):
parser = ArgumentParser(description='Script used to obtain powerline configuration.')
subparsers = parser.add_subparsers()
tmux_parser = subparsers.add_parser('tmux', help='Tmux-specific commands')
tmux_parser.add_argument(
'function',
choices=tuple(TMUX_ACTIONS.values()),
metavar='ACTION',
type=(lambda v: TMUX_ACTIONS.get(v)),
help='If action is `source\' then version-specific tmux configuration files are sourced.'
)
shell_parser = subparsers.add_parser('shell', help='Shell-specific commands')
shell_parser.add_argument(
'function',
choices=tuple(SHELL_ACTIONS.values()),
type=(lambda v: SHELL_ACTIONS.get(v)),
metavar='ACTION',
help='If action is `command\' then preferred powerline command is output, if it is `uses\' then powerline-config script will exit with 1 if specified component is disabled and 0 otherwise.',
)
shell_parser.add_argument(
'component',
nargs='?',
choices=('tmux', 'prompt'),
metavar='COMPONENT',
)
shell_parser.add_argument(
'-s', '--shell',
metavar='SHELL',
help='Shell for which query is run',
)
return parser
| # vim:fileencoding=utf-8:noet
from __future__ import (division, absolute_import, print_function)
import argparse
import powerline.bindings.config as config
class StrFunction(object):
def __init__(self, function, name=None):
self.name = name or function.__name__
self.function = function
def __call__(self, *args, **kwargs):
self.function(*args, **kwargs)
def __str__(self):
return self.name
TMUX_ACTIONS = {
'source': StrFunction(config.source_tmux_files, 'source'),
}
SHELL_ACTIONS = {
'command': StrFunction(config.shell_command, 'command'),
'uses': StrFunction(config.uses),
}
class ConfigArgParser(argparse.ArgumentParser):
def parse_args(self, *args, **kwargs):
ret = super(ConfigArgParser, self).parse_args(*args, **kwargs)
if not hasattr(ret, 'function'):
# In Python-3* `powerline-config` (without arguments) raises
# AttributeError. I have not found any standard way to display same
# error message as in Python-2*.
self.error('too few arguments')
return ret
def get_argparser(ArgumentParser=ConfigArgParser):
parser = ArgumentParser(description='Script used to obtain powerline configuration.')
subparsers = parser.add_subparsers()
tmux_parser = subparsers.add_parser('tmux', help='Tmux-specific commands')
tmux_parser.add_argument(
'function',
choices=tuple(TMUX_ACTIONS.values()),
metavar='action',
type=(lambda v: TMUX_ACTIONS.get(v)),
help='If action is `source\' then version-specific tmux configuration files are sourced.'
)
shell_parser = subparsers.add_parser('shell', help='Shell-specific commands')
shell_parser.add_argument(
'function',
choices=tuple(SHELL_ACTIONS.values()),
type=(lambda v: SHELL_ACTIONS.get(v)),
metavar='action',
help='If action is `command\' then preferred powerline command is output, if it is `uses\' then powerline-config script will exit with 1 if specified component is disabled and 0 otherwise.',
)
shell_parser.add_argument(
'component',
nargs='?',
choices=('tmux', 'prompt'),
metavar='component',
)
shell_parser.add_argument(
'-s', '--shell',
metavar='SHELL',
help='Shell for which query is run',
)
return parser
| mit | Python |
c94653382a5a8aa2ec9af94f3aaa02ed5cae9b4c | Create test_send_sms.py | nksheridan/elephantAI,nksheridan/elephantAI | test_send_sms.py | test_send_sms.py | import serial
from curses import ascii
# since we need ascii code from CTRL-Z
import time
# here we are testing sending an SMS via virtual serial port ttyUSB0 that was created by a USB serial modem
phonenumber = #enter phone number to send SMS to e.g. "+441234123123"
SMS = "here's your SMS!"
ser = serial.Serial('/dev/ttyUSB0', 460800, timeout=1)
# 460800 is baud rate, ttyUSB0 is virtual serial port we are sending to
ser.write("AT\r\n")
# send AT to the ttyUSB0 virtual serial port
line = ser.readline()
print(line)
# what did we get back from AT command? Should be OK
ser.write("AT+CMGF=1\r\n")
# send AT+CMGF=1 so setting up for SMS followed by CR
line = ser.readline()
print(line)
# what did we get back from that AT command?
ser.write('AT+CMGS="%s"\r\n' %phonenumber)
# send AT+CMGS then CR, then phonenumber variable
ser.write(SMS)
# send the SMS variable after we sent the CR
ser.write(ascii.ctrl('z'))
# send a CTRL-Z after the SMS variable using ascii library
time.sleep(10)
# wait 10 seconds
print ser.readline()
print ser.readline()
print ser.readline()
print ser.readline()
# what did we get back after we tried AT_CMGS=phonenumber followed
# by <CR> , then SMS variable, then <CTRL-Z> ascii code??
| mit | Python |
|
5e723223b5206e6e92a6140f184d41f1abfdbb3d | Create candy-crush.py | tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode | Python/candy-crush.py | Python/candy-crush.py | # Time: O((R * C)^2)
# Space: O(1)
# This question is about implementing a basic elimination algorithm for Candy Crush.
#
# Given a 2D integer array board representing the grid of candy,
# different positive integers board[i][j] represent different types of candies.
# A value of board[i][j] = 0 represents that the cell at position (i, j) is empty.
# The given board represents the state of the game following the player's move.
# Now, you need to restore the board to a stable state by crushing candies according to the following rules:
#
# If three or more candies of the same type are adjacent vertically or horizontally,
# "crush" them all at the same time - these positions become empty.
#
# After crushing all candies simultaneously,
# if an empty space on the board has candies on top of itself,
# then these candies will drop until they hit a candy or bottom at the same time.
# (No new candies will drop outside the top boundary.)
#
# After the above steps, there may exist more candies that can be crushed.
# If so, you need to repeat the above steps.
# If there does not exist more candies that can be crushed (ie. the board is stable),
# then return the current board.
# You need to perform the above rules until the board becomes stable, then return the current board.
#
# Example 1:
# Input:
# board =
# [[110,5,112,113,114],[210,211,5,213,214],[310,311,3,313,314],[410,411,412,5,414],[5,1,512,3,3],[610,4,1,613,614],[710,1,2,713,714],[810,1,2,1,1],[1,1,2,2,2],[4,1,4,4,1014]]
# Output:
# [[0,0,0,0,0],[0,0,0,0,0],[0,0,0,0,0],[110,0,0,0,114],[210,0,0,0,214],[310,0,0,113,314],[410,0,0,213,414],[610,211,112,313,614],[710,311,412,613,714],[810,411,512,713,1014]]
# Note:
# The length of board will be in the range [3, 50].
# The length of board[i] will be in the range [3, 50].
# Each board[i][j] will initially start as an integer in the range [1, 2000].
class Solution(object):
def candyCrush(self, board):
"""
:type board: List[List[int]]
:rtype: List[List[int]]
"""
R, C = len(board), len(board[0])
changed = True
while changed:
changed = False
for r in xrange(R):
for c in xrange(C-2):
if abs(board[r][c]) == abs(board[r][c+1]) == abs(board[r][c+2]) != 0:
board[r][c] = board[r][c+1] = board[r][c+2] = -abs(board[r][c])
changed = True
for r in xrange(R-2):
for c in xrange(C):
if abs(board[r][c]) == abs(board[r+1][c]) == abs(board[r+2][c]) != 0:
board[r][c] = board[r+1][c] = board[r+2][c] = -abs(board[r][c])
changed = True
for c in xrange(C):
i = R-1
for r in reversed(xrange(R)):
if board[r][c] > 0:
board[i][c] = board[r][c]
i -= 1
for r in reversed(xrange(i+1)):
board[r][c] = 0
return board
| mit | Python |
|
e262d176ecd7d8871a9e06ebc542cf473acf0925 | Add migration for transnational weights | Code4SA/gmmp,Code4SA/gmmp,Code4SA/gmmp | reports/migrations/0004_transnational_weights.py | reports/migrations/0004_transnational_weights.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0003_indonesia-weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'T1',
'Internet': '1',
'Print': '1',
'Radio': '1',
'Television': '1',
'Twitter': '1'}]
| apache-2.0 | Python |
|
6ff99aa939bf07d18595507efeca6ada7fc267a5 | add a test thing for wilk | rcbops/opencenter,rcbops/opencenter | tools/ast-env.py | tools/ast-env.py | #!/usr/bin/env python
if __name__ == '__main__':
import os
import sys
import copy
import json
sys.path.append(os.path.dirname(__file__))
import logging
logging.basicConfig(level=logging.DEBUG)
import roush.db.database
from roush.db import api as db_api
from roush.db.database import init_db
from sqlalchemy.orm import sessionmaker, create_session, scoped_session
from sqlalchemy.ext.declarative import declarative_base
from roushclient.client import RoushEndpoint
from roush.webapp.ast import FilterBuilder, FilterTokenizer
from roush.webapp.solver import Solver
ep = RoushEndpoint()
init_db('sqlite:///roush.db')
db_session = scoped_session(lambda: create_session(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
##########################
ast_logger = logging.getLogger('roush.webapp.ast')
ast_logger.setLevel(logging.WARNING)
expr1 = 'facts.woof = "goober"'
expr2 = 'facts.arf = "woof"'
api = db_api.api_from_models()
solver = Solver(api, 4, ['facts.ostype="hi"'])
solved, requires_input, plan = solver.solve()
print 'Solver plan: %s' % plan
solver_from_plan = Solver.from_plan(api, 4,
['facts.ostype="hi"'],
plan)
new_plan = solver_from_plan.plan()
print 'Solver plan: %s' % new_plan
print 'plans identical: %s' % new_plan == plan
print plan
print new_plan
print json.dumps(solver_from_plan.adventure(), sort_keys=True, indent=4)
# foo = FilterBuilder(FilterTokenizer(),
# 'nodes: "test" in union(facts.blah, "test")')
# root_node = foo.build()
# print 'expression: %s' % root_node.to_s()
# print 'inverted: %s' % root_node.invert()
| apache-2.0 | Python |
|
b0577ce3b8b162ce3702430b189905f9beaae8d5 | Add script to clean up all FD phone and fax numbers. | FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares | firecares/firestation/management/commands/cleanup_phonenumbers.py | firecares/firestation/management/commands/cleanup_phonenumbers.py | from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from phonenumber_field.modelfields import PhoneNumber
import re
"""
This command is for cleaning up every phone and fax number in the
database. It removes all non-numeric characters, such as parenthesis,
hyphens, spaces, etc. It also removes prefixed 1s These numbers should
be made human-readable on the client side.
"""
def cleanNumber(no1):
no2 = re.sub('[^0-9]','', no1)
if no2.startswith("1"):
no2 = no2[1:]
return no2
class Command(BaseCommand):
def handle(self, *args, **kwargs):
print("Don't worry, it always takes this long.")
for fd in FireDepartment.objects.all():
# If the FD has a phone number, clean it up
if fd.headquarters_phone and not fd.headquarters_phone.raw_input == "Invalid Input":
newPhone = cleanNumber(fd.headquarters_phone.raw_input)
print(newPhone)
fd.headquarters_phone = newPhone
# If the FD has a fax number, clean it up
if fd.headquarters_fax and not fd.headquarters_fax.raw_input == "Invalid Input":
newFax = cleanNumber(fd.headquarters_fax.raw_input)
print(newFax)
fd.headquarters_fax = newFax
# Save and continue to the next FD (if any)
fd.save()
print("Completed successfully!")
| mit | Python |
|
370fa8682b09de76af36129817e412270c8206c8 | add a new mgmt command for data analysis: compute the intersections of every pair of boundaries | datamade/represent-boundaries,opencorato/represent-boundaries,opencorato/represent-boundaries,datamade/represent-boundaries,opencorato/represent-boundaries,datamade/represent-boundaries | boundaries/management/commands/compute_intersections.py | boundaries/management/commands/compute_intersections.py | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.gis.geos import MultiPolygon, Polygon
from boundaries.models import BoundarySet, Boundary
class Command(BaseCommand):
help = 'Create a report of the area of intersection of every pair of boundaries from two boundary sets specified by their slug.'
args = 'boundaryset1 boundaryset1'
def handle(self, *args, **options):
if len(args) < 2:
print "Specify two boundaryset slugs."
return
bset_a = BoundarySet.objects.get(slug=args[0])
bset_b = BoundarySet.objects.get(slug=args[1])
print bset_a.slug, "area_1", bset_b.slug, "area_2", "area_intersection", "pct_of_1", "pct_of_2"
# For each boundary in the first set...
for a_slug in bset_a.boundaries.order_by("slug").values_list('slug', flat=True):
a_bdry = bset_a.boundaries.get(slug=a_slug)
a_area = a_bdry.shape.area
# Find each intersecting boundary in the second set...
for b_bdry in bset_b.boundaries\
.filter(shape__intersects=a_bdry.shape):
geometry = a_bdry.shape.intersection(b_bdry.shape)
int_area = geometry.area
if geometry.empty: continue
b_area = b_bdry.shape.area
# Skip overlaps that are less than .1% of the area of either of the shapes.
# These are probably not true overlaps.
if int_area/a_area < .001 or int_area/b_area < .001:
continue
print a_slug, a_area, b_bdry.slug, b_area, int_area, int_area/a_area, int_area/b_area
| mit | Python |
|
7a75174716ecbc0757c84f29ebbb3fd309521936 | add management command to fire all repeaters in a domain | qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/cleanup/management/commands/fire_repeaters.py | corehq/apps/cleanup/management/commands/fire_repeaters.py | import datetime
from django.core.management.base import BaseCommand, CommandError
from corehq.apps.receiverwrapper.models import RepeatRecord
from dimagi.utils.post import simple_post
class Command(BaseCommand):
args = '<domain>'
help = 'Fire all repeaters in a domain.'
def handle(self, *args, **options):
if len(args) == 1:
domain = args[0]
else:
raise CommandError('Usage: %s\n%s' % (self.args, self.help))
next_year = datetime.datetime.now() + datetime.timedelta(days=365)
records = RepeatRecord.all(domain=domain, due_before=next_year)
for record in records:
record.fire(post_fn=simple_post)
record.save()
print '{} {}'.format(record._id, 'successful' if record.succeeded else 'failed')
| bsd-3-clause | Python |
|
d5d3fcfb331c1486acbfb004705b94b1923a0db8 | Add code to dump features into libsvm file format | erfannoury/SuperEdge,erfannoury/SuperEdge,erfannoury/SuperEdge,erfannoury/SuperEdge,erfannoury/SuperEdge | Codes/SuperEdge/SuperEdge/dump_libsvm.py | Codes/SuperEdge/SuperEdge/dump_libsvm.py | import numpy as np
from datetime import datetime
from sklearn.datasets import dump_svmlight_file
import os.path as path
def main():
cache_path = 'largecache/'
feat_name = 'feat.dat'
lbl_name = 'lbl.dat'
feat_len = 4224 #1088
now = datetime.now()
lbl_memmap = np.memmap(path.join(cache_path, lbl_name), dtype='uint8', mode='r')
feat_memmap = np.memmap(path.join(cache_path, feat_name), dtype='float32', mode='r', shape=(lbl_memmap.shape[0], feat_len))
print 'loading dataset took ', (datetime.now() - now)
now = datetime.now()
print 'starting dumping feature files to libsvm format'
dump_svmlight_file(feat_memmap, lbl_memmap, 'largecache/data.train.txt')
if __name__ == '__main__':
main() | mit | Python |
|
269f1b743583609ecdc7658e35073db9b985634c | rename http.py | hhatto/poyonga | examples/http_example.py | examples/http_example.py | from poyonga.client import Groonga
g = Groonga()
cmds = [("status", {}),
("log_level", {"level": "warning"}),
#("table_create", {"name": "Site", "flags": "TABLE_HASH_KEY"}),
("select", {"table": "Site"})]
for cmd, kwargs in cmds:
ret = g.call(cmd, **kwargs)
print(ret.status)
print(ret.body)
print("*" * 40)
| mit | Python |
|
fef9b29a9072ce6bc184592c240558ba01fbeb6b | Copy to github | timhughes/git-credential-libsecret | git-credential-libsecret.py | git-credential-libsecret.py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2016 Tim Hughes <[email protected]>
#
# Distributed under terms of the MIT license.
"""
Handles storing and providing usernames and passwords to Git using libsecret.
"""
import os
import sys
if __name__ == '__main__':
githelper = __import__(os.path.splitext(os.path.basename(__file__))[0])
raise SystemExit(githelper.main(sys.argv))
import sys
import argparse
from urllib.parse import urlparse
import gi
gi.require_version('Secret', '1')
from gi.repository import Secret
GIT_CREDENTIALS_SCHEMA = Secret.Schema.new("org.timhughes.git.Credentials.",
Secret.SchemaFlags.NONE,
{
"protocol": Secret.SchemaAttributeType.STRING,
"host": Secret.SchemaAttributeType.STRING,
"path": Secret.SchemaAttributeType.STRING,
"username": Secret.SchemaAttributeType.STRING,
}
)
def main(argv):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
parser_get = subparsers.add_parser('get', help='get help')
parser_get.set_defaults(func=get)
parser_store = subparsers.add_parser('store', help='store shelp')
parser_store.set_defaults(func=store)
parser_reject = subparsers.add_parser('reject', help='reject help')
parser_reject.set_defaults(func=reject)
args = parser.parse_args(argv[1:])
if hasattr(args, 'func'):
try:
args.func()
except KeyboardInterrupt:
print('Interrupted')
sys.exit(0)
def get_attributes():
attributes = {}
for line in sys.stdin:
key, var = line.partition("=")[::2]
if key == "\n":
break
if key in ['protocol','host','path','username','password','url']:
if key == 'url':
o = urlparse(var.strip())
if o.scheme:
attributes['protocol'] = o.scheme
if o.netloc:
attributes['host'] = o.netloc
if o.path:
attributes['path'] = o.path
if o.username:
attributes['username'] = o.username
if o.password:
attributes['password'] = o.password
else:
attributes[key.strip()] = var.strip()
if len(attributes) > 0:
return attributes
else:
return
def get():
attributes = get_attributes()
if 'password' in attributes:
del attributes['password']
password = Secret.password_lookup_sync(
GIT_CREDENTIALS_SCHEMA,
attributes,
None
)
if password:
secret_item = find_secret_item(attributes)
print('protocol=%s' % secret_item['protocol'])
print('host=%s' % secret_item['host'])
print('username=%s' % secret_item['username'])
print('password=%s' % secret_item['password'])
def store():
attributes = get_attributes()
if 'password' in attributes:
password = attributes['password']
del attributes['password']
else:
sys.exit(1)
Secret.password_store_sync(
GIT_CREDENTIALS_SCHEMA,
attributes,
Secret.COLLECTION_DEFAULT,
"%s://%s@%s" %(attributes['protocol'], attributes['username'], attributes['host'] ),
password,
None
)
def reject():
attributes = get_attributes()
if 'password' in attributes:
del attributes['password']
Secret.password_clear_sync(
GIT_CREDENTIALS_SCHEMA,
attributes,
None
)
def find_secret_item(attributes):
service = Secret.Service.get_sync(Secret.ServiceFlags.LOAD_COLLECTIONS)
collection = Secret.Collection.for_alias_sync(service,Secret.COLLECTION_DEFAULT,Secret.CollectionFlags.LOAD_ITEMS,None)
item = collection.search_sync(GIT_CREDENTIALS_SCHEMA,attributes,Secret.SearchFlags.LOAD_SECRETS,None)[0]
item.load_secret_sync()
ret_attributes = item.get_attributes()
ret_attributes['password'] = item.get_secret().get().decode('utf-8')
return ret_attributes
| mit | Python |
|
95b304d2f0a9dc851926506795310f96c3312682 | Add SQL example. | yask123/rockstar,ghuntley/rockstar,GrimDerp/rockstar,georgemarshall/rockstar,danielbruns-wf/rockstar,RobertWang/rockstar,Hitman666/rockstar,jessiejea/rockstar,dkfiresky/rockstar,monsterwater/rockstar,ActuallyACat/rockstar,intermezzo-fr/rockstar,bechurch/rockstar,tyrchen/rockstar,jehb/rockstar,Endika/rockstar,haosdent/rockstar,allengaller/rockstar,gokaygurcan/rockstar,danialgoodwin/rockstar,jrajath94/RockStar,yamamushi/rockstar-1,varunparkhe/rockstar,ak2703/rockstar,avinassh/rockstar,freakynit/rockstar,clakech/rockstar | examples/sql_rockstar.py | examples/sql_rockstar.py | from RockStar import RockStar
sql_code = "SELECT 'Hello World!';"
rock_it_bro = RockStar(days=400, file_name='hello_world.sql', code=sql_code)
rock_it_bro.make_me_a_rockstar()
| mit | Python |
|
d5cf05e40b638afbf12fd95cf721a22ad0f3281d | Create appointments.py | Programmeerclub-WLG/Agenda-App | backend/appointments.py | backend/appointments.py | apache-2.0 | Python |
||
b017c43c9bf46cd2e1fd7380904a1b022b6930ba | Create __init__.py | ant6/beprof,DataMedSci/beprof,grzanka/beprof,DataMedSci/beprof,ant6/beprof | beprof/__init__.py | beprof/__init__.py | pass
| mit | Python |
|
bb462e78221344d71a2605d4af042e5037db3e79 | add colorize.py script | richzhang/colorization,richzhang/colorization,richzhang/colorization | colorize.py | colorize.py | import numpy as np
import os
import skimage.color as color
import matplotlib.pyplot as plt
import scipy.ndimage.interpolation as sni
import caffe
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='iColor: deep interactive colorization')
parser.add_argument('-img_in',dest='img_in',help='grayscale image to read in', type=str)
parser.add_argument('-img_out',dest='img_out',help='colorized image to save off', type=str)
parser.add_argument('--gpu', dest='gpu', help='gpu id', type=int, default=0)
parser.add_argument('--prototxt',dest='prototxt',help='prototxt filepath', type=str, default='./models/colorization_deploy_v2.prototxt')
parser.add_argument('--caffemodel',dest='caffemodel',help='caffemodel filepath', type=str, default='./models/colorization_release_v2.caffemodel')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
caffe.set_mode_gpu()
caffe.set_device(args.gpu)
# Select desired model
net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST)
(H_in,W_in) = net.blobs['data_l'].data.shape[2:] # get input shape
(H_out,W_out) = net.blobs['class8_ab'].data.shape[2:] # get output shape
pts_in_hull = np.load('./resources/pts_in_hull.npy') # load cluster centers
net.params['class8_ab'][0].data[:,:,0,0] = pts_in_hull.transpose((1,0)) # populate cluster centers as 1x1 convolution kernel
# print 'Annealed-Mean Parameters populated'
# load the original image
img_rgb = caffe.io.load_image(args.img_in)
img_lab = color.rgb2lab(img_rgb) # convert image to lab color space
img_l = img_lab[:,:,0] # pull out L channel
(H_orig,W_orig) = img_rgb.shape[:2] # original image size
# create grayscale version of image (just for displaying)
img_lab_bw = img_lab.copy()
img_lab_bw[:,:,1:] = 0
img_rgb_bw = color.lab2rgb(img_lab_bw)
# resize image to network input size
img_rs = caffe.io.resize_image(img_rgb,(H_in,W_in)) # resize image to network input size
img_lab_rs = color.rgb2lab(img_rs)
img_l_rs = img_lab_rs[:,:,0]
net.blobs['data_l'].data[0,0,:,:] = img_l_rs-50 # subtract 50 for mean-centering
net.forward() # run network
ab_dec = net.blobs['class8_ab'].data[0,:,:,:].transpose((1,2,0)) # this is our result
ab_dec_us = sni.zoom(ab_dec,(1.*H_orig/H_out,1.*W_orig/W_out,1)) # upsample to match size of original image L
img_lab_out = np.concatenate((img_l[:,:,np.newaxis],ab_dec_us),axis=2) # concatenate with original image L
img_rgb_out = (255*np.clip(color.lab2rgb(img_lab_out),0,1)).astype('uint8') # convert back to rgb
plt.imsave(args.img_out, img_rgb_out)
| bsd-2-clause | Python |
|
753388550e4ea7a8b09ddb22189021be3585a5e5 | Clean PlexUpdate plugin | randybias/beets,Freso/beets,lengtche/beets,asteven/beets,m-urban/beets,imsparsh/beets,moodboom/beets,mosesfistos1/beetbox,jmwatte/beets,jackwilsdon/beets,mathstuf/beets,PierreRust/beets,tima/beets,mathstuf/beets,lightwang1/beets,ibmibmibm/beets,sadatay/beets,arabenjamin/beets,kareemallen/beets,tima/beets,mathstuf/beets,mried/beets,pkess/beets,multikatt/beets,marcuskrahl/beets,xsteadfastx/beets,randybias/beets,Andypsamp/CODjunit,YetAnotherNerd/beets,beetbox/beets,lightwang1/beets,multikatt/beets,sadatay/beets,drm00/beets,ibmibmibm/beets,moodboom/beets,Andypsamp/CODjunit,Kraymer/beets,marcuskrahl/beets,sampsyo/beets,Kraymer/beets,jcoady9/beets,untitaker/beets,untitaker/beets,SusannaMaria/beets,kelvinhammond/beets,shamangeorge/beets,drm00/beets,andremiller/beets,asteven/beets,kareemallen/beets,YetAnotherNerd/beets,jackwilsdon/beets,m-urban/beets,LordSputnik/beets,lightwang1/beets,sadatay/beets,tima/beets,diego-plan9/beets,asteven/beets,jmwatte/beets,arabenjamin/beets,Kraymer/beets,shamangeorge/beets,swt30/beets,beetbox/beets,YetAnotherNerd/beets,Freso/beets,jcoady9/beets,ruippeixotog/beets,dfc/beets,kelvinhammond/beets,Andypsamp/CODfinalJUNIT,kelvinhammond/beets,lengtche/beets,shanemikel/beets,mosesfistos1/beetbox,diego-plan9/beets,randybias/beets,moodboom/beets,randybias/beets,pkess/beets,gabrielaraujof/beets,shamangeorge/beets,lightwang1/beets,LordSputnik/beets,shamangeorge/beets,beetbox/beets,mried/beets,sadatay/beets,diego-plan9/beets,untitaker/beets,Freso/beets,gabrielaraujof/beets,Andypsamp/CODfinalJUNIT,kareemallen/beets,Andypsamp/CODjunit,YetAnotherNerd/beets,imsparsh/beets,multikatt/beets,pkess/beets,Kraymer/beets,mried/beets,beetbox/beets,artemutin/beets,Dishwishy/beets,mathstuf/beets,ruippeixotog/beets,lengtche/beets,sampsyo/beets,m-urban/beets,ibmibmibm/beets,pkess/beets,parapente/beets,xsteadfastx/beets,swt30/beets,swt30/beets,marcuskrahl/beets,xsteadfastx/beets,marcuskrahl/beets,shanemikel/beets,mosesfistos1/beetbox,swt30/beets,PierreRust/beets,Andypsamp/CODfinalJUNIT,gabrielaraujof/beets,shanemikel/beets,MyTunesFreeMusic/privacy-policy,m-urban/beets,ruippeixotog/beets,artemutin/beets,SusannaMaria/beets,jmwatte/beets,madmouser1/beets,mosesfistos1/beetbox,drm00/beets,ttsda/beets,parapente/beets,moodboom/beets,andremiller/beets,parapente/beets,lengtche/beets,PierreRust/beets,LordSputnik/beets,jcoady9/beets,xsteadfastx/beets,MyTunesFreeMusic/privacy-policy,asteven/beets,artemutin/beets,Andypsamp/CODfinalJUNIT,Dishwishy/beets,drm00/beets,arabenjamin/beets,madmouser1/beets,dfc/beets,Dishwishy/beets,jackwilsdon/beets,madmouser1/beets,jackwilsdon/beets,kelvinhammond/beets,diego-plan9/beets,LordSputnik/beets,arabenjamin/beets,sampsyo/beets,multikatt/beets,madmouser1/beets,SusannaMaria/beets,ttsda/beets,kareemallen/beets,parapente/beets,dfc/beets,gabrielaraujof/beets,jcoady9/beets,mried/beets,MyTunesFreeMusic/privacy-policy,untitaker/beets,Andypsamp/CODjunit,MyTunesFreeMusic/privacy-policy,ibmibmibm/beets,andremiller/beets,Dishwishy/beets,ttsda/beets,artemutin/beets,tima/beets,sampsyo/beets,Andypsamp/CODjunit,imsparsh/beets,Freso/beets,dfc/beets,PierreRust/beets,ttsda/beets,jmwatte/beets,ruippeixotog/beets,SusannaMaria/beets,imsparsh/beets,Andypsamp/CODfinalJUNIT,shanemikel/beets | beetsplug/plexupdate.py | beetsplug/plexupdate.py | """Updates an Plex library whenever the beets library is changed.
Put something like the following in your config.yaml to configure:
plex:
host: localhost
port: 32400
"""
import requests
from urlparse import urljoin
import xml.etree.ElementTree as ET
from beets import config
from beets.plugins import BeetsPlugin
def get_music_section(host, port):
"""Getting the section key for the music library in Plex.
"""
api_endpoint = 'library/sections'
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request.
r = requests.get(url)
# Parse xml tree and extract music section key.
tree = ET.fromstring(r.text)
for child in tree.findall('Directory'):
if child.get('title') == 'Music':
return child.get('key')
def update_plex(host, port):
"""Sends request to the Plex api to start a library refresh.
"""
# Getting section key and build url.
section_key = get_music_section(host, port)
api_endpoint = 'library/sections/{0}/refresh'.format(section_key)
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request and returns requests object.
r = requests.get(url)
return r
class PlexUpdate(BeetsPlugin):
def __init__(self):
super(PlexUpdate, self).__init__()
# Adding defaults.
config['plex'].add({
u'host': u'localhost',
u'port': 32400})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Plex server.
"""
self._log.info('Updating Plex library...')
# Try to send update request.
try:
update_plex(
config['plex']['host'].get(),
config['plex']['port'].get())
self._log.info('... started.')
except requests.exceptions.RequestException:
self._log.warning('Update failed.')
| """Updates an Plex library whenever the beets library is changed.
Put something like the following in your config.yaml to configure:
plex:
host: localhost
port: 32400
"""
import requests
from urlparse import urljoin
import xml.etree.ElementTree as ET
from beets import config
from beets.plugins import BeetsPlugin
# Global variable to detect if database is changed that the update
# is only run once before beets exists.
database_changed = False
def get_music_section(host, port):
"""Getting the section key for the music library in Plex.
"""
api_endpoint = 'library/sections'
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request.
r = requests.get(url)
# Parse xml tree and extract music section key.
tree = ET.fromstring(r.text)
for child in tree.findall('Directory'):
if child.get('title') == 'Music':
return child.get('key')
def update_plex(host, port):
"""Sends request to the Plex api to start a library refresh.
"""
# Getting section key and build url.
section_key = get_music_section(host, port)
api_endpoint = 'library/sections/{0}/refresh'.format(section_key)
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request and returns requests object.
r = requests.get(url)
return r
class PlexUpdate(BeetsPlugin):
def __init__(self):
super(PlexUpdate, self).__init__()
# Adding defaults.
config['plex'].add({
u'host': u'localhost',
u'port': 32400})
@PlexUpdate.listen('database_change')
def listen_for_db_change(lib=None):
"""Listens for beets db change and set global database_changed
variable to True.
"""
global database_changed
database_changed = True
@PlexUpdate.listen('cli_exit')
def update(lib=None):
"""When the client exists and the database_changed variable is True
trying to send refresh request to Plex server.
"""
if database_changed:
print('Updating Plex library...')
# Try to send update request.
try:
update_plex(
config['plex']['host'].get(),
config['plex']['port'].get())
print('... started.')
except requests.exceptions.RequestException:
print('Update failed.')
| mit | Python |
e5bdbfb075bf95b7834f2db8c7d6ffa69cb03bc0 | Create convnet_basic.py | AdityaSoni19031997/Machine-Learning,AdityaSoni19031997/Machine-Learning | Classifying_datasets/statoil/convnet_basic.py | Classifying_datasets/statoil/convnet_basic.py | adi
| mit | Python |
|
6f3bb73dd387fd73022a20c3a45adf05213339cf | add new package (#20109) | iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-rosinstall-generator/package.py | var/spack/repos/builtin/packages/py-rosinstall-generator/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyRosinstallGenerator(PythonPackage):
"""A tool for generating rosinstall files."""
homepage = "https://wiki.ros.org/rosinstall_generator"
url = "https://pypi.io/packages/source/r/rosinstall-generator/rosinstall_generator-0.1.22.tar.gz"
version('0.1.22', sha256='22d22599cd3f08a1f77fb2b1d9464cc8062ede50752a75564d459fcf5447b8c5')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-rospkg', type=('build', 'run'))
depends_on('py-pyyaml', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
| lgpl-2.1 | Python |
|
0cbb114a70bcbef266c6df776681d1c664d8bdae | Create superClock.py | jimfinoc/superClock | superClock.py | superClock.py | #This is My Nest but it will slowly be converted to superClock!
import urllib
import urllib2
import sys
import json
import time
# Make sure your higher level directory has the JSON file called passwordFile.json
# The file should contain the information in the JSON format. See below for an example
# {"username": "[email protected]", "password": "yourSuperSecretPassword!!!"}
# all temps from the Nest site are stored in degrees Celsius
fileData = open('../passwordFile.json')
usernameAndPassword = json.load(fileData)
#print "username:" + str(usernameAndPassword['username'])
#print "password:" + str(usernameAndPassword['password'])
def c_to_f(c):
return c * 9.0 / 5.0 + 32.0
class Nest:
def __init__(self, username, password, serial=None, index=0):
self.username = username
self.password = password
self.serial = serial
self.index = index
def loads(self, res):
if hasattr(json, "loads"):
res = json.loads(res)
else:
res = json.read(res)
return res
def login(self):
data = urllib.urlencode({"username": self.username, "password": self.password})
req = urllib2.Request("https://home.nest.com/user/login",
data,
{"user-agent":"Nest/1.1.0.10 CFNetwork/548.0.4"})
res = urllib2.urlopen(req).read()
res = self.loads(res)
self.transport_url = res["urls"]["transport_url"]
self.access_token = res["access_token"]
self.userid = res["userid"]
def get_status(self):
req = urllib2.Request(self.transport_url + "/v2/mobile/user." + self.userid,
headers={"user-agent":"Nest/1.1.0.10 CFNetwork/548.0.4",
"Authorization":"Basic " + self.access_token,
"X-nl-user-id": self.userid,
"X-nl-protocol-version": "1"})
res = urllib2.urlopen(req).read()
res = self.loads(res)
self.structure_id = res["structure"].keys()[0]
if (self.serial is None):
self.device_id = res["structure"][self.structure_id]["devices"][self.index]
self.serial = self.device_id.split(".")[1]
self.status = res
def show_status(self):
shared = self.status["shared"][self.serial]
device = self.status["device"][self.serial]
allvars = shared
allvars.update(device)
for k in sorted(allvars.keys()):
print k + "."*(32-len(k)) + ":", allvars[k]
# This assumes you have two Nest Thermostats. If you have more than 2, the number, index, after "None"
# below will increment accordingly. If you only have one, it should just be 0. You have to create an object
# for each nest thermostat. You could also specify the thermostats by serial number instead of the index.
print"My Nest Data"
n0 = Nest(usernameAndPassword['username'],usernameAndPassword['password'], None, 0) #Downstairs
n1 = Nest(usernameAndPassword['username'],usernameAndPassword['password'], None, 1) #Upstairs
print " Logging On"
n1.login()
n0.login()
print " Getting Status"
n1.get_status()
n0.get_status()
print""
print "Upstairs Temperature"
print c_to_f(n1.status["shared"][n1.serial]["current_temperature"])
print "Upstairs Humidity"
print n1.status["device"][n1.serial]["current_humidity"]
print ""
print "Downstairs Temperature"
print c_to_f(n0.status["shared"][n0.serial]["current_temperature"])
print "Downstairs Humidity"
print n0.status["device"][n0.serial]["current_humidity"]
| apache-2.0 | Python |
|
a2516d28c86fd23efcb893e59de42b33526bfe6f | Add a Python Tkinter example showing how to map a scale widget. | davidhernon/libmapper,johnty/libmapper,davidhernon/libmapper,radarsat1/libmapper,malloch/libmapper,malloch/libmapper,radarsat1/libmapper,libmapper/libmapper,radarsat1/libmapper,davidhernon/libmapper-admin2,johnty/libmapper,johnty/libmapper,malloch/libmapper,johnty/libmapper,radarsat1/libmapper,radarsat1/libmapper,libmapper/libmapper,davidhernon/libmapper-admin2,davidhernon/libmapper,malloch/libmapper,davidhernon/libmapper,davidhernon/libmapper,johnty/libmapper,davidhernon/libmapper-admin2,malloch/libmapper,libmapper/libmapper,davidhernon/libmapper-admin2,malloch/libmapper,libmapper/libmapper,libmapper/libmapper,davidhernon/libmapper-admin2,libmapper/libmapper | swig/tkgui.py | swig/tkgui.py | #!/usr/bin/env python
import Tkinter
import sys
import mapper
def on_gui_change(x):
# print 'on_gui_change',x,x.__class__
sig_out.update_scalar(int(x))
def on_mapper_change(sig, x):
# print 'on_mapper_change', x, x.__class__
w.set(int(x))
dev = mapper.device("tkgui", 9000)
sig_in = mapper.signal(1, "/signal0", None, 'f', on_mapper_change)
dev.register_input(sig_in)
sig_out = mapper.signal(1, "/signal0", None, 'f', lambda x: x)
dev.register_output(sig_out)
master = Tkinter.Tk()
master.title("libmapper Python GUI demo")
w = Tkinter.Scale(master, from_=0, to=100, label='signal0',
orient=Tkinter.HORIZONTAL, length=300,
command=on_gui_change)
w.pack()
def do_poll():
dev.poll(20)
master.after(5, do_poll)
do_poll()
master.mainloop()
| lgpl-2.1 | Python |
|
99578401585435c08aed6f29e090fbde955423fd | Create good2d.py | hpssjellis/easy-tensorflow-on-cloud9,hpssjellis/easy-tensorflow-on-cloud9,hpssjellis/easy-tensorflow-on-cloud9 | rocksetta-examples/good2d.py | rocksetta-examples/good2d.py | import tensorflow as tf
import numpy as np
# Create 100 phony x, y data points in NumPy, y = x * 0.1 + 0.3
x_data = np.random.rand(100).astype("float32")
y_data = x_data * 0.1 + 0.3
# Try to find values for W and b that compute y_data = W * x_data + b
# (We know that W should be 0.1 and b 0.3, but Tensorflow will
# figure that out for us.)
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
b = tf.Variable(tf.zeros([1]))
y = W * x_data + b
# Minimize the mean squared errors.
loss = tf.reduce_mean(tf.square(y - y_data))
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)
# Before starting, initialize the variables. We will 'run' this first.
init = tf.initialize_all_variables()
# Launch the graph.
sess = tf.Session()
sess.run(init)
# Fit the line.
for step in xrange(201):
sess.run(train)
if step % 20 == 0:
print(step, sess.run(W), sess.run(b))
# Learns best fit is W: [0.1], b: [0.3]
| mit | Python |
|
35e51f55adfbe8383bfb3d34688eb7aee89a8351 | add shuffle_by_year script, to toss messages into yearly folders | rtucker/imap2maildir,rtucker/imap2maildir | shuffle_by_year.py | shuffle_by_year.py | #!/usr/bin/env python
# Q&D script to sort mail into subfolders by year.
# Reduces the burden upon the filesystem gnomes.
DIRPATH = "/stor0/backups/imapbak/rtucker/Fastmail-rey_fmgirl_com"
import email
import mailbox
import imap2maildir
import sys
import time
import os
def main():
db = imap2maildir.open_sql_session(DIRPATH + "/.imap2maildir.sqlite")
mbox = mailbox.Maildir(DIRPATH, False)
try:
counter = 0
c = db.cursor()
for result in db.execute("select mailfile,folder from seenmessages where folder is null or folder = ''"):
key = result[0]
msg = mbox.get_message(key)
year = None
if 'Date' in msg:
ttup = email.utils.parsedate(msg['Date'])
if ttup:
year = ttup[0]
if year is None:
tstamp = msg.get_date()
year = time.gmtime(tstamp).tm_year
print(key + " has no valid Date header; going with " + str(year))
ybox = mbox.add_folder(str(year))
ybox.lock()
newkey = ybox.add(msg)
ybox.flush()
ybox.unlock()
c.execute("update seenmessages set mailfile = ?, folder = ? where mailfile = ?", (newkey, year, key))
mbox.lock()
mbox.discard(key)
mbox.flush()
mbox.unlock()
print("moved " + key + " to " + str(year) + "/" + newkey)
counter += 1
if counter % 25 == 0:
print("committing db")
db.commit()
sys.stdout.flush()
if os.path.exists(".STOP"):
print("stop requested")
os.unlink(".STOP")
break
finally:
mbox.unlock()
db.commit()
if __name__ == "__main__":
main()
| mit | Python |
|
7423de8d2222e81693fe380a6b4c8638a96a9db8 | Create urls.py | arbin/pysoldev,arbin/pysoldev,arbin/pysoldev | urls.py | urls.py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from pysoldev import settings
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'pysoldev.views.home', name='home'),
url(r'^$', 'pysoldev.app.views.index', name='index'),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
)
| bsd-2-clause | Python |
|
0079676729fa8023bea93fcf03bb48c4ff24a495 | add partition | dragonwolverines/DataStructures,dragonwolverines/DataStructures,dragonwolverines/DataStructures | resource-4/combinatorics/integer-partitions/partition1.py | resource-4/combinatorics/integer-partitions/partition1.py | # counting partitions
def partition1(n,k=-1):
if (k == -1):
return sum([partition1(n,i) for i in range(1,n+1)])
if (n < k):
return 0
if((n==0) or (n==1)):
return 1
if((k==1) or (n==k)):
return 1
return sum([partition1(n-k,i) for i in range(1,min(k,n-k)+1)])
| bsd-2-clause | Python |
|
a787ceea91abf1c0fbeb3b97e063d3ec1aa61b57 | Create implement-rand10-using-rand7.py | tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015 | Python/implement-rand10-using-rand7.py | Python/implement-rand10-using-rand7.py | # Time: O(1)
# Space: O(1)
# Given a function rand7 which generates a uniform random integer in the range 1 to 7,
# write a function rand10 which generates a uniform random integer in the range 1 to 10.
#
# Do NOT use system's Math.random().
#
# Example 1:
#
# Input: 1
# Output: [7]
# Example 2:
#
# Input: 2
# Output: [8,4]
# Example 3:
#
# Input: 3
# Output: [8,1,10]
#
# Note:
#
# rand7 is predefined.
# Each testcase has one argument: n, the number of times that rand10 is called.
#
# Follow up:
# - What is the expected value for the number of calls to rand7() function?
# - Could you minimize the number of calls to rand7()?
#
# The rand7() API is already defined for you.
# def rand7():
# @return a random integer in the range 1 to 7
class Solution(object):
def rand10(self):
"""
:rtype: int
"""
while True:
x = (rand7()-1)*7 + (rand7()-1)
if x < 40:
return x%10 + 1
| mit | Python |
|
b659dd572bd92e10bde8899540792bdb26529a45 | add qdb_test | mequanta/z-dyno,mequanta/z-dyno | scripts/qdb_test.py | scripts/qdb_test.py | from qdb import set_trace, RemoteCommandManager
def f():
in_f = True
return 'getting out of f'
def main():
set_trace(
uuid='qdb',
host='localhost',
port=8001,
cmd_manager=RemoteCommandManager(),
)
mutable_object = {}
print 'Hello world!'
f()
print mutable_object
if __name__ == '__main__':
main() | mit | Python |
|
5c02d7ccb9721e0b02cdd31edc36be095a4568e2 | test the Meta and the MetaMixin classes | samgiles/slumber,ministryofjustice/slumber,s-block/slumber,zongxiao/slumber,CloudNcodeInc/slumber,IAlwaysBeCoding/slumber,futurice/slumber,IAlwaysBeCoding/More,jannon/slumber,infoxchange/slumber | tests/meta.py | tests/meta.py | import unittest
import slumber
class MetaTestCase(unittest.TestCase):
def test_init_kwargs_to_attributes(self):
m = slumber.Meta(item1="test", item2=41, item3="example")
self.assertEqual(m.item1, "test")
self.assertEqual(m.item2, 41)
self.assertEqual(m.item3, "example")
class MetaMixinTestCase(unittest.TestCase):
def test_init_kwargs_to_meta(self):
class MetaMixinTest(slumber.MetaMixin, object):
class Meta:
item1 = None
item2 = None
item3 = None
mmt = MetaMixinTest(item1="test", item2=41, item3="example")
self.assertTrue(hasattr(mmt, "_meta"))
self.assertTrue(isinstance(mmt._meta, slumber.Meta))
self.assertEqual(mmt._meta.item1, "test")
self.assertEqual(mmt._meta.item2, 41)
self.assertEqual(mmt._meta.item3, "example")
def test_meta_to_meta_defaults(self):
class MetaMixinTest(slumber.MetaMixin, object):
class Meta:
item1 = None
item2 = None
item3 = None
mmt = MetaMixinTest()
self.assertTrue(hasattr(mmt, "_meta"))
self.assertTrue(isinstance(mmt._meta, slumber.Meta))
self.assertEqual(mmt._meta.item1, None)
self.assertEqual(mmt._meta.item2, None)
self.assertEqual(mmt._meta.item3, None)
def test_meta_to_meta_defaults_with_init_kwargs(self):
class MetaMixinTest(slumber.MetaMixin, object):
class Meta:
item1 = None
item2 = None
item3 = None
mmt = MetaMixinTest(item2=41)
self.assertTrue(hasattr(mmt, "_meta"))
self.assertTrue(isinstance(mmt._meta, slumber.Meta))
self.assertEqual(mmt._meta.item1, None)
self.assertEqual(mmt._meta.item2, 41)
self.assertEqual(mmt._meta.item3, None)
| bsd-2-clause | Python |
|
4c3c9c6929ebc3f439ccf3bb7d3696f484b154bc | Add missing noop-migrations for PositiveIntegerField | Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet | karspexet/ticket/migrations/0017_positive_integers_20180322_2056.py | karspexet/ticket/migrations/0017_positive_integers_20180322_2056.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2018-03-22 19:56
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ticket', '0016_add_voucher_note_20180213_2307'),
]
operations = [
migrations.AlterField(
model_name='discount',
name='amount',
field=models.PositiveIntegerField(validators=[django.core.validators.MinValueValidator(100), django.core.validators.MaxValueValidator(5000)]),
),
migrations.AlterField(
model_name='reservation',
name='ticket_price',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='reservation',
name='total',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='ticket',
name='price',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='voucher',
name='amount',
field=models.PositiveIntegerField(help_text='Rabatt i SEK'),
),
]
| mit | Python |
|
af508daaf016b824c7518a36f9b92f571f0f65af | Implement management command for creating demo records of balance history (NC-842) | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/structure/management/commands/init_balance_history.py | nodeconductor/structure/management/commands/init_balance_history.py | from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from nodeconductor.structure.models import BalanceHistory
from nodeconductor.structure.models import Customer
class Command(BaseCommand):
help = """ Initialize demo records of balance history """
def handle(self, *args, **options):
self.stdout.write('Creating demo records of balance history for all customers')
for customer in Customer.objects.all():
for i in range(10):
BalanceHistory.objects.create(customer=customer,
created=timezone.now() - timedelta(days=i),
amount=100 + i * 10)
self.stdout.write('... Done')
| mit | Python |
|
7f1883275e9aa0b489de99947db7daf87f616ed4 | solve k58 | WatsonDNA/nlp100,WatsonDNA/nlp100,wtsnjp/nlp100,wtsnjp/nlp100 | chap06/k58.py | chap06/k58.py | #
# usage: python k58.py {file name} {number}
#
import sys
from xml.etree import ElementTree as ET
def get_tuple(nll, dll):
return [[p, [n[1] for n in nl if n[0] == p][0],
[d[1] for d in dl if d[0] == p][0]]
for nl, dl in zip(nll, dll)
for p in list({n[0] for n in nl} & {d[0] for d in dl})]
if __name__ == '__main__':
fn = sys.argv[1]
root = ET.parse(fn).getroot()
cdl = [d for d in root.findall('document/sentences/*/dependencies')
if d.get('type') == 'collapsed-dependencies']
nll = [[[n.find('governor').text, n.find('dependent').text]
for n in e.findall('*[@type="nsubj"]')]
for e in cdl]
dll = [[[d.find('governor').text, d.find('dependent').text]
for d in e.findall('*[@type="dobj"]')]
for e in cdl]
for t in get_tuple(nll, dll):
print('\t'.join(t))
| unlicense | Python |
|
a9d0a3dcc5221adbca5142a0cd7548ef874afd45 | add script to make matrix | nishio/learning_alloy,nishio/learning_alloy,nishio/learning_alloy | matrix/build_maxrix.py | matrix/build_maxrix.py | WIDTH = 10
HEIGHT = 10
HORIZONTAL_NEXT = "hnext"
VERTICAL_NEXT = "vnext"
BASE = "Cell"
print """
abstract sig %(BASE)s {
%(HORIZONTAL_NEXT)s: lone %(BASE)s,
%(VERTICAL_NEXT)s: lone %(BASE)s
}
""" % globals()
for x in range(WIDTH):
for y in range(HEIGHT):
print "one sig Cell_%d_%d extends %s {}" % (x, y, BASE)
# fact
print "fact matrix_adj {"
for x in range(WIDTH):
for y in range(HEIGHT - 1):
next = y + 1
print " Cell_%(x)d_%(y)d.%(VERTICAL_NEXT)s = Cell_%(x)d_%(next)d" % globals()
print " no Cell_%(x)d_%(next)d.%(VERTICAL_NEXT)s" % globals()
for y in range(HEIGHT):
for x in range(WIDTH - 1):
next = x + 1
print " Cell_%(x)d_%(y)d.%(HORIZONTAL_NEXT)s = Cell_%(next)d_%(y)d" % globals()
print " no Cell_%(next)d_%(y)d.%(HORIZONTAL_NEXT)s" % globals()
print "}"
| mit | Python |
|
98cbb29d008fc7abf1a066d9ecf7b3399395aefe | add users api | fkmclane/MCP,fkmclane/MCP,fkmclane/MCP,fkmclane/MCP | mcp/interface/users.py | mcp/interface/users.py | import json
from mcp import users
from mcp.interface import common
class UsersHandler(common.AuthorizedHandler):
def forbidden(self):
return True
def do_get(self):
return 200, json.dumps(list(iter(users.user_db)))
class UserHandler(common.AuthorizedHandler):
def __init__(self, request, response, groups):
common.AuthorizedHandler.__init__(self, request, response, groups)
self.userentry = users.get(self.groups[0])
def forbidden(self):
return self.user.name != self.userentry.name
class UserInfoHandler(UserHandler):
def do_get(self):
return 200, json.dumps({'name': self.userentry.name, 'key': self.userentry.key, 'admin': self.userentry.admin, 'active': self.userentry.active, 'servers': self.userentry.servers})
users_base = '/users/'
user_base = users_base + '(' + users.users_allowed + ')'
routes = {users_base: UsersHandler, user_base: UserInfoHandler}
| mit | Python |
|
6e165165974f99a30ba6dce528d97c03152eeab6 | Add tracegraph script | rix0rrr/tracegraph | tracegraph.py | tracegraph.py | #!/usr/bin/python
import sys
import re
class Host(object):
def __init__(self, step, name, ip):
self.step = step
self.name = name
self.ip = ip
self.pings = []
def add_ping(self, ping):
self.pings.append(ping)
def max_ping(self):
return max(self.pings) if self.pings else 0
def min_ping(self):
return min(self.pings) if self.pings else float('inf')
class Step(object):
def __init__(self):
self.hosts = []
def to_host(line):
parts = re.split(r'\s+', line.strip())
step = ''
if parts[0].isdigit():
step = int(parts[0])
parts = parts[1:]
x = 0
while x < len(parts) and parts[x] == '*':
x += 1
name = '?'
ip = '?'
if x < len(parts):
name = parts[x]
x += 1
if x < len(parts):
ip = parts[x]
x += 1
pings = [float(t) for t in parts[x:] if t != 'ms' and t != '*']
host = Host(step, name, ip)
for ping in pings:
host.add_ping(ping)
return host
def barplot(host, scale):
if not host.pings:
return ''
p0 = int(host.min_ping() * scale)
p1 = int(host.max_ping() * scale)
return (max(0, p0 - 1) * ' '
+ 'o'
+ (p1 - p0 - 1) * '-'
+ ('o' if p1 > p0 else ''))
def rchop_to(s, l):
if len(s) <= l:
return s
return '...' + s[-l+3:]
if len(sys.argv) > 1:
# Pass arguments to traceroute
import subprocess
#lines = subprocess.check_output(['traceroute'] + sys.argv[1:]).splitlines()
p = subprocess.Popen(['traceroute'] + sys.argv[1:], stdout=subprocess.PIPE, stderr=None)
lines = []
for line in iter(p.stdout.readline,''):
lines.append(line.rstrip())
print line.rstrip()
else:
print 'Reading from stdin.'
lines = sys.stdin.read().splitlines()
print 'traceroute:'
print '\n'.join(lines)
print ''
print 'tracegraph:'
hosts = list(to_host(line) for line in lines)
max_ping = max(h.max_ping() for h in hosts)
WIDTH = 60
scale = float(WIDTH) / max_ping
for host in hosts:
print '%2s %-20s | %s' % (host.step, rchop_to(host.name, 20), barplot(host, scale))
print 25 * ' ' + ' 0ms' + (WIDTH - 8) * ' ' + ' %.1fms' % max_ping
| mit | Python |
|
8bc3c2c82b1b8f7e4582fe401c05c4c4f34cc345 | create structure for python | Southampton-Maritime-Robotics/autonomous-sailing-robot,smaria/autonomous-sailing-robot | src/boat_servo_sail/setup.py | src/boat_servo_sail/setup.py | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
## http://docs.ros.org/api/catkin/html/howto/format2/installing_python.html
## pdf download of the page in sources folder:
## docs-ros_installing_python.pdf
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['boat_servo_sail'],
package_dir={'': 'src'})
setup(**setup_args)
| bsd-2-clause | Python |
|
dc3df810c3c6ffea429e43ec0f8e6f006a9c1c6f | Create tensorCase.py | saurabhrathor/Tensorflow_Practice | tensorCase.py | tensorCase.py | import tensorflow as tf
sess = tf.InteractiveSession()
x = tf.random_uniform([])
y = tf.random_uniform([])
out1 = tf.cond(tf.greater(x,y), lambda:tf.add(x,y), lambda:(tf.subtract(x,y)))
print(x.eval(), y.eval(), out1.eval())
x = tf.random_uniform([],-1,1)
y = tf.random_uniform([],-1,1)
def f1(): return tf.cast(tf.add(x,y), tf.float32)
def f2(): return tf.cast(tf.subtract(x,y), tf.float32)
def f3(): return tf.cast(tf.constant(0), tf.float32)
out2 = tf.case({tf.less(x, y):f2, tf.greater(x,y):f1}, default=f3)
print(x.eval(), y.eval(), out2.eval())
| bsd-2-clause | Python |
|
6584d9e71c82097f65f316bf85b2f019350cfa58 | Add run_aiotest.py | overcastcloud/aioeventlet | run_aiotest.py | run_aiotest.py | import aioeventlet
import aiotest.run
import eventlet
config = aiotest.TestConfig()
config.new_event_pool_policy = aioeventlet.EventLoopPolicy
config.sleep = eventlet.sleep
aiotest.run.main(config)
| apache-2.0 | Python |
|
50983c0a6d18e1ec8fcaed076f3c82b5935fe913 | Solve problem 23 | mazayus/ProjectEuler | problem023.py | problem023.py | #!/usr/bin/env python3
from itertools import *
def divisors(n):
for d in takewhile(lambda d: d * d <= n, count(1)):
if n % d == 0:
yield d
if n // d != d:
yield n // d
def is_abundant_number(n):
return n < sum(divisors(n)) - n
all_abundant = set(filter(is_abundant_number, range(1, 28123+1)))
print(sum(n for n in range(1, 28123+1)
if not any(n-d in all_abundant for d in all_abundant)))
| mit | Python |
|
8db65dc2a6a99a0e6287b12f7bfdcd423a62e515 | Add test running harness | hexahedria/gated-graph-transformer-network,hexahedria/gated-graph-transformer-network | run_harness.py | run_harness.py | import sys
import os
import subprocess
import shutil
import shlex
import collections
from babi_train import TrainExitStatus
from graceful_interrupt import GracefulInterruptHandler
TaskSpec = collections.namedtuple("TaskSpec", ["task_name", "variant_name", "run_params"])
def run(tasks_dir, output_dir, base_params, specs):
base_params_split = shlex.split(base_params)
for spec in specs:
print("### Task {} ({}) ###".format(spec.task_name, spec.variant_name))
run_params_split = shlex.split(spec.run_params)
task_folder_train = os.path.join(tasks_dir, "{}_train".format(spec.task_name))
if not os.path.isdir(task_folder_train):
print("Train directory doesn't exist. Parsing text file...")
textfile = task_folder_train + ".txt"
subprocess.run(["python3","babi_graph_parse.py",textfile], check=True)
task_folder_valid = os.path.join(tasks_dir, "{}_valid".format(spec.task_name))
if not os.path.isdir(task_folder_valid):
print("Validation directory doesn't exist. Parsing text file...")
textfile = task_folder_valid + ".txt"
subprocess.run(["python3","babi_graph_parse.py",textfile], check=True)
task_output_dir = os.path.join(output_dir, spec.task_name, spec.variant_name)
if not os.path.isdir(task_output_dir):
os.makedirs(task_output_dir)
completed_file = os.path.join(task_output_dir, "completed.txt")
if os.path.exists(completed_file):
print("Task is already completed! Skipping...")
continue
stdout_fn = os.path.join(task_output_dir, "stdout.txt")
all_params = [task_folder_train] + run_params_split + base_params_split
all_params.extend(["--outputdir", task_output_dir])
all_params.extend(["--validation", task_folder_valid])
all_params.extend(["--set-exit-status"])
all_params.extend(["--resume-auto"])
with open(stdout_fn, 'a', 1) as stdout_file:
proc = subprocess.Popen(all_params, stdout=stdout_file, stderr=subprocess.STDOUT)
with GracefulInterruptHandler():
returncode = proc.wait()
task_status = TrainExitStatus(returncode)
if task_status == TrainExitStatus.accuracy_success:
print("SUCCESS! Reached desired accuracy.")
with open(completed_file,'w') as f:
f.write("SUCCESS\n")
elif task_status == TrainExitStatus.reached_update_limit:
print("FAIL! Reached update limit without attaining desired accuracy.")
with open(completed_file,'w') as f:
f.write("FAIL_UPDATE_LIMIT\n")
elif task_status == TrainExitStatus.error:
print("Got an error; skipping for now. See {} for details.".format(stdout_fn))
elif task_status == TrainExitStatus.nan_loss:
print("NaN loss detected; skipping for now.")
elif task_status == TrainExitStatus.interrupted:
print("Process was interrupted! Stopping now")
break
| mit | Python |
|
a423b0eb6277c260a16dc11f37088e496964cc0c | Create count-neighbours.py | aureooms/checkio | home/count-neighbours.py | home/count-neighbours.py | def checkio ( data ) :
if len( data ) < 10 : return False
if not any( c in data for c in "abcdefghijklmnopqrstuvwxyz" ) : return False
if not any( c in data for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ) : return False
if not any( c in data for c in "1234567890" ) : return False
return True
#Some hints
#Just check all conditions
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio('A1213pokl') == False, "1st example"
assert checkio('bAse730onE4') == True, "2nd example"
assert checkio('asasasasasasasaas') == False, "3rd example"
assert checkio('QWERTYqwerty') == False, "4th example"
assert checkio('123456123456') == False, "5th example"
assert checkio('QwErTy911poqqqq') == True, "6th example"
| agpl-3.0 | Python |
|
557d21ffbbbf5cb8a452d6bc0b4c013daf8eabdc | Add new migration | GoogleCloudPlatform/avocano,GoogleCloudPlatform/avocano,GoogleCloudPlatform/avocano,GoogleCloudPlatform/avocano | server/store/migrations/0003_alter_testimonial_reviewer_location.py | server/store/migrations/0003_alter_testimonial_reviewer_location.py | # Generated by Django 4.1.3 on 2022-11-14 02:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("store", "0002_initial"),
]
operations = [
migrations.AlterField(
model_name="testimonial",
name="reviewer_location",
field=models.CharField(max_length=100),
),
]
| apache-2.0 | Python |
|
0df48d7159841f66cc4e8dac8e9a52727c69091e | add testcase for settings() | 9nix00/cliez | test/model.py | test/model.py | # -*- coding: utf-8 -*-
import unittest
from cliez.conf import settings, Settings
class Model(object):
config_none = settings()
def __init__(self):
self.config = settings()
pass
pass
class ModelTestCase(unittest.TestCase):
def setUp(self):
Settings.bind('cliez.conf')
pass
def test_ok(self):
a = Model()
self.assertEqual(None, a.config_none)
self.assertEqual(None, a.config.PACKAGE_ROOT)
pass
pass
| mit | Python |
|
bb0e8032d325d2fd015a53d4513d632d12e8afb3 | Create pset2part3.py | somabc/6.00.1x,freckleboy/6.00.1x | pset2part3.py | pset2part3.py | balance = 270472
annualInterestRate = 0.21
# test case 1
# balance = 320000
# annualInterestRate = 0.2
# Lowest Payment: 29157.09
# test case 2
# balance = 999999
# annualInterestRate = 0.18
# Lowest Payment: 90325.03
epsilon = 0.01
lower = balance / 12
upper = balance * ((1 + annualInterestRate / 12.0) ** 12) / 12.0
ans = (lower + upper) / 2.0
def yearEndBalance(monthlyPayment):
'''
Calculates year end balance given a monthly payment
as an argument. monthlyPayment can be int or float '''
myBalance = balance
for m in range(12):
interest = (myBalance - monthlyPayment) * annualInterestRate / 12.0
myBalance = myBalance + interest - monthlyPayment
return myBalance
while abs(yearEndBalance(ans)) >= epsilon:
# print("lower = " + str(lower) + " upper = " + str(upper) + " ans = " + str(ans))
if yearEndBalance(ans) < 0:
upper = ans
else:
lower = ans
ans = (lower + upper) / 2.0
print ("Lowest Payment: " + str(round(ans, 2)))
| mit | Python |
|
3f64c94f51698ab69fc1996d73a04bde58da9052 | Update build script. | qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv | client/scripts/build-plugin.py | client/scripts/build-plugin.py | import os, sys
import ue4config
import ue4util, gitutil, shutil, uploadutil
plugin_version = gitutil.get_short_version('.')
plugin_output_folder = os.path.abspath('./unrealcv-%s' % plugin_version)
plugin_output_folder = ue4util.get_real_path(plugin_output_folder)
def build_plugin():
UAT_script = ue4config.conf['UATScript']
if not os.path.isfile(UAT_script):
print('Can not find Automation Script of UE4 %s' % UAT_script)
print('Please set UnrealEnginePath in ue4config.py correctly first')
return False
else:
if gitutil.is_dirty(os.path.abspath('.')):
print 'Error: uncommited changes of this repo exist'
return False
plugin_file = os.path.abspath('../../UnrealCV.uplugin')
plugin_file = ue4util.get_real_path(plugin_file)
UAT_script = UAT_script.replace(' ', '\ ')
cmd = '%s BuildPlugin -plugin=%s -package=%s -rocket -targetplatforms=Win64+Linux' % (UAT_script, plugin_file, plugin_output_folder)
print(cmd)
os.system(cmd)
# Clean up intermediate files
intermediate_folder = os.path.join(plugin_output_folder, 'Intermediate')
print 'Delete intermediate folder %s' % intermediate_folder
shutil.rmtree(intermediate_folder)
return True
def output_plugin(output_conf):
type = output_conf['Type']
upload_handlers = dict(
scp = uploadutil.upload_scp,
s3 = uploadutil.upload_s3,
)
upload_handlers[type](output_conf, [plugin_output_folder], '.')
if __name__ == '__main__':
if build_plugin():
output_confs = ue4config.conf['PluginOutput']
for conf in output_confs:
print conf['Type']
output_plugin(conf)
| import os, sys
import ue4config
import ue4util, gitutil, shutil, uploadutil
plugin_version = gitutil.get_short_version('.')
plugin_output_folder = os.path.abspath('./unrealcv-%s' % plugin_version)
def build_plugin():
UAT_script = ue4config.conf['UATScript']
if not os.path.isfile(UAT_script):
print('Can not find Automation Script of UE4 %s' % UAT_script)
print('Please set UnrealEnginePath in ue4config.py correctly first')
return False
else:
if gitutil.is_dirty(os.path.abspath('.')):
print 'Error: uncommited changes of this repo exist'
return False
plugin_file = os.path.abspath('../../UnrealCV.uplugin')
plugin_file = ue4util.get_real_path(plugin_file)
UAT_script = UAT_script.replace(' ', '\ ')
cmd = '%s BuildPlugin -plugin=%s -package=%s -rocket -targetplatforms=Win64+Linux' % (UAT_script, plugin_file, plugin_output_folder)
print(cmd)
os.system(cmd)
# Clean up intermediate files
intermediate_folder = os.path.join(plugin_output_folder, 'Intermediate')
print 'Delete intermediate folder %s' % intermediate_folder
shutil.rmtree(intermediate_folder)
return True
def output_plugin(output_conf):
type = output_conf['Type']
upload_handlers = dict(
scp = uploadutil.upload_scp,
s3 = uploadutil.upload_s3,
)
upload_handlers[type](output_conf, [plugin_output_folder], '.')
if __name__ == '__main__':
if build_plugin():
output_confs = ue4config.conf['PluginOutput']
for conf in output_confs:
print conf['Type']
output_plugin(conf)
| mit | Python |
1765ac3a12ea2a56b4e25e05cf1f1b531de5b2cf | Add External Temperature Probe from OpenWeather | mattcongy/piprobe | pyexternal.py | pyexternal.py | #!/usr/bin/env python
# Get External Temperature from OpenWeatherMap
# External informations are :
# - temperature
# - humidity
# - pressure
# - precipitation volume (each 3h)
import urllib.request
import json
import pyowm
from datetime import datetime
from pyserial import pySerial
from imports.pyTemperature import pyTemperature
BASE_URL = "http://api.openweathermap.org/data/2.5/weather?q="
DEFAULT_CITY = "Meyreuil, France"
API_KEY = "4ca5e2bebb63f72d4cc5564300cf68d5"
class py_external(object):
def __init__(self):
super(py_external, self).__init__()
self.pyTemperature = None
def getDataAPI(self):
owm = pyowm.OWM(API_KEY)
#observation = owm.weather_at_place(DEFAULT_CITY,'accurate')
observation = owm.weather_at_id(2994068)
print(observation)
if observation is not None:
w = observation.get_weather()
w_temp = w.get_temperature(unit='celsius')
w_hum = w.get_humidity()
w_pres = w.get_pressure()
w_prec = w.get_rain()
#print(w_prec)
l = observation.get_location()
#print(l.get_ID())
#print(l.get_name())
#print(l.get_lat())
#print(l.get_lon())
#pyTemperature Constructor (self, date = datetime.now(), temp=None,pressure=None,humidity=None,precicipationVol=None):
dateNow = datetime.now()
self.pyTemperature = pyTemperature(dateNow,w_temp['temp'],w_pres['press'],w_hum)
#print("Temperature at pyExternal")
#self.pyTemperature.printTemperature()
def getPyTemperature(self):
return self.pyTemperature
def setDate(self,newDate):
self.date = newDate
def setPressure(self,newPressure):
self.pressure = newPressure
def setHumidity(self,newHumidity):
self.humidity = newHumidity
| mit | Python |
|
9a37f573aa985b3bd876c2df4c3f83670fddab42 | add script to set script permissions | SublimeText/NSIS | chmod.py | chmod.py | import os, stat, sublime, sublime_plugin
# Configuration
pkg = 'NSIS'
script = sublime.packages_path() + '/' + pkg + '/nsis_build.sh'
def plugin_loaded():
from package_control import events
# chmod +x <script>
if (events.install(pkg) or events.post_upgrade(pkg)) and os.name is 'posix' or 'mac':
st = os.stat(script)
os.chmod(script, st.st_mode | stat.S_IEXEC) | apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.