commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
e660953c1df2dc9de6b3038e4ddb1d77768b2b51
Correct pyhande dependencies (broken for some time)
hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande
tools/pyhande/setup.py
tools/pyhande/setup.py
from distutils.core import setup setup( name='pyhande', version='0.1', author='HANDE developers', packages=('pyhande',), license='Modified BSD license', description='Analysis framework for HANDE calculations', long_description=open('README.rst').read(), install_requires=['numpy', 'scipy', 'pandas', 'pyblock', 'matplotlib'], )
from distutils.core import setup setup( name='pyhande', version='0.1', author='HANDE developers', packages=('pyhande',), license='Modified BSD license', description='Analysis framework for HANDE calculations', long_description=open('README.rst').read(), requires=['numpy', 'pandas (>= 0.13)', 'pyblock',], )
lgpl-2.1
Python
b277ca357728010c9d763c95cc459540821802c0
Update dice loss
analysiscenter/dataset
dataset/models/tf/losses/__init__.py
dataset/models/tf/losses/__init__.py
""" Contains custom losses """ import tensorflow as tf from ..layers import flatten def dice(targets, predictions, weights=1.0, label_smoothing=0, scope=None, loss_collection=tf.GraphKeys.LOSSES, reduction=tf.losses.Reduction.SUM_BY_NONZERO_WEIGHTS): """ Dice coefficient Parameters ---------- targets : tf.Tensor tensor with target values predictions : tf.Tensor tensor with predicted logits Returns ------- Tensor of the same type as targets. If reduction is NONE, this has the same shape as targets; otherwise, it is scalar. """ e = 1e-6 predictions = tf.sigmoid(predictions) axis = tuple(range(1, targets.shape.ndims)) if label_smoothing > 0: num_classes = targets.shape[-1] targets = targets * (1 - label_smoothing) + label_smoothing / num_classes intersection = tf.reduce_sum(targets * predictions, axis=axis) targets = tf.reduce_sum(targets, axis=axis) predictions = tf.reduce_sum(predictions, axis=axis) loss = -(2. * intersection + e) / (targets + predictions + e) loss = tf.losses.compute_weighted_loss(loss, weights, scope, loss_collection, reduction) return loss
""" Contains custom losses """ import tensorflow as tf from ..layers import flatten def dice(targets, predictions): """ Dice coefficient Parameters ---------- targets : tf.Tensor tensor with target values predictions : tf.Tensor tensor with predicted values Returns ------- average loss : tf.Tensor with a single element """ e = 1e-6 intersection = flatten(targets * predictions) loss = -tf.reduce_mean((2. * intersection + e) / (flatten(targets) + flatten(predictions) + e)) return loss
apache-2.0
Python
4c9b47052c2c66671230f33ea84459e02b3b2f06
Update Unit_Testing2.py
ZachMG/unit_testing
Unit_Testing2.py
Unit_Testing2.py
from unit_testing import * import unittest class UnitTests(unittest.TestCase): def setUp(self): print('setUp()...') self.hash1 = Hash('1234') self.email1 = Email('[email protected]') def test(self): print('testing hash...') self.assertEqual(self.hash1, self.hash1) #failed self.assertNotEqual(self.hash1, Hash('123')) self.assertRaises(InvalidPassword, Hash, '1 ') #failed #self.assertEqual(length of Hash for two different passwords) print('testing email...') self.assertEqual(str(self.email1), '[email protected]') self.assertRaises(InvalidEmail, Email, '@@') #failed self.assertRaises(InvalidEmail, Email, '@gmail.com') #failed print('testing social...') self.assertRaises(InvalidSocial, SS, '123456789') self.assertRaises(InvalidSocial, SS, '1234-567-89') #failed self.assertRaises(InvalidSocial, SS, '-') #failed self.assertRaises(InvalidSocial, SS, '1234-') #failed def tearDown(self): print('tearDown()...') del self.hash1 del self.hash2 del self.hash3 del self.email1
from unit_testing import * import unittest class UnitTests(unittest.TestCase): def setUp(self): print('setUp()...') self.hash1 = Hash('1234') self.hash2 = Hash('1234') self.hash3 = Hash('123') self.email1 = Email('P@V') def test(self): print('testing hash...') self.assertEqual(self.hash1, self.hash2) #failed self.assertNotEqual(self.hash1, self.hash3) self.assertRaises(InvalidPassword, Hash, '1') print('testing email...') self.assertEqual(str(self.email1), 'P@V') self.assertRaises(InvalidEmail, Email, 'thing') self.assertRaises(InvalidEmail, Email, '@gmail.com') #failed print('testing social...') self.assertRaises(InvalidSocial, SS, '123456789') self.assertRaises(InvalidSocial, SS, '1234-567-89') #failed self.assertRaises(InvalidSocial, SS, '-') #failed self.assertRaises(InvalidSocial, SS, '1234') def tearDown(self): print('tearDown()...') del self.hash1 del self.hash2 del self.hash3 del self.email1
mit
Python
19def5d347a725b8200f1d29e1863e3d702bdc04
hide some test fixtures from `spec`
tek/amino
unit/case_spec.py
unit/case_spec.py
from amino import ADT from amino.case import CaseRec, Term from amino.test.spec_spec import Spec class _Num(ADT['_Num']): pass class _Int(_Num): def __init__(self, i: int) -> None: self.i = i class _Float(_Num): def __init__(self, f: float) -> None: self.f = f class _Prod(_Num): def __init__(self, p: int) -> None: self.p = p class _rec(CaseRec[int, int], alg=_Num): def __init__(self, base: int) -> None: self.base = base def _int(self, n: _Int) -> int: return self(_Prod(self.base * n.i)) def _float(self, n: _Float) -> int: return self(_Int(int(n))) def _prod(self, n: _Prod) -> int: return Term(n.p + 7) class CaseSpec(Spec): def _rec(self) -> None: r = _rec(5)(_Int(6)).eval() r.should.equal(37) __all__ = ('CaseSpec',)
from amino import ADT from amino.case import CaseRec, Term from amino.test.spec_spec import Spec class Num(ADT['Num']): pass class Int(Num): def __init__(self, i: int) -> None: self.i = i class Float(Num): def __init__(self, f: float) -> None: self.f = f class Prod(Num): def __init__(self, p: int) -> None: self.p = p class rec(CaseRec[int, int], alg=Num): def __init__(self, base: int) -> None: self.base = base def int(self, n: Int) -> int: return self(Prod(self.base * n.i)) def float(self, n: Float) -> int: return self(Int(int(n))) def prod(self, n: Prod) -> int: return Term(n.p + 7) class CaseSpec(Spec): def test(self) -> None: r = rec(5)(Int(6)).eval() r.should.equal(37) __all__ = ('CaseSpec',)
mit
Python
4089730950d6005e257c20e6926000073fd41b33
Enable Tensor equality for 2.0
tensorflow/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,renyi533/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jhseu/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,arborh/tensorflow,gunan/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,sarvex/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,jhseu/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,cxxgtxy/tensorflow,aldian/tensorflow,aam-at/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,aldian/tensorflow,jhseu/tensorflow,davidzchen/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,arborh/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,xzturn/tensorflow,petewarden/tensorflow,arborh/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,adit-chandra/tensorflow,freedomtan/tensorflow,aldian/tensorflow,renyi533/tensorflow,karllessard/tensorflow,gunan/tensorflow,aldian/tensorflow,annarev/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,xzturn/tensorflow,petewarden/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,arborh/tensorflow,renyi533/tensorflow,chemelnucfin/tensorflow,freedomtan/tensorflow,xzturn/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,gautam1858/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,chemelnucfin/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,gunan/tensorflow,gunan/tensorflow,jhseu/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,jhseu/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,cxxgtxy/tensorflow,renyi533/tensorflow,ppwwyyxx/tensorflow,freedomtan/tensorflow,DavidNorman/tensorflow,Intel-Corporation/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,jhseu/tensorflow,frreiss/tensorflow-fred,chemelnucfin/tensorflow,arborh/tensorflow,renyi533/tensorflow,cxxgtxy/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,gunan/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,adit-chandra/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,renyi533/tensorflow,karllessard/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,freedomtan/tensorflow,gunan/tensorflow,xzturn/tensorflow,ppwwyyxx/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,chemelnucfin/tensorflow,aam-at/tensorflow,jhseu/tensorflow,jhseu/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,ppwwyyxx/tensorflow,petewarden/tensorflow,xzturn/tensorflow,sarvex/tensorflow,arborh/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,aldian/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,xzturn/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,adit-chandra/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,DavidNorman/tensorflow,yongtang/tensorflow,annarev/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,annarev/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,gautam1858/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xzturn/tensorflow,cxxgtxy/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,DavidNorman/tensorflow,yongtang/tensorflow,yongtang/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,annarev/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,chemelnucfin/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,cxxgtxy/tensorflow,aam-at/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,adit-chandra/tensorflow,arborh/tensorflow,paolodedios/tensorflow,gunan/tensorflow,Intel-tensorflow/tensorflow,annarev/tensorflow,DavidNorman/tensorflow,gunan/tensorflow,chemelnucfin/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,annarev/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,adit-chandra/tensorflow,arborh/tensorflow,petewarden/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,renyi533/tensorflow,aam-at/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,Intel-Corporation/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,chemelnucfin/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,frreiss/tensorflow-fred,chemelnucfin/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,annarev/tensorflow,petewarden/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,ppwwyyxx/tensorflow,adit-chandra/tensorflow,cxxgtxy/tensorflow,gunan/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ppwwyyxx/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,xzturn/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,xzturn/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,tensorflow/tensorflow,aam-at/tensorflow,arborh/tensorflow,annarev/tensorflow,petewarden/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ppwwyyxx/tensorflow,arborh/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,sarvex/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,jhseu/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,jhseu/tensorflow,gautam1858/tensorflow,DavidNorman/tensorflow,paolodedios/tensorflow,freedomtan/tensorflow,adit-chandra/tensorflow,renyi533/tensorflow,aldian/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,sarvex/tensorflow
tensorflow/python/compat/v2_compat.py
tensorflow/python/compat/v2_compat.py
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Switching v2 features on and off.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python import tf2 from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import control_flow_v2_toggles from tensorflow.python.ops import variable_scope from tensorflow.python.util.tf_export import tf_export @tf_export(v1=["enable_v2_behavior"]) def enable_v2_behavior(): """Enables TensorFlow 2.x behaviors. This function can be called at the beginning of the program (before `Tensors`, `Graphs` or other structures have been created, and before devices have been initialized. It switches all global behaviors that are different between TensorFlow 1.x and 2.x to behave as intended for 2.x. This function is called in the main TensorFlow `__init__.py` file, user should not need to call it, except during complex migrations. """ # TF2 behavior is enabled if either 1) enable_v2_behavior() is called or # 2) the TF2_BEHAVIOR=1 environment variable is set. In the latter case, # the modules below independently check if tf2.enabled(). tf2.enable() ops.enable_eager_execution() tensor_shape.enable_v2_tensorshape() # Also switched by tf2 variable_scope.enable_resource_variables() ops.enable_tensor_equality() # Enables TensorArrayV2 and control flow V2. control_flow_v2_toggles.enable_control_flow_v2() @tf_export(v1=["disable_v2_behavior"]) def disable_v2_behavior(): """Disables TensorFlow 2.x behaviors. This function can be called at the beginning of the program (before `Tensors`, `Graphs` or other structures have been created, and before devices have been initialized. It switches all global behaviors that are different between TensorFlow 1.x and 2.x to behave as intended for 1.x. User can call this function to disable 2.x behavior during complex migrations. """ tf2.disable() ops.disable_eager_execution() tensor_shape.disable_v2_tensorshape() # Also switched by tf2 variable_scope.disable_resource_variables() ops.disable_tensor_equality() # Disables TensorArrayV2 and control flow V2. control_flow_v2_toggles.disable_control_flow_v2()
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Switching v2 features on and off.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python import tf2 from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import control_flow_v2_toggles from tensorflow.python.ops import variable_scope from tensorflow.python.util.tf_export import tf_export @tf_export(v1=["enable_v2_behavior"]) def enable_v2_behavior(): """Enables TensorFlow 2.x behaviors. This function can be called at the beginning of the program (before `Tensors`, `Graphs` or other structures have been created, and before devices have been initialized. It switches all global behaviors that are different between TensorFlow 1.x and 2.x to behave as intended for 2.x. This function is called in the main TensorFlow `__init__.py` file, user should not need to call it, except during complex migrations. """ # TF2 behavior is enabled if either 1) enable_v2_behavior() is called or # 2) the TF2_BEHAVIOR=1 environment variable is set. In the latter case, # the modules below independently check if tf2.enabled(). tf2.enable() ops.enable_eager_execution() tensor_shape.enable_v2_tensorshape() # Also switched by tf2 variable_scope.enable_resource_variables() # Enables TensorArrayV2 and control flow V2. control_flow_v2_toggles.enable_control_flow_v2() @tf_export(v1=["disable_v2_behavior"]) def disable_v2_behavior(): """Disables TensorFlow 2.x behaviors. This function can be called at the beginning of the program (before `Tensors`, `Graphs` or other structures have been created, and before devices have been initialized. It switches all global behaviors that are different between TensorFlow 1.x and 2.x to behave as intended for 1.x. User can call this function to disable 2.x behavior during complex migrations. """ tf2.disable() ops.disable_eager_execution() tensor_shape.disable_v2_tensorshape() # Also switched by tf2 variable_scope.disable_resource_variables() # Disables TensorArrayV2 and control flow V2. control_flow_v2_toggles.disable_control_flow_v2()
apache-2.0
Python
e39c2e0c3dae39ee380a98a1aa662d14d1a1191e
Add new keyfile
Code4SA/mma-dexter,Code4SA/mma-dexter,Code4SA/mma-dexter
dexter/config/celeryconfig.py
dexter/config/celeryconfig.py
from celery.schedules import crontab # uses AWS creds from the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env variables BROKER_URL = 'sqs://' BROKER_TRANSPORT_OPTIONS = { 'region': 'eu-west-1', 'polling_interval': 15 * 1, 'queue_name_prefix': 'mma-dexter-', 'visibility_timeout': 3600*12, } # all our tasks can by retried if the worker fails CELERY_ACKS_LATE = True CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['json'] CELERY_TIMEZONE = 'Africa/Johannesburg' CELERY_ENABLE_UTC = True CELERYBEAT_SCHEDULE = { 'fetch-yesterdays-feeds': { 'schedule': crontab(hour=2, minute=0), 'task': 'dexter.tasks.fetch_yesterdays_feeds', }, 'back-process-feeds': { 'schedule': crontab(hour=11, minute=0), 'task': 'dexter.tasks.back_process_feeds', }, 'fetch_yesterdays_feeds_rerun': { 'schedule': crontab(hour=15, minute=0), 'task': 'dexter.tasks.back_process_feeds', }, # 'backfill-taxonomies': { # 'schedule': crontab(hour=21, minute=0), # 'task': 'dexter.tasks.backfill_taxonomies', # }, }
from celery.schedules import crontab # uses AWS creds from the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env variables BROKER_URL = 'sqs://' BROKER_TRANSPORT_OPTIONS = { 'region': 'eu-west-1', 'polling_interval': 15 * 1, 'queue_name_prefix': 'mma-dexter-', 'visibility_timeout': 3600*12, } # all our tasks can by retried if the worker fails CELERY_ACKS_LATE = True CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['json'] CELERY_TIMEZONE = 'Africa/Johannesburg' CELERY_ENABLE_UTC = True CELERYBEAT_SCHEDULE = { 'fetch-yesterdays-feeds': { 'schedule': crontab(hour=1, minute=0), 'task': 'dexter.tasks.fetch_yesterdays_feeds', }, 'back-process-feeds': { 'schedule': crontab(hour=11, minute=0), 'task': 'dexter.tasks.back_process_feeds', }, 'fetch_yesterdays_feeds_rerun': { 'schedule': crontab(hour=12, minute=0), 'task': 'dexter.tasks.back_process_feeds', }, # 'backfill-taxonomies': { # 'schedule': crontab(hour=21, minute=0), # 'task': 'dexter.tasks.backfill_taxonomies', # }, }
apache-2.0
Python
c4f7f2025a6089ec0ddcb190eaf4c020804b384b
make the call to core commands more explicit
eugenesvk/StatusBarExtendedF,kek91/StatusBarExtended
toggleselection/__init__.py
toggleselection/__init__.py
# Override commands that toggle item selection to automatically compute and instantly display # combined filesize for selected files and the number of selected folders/files from fman import DirectoryPaneCommand, DirectoryPaneListener, load_json, save_json, PLATFORM from core.commands.util import is_hidden from fman.url import splitscheme import json from statusbarextended import StatusBarExtended class _CorePaneCommand(DirectoryPaneCommand): # copy from core/commands/__init__.py def select_all(self): self.pane.select_all() def deselect( self): self.pane.clear_selection() def move_cursor_down( self, toggle_selection=False): self.pane.move_cursor_down( toggle_selection) def move_cursor_up( self, toggle_selection=False): self.pane.move_cursor_up( toggle_selection) def move_cursor_page_up( self, toggle_selection=False): self.pane.move_cursor_page_up( toggle_selection) def move_cursor_page_down(self, toggle_selection=False): self.pane.move_cursor_page_down(toggle_selection) def move_cursor_home( self, toggle_selection=False): self.pane.move_cursor_home( toggle_selection) def move_cursor_end( self, toggle_selection=False): self.pane.move_cursor_end( toggle_selection) class CommandEmpty(): # to avoid duplicate command execution (and "return '', args" hangs) def __call__(self): pass class SelectionOverride(DirectoryPaneListener): def on_command(self, command_name, args): self.show_selected_files() return 'command_empty', args elif command_name in ( 'select_all', 'deselect'): getattr(_CorePaneCommand, command_name)(self) self.show_selected_files() return 'command_empty', args elif command_name in ( # commands that can pass a 'toggle_selection' argument 'move_cursor_down' , 'move_cursor_up' , 'move_cursor_page_down', 'move_cursor_page_up', 'move_cursor_home' , 'move_cursor_end'): getattr(_CorePaneCommand, command_name)(self, args) self.show_selected_files() return 'command_empty', args def show_selected_files(self): statusBarExtendedEnabled = load_json('StatusBarExtended.json') if statusBarExtendedEnabled: statusBarExtendedEnabledJson = json.loads(statusBarExtendedEnabled) if statusBarExtendedEnabledJson['enabled'] == True: StatusBarExtended.show_selected_files(self)
# Override commands that toggle item selection to automatically compute and instantly display # combined filesize for selected files and the number of selected folders/files from fman import DirectoryPaneListener, load_json import json from statusbarextended import StatusBarExtended class CommandEmpty(): # to avoid duplicate command execution (and "return '', args" hangs) def __call__(self): pass class SelectionOverride(DirectoryPaneListener): def on_command(self, command_name, args): if command_name in ('select_all'): # def ^A self.pane.select_all() self.show_selected_files() return 'command_empty', args elif command_name in ('deselect'): # def ^D self.pane.clear_selection() self.show_selected_files() return 'command_empty', args elif command_name in ( # commands that can pass a 'toggle_selection' argument 'move_cursor_down' , 'move_cursor_up' , 'move_cursor_page_down', 'move_cursor_page_up', 'move_cursor_home' , 'move_cursor_end'): if args.get('toggle_selection'): # select item → update statusbar → pass False arg file_under_cursor = self.pane.get_file_under_cursor() if file_under_cursor: self.pane.toggle_selection(file_under_cursor) self.show_selected_files() new_args = dict(args) new_args['toggle_selection'] = False return command_name, new_args def show_selected_files(self): statusBarExtendedEnabled = load_json('StatusBarExtended.json') if statusBarExtendedEnabled: statusBarExtendedEnabledJson = json.loads(statusBarExtendedEnabled) if statusBarExtendedEnabledJson['enabled'] == True: StatusBarExtended.show_selected_files(self)
mit
Python
9a4e2f88eba716ef607b8c476509cac5e58475f7
Update mapper_lowercase.py
dragoon/kilogram,dragoon/kilogram,dragoon/kilogram
mapreduce/filter/mapper_lowercase.py
mapreduce/filter/mapper_lowercase.py
#!/usr/bin/env python import sys # Open just for read dbpediadb = set(open('dbpedia_labels.txt').read().splitlines()) dbpediadb_lower = set(x.lower() for x in open('dbpedia_labels.txt').read().splitlines()) for line in sys.stdin: # remove leading and trailing whitespace line = line.strip() # split the line into words ngram, num = line.split('\t') if ngram in dbpediadb: print '%s\t%s|--|%s' % (ngram.lower(), ngram.replace(' ', '_'), num) if ngram.lower() in dbpediadb_lower: print '%s\t%s|--|%s' % (ngram.lower(), 'lower', num)
#!/usr/bin/env python import sys # Open just for read dbpediadb = set(open('dbpedia_labels.txt').read().splitlines()) dbpediadb_lower = set(x.lower() for x in open('dbpedia_labels.txt').read().splitlines()) for line in sys.stdin: # remove leading and trailing whitespace line = line.strip() # split the line into words ngram, num = line.split('\t') if ngram in dbpediadb: print '%s\t%s|--|%s' % (ngram.lower(), ngram.replace(' ', '_'), num) if ngram in dbpediadb_lower: print '%s\t%s|--|%s' % (ngram.lower(), 'lower', num)
apache-2.0
Python
84ce27775b7e04955a15a0eb1e277db3e447b81f
fix SlidingCloth
Aiacos/DevPyLib
mayaLib/rigLib/utils/slidingCloth.py
mayaLib/rigLib/utils/slidingCloth.py
__author__ = 'Lorenzo Argentieri' import pymel.core as pm from mayaLib.rigLib.utils import skin from mayaLib.rigLib.utils import deform class SlidingCloth(): def __init__(self, mainSkinGeo, proxySkinGeo, mainClothGeo, proxyClothGeo, rigModelGrp=None): """ Setup Sliding Cloth deformation :param mainSkinGeo: str :param proxySkinGeo: str :param mainClothGeo: str :param proxyClothGeo: str """ if mainSkinGeo and mainClothGeo: self.mainSkinGeo = pm.ls(mainSkinGeo)[0] self.mainClothGeo = pm.ls(mainClothGeo)[0] else: print 'No valid Geo!' if proxySkinGeo: self.proxySkinGeo = pm.ls(proxySkinGeo)[0] else: print 'Make Skin proxy Geo!' if proxyClothGeo: self.proxyClothGeo = pm.ls(proxyClothGeo)[0] else: print 'Make Cloth proxy GEO!' # setup skin proxy geo skin.copyBind(self.mainSkinGeo, self.proxySkinGeo) # setup cloth proxy geo skin.copyBind(self.mainSkinGeo, self.proxyClothGeo) cMuscleDeformer = deform.cMuscleSystemDeformer(self.proxyClothGeo) cMuscleDeformer.enableRelax.set(1) cMuscleDeformer.relaxCompress.set(10) cMuscleDeformer.enableSmooth.set(1) shrinkWrapDeformer = deform.shrinkWrapDeformer(self.proxyClothGeo, self.proxySkinGeo) shrinkWrapDeformer.shapePreservationEnable.set(1) polySmoothDeformer = pm.polySmooth(self.proxyClothGeo)[0] # wrap main Cloth Geo wrapDeformer = deform.wrapDeformer(self.mainClothGeo, self.proxyClothGeo) baseObj = pm.listConnections(wrapDeformer.basePoints, source=True)[0] if rigModelGrp: pm.parent(baseObj, rigModelGrp) # save attribute self.baseObj = baseObj def getWrapBaseObj(self): return self.baseObj
__author__ = 'Lorenzo Argentieri' import pymel.core as pm from mayaLib.rigLib.utils import skin from mayaLib.rigLib.utils import deform class SlidingCloth(): def __init__(self, mainSkinGeo, proxySkinGeo, mainClothGeo, proxyClothGeo): """ Setup Sliding Cloth deformation :param mainSkinGeo: str :param proxySkinGeo: str :param mainClothGeo: str :param proxyClothGeo: str """ if mainSkinGeo and mainClothGeo: self.mainSkinGeo = pm.ls(mainSkinGeo)[0] self.mainClothGeo = pm.ls(mainClothGeo)[0] else: print 'No valid Geo!' if proxySkinGeo: self.proxySkinGeo = pm.ls(proxySkinGeo)[0] else: print 'Make Skin proxy Geo!' if proxyClothGeo: self.proxyClothGeo = pm.ls(proxyClothGeo)[0] else: print 'Make Cloth proxy GEO!' # setup skin proxy geo skin.copyBind(self.mainSkinGeo, self.proxySkinGeo) # setup cloth proxy geo skin.copyBind(self.mainSkinGeo, self.proxyClothGeo) cMuscleDeformer = deform.cMuscleSystemDeformer(self.proxyClothGeo) cMuscleDeformer.enableRelax.set(1) cMuscleDeformer.relaxCompress.set(10) cMuscleDeformer.enableSmooth.set(1) shrinkWrapDeformer = deform.shrinkWrapDeformer(self.proxyClothGeo, self.proxySkinGeo) shrinkWrapDeformer.shapePreservationEnable.set(1) polySmoothDeformer = pm.polySmooth(self.proxyClothGeo)[0] # wrap main Cloth Geo wrapDeformer = deform.wrapDeformer(self.mainClothGeo, self.proxyClothGeo)
agpl-3.0
Python
f7060b65464b24bb16a8cf4704c68fa1348d655c
bump version
RaitoBezarius/crossbar,w1z2g3/crossbar,GoodgameStudios/crossbar,w1z2g3/crossbar,RaitoBezarius/crossbar,NinjaMSP/crossbar,RaitoBezarius/crossbar,erhuabushuo/crossbar,NinjaMSP/crossbar,GoodgameStudios/crossbar,GoodgameStudios/crossbar,RaitoBezarius/crossbar,erhuabushuo/crossbar,GoodgameStudios/crossbar,erhuabushuo/crossbar,w1z2g3/crossbar,RaitoBezarius/crossbar,w1z2g3/crossbar,w1z2g3/crossbar,erhuabushuo/crossbar,w1z2g3/crossbar,erhuabushuo/crossbar,RaitoBezarius/crossbar,GoodgameStudios/crossbar,erhuabushuo/crossbar,RaitoBezarius/crossbar,GoodgameStudios/crossbar,GoodgameStudios/crossbar,NinjaMSP/crossbar,erhuabushuo/crossbar,w1z2g3/crossbar
crossbar/crossbar/__init__.py
crossbar/crossbar/__init__.py
############################################################################### ## ## Copyright (C) 2011-2014 Tavendo GmbH ## ## This program is free software: you can redistribute it and/or modify ## it under the terms of the GNU Affero General Public License, version 3, ## as published by the Free Software Foundation. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU Affero General Public License for more details. ## ## You should have received a copy of the GNU Affero General Public License ## along with this program. If not, see <http://www.gnu.org/licenses/>. ## ############################################################################### __doc__ = """ Crossbar.io - Polyglot application router. For more information, please visit: * Documentation: https://github.com/crossbario/crossbar/wiki * Homepage: http://crossbar.io/ * Source code: https://github.com/crossbario/crossbar Open-source licensed under the GNU Affero General Public License version 3. Created by Tavendo GmbH. Get in contact at http://tavendo.com """ __version__ = "0.9.4-3"
############################################################################### ## ## Copyright (C) 2011-2014 Tavendo GmbH ## ## This program is free software: you can redistribute it and/or modify ## it under the terms of the GNU Affero General Public License, version 3, ## as published by the Free Software Foundation. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU Affero General Public License for more details. ## ## You should have received a copy of the GNU Affero General Public License ## along with this program. If not, see <http://www.gnu.org/licenses/>. ## ############################################################################### __doc__ = """ Crossbar.io - Polyglot application router. For more information, please visit: * Documentation: https://github.com/crossbario/crossbar/wiki * Homepage: http://crossbar.io/ * Source code: https://github.com/crossbario/crossbar Open-source licensed under the GNU Affero General Public License version 3. Created by Tavendo GmbH. Get in contact at http://tavendo.com """ __version__ = "0.9.4-2"
agpl-3.0
Python
c0e09993facdd76e7b1dfbab97285464f83980bb
Update version
thismachinechills/cast_convert
cast_convert/__init__.py
cast_convert/__init__.py
#!/usr/bin/env python3 __version__ = '0.1.7.17' from .cmd import cmd as command from .watch import * from . import * from .convert import * from .media_info import * import click @click.command(help="Print version") def version(): print(__version__) command.add_command(version)
#!/usr/bin/env python3 __version__ = '0.1.7.11' from .cmd import cmd as command from .watch import * from . import * from .convert import * from .media_info import * import click @click.command(help="Print version") def version(): debug_print(__version__) command.add_command(version)
agpl-3.0
Python
b5cd4ff2b02151bca966c53b80dbea8911a7a6b2
Upgrade celery.utils.encoding from kombu
ask/celery,cbrepo/celery,ask/celery,cbrepo/celery
celery/utils/encoding.py
celery/utils/encoding.py
""" celery.utils.encoding ==================== Utilities to encode text, and to safely emit text from running applications without crashing with the infamous :exc:`UnicodeDecodeError` exception. """ from __future__ import absolute_import import sys import traceback __all__ = ["str_to_bytes", "bytes_to_str", "from_utf8", "default_encoding", "safe_str", "safe_repr"] is_py3k = sys.version_info >= (3, 0) if is_py3k: def str_to_bytes(s): if isinstance(s, str): return s.encode() return s def bytes_to_str(s): if isinstance(s, bytes): return s.decode() return s def from_utf8(s, *args, **kwargs): return s else: def str_to_bytes(s): # noqa if isinstance(s, unicode): return s.encode() return s def bytes_to_str(s): # noqa return s def from_utf8(s, *args, **kwargs): # noqa return s.encode("utf-8", *args, **kwargs) if sys.platform.startswith("java"): def default_encoding(): return "utf-8" else: def default_encoding(): # noqa return sys.getfilesystemencoding() def safe_str(s, errors="replace"): s = bytes_to_str(s) if not isinstance(s, basestring): return safe_repr(s, errors) return _safe_str(s, errors) def _safe_str(s, errors="replace"): if is_py3k: return s encoding = default_encoding() try: if isinstance(s, unicode): return s.encode(encoding, errors) return unicode(s, encoding, errors) except Exception, exc: return "<Unrepresentable %r: %r %r>" % ( type(s), exc, "\n".join(traceback.format_stack())) def safe_repr(o, errors="replace"): try: return repr(o) except Exception: return _safe_str(o, errors)
""" celery.utils.encoding ===================== Utilties to encode text, and to safely emit text from running applications without crashing with the infamous :exc:`UnicodeDecodeError` exception. """ from __future__ import absolute_import import sys import traceback __all__ = ["str_to_bytes", "bytes_to_str", "from_utf8", "default_encoding", "safe_str", "safe_repr"] is_py3k = sys.version_info >= (3, 0) if sys.version_info >= (3, 0): def str_to_bytes(s): if isinstance(s, str): return s.encode() return s def bytes_to_str(s): if isinstance(s, bytes): return s.decode() return s def from_utf8(s, *args, **kwargs): return s else: def str_to_bytes(s): # noqa return s def bytes_to_str(s): # noqa return s def from_utf8(s, *args, **kwargs): # noqa return s.encode("utf-8", *args, **kwargs) if sys.platform.startswith("java"): def default_encoding(): return "utf-8" else: def default_encoding(): # noqa return sys.getfilesystemencoding() def safe_str(s, errors="replace"): s = bytes_to_str(s) if not isinstance(s, basestring): return safe_repr(s, errors) return _safe_str(s, errors) def _safe_str(s, errors="replace"): if is_py3k: return s encoding = default_encoding() try: if isinstance(s, unicode): return s.encode(encoding, errors) return unicode(s, encoding, errors) except Exception, exc: return "<Unrepresentable %r: %r %r>" % ( type(s), exc, "\n".join(traceback.format_stack())) def safe_repr(o, errors="replace"): try: return repr(o) except Exception: return _safe_str(o, errors)
bsd-3-clause
Python
a0ff8cc15df5cd9668e11eba3b5e7406b33dcfc5
fix RemovedInDjango19Warning on django.utils.importlib
roverdotcom/celery-haystack,iXioN/celery-haystack
celery_haystack/utils.py
celery_haystack/utils.py
from django.core.exceptions import ImproperlyConfigured try: from importlib import import_module except ImportError: from django.utils.importlib import import_module from django.db import connection from haystack.utils import get_identifier from .conf import settings def get_update_task(task_path=None): import_path = task_path or settings.CELERY_HAYSTACK_DEFAULT_TASK module, attr = import_path.rsplit('.', 1) try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing module %s: "%s"' % (module, e)) try: Task = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured('Module "%s" does not define a "%s" ' 'class.' % (module, attr)) return Task() def enqueue_task(action, instance): """ Common utility for enqueing a task for the given action and model instance. """ identifier = get_identifier(instance) kwargs = {} if settings.CELERY_HAYSTACK_QUEUE: kwargs['queue'] = settings.CELERY_HAYSTACK_QUEUE if settings.CELERY_HAYSTACK_COUNTDOWN: kwargs['countdown'] = settings.CELERY_HAYSTACK_COUNTDOWN task = get_update_task() if hasattr(connection, 'on_commit'): connection.on_commit( lambda: task.apply_async((action, identifier), {}, **kwargs) ) else: task.apply_async((action, identifier), {}, **kwargs)
from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module from django.db import connection from haystack.utils import get_identifier from .conf import settings def get_update_task(task_path=None): import_path = task_path or settings.CELERY_HAYSTACK_DEFAULT_TASK module, attr = import_path.rsplit('.', 1) try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing module %s: "%s"' % (module, e)) try: Task = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured('Module "%s" does not define a "%s" ' 'class.' % (module, attr)) return Task() def enqueue_task(action, instance): """ Common utility for enqueing a task for the given action and model instance. """ identifier = get_identifier(instance) kwargs = {} if settings.CELERY_HAYSTACK_QUEUE: kwargs['queue'] = settings.CELERY_HAYSTACK_QUEUE if settings.CELERY_HAYSTACK_COUNTDOWN: kwargs['countdown'] = settings.CELERY_HAYSTACK_COUNTDOWN task = get_update_task() if hasattr(connection, 'on_commit'): connection.on_commit( lambda: task.apply_async((action, identifier), {}, **kwargs) ) else: task.apply_async((action, identifier), {}, **kwargs)
bsd-3-clause
Python
7d89c9c3229ebd7d8b56edf211e7020c3fad29a0
add support for msgpack
SlimToolbox/SlimApi
utils/encoders.py
utils/encoders.py
# Copyright (C) 2015 SlimRoms Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. SUPPORTED_ENCODERS = {} import json def json_encode(obj, pretty=False): kwargs = {} if pretty: kwargs['indent'] = 4 kwargs['separators'] = (',', ': ') return json.dumps(obj, **kwargs).replace("</", "<\\/") SUPPORTED_ENCODERS.update({ 'json': { 'headers': (("Content-Type", "application/json; charset=UTF-8"),), 'encoder': json_encode } }) try: import xmltodict except ImportError: pass else: def xml_encode(obj, pretty=False): if len(obj) == 1: obj = {'root': obj} return xmltodict.unparse(obj, pretty=pretty) SUPPORTED_ENCODERS.update({ 'xml': { 'headers': (("Content-Type", "application/xml; charset=UTF-8"),), 'encoder': xml_encode } }) try: import yaml except ImportError: pass else: def yaml_encode(obj, pretty=False): yaml.safe_dump(obj, default_flow_style=(not pretty)) SUPPORTED_ENCODERS.update({ 'yaml': { 'headers': (("Content-Type", "text/yaml; charset=UTF-8"),), 'encoder': yaml_encode } }) try: try: import msgpack except ImportError: import umsgpack as msgpack except ImportError: pass else: def msgpack_encode(obj, pretty=False): return msgpack.dumps(obj) SUPPORTED_ENCODERS.update({ 'msgpack': { 'headers': (("Content-Type", "application/msgpack; charset=UTF-8"),), 'encoder': msgpack_encode } })
# Copyright (C) 2015 SlimRoms Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. SUPPORTED_ENCODERS = {} import json def json_encode(obj, pretty=False): kwargs = {} if pretty: kwargs['indent'] = 4 kwargs['separators'] = (',', ': ') return json.dumps(obj, **kwargs).replace("</", "<\\/") SUPPORTED_ENCODERS.update({ 'json': { 'headers': (("Content-Type", "application/json; charset=UTF-8"),), 'encoder': json_encode } }) try: import xmltodict except ImportError: pass else: def xml_encode(obj, pretty=False): if len(obj) == 1: obj = {'root': obj} return xmltodict.unparse(obj, pretty=pretty) SUPPORTED_ENCODERS.update({ 'xml': { 'headers': (("Content-Type", "application/xml; charset=UTF-8"),), 'encoder': xml_encode } }) try: import yaml except ImportError: pass else: def yaml_encode(obj, pretty=False): yaml.safe_dump(obj, default_flow_style=(not pretty)) SUPPORTED_ENCODERS.update({ 'yaml': { 'headers': (("Content-Type", "text/yaml; charset=UTF-8"),), 'encoder': yaml_encode } })
apache-2.0
Python
745d3fae6b6055c731a47c13ef77e1faf1a4b7e5
upgrade elasticsearch mining backends
chrisdamba/mining,mining/mining,jgabriellima/mining,AndrzejR/mining,seagoat/mining,chrisdamba/mining,mining/mining,seagoat/mining,jgabriellima/mining,avelino/mining,AndrzejR/mining,avelino/mining
mining/db/backends/melasticsearch.py
mining/db/backends/melasticsearch.py
# -*- coding: utf-8 -*- import json import requests from elasticsearch import Elasticsearch as ES from mining.utils.listc import listc_dict class Elasticsearch(object): def conn(self): """Open connection on Elasticsearch DataBase""" conn = ES([ {"host": self.conf.get('host'), "port": self.conf.get('port')} ]) return conn def save(self, house, data, content_type='dict'): """Save meta dada on Elasticsearch""" requests.delete("http://{}:{}/{}".format( self.conf.get('host'), self.conf.get('port'), house)) for obj in data.get('data'): self.conn().index(index=house, doc_type='data'.format(house), body=obj) self.conn().index(index=house, doc_type='columns', body={"columns": data.get('columns')}) return self.conn() def get(self, house, content_type="dict", callback={}): """Get meta data on Elasticsearch""" count = self.conn().count(index=house, doc_type="data").get('count') doc_data = self.conn().search(index=house, doc_type='data', body=self.filter(), size=count) data = {} """ data['data'] = [obj.get("_source") for obj in doc_data.get('hits').get('hits')] """ data['data'] = listc_dict(doc_data.get('hits').get('hits'), "_source") doc_columns = self.conn().search(index=house, doc_type='columns', body=self.filter()) data.update(doc_columns.get('hits').get('hits')[0].get('_source')) data['count'] = count return data def filter(self): """Generate dict to applay filter on Elasticsearch""" filter = { "query": { "bool": { "should": [ { "match": { "country": "Brazil"}}, { "match": { "full_name": "Daniel Austin"}} ] } } } filter = {"query": {"match_all" : {}}} return filter
# -*- coding: utf-8 -*- import json from elasticsearch import Elasticsearch as ES class Elasticsearch(object): def conn(self): """Open connection on Elasticsearch DataBase""" conn = ES([ {"host": self.conf.get('host'), "port": self.conf.get('port'), "url_prefix": self.conf.get('db')} ]) return conn def save(self, house, data, content_type=None): """Save meta dada on Elasticsearch""" if content_type == "application/json": data = json.dumps(data) return self.conn().index(index=house, doc_type='json', id=1, body=data) def get(self, house, content_type="application/json", callback={}): """Get meta data on Elasticsearch""" data = self.conn().get(index=house, doc_type='json', id=1) or callback if content_type == "application/json": return json.loads(data['_source']) return data['_source']
mit
Python
bc1e350dd19d91932bbfff73f863129ac94273c9
bump version to 2.0.1
alunduil/torment,kumoru/torment,doublerr/torment,swarren83/torment,devx/torment
torment/information.py
torment/information.py
# Copyright 2015 Alex Brandt # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. AUTHOR = 'Alex Brandt' AUTHOR_EMAIL = '[email protected]' COPYRIGHT = '2015' DESCRIPTION = 'A Study in Fixture Based Testing Frameworking' LICENSE = 'Apache-2.0' NAME = 'torment' URL = 'https://github.com/kumoru/torment' VERSION = '2.0.1'
# Copyright 2015 Alex Brandt # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. AUTHOR = 'Alex Brandt' AUTHOR_EMAIL = '[email protected]' COPYRIGHT = '2015' DESCRIPTION = 'A Study in Fixture Based Testing Frameworking' LICENSE = 'Apache-2.0' NAME = 'torment' URL = 'https://github.com/kumoru/torment' VERSION = '2.0.0'
apache-2.0
Python
90ad8e104c339b923d9291916647391572fbced1
Bump version number
nabla-c0d3/nassl,nabla-c0d3/nassl,nabla-c0d3/nassl
nassl/__init__.py
nassl/__init__.py
# -*- coding: utf-8 -*- __author__ = 'Alban Diquet' __version__ = '0.16.0'
# -*- coding: utf-8 -*- __author__ = 'Alban Diquet' __version__ = '0.15.1'
agpl-3.0
Python
e51786c46ad4eb7310b1eaa0153253116f2c01bc
Update test bids
openprocurement/openprocurement.tender.esco,Scandie/openprocurement.tender.esco
openprocurement/tender/esco/tests/base.py
openprocurement/tender/esco/tests/base.py
# -*- coding: utf-8 -*- import os from copy import deepcopy from openprocurement.tender.openeu.tests.base import ( BaseTenderWebTest, test_features_tender_data as base_eu_test_features_data, test_tender_data as base_eu_test_data, test_lots as base_eu_lots, test_bids as base_eu_bids, ) test_tender_data = deepcopy(base_eu_test_data) test_tender_data['procurementMethodType'] = "esco.EU" test_tender_data['NBUdiscountRate'] = 0.22 test_tender_data['minValue'] = test_tender_data['value'] del test_tender_data['value'] test_features_tender_data = deepcopy(base_eu_test_features_data) test_features_tender_data['procurementMethodType'] = "esco.EU" test_features_tender_data['NBUdiscountRate'] = 0.22 test_features_tender_data['minValue'] = test_features_tender_data['value'] del test_features_tender_data['value'] test_lots = deepcopy(base_eu_lots) test_lots[0]['minValue'] = test_lots[0]['value'] del test_lots[0]['value'] test_bids = deepcopy(base_eu_bids) for bid in test_bids: bid['value'] = {'yearlyPayments': 0.9, 'annualCostsReduction': 751.5, 'contractDuration': 10} class BaseESCOWebTest(BaseTenderWebTest): relative_to = os.path.dirname(__file__) initial_data = None initial_status = None initial_bids = None initial_lots = None initial_auth = ('Basic', ('broker', '')) docservice = False class BaseESCOContentWebTest(BaseESCOWebTest): """ ESCO Content Test """ initialize_initial_data = True def setUp(self): super(BaseESCOContentWebTest, self).setUp() if self.initial_data and self.initialize_initial_data: self.create_tender() class BaseESCOEUContentWebTest(BaseESCOContentWebTest): """ ESCO EU Content Test """ initial_data = test_tender_data
# -*- coding: utf-8 -*- import os from copy import deepcopy from openprocurement.tender.openeu.tests.base import ( BaseTenderWebTest, test_features_tender_data as base_eu_test_features_data, test_tender_data as base_eu_test_data, test_lots as base_eu_lots, test_bids as base_eu_bids, ) test_tender_data = deepcopy(base_eu_test_data) test_tender_data['procurementMethodType'] = "esco.EU" test_tender_data['NBUdiscountRate'] = 0.22 test_tender_data['minValue'] = test_tender_data['value'] del test_tender_data['value'] test_features_tender_data = deepcopy(base_eu_test_features_data) test_features_tender_data['procurementMethodType'] = "esco.EU" test_features_tender_data['NBUdiscountRate'] = 0.22 test_features_tender_data['minValue'] = test_features_tender_data['value'] del test_features_tender_data['value'] test_lots = deepcopy(base_eu_lots) test_lots[0]['minValue'] = test_lots[0]['value'] del test_lots[0]['value'] test_bids = deepcopy(base_eu_bids) test_bids[0]['value'] = {'yearlyPayments': 0.9, 'annualCostsReduction': 751.5, 'contractDuration': 10} class BaseESCOWebTest(BaseTenderWebTest): relative_to = os.path.dirname(__file__) initial_data = None initial_status = None initial_bids = None initial_lots = None initial_auth = ('Basic', ('broker', '')) docservice = False class BaseESCOContentWebTest(BaseESCOWebTest): """ ESCO Content Test """ initialize_initial_data = True def setUp(self): super(BaseESCOContentWebTest, self).setUp() if self.initial_data and self.initialize_initial_data: self.create_tender() class BaseESCOEUContentWebTest(BaseESCOContentWebTest): """ ESCO EU Content Test """ initial_data = test_tender_data
apache-2.0
Python
7fabbbb6562f068690b7971c6ea1299172400d73
fix `make run_importer_jobs`
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
labonneboite/importer/conf/development.py
labonneboite/importer/conf/development.py
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements DISTINCT_DEPARTEMENTS_HAVING_OFFICES = 15 # --- job 5/8 : compute_scores MINIMUM_OFFICES_REQUIRED_TO_TRAIN_MODEL = 0 RMSE_MAX = 20000 MAXIMUM_COMPUTE_SCORE_JOB_FAILURES = 94 # 96 departements == 2 successes + 94 failures # --- job 6/8 : validate_scores SCORE_REDUCING_MINIMUM_THRESHOLD = 0 DEPARTEMENTS_TO_BE_SANITY_CHECKED = ['14', '69']
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements DISTINCT_DEPARTEMENTS_HAVING_OFFICES = 15 # --- job 5/8 : compute_scores MINIMUM_OFFICES_REQUIRED_TO_TRAIN_MODEL = 0 RMSE_MAX = 5000 MAXIMUM_COMPUTE_SCORE_JOB_FAILURES = 94 # 96 departements == 2 successes + 94 failures # --- job 6/8 : validate_scores SCORE_REDUCING_MINIMUM_THRESHOLD = 0 DEPARTEMENTS_TO_BE_SANITY_CHECKED = ['14', '69']
agpl-3.0
Python
3cefa75b8e9012d828453a764c0b169ab169fae6
fix google login names; associate with any user with same name
dougalsutherland/chips-with-friends,dougalsutherland/chips-with-friends,dougalsutherland/chips-with-friends
chip_friends/security.py
chip_friends/security.py
from __future__ import unicode_literals import random import string from flask import render_template from flask_security import Security, PeeweeUserDatastore from flask_social import Social from flask_social.datastore import PeeweeConnectionDatastore from flask_social.utils import get_connection_values_from_oauth_response from flask_social.views import connect_handler, login_user, login_failed from .app import app, db from .models import Role, User, UserRoles, Connection user_datastore = PeeweeUserDatastore(db, User, Role, UserRoles) app.security = Security(app, user_datastore) app.social = Social(app, PeeweeConnectionDatastore(db, Connection)) @login_failed.connect_via(app) def on_login_failed(sender, provider, oauth_response): connection_values = get_connection_values_from_oauth_response( provider, oauth_response) name = connection_values['full_name'] if isinstance(name, dict): try: name = '{} {}'.format(name['givenName'], name['familyName']) except (ValueError, KeyError): pass password = ''.join(random.choice(string.ascii_letters) for _ in range(20)) user, new = User.get_or_create( name=name, defaults={'email': '', 'password': password}) # don't bother using the datastore, just use the model connection_values['user_id'] = user.id connect_handler(connection_values, provider) login_user(user) db.commit() return render_template('index.html')
import random import string from flask import render_template from flask_security import Security, PeeweeUserDatastore from flask_social import Social from flask_social.datastore import PeeweeConnectionDatastore from flask_social.utils import get_connection_values_from_oauth_response from flask_social.views import connect_handler, login_user, login_failed from .app import app, db from .models import Role, User, UserRoles, Connection user_datastore = PeeweeUserDatastore(db, User, Role, UserRoles) app.security = Security(app, user_datastore) app.social = Social(app, PeeweeConnectionDatastore(db, Connection)) @login_failed.connect_via(app) def on_login_failed(sender, provider, oauth_response): connection_values = get_connection_values_from_oauth_response( provider, oauth_response) ds = app.security.datastore password = ''.join(random.choice(string.ascii_letters) for _ in range(20)) user = ds.create_user( email='', password=password, name=connection_values['full_name']) ds.commit() connection_values['user_id'] = user.id connect_handler(connection_values, provider) login_user(user) db.commit() return render_template('index.html')
mit
Python
5836b48bbfa87ba706e6ddcb267dc375678695a8
use str
syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin
test/functional/feature_asset_burn.py
test/functional/feature_asset_burn.py
#!/usr/bin/env python3 # Copyright (c) 2019-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import SyscoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class AssetBurnTest(SyscoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.rpc_timeout = 240 self.extra_args = [['-assetindex=1'],['-assetindex=1']] def run_test(self): self.nodes[0].generate(200) self.sync_blocks() self.basic_burn_syscoin() self.basic_audittxroot1() def basic_burn_syscoin(self): self.basic_asset() self.nodes[0].generate(1) newaddress = self.nodes[0].getnewaddress() self.nodes[0].assetsend(self.asset, newaddress, '0.5') self.nodes[0].generate(1) out = self.nodes[0].listunspent(query_options={'assetGuid': self.asset}) assert_equal(len(out), 1) # try to burn more than we own assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[0].assetallocationburn(self.asset, newaddress, '0.6', '0x931d387731bbbc988b312206c74f77d004d6b84b')) self.nodes[0].assetallocationburn(self.asset, newaddress, '0.5', '0x931d387731bbbc988b312206c74f77d004d6b84b') self.nodes[0].generate(1) out = self.nodes[0].listunspent(query_options={'assetGuid': self.asset}) assert_equal(len(out), 0) def basic_asset(self): self.asset = self.nodes[0].assetnew('1', 'TST', 'asset description', '0x9f90b5093f35aeac5fbaeb591f9c9de8e2844a46', 8, '1000', '10000', 31, {})['asset_guid'] if __name__ == '__main__': AssetBurnTest().main()
#!/usr/bin/env python3 # Copyright (c) 2019-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import SyscoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error class AssetBurnTest(SyscoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.rpc_timeout = 240 self.extra_args = [['-assetindex=1'],['-assetindex=1']] def run_test(self): self.nodes[0].generate(200) self.sync_blocks() self.basic_burn_syscoin() self.basic_audittxroot1() def basic_burn_syscoin(self): self.basic_asset() self.nodes[0].generate(1) newaddress = self.nodes[0].getnewaddress() self.nodes[0].assetsend(self.asset, newaddress, 0.5) self.nodes[0].generate(1) out = self.nodes[0].listunspent(query_options={'assetGuid': self.asset}) assert_equal(len(out), 1) # try to burn more than we own assert_raises_rpc_error(-20, 'Failed to read from asset DB', self.nodes[0].assetallocationburn(self.asset, newaddress, 0.6, '0x931d387731bbbc988b312206c74f77d004d6b84b')) self.nodes[0].assetallocationburn(self.asset, newaddress, 0.5, '0x931d387731bbbc988b312206c74f77d004d6b84b') self.nodes[0].generate(1) out = self.nodes[0].listunspent(query_options={'assetGuid': self.asset}) assert_equal(len(out), 0) def basic_asset(self): self.asset = self.nodes[0].assetnew('1', 'TST', 'asset description', '0x9f90b5093f35aeac5fbaeb591f9c9de8e2844a46', 8, '1000', '10000', 31, {})['asset_guid'] if __name__ == '__main__': AssetBurnTest().main()
mit
Python
9d2766a7b6aae9e3ad3c94925bdde100a70f6150
fix debug_view function
codercarl/psd-tools,vgatto/psd-tools,EvgenKo423/psd-tools,ssh-odoo/psd-tools,mitni455/psd-tools,kmike/psd-tools,vgatto/psd-tools,codercarl/psd-tools,codercarl/psd-tools,psd-tools/psd-tools,kmike/psd-tools,ssh-odoo/psd-tools,vovkasm/psd-tools,vovkasm/psd-tools,mitni455/psd-tools,EvgenKo423/psd-tools
src/psd_tools/debug.py
src/psd_tools/debug.py
# -*- coding: utf-8 -*- """ Assorted debug utilities """ from __future__ import absolute_import, print_function import sys from collections import namedtuple try: from IPython.lib.pretty import pprint _PRETTY_ENABLED = True except ImportError: from pprint import pprint _PRETTY_ENABLED = False def debug_view(fp, txt="", max_back=20): """ Print file contents around current position for file pointer ``fp`` """ max_back = min(max_back, fp.tell()) fp.seek(-max_back, 1) pre = fp.read(max_back) post = fp.read(100) fp.seek(-100, 1) print(txt, repr(pre), "--->.<---", repr(post)) def pretty_namedtuple(typename, field_names, verbose=False): """ Return a namedtuple class that knows how to pretty-print itself using IPython.lib.pretty library; if IPython is not installed then this function is the same as collections.namedtuple (with one exception: 'rename' argument is unsupported). """ cls = namedtuple(typename, field_names, verbose) if _PRETTY_ENABLED: PrettyMixin = _get_pretty_mixin(typename) cls = type(str(typename), (PrettyMixin, cls), {}) # For pickling to work, the __module__ variable needs to be set to the frame # where the named tuple is created. Bypass this step in enviroments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). try: cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass return cls def _get_pretty_mixin(typename): """ Return a mixin class for multiline pretty-printing of namedtuple objects. """ class _PrettyNamedtupleMixin(object): def _repr_pretty_(self, p, cycle): if cycle: return "{typename}(...)".format(name=typename) with p.group(1, '{name}('.format(name=typename), ')'): p.breakable() for idx, field in enumerate(self._fields): if idx: p.text(',') p.breakable() p.text('{field}='.format(field=field)) p.pretty(getattr(self, field)) return _PrettyNamedtupleMixin
# -*- coding: utf-8 -*- """ Assorted debug utilities """ from __future__ import absolute_import import sys from collections import namedtuple try: from IPython.lib.pretty import pprint _PRETTY_ENABLED = True except ImportError: from pprint import pprint _PRETTY_ENABLED = False def debug_view(fp, txt="", max_back=20): """ Print file contents around current position for file pointer ``fp`` """ max_back = min(max_back, fp.tell()) fp.seek(-max_back, 1) pre = fp.read(max_back) post = fp.read(100) fp.seek(-100, 1) print(txt, repr(pre), "--->.<---", repr(post)) def pretty_namedtuple(typename, field_names, verbose=False): """ Return a namedtuple class that knows how to pretty-print itself using IPython.lib.pretty library; if IPython is not installed then this function is the same as collections.namedtuple (with one exception: 'rename' argument is unsupported). """ cls = namedtuple(typename, field_names, verbose) if _PRETTY_ENABLED: PrettyMixin = _get_pretty_mixin(typename) cls = type(str(typename), (PrettyMixin, cls), {}) # For pickling to work, the __module__ variable needs to be set to the frame # where the named tuple is created. Bypass this step in enviroments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). try: cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') except (AttributeError, ValueError): pass return cls def _get_pretty_mixin(typename): """ Return a mixin class for multiline pretty-printing of namedtuple objects. """ class _PrettyNamedtupleMixin(object): def _repr_pretty_(self, p, cycle): if cycle: return "{typename}(...)".format(name=typename) with p.group(1, '{name}('.format(name=typename), ')'): p.breakable() for idx, field in enumerate(self._fields): if idx: p.text(',') p.breakable() p.text('{field}='.format(field=field)) p.pretty(getattr(self, field)) return _PrettyNamedtupleMixin
mit
Python
5e991fd00d980884f9210cfd5f25d5e7d91aabfc
Fix race condition in #144
ocelot-inc/tarantool,rtsisyk/tarantool,guard163/tarantool,ocelot-inc/tarantool,KlonD90/tarantool,Sannis/tarantool,ocelot-inc/tarantool,Sannis/tarantool,condor-the-bird/tarantool,vasilenkomike/tarantool,dkorolev/tarantool,dkorolev/tarantool,guard163/tarantool,Sannis/tarantool,Sannis/tarantool,vasilenkomike/tarantool,nvoron23/tarantool,mejedi/tarantool,ocelot-inc/tarantool,KlonD90/tarantool,guard163/tarantool,nvoron23/tarantool,condor-the-bird/tarantool,nvoron23/tarantool,mejedi/tarantool,Sannis/tarantool,guard163/tarantool,nvoron23/tarantool,dkorolev/tarantool,nvoron23/tarantool,dkorolev/tarantool,vasilenkomike/tarantool,rtsisyk/tarantool,mejedi/tarantool,condor-the-bird/tarantool,KlonD90/tarantool,rtsisyk/tarantool,vasilenkomike/tarantool,condor-the-bird/tarantool,KlonD90/tarantool,guard163/tarantool,KlonD90/tarantool,mejedi/tarantool,dkorolev/tarantool,condor-the-bird/tarantool,nvoron23/tarantool,rtsisyk/tarantool,vasilenkomike/tarantool
test/replication/init_storage.test.py
test/replication/init_storage.test.py
import os import glob from lib.tarantool_server import TarantoolServer # master server master = server master.admin('space = box.schema.create_space(\'test\', {id = 42})') master.admin('space:create_index(\'primary\', \'hash\', {parts = { 0, \'num\' } })') master.admin('for k = 1, 9 do space:insert(k, k*k) end') for k in glob.glob(os.path.join(master.vardir, '*.xlog')): os.unlink(k) print '-------------------------------------------------------------' print 'replica test 1 (must be failed)' print '-------------------------------------------------------------' replica = TarantoolServer() replica.deploy("replication/cfg/replica.cfg", replica.find_exe(self.args.builddir), os.path.join(self.args.vardir, "replica"), need_init=False) for i in range(1, 10): replica.admin('box.select(42, 0, %d)' % i) replica.stop() replica.cleanup(True) master.admin('box.snapshot()') master.restart() master.admin('for k = 10, 19 do box.insert(42, k, k*k*k) end') lsn = master.get_param('lsn') print '-------------------------------------------------------------' print 'replica test 2 (must be ok)' print '-------------------------------------------------------------' replica = TarantoolServer() replica.deploy("replication/cfg/replica.cfg", replica.find_exe(self.args.builddir), os.path.join(self.args.vardir, "replica"), need_init=False) replica.admin('space = box.space.test'); replica.wait_lsn(lsn) for i in range(1, 20): replica.admin('space:select(0, %d)' % i) replica.stop() replica.cleanup(True) server.stop() server.deploy(self.suite_ini["config"])
import os import glob from lib.tarantool_server import TarantoolServer # master server master = server master.admin('space = box.schema.create_space(\'test\', {id = 42})') master.admin('space:create_index(\'primary\', \'hash\', {parts = { 0, \'num\' } })') master.admin('for k = 1, 9 do space:insert(k, k*k) end') for k in glob.glob(os.path.join(master.vardir, '*.xlog')): os.unlink(k) print '-------------------------------------------------------------' print 'replica test 1 (must be failed)' print '-------------------------------------------------------------' replica = TarantoolServer() replica.deploy("replication/cfg/replica.cfg", replica.find_exe(self.args.builddir), os.path.join(self.args.vardir, "replica"), need_init=False) for i in range(1, 10): replica.admin('box.select(42, 0, %d)' % i) replica.stop() replica.cleanup(True) master.admin('box.snapshot()') master.restart() master.admin('for k = 10, 19 do box.insert(42, k, k*k*k) end') print '-------------------------------------------------------------' print 'replica test 2 (must be ok)' print '-------------------------------------------------------------' replica = TarantoolServer() replica.deploy("replication/cfg/replica.cfg", replica.find_exe(self.args.builddir), os.path.join(self.args.vardir, "replica"), need_init=False) replica.admin('space = box.space.test'); for i in range(1, 20): replica.admin('space:select(0, %d)' % i) replica.stop() replica.cleanup(True) server.stop() server.deploy(self.suite_ini["config"])
bsd-2-clause
Python
0ea32a2b51438b55130082e54f30fc9c97bd9d85
Fix compatibility with oslo.db 12.1.0
openstack/cloudkitty,openstack/cloudkitty
cloudkitty/db/__init__.py
cloudkitty/db/__init__.py
# -*- coding: utf-8 -*- # Copyright 2014 Objectif Libre # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from oslo_config import cfg from oslo_db.sqlalchemy import session _FACADE = None def _create_facade_lazily(): global _FACADE if _FACADE is None: # FIXME(priteau): Remove autocommit=True (and ideally use of # LegacyEngineFacade) asap since it's not compatible with SQLAlchemy # 2.0. _FACADE = session.EngineFacade.from_config(cfg.CONF, sqlite_fk=True, autocommit=True) return _FACADE def get_engine(): facade = _create_facade_lazily() return facade.get_engine() def get_session(**kwargs): facade = _create_facade_lazily() return facade.get_session(**kwargs)
# -*- coding: utf-8 -*- # Copyright 2014 Objectif Libre # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from oslo_config import cfg from oslo_db.sqlalchemy import session _FACADE = None def _create_facade_lazily(): global _FACADE if _FACADE is None: _FACADE = session.EngineFacade.from_config(cfg.CONF, sqlite_fk=True) return _FACADE def get_engine(): facade = _create_facade_lazily() return facade.get_engine() def get_session(**kwargs): facade = _create_facade_lazily() return facade.get_session(**kwargs)
apache-2.0
Python
cfe7de10ef9c6c1d8d5be71993e5f96ace58953d
Update Ansible release version to 2.6.0dev0.
thaim/ansible,thaim/ansible
lib/ansible/release.py
lib/ansible/release.py
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type __version__ = '2.6.0dev0' __author__ = 'Ansible, Inc.' __codename__ = 'Heartbreaker'
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type __version__ = '2.6.0a1' __author__ = 'Ansible, Inc.' __codename__ = 'Heartbreaker'
mit
Python
359f337d7cfd0dac2eec8ecce643af10588e3e6a
Fix i18n __radd__ bug
limodou/uliweb,wwfifi/uliweb,wwfifi/uliweb,wwfifi/uliweb,wwfifi/uliweb,limodou/uliweb,limodou/uliweb,limodou/uliweb
uliweb/i18n/lazystr.py
uliweb/i18n/lazystr.py
def lazy(func): def f(message): return LazyString(func, message) return f class LazyString(object): """ >>> from uliweb.i18n import gettext_lazy as _ >>> x = _('Hello') >>> print repr(x) """ def __init__(self, func, message): self._func = func self.msg = message self._format = [] def __unicode__(self): if not self.msg: return '' value = self.getvalue() if isinstance(value, unicode): return value else: return unicode(self.getvalue(), 'utf-8') def __str__(self): if not self.msg: return '' value = self.getvalue() if isinstance(value, unicode): return value.encode('utf-8') else: return str(value) def format(self, *args, **kwargs): self._format.append((args, kwargs)) return self def getvalue(self): v = self._func(self.msg) for args, kwargs in self._format: v = v.format(*args, **kwargs) return v def __repr__(self): return "%s_lazy(%r)" % (self._func.__name__, self.msg) def __add__(self, obj): return self.getvalue() + obj def __radd__(self, obj): return obj + self.getvalue() def encode(self, encoding): return self.getvalue().encode(encoding) def split(self, *args, **kwargs): return self.getvalue().split(*args, **kwargs) # def __getattr__(self, name): # return getattr(self.getvalue(), name)
def lazy(func): def f(message): return LazyString(func, message) return f class LazyString(object): """ >>> from uliweb.i18n import gettext_lazy as _ >>> x = _('Hello') >>> print repr(x) """ def __init__(self, func, message): self._func = func self.msg = message self._format = [] def __unicode__(self): if not self.msg: return '' value = self.getvalue() if isinstance(value, unicode): return value else: return unicode(self.getvalue(), 'utf-8') def __str__(self): if not self.msg: return '' value = self.getvalue() if isinstance(value, unicode): return value.encode('utf-8') else: return str(value) def format(self, *args, **kwargs): self._format.append((args, kwargs)) return self def getvalue(self): v = self._func(self.msg) for args, kwargs in self._format: v = v.format(*args, **kwargs) return v def __repr__(self): return "%s_lazy(%r)" % (self._func.__name__, self.msg) def __add__(self, obj): return self.getvalue() + obj def __radd__(self, obj): return self.getvalue() + obj def encode(self, encoding): return self.getvalue().encode(encoding) def split(self, *args, **kwargs): return self.getvalue().split(*args, **kwargs) # def __getattr__(self, name): # return getattr(self.getvalue(), name)
bsd-2-clause
Python
ac985005f925c0d37ae337ada0bf88b50becaee6
change scheduler
paulgessinger/coalics,paulgessinger/coalics,paulgessinger/coalics
coalics/schedule.py
coalics/schedule.py
import os.path import sys sys.path.append(os.path.join(os.path.dirname(__file__), "..")) import logging from coalics import tasks, q, redis, app from datetime import datetime from datetime import datetime, timedelta import time # stream_handler = logging.StreamHandler() # stream_handler.setLevel(logging.INFO) # app.logger.addHandler(stream_handler) logger = logging.getLogger("Scheduler") fh = logging.FileHandler("/app/log/scheduler.log") fh.setLevel(logging.INFO) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") fh.setFormatter(formatter) logger.addHandler(fh) prev_job = None td = timedelta(seconds=app.config["SOURCE_UPDATE_FREQUENCY"]) logger.info("Scheduler launching") while True: try: logger.info("Begin schedule run") q.enqueue(tasks.update_sources, timeout=td.seconds*0.9) logger.info("Scheduler: ran without error") except Exception as e: logger.error("Scheduler: caught error {}".format(str(e))) finally: logger.info("Scheduler: Sleeping for {}s".format(td.seconds)) time.sleep(td.seconds)
import os.path import sys sys.path.append(os.path.join(os.path.dirname(__file__), "..")) import logging from coalics import tasks, q, redis, app from datetime import datetime from datetime import datetime, timedelta import time # stream_handler = logging.StreamHandler() # stream_handler.setLevel(logging.INFO) # app.logger.addHandler(stream_handler) logger = logging.getLogger("Scheduler") fh = logging.FileHandler("/app/log/scheduler.log") fh.setLevel(logging.INFO) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") fh.setFormatter(formatter) logger.addHandler(fh) prev_job = None td = timedelta(seconds=app.config["SOURCE_UPDATE_FREQUENCY"]) logger.info("Scheduler launching") while True: try: logger.info("Begin schedule run") if prev_job: print(prev_job.result) if prev_job == None or prev_job.result != None: prev_job = q.enqueue(tasks.update_sources, timeout=td.seconds*0.9) logger.info("Scheduler: ran without error") except Exception as e: logger.error("Scheduler: caught error {}".format(str(e))) finally: logger.info("Scheduler: Sleeping for {}s".format(td.seconds)) time.sleep(td.seconds)
mit
Python
9b354f4dc00e3aef4cfceae71be60b1dc60a1927
Add test for ticket #1559.
jorisvandenbossche/numpy,skymanaditya1/numpy,tacaswell/numpy,kirillzhuravlev/numpy,dch312/numpy,CMartelLML/numpy,argriffing/numpy,rhythmsosad/numpy,mindw/numpy,jschueller/numpy,jankoslavic/numpy,dwillmer/numpy,trankmichael/numpy,stuarteberg/numpy,naritta/numpy,felipebetancur/numpy,dch312/numpy,empeeu/numpy,kirillzhuravlev/numpy,joferkington/numpy,larsmans/numpy,stuarteberg/numpy,bmorris3/numpy,rherault-insa/numpy,GaZ3ll3/numpy,trankmichael/numpy,BabeNovelty/numpy,rgommers/numpy,abalkin/numpy,SunghanKim/numpy,bringingheavendown/numpy,NextThought/pypy-numpy,mhvk/numpy,joferkington/numpy,dwf/numpy,WarrenWeckesser/numpy,endolith/numpy,skwbc/numpy,stefanv/numpy,mingwpy/numpy,Anwesh43/numpy,rgommers/numpy,pyparallel/numpy,gmcastil/numpy,chatcannon/numpy,nguyentu1602/numpy,anntzer/numpy,dimasad/numpy,hainm/numpy,shoyer/numpy,moreati/numpy,rhythmsosad/numpy,njase/numpy,dwf/numpy,empeeu/numpy,nbeaver/numpy,pbrod/numpy,charris/numpy,dimasad/numpy,MaPePeR/numpy,jorisvandenbossche/numpy,trankmichael/numpy,mwiebe/numpy,Eric89GXL/numpy,ChanderG/numpy,b-carter/numpy,CMartelLML/numpy,felipebetancur/numpy,cjermain/numpy,yiakwy/numpy,ESSS/numpy,Dapid/numpy,mathdd/numpy,nguyentu1602/numpy,MaPePeR/numpy,utke1/numpy,SiccarPoint/numpy,leifdenby/numpy,Yusa95/numpy,tdsmith/numpy,charris/numpy,madphysicist/numpy,ChristopherHogan/numpy,Linkid/numpy,matthew-brett/numpy,tynn/numpy,githubmlai/numpy,brandon-rhodes/numpy,rherault-insa/numpy,ChanderG/numpy,matthew-brett/numpy,naritta/numpy,tynn/numpy,ogrisel/numpy,seberg/numpy,pizzathief/numpy,sinhrks/numpy,has2k1/numpy,ogrisel/numpy,endolith/numpy,chatcannon/numpy,githubmlai/numpy,ChanderG/numpy,ddasilva/numpy,shoyer/numpy,mortada/numpy,rgommers/numpy,astrofrog/numpy,ContinuumIO/numpy,SiccarPoint/numpy,skwbc/numpy,charris/numpy,ChristopherHogan/numpy,jankoslavic/numpy,ssanderson/numpy,jorisvandenbossche/numpy,embray/numpy,ddasilva/numpy,dwf/numpy,grlee77/numpy,rajathkumarmp/numpy,pelson/numpy,cowlicks/numpy,jakirkham/numpy,cjermain/numpy,rajathkumarmp/numpy,stuarteberg/numpy,MSeifert04/numpy,githubmlai/numpy,ViralLeadership/numpy,rudimeier/numpy,larsmans/numpy,MichaelAquilina/numpy,stefanv/numpy,groutr/numpy,nbeaver/numpy,mortada/numpy,pbrod/numpy,moreati/numpy,njase/numpy,jankoslavic/numpy,rmcgibbo/numpy,jorisvandenbossche/numpy,mhvk/numpy,ekalosak/numpy,MSeifert04/numpy,yiakwy/numpy,pdebuyl/numpy,seberg/numpy,sigma-random/numpy,kiwifb/numpy,NextThought/pypy-numpy,embray/numpy,pdebuyl/numpy,nguyentu1602/numpy,immerrr/numpy,WarrenWeckesser/numpy,musically-ut/numpy,grlee77/numpy,bertrand-l/numpy,rudimeier/numpy,anntzer/numpy,nbeaver/numpy,ViralLeadership/numpy,mortada/numpy,musically-ut/numpy,mhvk/numpy,ssanderson/numpy,cowlicks/numpy,pizzathief/numpy,leifdenby/numpy,mwiebe/numpy,dwillmer/numpy,githubmlai/numpy,dimasad/numpy,pelson/numpy,dch312/numpy,sinhrks/numpy,matthew-brett/numpy,skymanaditya1/numpy,Dapid/numpy,jakirkham/numpy,ewmoore/numpy,mattip/numpy,matthew-brett/numpy,Anwesh43/numpy,brandon-rhodes/numpy,ewmoore/numpy,has2k1/numpy,ContinuumIO/numpy,sonnyhu/numpy,tdsmith/numpy,pelson/numpy,abalkin/numpy,Dapid/numpy,ssanderson/numpy,kirillzhuravlev/numpy,MaPePeR/numpy,SunghanKim/numpy,seberg/numpy,jakirkham/numpy,mortada/numpy,jonathanunderwood/numpy,Anwesh43/numpy,mathdd/numpy,grlee77/numpy,cowlicks/numpy,felipebetancur/numpy,Srisai85/numpy,anntzer/numpy,naritta/numpy,pbrod/numpy,MaPePeR/numpy,naritta/numpy,BabeNovelty/numpy,chiffa/numpy,Linkid/numpy,ajdawson/numpy,endolith/numpy,astrofrog/numpy,yiakwy/numpy,numpy/numpy,b-carter/numpy,trankmichael/numpy,maniteja123/numpy,rmcgibbo/numpy,simongibbons/numpy,argriffing/numpy,madphysicist/numpy,NextThought/pypy-numpy,jonathanunderwood/numpy,KaelChen/numpy,gfyoung/numpy,joferkington/numpy,mwiebe/numpy,KaelChen/numpy,empeeu/numpy,GrimDerp/numpy,numpy/numpy,mathdd/numpy,sonnyhu/numpy,ESSS/numpy,seberg/numpy,AustereCuriosity/numpy,astrofrog/numpy,dwf/numpy,astrofrog/numpy,dwillmer/numpy,ddasilva/numpy,bmorris3/numpy,AustereCuriosity/numpy,MSeifert04/numpy,Linkid/numpy,tynn/numpy,drasmuss/numpy,BMJHayward/numpy,drasmuss/numpy,andsor/numpy,skwbc/numpy,Yusa95/numpy,rhythmsosad/numpy,stuarteberg/numpy,yiakwy/numpy,jorisvandenbossche/numpy,andsor/numpy,madphysicist/numpy,cjermain/numpy,ahaldane/numpy,MSeifert04/numpy,abalkin/numpy,rajathkumarmp/numpy,kiwifb/numpy,gfyoung/numpy,SunghanKim/numpy,dimasad/numpy,sigma-random/numpy,embray/numpy,chiffa/numpy,embray/numpy,rgommers/numpy,chatcannon/numpy,shoyer/numpy,larsmans/numpy,pdebuyl/numpy,ekalosak/numpy,pbrod/numpy,mindw/numpy,sigma-random/numpy,rudimeier/numpy,tacaswell/numpy,WillieMaddox/numpy,BMJHayward/numpy,mhvk/numpy,grlee77/numpy,pizzathief/numpy,ahaldane/numpy,rherault-insa/numpy,gfyoung/numpy,BabeNovelty/numpy,ChristopherHogan/numpy,WillieMaddox/numpy,musically-ut/numpy,ahaldane/numpy,tdsmith/numpy,sonnyhu/numpy,MichaelAquilina/numpy,sinhrks/numpy,Yusa95/numpy,KaelChen/numpy,simongibbons/numpy,numpy/numpy,stefanv/numpy,Srisai85/numpy,mingwpy/numpy,ajdawson/numpy,ogrisel/numpy,dato-code/numpy,mattip/numpy,empeeu/numpy,WillieMaddox/numpy,KaelChen/numpy,larsmans/numpy,rmcgibbo/numpy,dwf/numpy,andsor/numpy,groutr/numpy,ewmoore/numpy,jakirkham/numpy,ewmoore/numpy,WarrenWeckesser/numpy,CMartelLML/numpy,maniteja123/numpy,WarrenWeckesser/numpy,mattip/numpy,SiccarPoint/numpy,dwillmer/numpy,simongibbons/numpy,stefanv/numpy,NextThought/pypy-numpy,has2k1/numpy,mindw/numpy,bertrand-l/numpy,njase/numpy,shoyer/numpy,tacaswell/numpy,kiwifb/numpy,maniteja123/numpy,jschueller/numpy,MSeifert04/numpy,Eric89GXL/numpy,simongibbons/numpy,behzadnouri/numpy,matthew-brett/numpy,rajathkumarmp/numpy,GrimDerp/numpy,mhvk/numpy,pizzathief/numpy,SiccarPoint/numpy,hainm/numpy,MichaelAquilina/numpy,cowlicks/numpy,SunghanKim/numpy,felipebetancur/numpy,Srisai85/numpy,jschueller/numpy,rhythmsosad/numpy,BMJHayward/numpy,MichaelAquilina/numpy,mathdd/numpy,simongibbons/numpy,jschueller/numpy,madphysicist/numpy,dch312/numpy,ContinuumIO/numpy,Eric89GXL/numpy,ChristopherHogan/numpy,andsor/numpy,bmorris3/numpy,Linkid/numpy,Anwesh43/numpy,moreati/numpy,bringingheavendown/numpy,brandon-rhodes/numpy,BMJHayward/numpy,behzadnouri/numpy,ajdawson/numpy,BabeNovelty/numpy,GaZ3ll3/numpy,astrofrog/numpy,sinhrks/numpy,solarjoe/numpy,shoyer/numpy,CMartelLML/numpy,skymanaditya1/numpy,tdsmith/numpy,ajdawson/numpy,argriffing/numpy,solarjoe/numpy,cjermain/numpy,mattip/numpy,hainm/numpy,immerrr/numpy,leifdenby/numpy,GrimDerp/numpy,behzadnouri/numpy,ESSS/numpy,pbrod/numpy,gmcastil/numpy,embray/numpy,ChanderG/numpy,solarjoe/numpy,ewmoore/numpy,pelson/numpy,Eric89GXL/numpy,pyparallel/numpy,ViralLeadership/numpy,rmcgibbo/numpy,immerrr/numpy,grlee77/numpy,drasmuss/numpy,dato-code/numpy,utke1/numpy,rudimeier/numpy,gmcastil/numpy,hainm/numpy,sigma-random/numpy,ekalosak/numpy,endolith/numpy,jakirkham/numpy,pdebuyl/numpy,jonathanunderwood/numpy,GaZ3ll3/numpy,joferkington/numpy,b-carter/numpy,has2k1/numpy,pelson/numpy,nguyentu1602/numpy,pizzathief/numpy,ogrisel/numpy,madphysicist/numpy,mingwpy/numpy,immerrr/numpy,dato-code/numpy,musically-ut/numpy,numpy/numpy,kirillzhuravlev/numpy,chiffa/numpy,mindw/numpy,bringingheavendown/numpy,ogrisel/numpy,charris/numpy,sonnyhu/numpy,Srisai85/numpy,bertrand-l/numpy,groutr/numpy,GaZ3ll3/numpy,brandon-rhodes/numpy,WarrenWeckesser/numpy,AustereCuriosity/numpy,pyparallel/numpy,dato-code/numpy,stefanv/numpy,jankoslavic/numpy,ekalosak/numpy,mingwpy/numpy,skymanaditya1/numpy,bmorris3/numpy,GrimDerp/numpy,ahaldane/numpy,utke1/numpy,anntzer/numpy,Yusa95/numpy,ahaldane/numpy
numpy/ma/tests/test_regression.py
numpy/ma/tests/test_regression.py
from numpy.testing import * import numpy as np rlevel = 1 class TestRegression(TestCase): def test_masked_array_create(self,level=rlevel): """Ticket #17""" x = np.ma.masked_array([0,1,2,3,0,4,5,6],mask=[0,0,0,1,1,1,0,0]) assert_array_equal(np.ma.nonzero(x),[[1,2,6,7]]) def test_masked_array(self,level=rlevel): """Ticket #61""" x = np.ma.array(1,mask=[1]) def test_mem_masked_where(self,level=rlevel): """Ticket #62""" from numpy.ma import masked_where, MaskType a = np.zeros((1,1)) b = np.zeros(a.shape, MaskType) c = masked_where(b,a) a-c def test_masked_array_multiply(self,level=rlevel): """Ticket #254""" a = np.ma.zeros((4,1)) a[2,0] = np.ma.masked b = np.zeros((4,2)) a*b b*a def test_masked_array_repeat(self, level=rlevel): """Ticket #271""" np.ma.array([1],mask=False).repeat(10) def test_masked_array_repr_unicode(self): """Ticket #1256""" repr(np.ma.array(u"Unicode")) def test_atleast_2d(self): """Ticket #1559""" a = np.ma.masked_array([0.0, 1.2, 3.5], mask=[False, True, False]) b = np.atleast_2d(a) assert_(a.mask.ndim == 1) assert_(b.mask.ndim == 2) if __name__ == "__main__": run_module_suite()
from numpy.testing import * import numpy as np rlevel = 1 class TestRegression(TestCase): def test_masked_array_create(self,level=rlevel): """Ticket #17""" x = np.ma.masked_array([0,1,2,3,0,4,5,6],mask=[0,0,0,1,1,1,0,0]) assert_array_equal(np.ma.nonzero(x),[[1,2,6,7]]) def test_masked_array(self,level=rlevel): """Ticket #61""" x = np.ma.array(1,mask=[1]) def test_mem_masked_where(self,level=rlevel): """Ticket #62""" from numpy.ma import masked_where, MaskType a = np.zeros((1,1)) b = np.zeros(a.shape, MaskType) c = masked_where(b,a) a-c def test_masked_array_multiply(self,level=rlevel): """Ticket #254""" a = np.ma.zeros((4,1)) a[2,0] = np.ma.masked b = np.zeros((4,2)) a*b b*a def test_masked_array_repeat(self, level=rlevel): """Ticket #271""" np.ma.array([1],mask=False).repeat(10) def test_masked_array_repr_unicode(self): """Ticket #1256""" repr(np.ma.array(u"Unicode"))
bsd-3-clause
Python
813c478f06c175e36dc8334fd37195e403a42166
update test_symbol_accuracy
vincentfpgarcia/tradingtool
test_symbol_accuracy.py
test_symbol_accuracy.py
from dataset import create_testing_data_for_symbol, get_symbol_list from keras.models import load_model import sys INITIAL_CAPITAL = 10000.0 PERCENT_OF_CAPITAL_PER_TRANSACTION = 10.0 TRANSACTION_FEE = 0 def compare(x, y): if x[3] < y[3]: return 1 return -1 def main(): model = load_model(sys.argv[1]) symbols = get_symbol_list() gains = [] for sym in symbols: print "----" X, Y = create_testing_data_for_symbol(sym) money = INITIAL_CAPITAL true_pos = 0 false_pos = 0 for i in range(len(X)): current = X[i] current_value = current[0][-1] prediction = model.predict(X[i:i+1]) if prediction[0][0] > current_value * 1.01: investment = 100.0 money -= investment + TRANSACTION_FEE * 2.0 revenue = Y[i:i+1][0][0] / current_value * investment gain = revenue - investment money += revenue if gain > 0.0: true_pos += 1 else: false_pos += 1 print "" print "symbol:", sym total_gain = money - INITIAL_CAPITAL percent_gain = ((money / INITIAL_CAPITAL) - 1.0) * 100.0 print "gain:", total_gain, "(", percent_gain, ")" accuracy = 0 if false_pos+true_pos == 0 else float(true_pos)/float(false_pos+true_pos) print "true pos:", true_pos, "false pos:", false_pos, "accuracy:", accuracy gains.append([sym, true_pos, false_pos, accuracy, total_gain, percent_gain]) gains.sort(compare) for item in gains: print item if __name__ == "__main__": # import dataset # X, y = dataset.create_testing_data_for_symbol('CBI') # print X main()
from dataset import create_testing_data_for_symbol, get_symbol_list from keras.models import load_model import sys INITIAL_CAPITAL = 10000.0 PERCENT_OF_CAPITAL_PER_TRANSACTION = 10.0 TRANSACTION_FEE = 0 def compare(x, y): if x[1] < y[1]: return 1 return -1 def main(): model = load_model(sys.argv[1]) symbols = get_symbol_list() gains = [] for sym in symbols: X, Y = create_testing_data_for_symbol(sym) print "----" money = INITIAL_CAPITAL for i in range(len(X)): current = X[i] current_value = current[0][-1] prediction = model.predict(X[i:i+1]) if prediction[0][0] > current_value * 1.02: investment = 100.0 money -= investment + TRANSACTION_FEE * 2.0 revenue = Y[i:i+1][0][0] / current_value * investment gain = revenue - investment money += revenue print "" print "symbol:", sym total_gain = money - INITIAL_CAPITAL percent_gain = ((money / INITIAL_CAPITAL) - 1.0) * 100.0 print "gain:", total_gain, "(", percent_gain, ")" gains.append([sym, total_gain, percent_gain]) gains.sort(compare) for item in gains: print item if __name__ == "__main__": main()
mit
Python
f61a4766ad3006bb2001df33d06feeb15352aa5a
Change Box user list request from raw API call to Box SDK make_request method
jcleblanc/box-examples,jcleblanc/box-examples,jcleblanc/box-examples,jcleblanc/box-examples,jcleblanc/box-examples,jcleblanc/box-examples,jcleblanc/box-examples
okta-integration/python/server.py
okta-integration/python/server.py
from flask import Flask, redirect, g, url_for from flask_oidc import OpenIDConnect from okta import UsersClient from boxsdk import Client from boxsdk import JWTAuth import requests import config import json app = Flask(__name__) app.config.update({ 'SECRET_KEY': config.okta_client_secret, 'OIDC_CLIENT_SECRETS': './client_secrets.json', 'OIDC_DEBUG': True, 'OIDC_ID_TOKEN_COOKIE_SECURE': False, 'OIDC_SCOPES': ["openid", "profile"], 'OIDC_CALLBACK_ROUTE': config.okta_callback_route }) oidc = OpenIDConnect(app) okta_client = UsersClient(config.okta_org_url, config.okta_auth_token) # Fetch Okta user record if logged in @app.before_request def before_request(): if oidc.user_loggedin: g.user = okta_client.get_user(oidc.user_getfield('sub')) else: g.user = None # Main application route @app.route('/') def start(): return redirect(url_for(".box_auth")) # Box user verification @app.route("/box_auth") @oidc.require_login def box_auth(): uid = g.user.id # Instantiate Box Client instance auth = JWTAuth.from_settings_file('../config.json') box_client = Client(auth) # Validate is user exists url = f'https://api.box.com/2.0/users?external_app_user_id={uid}' response = box_client.make_request('GET', url) user_info = response.json() # If user not found, create user, otherwise fetch user token if (user_info['total_count'] == 0): user_name = f'{g.user.profile.firstName} {g.user.profile.lastName}' space = 1073741824 # Create app user user = box_client.create_user(user_name, None, space_amount=space, external_app_user_id=uid) print('user {name} created') else: # Create user client based on discovered user user = user_info['entries'][0] user_to_impersonate = box_client.user(user_id=user['id']) user_client = box_client.as_user(user_to_impersonate) # Get current user current_user = box_client.user().get() print(current_user.id) # Get all items in a folder items = user_client.folder(folder_id='0').get_items() for item in items: print('{0} {1} is named "{2}"'.format(item.type.capitalize(), item.id, item.name)) return 'Test complete' # User logout @app.route("/logout") def logout(): oidc.logout()
from flask import Flask, redirect, g, url_for from flask_oidc import OpenIDConnect from okta import UsersClient from boxsdk import Client from boxsdk import JWTAuth import requests import config import json app = Flask(__name__) app.config.update({ 'SECRET_KEY': config.okta_client_secret, 'OIDC_CLIENT_SECRETS': './client_secrets.json', 'OIDC_DEBUG': True, 'OIDC_ID_TOKEN_COOKIE_SECURE': False, 'OIDC_SCOPES': ["openid", "profile"], 'OIDC_CALLBACK_ROUTE': config.okta_callback_route }) oidc = OpenIDConnect(app) okta_client = UsersClient(config.okta_org_url, config.okta_auth_token) # Fetch Okta user record if logged in @app.before_request def before_request(): if oidc.user_loggedin: g.user = okta_client.get_user(oidc.user_getfield('sub')) else: g.user = None # Main application route @app.route('/') def start(): return redirect(url_for(".box_auth")) # Box user verification @app.route("/box_auth") @oidc.require_login def box_auth(): uid = g.user.id auth = JWTAuth.from_settings_file('../config.json') access_token = auth.authenticate_instance() box_client = Client(auth) # Validate is user exists url = f'https://api.box.com/2.0/users?external_app_user_id={uid}' headers = {'Authorization': 'Bearer ' + access_token} response = requests.get(url, headers=headers) user_info = response.json() # If user not found, create user, otherwise fetch user token if (user_info['total_count'] == 0): user_name = f'{g.user.profile.firstName} {g.user.profile.lastName}' space = 1073741824 # Create app user user = box_client.create_user(user_name, None, space_amount=space, external_app_user_id=uid) print('user {name} created') else: # Create user client based on discovered user user = user_info['entries'][0] user_to_impersonate = box_client.user(user_id=user['id']) user_client = box_client.as_user(user_to_impersonate) # Get current user current_user = box_client.user().get() print(current_user.id) # Get all items in a folder items = user_client.folder(folder_id='0').get_items() for item in items: print('{0} {1} is named "{2}"'.format(item.type.capitalize(), item.id, item.name)) return 'Test complete' # User logout @app.route("/logout") def logout(): oidc.logout()
mit
Python
72941398fd2e78cbf5d994b4bf8683c4bdefaab9
Comment out semipar notebook in travis runner until pip build us updated.
grmToolbox/grmpy
utils/travis_runner.py
utils/travis_runner.py
#!/usr/bin/env python """This script manages all tasks for the TRAVIS build server.""" import os import subprocess if __name__ == "__main__": os.chdir("promotion/grmpy_tutorial_notebook") cmd = [ "jupyter", "nbconvert", "--execute", "grmpy_tutorial_notebook.ipynb", "--ExecutePreprocessor.timeout=-1", ] subprocess.check_call(cmd) os.chdir("../..") # if __name__ == "__main__": # os.chdir("promotion/grmpy_tutorial_notebook") # cmd = [ # "jupyter", # "nbconvert", # "--execute", # "tutorial_semipar_notebook.ipynb", # "--ExecutePreprocessor.timeout=-1", # ] # subprocess.check_call(cmd)
#!/usr/bin/env python """This script manages all tasks for the TRAVIS build server.""" import os import subprocess if __name__ == "__main__": os.chdir("promotion/grmpy_tutorial_notebook") cmd = [ "jupyter", "nbconvert", "--execute", "grmpy_tutorial_notebook.ipynb", "--ExecutePreprocessor.timeout=-1", ] subprocess.check_call(cmd) os.chdir("../..") if __name__ == "__main__": os.chdir("promotion/grmpy_tutorial_notebook") cmd = [ "jupyter", "nbconvert", "--execute", "tutorial_semipar_notebook.ipynb", "--ExecutePreprocessor.timeout=-1", ] subprocess.check_call(cmd)
mit
Python
17dfc3faa45584200c8f67686b86b541a2ce01fe
Test for informal word
wiki-ai/revscoring,eranroz/revscoring,he7d3r/revscoring,aetilley/revscoring,ToAruShiroiNeko/revscoring
revscoring/languages/tests/test_hebrew.py
revscoring/languages/tests/test_hebrew.py
from nose.tools import eq_ from .. import language, hebrew def test_language(): is_misspelled = hebrew.solve(language.is_misspelled) assert is_misspelled("חטול") assert not is_misspelled("חתול") is_badword = hebrew.solve(language.is_badword) assert is_badword("שרמוטה") assert not is_badword("שימרותה") is_informal_word = hebrew.solve(language.is_informal_word) assert is_informal_word("בגללך") assert not is_informal_word("בגלל")
from nose.tools import eq_ from .. import language, hebrew def test_language(): is_misspelled = hebrew.solve(language.is_misspelled) assert is_misspelled("חטול") assert not is_misspelled("חתול") is_badword = hebrew.solve(language.is_badword) assert is_badword("שרמוטה") assert not is_badword("שימרותה")
mit
Python
eb71d45097e509273518b83113489911bf985e4a
clean up
cellcraft/cellcraft
mcpipy/test/builders/test_protein.py
mcpipy/test/builders/test_protein.py
import pandas as pd from cellcraft.builders.protein import define_items_color_texture_protein, store_location_biological_prot_data def test_define_items_color_texture_protein(): dict_chains = {"a": 1, "b": 2} d_appearance = define_items_color_texture_protein(dict_chains) assert len(d_appearance) == 2 assert d_appearance[1]['color'] != d_appearance[2]['color']
import pandas as pd from cellcraft.builders.protein import define_items_color_texture_protein, store_location_biological_prot_data def test_define_items_color_texture_protein(): dict_chains = {"a": 1, "b": 2} d_appearance = define_items_color_texture_protein(dict_chains) assert len(d_appearance) == 2 assert d_appearance[1]['color'] != d_appearance[2]['color'] def test_store_location_biological_prot_data(): complex_coordinates = pd.Series([0.03, 0.45, 0.23]) name = '1jsu' data_dict = store_location_biological_prot_data(complex_coordinates, name)
mit
Python
be929d518ff320ed8e16f57da55f0855800f7408
Use mutli_reduce instead of reduce in enum file loading
Tactique/game_engine,Tactique/game_engine
src/engine/file_loader.py
src/engine/file_loader.py
import os import json from lib import contract, functional data_dir = os.path.join(os.environ['PORTER'], 'data') @contract.accepts(str) @contract.returns(list) def read_and_parse_json(data_type): sub_dir = os.path.join(data_dir, data_type) def full_path(file_name): return os.path.join(sub_dir, file_name) def only_json(file_name): return file_name.endswith('.json') def load_json(json_file_name): with open(json_file_name) as json_file: return json.load(json_file) return map(load_json, filter(only_json, map(full_path, os.listdir(sub_dir)))) @contract.accepts(str) @contract.returns(dict) def load_enum(struct_name): def create_enum_map(enum_map, enumeration, enum_type): enum_map[str(enum_type)] = enumeration return enum_map return functional.multi_reduce( create_enum_map, enumerate(read_and_parse_json(struct_name)[0]), {}) @contract.accepts(str) @contract.returns(dict) def load_struct(struct_name): def create_struct_map(struct_map, struct_): struct_map[str(struct_['name'])] = struct_ return struct_map return reduce(create_struct_map, read_and_parse_json(struct_name), {})
import os import json from lib import contract data_dir = os.path.join(os.environ['PORTER'], 'data') @contract.accepts(str) @contract.returns(list) def read_and_parse_json(data_type): sub_dir = os.path.join(data_dir, data_type) def full_path(file_name): return os.path.join(sub_dir, file_name) def only_json(file_name): return file_name.endswith('.json') def load_json(json_file_name): with open(json_file_name) as json_file: return json.load(json_file) return map(load_json, filter(only_json, map(full_path, os.listdir(sub_dir)))) @contract.accepts(str) @contract.returns(dict) def load_enum(struct_name): def create_enum_map(enum_map, args): enumeration, enum_type = args enum_map[str(enum_type)] = enumeration return enum_map return reduce(create_enum_map, enumerate(read_and_parse_json(struct_name)[0]), {}) @contract.accepts(str) @contract.returns(dict) def load_struct(struct_name): def create_struct_map(struct_map, struct_): struct_map[str(struct_['name'])] = struct_ return struct_map return reduce(create_struct_map, read_and_parse_json(struct_name), {})
mit
Python
1aa44c23e138fadd0b8fc604e5b9dac384901ce3
sort dashboards dropdown
vimeo/graph-explorer,vimeo/graph-explorer,vimeo/graph-explorer,dbirchak/graph-explorer,dbirchak/graph-explorer,dbirchak/graph-explorer,vimeo/graph-explorer,dbirchak/graph-explorer
dashboards.py
dashboards.py
def list_dashboards(): import os wd = os.getcwd() os.chdir('templates/dashboards') dashboards = [] for f in os.listdir("."): if not f.endswith(".tpl"): continue dashboards.append(f[:-4]) os.chdir(wd) return sorted(dashboards)
def list_dashboards(): import os wd = os.getcwd() os.chdir('templates/dashboards') dashboards = [] for f in os.listdir("."): if not f.endswith(".tpl"): continue dashboards.append(f[:-4]) os.chdir(wd) return dashboards
apache-2.0
Python
1df66cc442e93d85fd8a8bbab2815574387a8952
Remove print
villalonreina/dipy,beni55/dipy,FrancoisRheaultUS/dipy,sinkpoint/dipy,JohnGriffiths/dipy,mdesco/dipy,Messaoud-Boudjada/dipy,oesteban/dipy,FrancoisRheaultUS/dipy,matthieudumont/dipy,samuelstjean/dipy,rfdougherty/dipy,oesteban/dipy,demianw/dipy,nilgoyyou/dipy,Messaoud-Boudjada/dipy,StongeEtienne/dipy,samuelstjean/dipy,beni55/dipy,sinkpoint/dipy,nilgoyyou/dipy,JohnGriffiths/dipy,jyeatman/dipy,jyeatman/dipy,demianw/dipy,StongeEtienne/dipy,villalonreina/dipy,samuelstjean/dipy,rfdougherty/dipy,matthieudumont/dipy,mdesco/dipy
doc/examples/brain_extraction_dwi.py
doc/examples/brain_extraction_dwi.py
""" ================================================= Brain segmentation with dipy.segment.mask. ================================================= We show how to extract brain information and mask from a b0 image using dipy's segment.mask module. First import the necessary modules: """ import os.path import numpy as np import nibabel as nib """ Download and read the data for this tutorial. The scil_b0 dataset contains different data from different companies and models. For this example, the data comes from a 3 tesla GE MRI. """ from dipy.data import fetch_scil_b0, read_scil_b0 fetch_scil_b0() img = read_scil_b0() data = np.squeeze(img.get_data()) """ img contains a nibabel Nifti1Image object. Data is the actual brain data as a numpy ndarray. Segment the brain using dipy's mask module. `medostu` returns the segmented brain data an a binary mask of the brain. It is possible to fine tune the `medotsu`'s parameters (median_radius and num_pass) if extraction yields incorrect results but the default parameters work well on most volumes. For this example, default parameters (4, 4) will be used. """ from dipy.segment.mask import medotsu b0_mask, mask = medotsu(data.copy(), 4, 4) """ Saving the segmentation results is very easy using nibabel. We need the b0_mask, and the binary mask volumes. The affine matrix which transform the image's coordinates to the world coordinates is also needed. Here, we choose to save both images in float32. """ mask_img = nib.Nifti1Image(mask.astype(np.float32), img.get_affine()) b0_img = nib.Nifti1Image(b0_mask.astype(np.float32), img.get_affine()) fname = './ge_3t' nib.save(mask_img, fname+'_binary_mask.nii.gz') nib.save(b0_img, fname+'_mask.nii.gz') """ Quick view of the results middle slice using matplotlib. """ import matplotlib.pyplot as plt slice = data.shape[2]/2 plt.figure('Brain segmentation') plt.subplot(1,2,1) plt.imshow(data[:,:,slice]) plt.subplot(1,2,2) plt.imshow(b0_mask[:,:,slice]) plt.show() """ `medostu` can also automaticaly crop the outputs to remove the largest possible number of backgroud voxels. This makes outputted data significantly smaller. `medostu`'s auto cropping is activated by setting the autocrop parameter to True. """ b0_mask_crop, mask_crop = medotsu(data.copy(), 4, 4, autocrop=True) """ Saving cropped data using nibabel as demonstrated previously. """ mask_img_crop = nib.Nifti1Image(mask_crop.astype(np.float32), img.get_affine()) b0_img_crop = nib.Nifti1Image(b0_mask_crop.astype(np.float32), img.get_affine()) nib.save(mask_img_crop, fname+'_binary_mask_crop.nii.gz') nib.save(b0_img_crop, fname+'_mask_crop.nii.gz')
""" ================================================= Brain segmentation with dipy.segment.mask. ================================================= We show how to extract brain information and mask from a b0 image using dipy's segment.mask module. First import the necessary modules: """ import os.path import numpy as np import nibabel as nib """ Download and read the data for this tutorial. The scil_b0 dataset contains different data from different companies and models. For this example, the data comes from a 3 tesla GE MRI. """ from dipy.data import fetch_scil_b0, read_scil_b0 fetch_scil_b0() img = read_scil_b0() data = np.squeeze(img.get_data()) """ img contains a nibabel Nifti1Image object. Data is the actual brain data as a numpy ndarray. Segment the brain using dipy's mask module. `medostu` returns the segmented brain data an a binary mask of the brain. It is possible to fine tune the `medotsu`'s parameters (median_radius and num_pass) if extraction yields incorrect results but the default parameters work well on most volumes. For this example, default parameters (4, 4) will be used. """ print('Segmenting brain data from GE 3T b0 volume...') from dipy.segment.mask import medotsu b0_mask, mask = medotsu(data.copy(), 4, 4) """ Saving the segmentation results is very easy using nibabel. We need the b0_mask, and the binary mask volumes. The affine matrix which transform the image's coordinates to the world coordinates is also needed. Here, we choose to save both images in float32. """ mask_img = nib.Nifti1Image(mask.astype(np.float32), img.get_affine()) b0_img = nib.Nifti1Image(b0_mask.astype(np.float32), img.get_affine()) fname = './ge_3t' nib.save(mask_img, fname+'_binary_mask.nii.gz') nib.save(b0_img, fname+'_mask.nii.gz') """ Quick view of the results middle slice using matplotlib. """ import matplotlib.pyplot as plt slice = data.shape[2]/2 plt.figure('Brain segmentation') plt.subplot(1,2,1) plt.imshow(data[:,:,slice]) plt.subplot(1,2,2) plt.imshow(b0_mask[:,:,slice]) plt.show() """ `medostu` can also automaticaly crop the outputs to remove the largest possible number of backgroud voxels. This makes outputted data significantly smaller. `medostu`'s auto cropping is activated by setting the autocrop parameter to True. """ b0_mask_crop, mask_crop = medotsu(data.copy(), 4, 4, autocrop=True) """ Saving cropped data using nibabel as demonstrated previously. """ mask_img_crop = nib.Nifti1Image(mask_crop.astype(np.float32), img.get_affine()) b0_img_crop = nib.Nifti1Image(b0_mask_crop.astype(np.float32), img.get_affine()) nib.save(mask_img_crop, fname+'_binary_mask_crop.nii.gz') nib.save(b0_img_crop, fname+'_mask_crop.nii.gz')
bsd-3-clause
Python
51b716cc00efd0d0c93ffc11f4cd7242446bad88
Remove unused pyrax import
nvbn/coviolations_web,nvbn/coviolations_web
nodes/management/commands/create_images.py
nodes/management/commands/create_images.py
from gevent import monkey monkey.patch_all() import gevent import os from django.core.management import BaseCommand from django.conf import settings from ...utils import connect_to_node, logger class Command(BaseCommand): help = 'create nodes images' def handle(self, *args, **kwargs): self._root = os.path.join(settings.PROJECT_ROOT, 'nodes', 'images') self._create_image('raw') tasks = [ gevent.spawn(self._create_image, image, image_name='raw') for image in os.listdir(self._root) if image != 'raw' ] gevent.joinall(tasks) def _create_image(self, name, **kwargs): """Create image""" image_root = os.path.join(self._root, name) with connect_to_node(**kwargs) as node: node.put(image_root, '/root/{name}/'.format(name=name)) out = node.execute(''' cd /root/{name}/ bash bootstrap.sh '''.format(name=name)) logger.info(out.stdout) logger.info(out.stderr) node.save_image(name)
from gevent import monkey monkey.patch_all() import gevent import os from django.core.management import BaseCommand from django.conf import settings from ...utils import connect_to_node, logger, pyrax class Command(BaseCommand): help = 'create nodes images' def handle(self, *args, **kwargs): self._root = os.path.join(settings.PROJECT_ROOT, 'nodes', 'images') self._create_image('raw') tasks = [ gevent.spawn(self._create_image, image, image_name='raw') for image in os.listdir(self._root) if image != 'raw' ] gevent.joinall(tasks) def _create_image(self, name, **kwargs): """Create image""" image_root = os.path.join(self._root, name) with connect_to_node(**kwargs) as node: node.put(image_root, '/root/{name}/'.format(name=name)) out = node.execute(''' cd /root/{name}/ bash bootstrap.sh '''.format(name=name)) logger.info(out.stdout) logger.info(out.stderr) node.save_image(name)
mit
Python
9ca88c5cd7f52c6f064a1d5edb003471f6223a74
Change lable on click
rituven/winston
Winston.py
Winston.py
import sys from PyQt4.QtGui import * #from PyQt4.QtWidgets import * from PyQt4.QtCore import * from core.Messenger import * from core.Events import * from alexa import AlexaService class QTApp(QWidget): def __init__(self): super(QWidget, self).__init__() self.title = 'Winston' self.setWindowTitle(self.title) self.setGeometry(100,100,800,400) self.btn = QPushButton('', self) self.alexaService = AlexaService() self.messenger = getMessenger() self.initUI() def initUI(self): self.label = QLabel(self) self.label.setText("Hi, I am Winston. How can I help you?") self.label.move(50,40) self.btn.setCheckable(True) self.btn.setIcon(QIcon('media/Alexa_passive.jpg')) self.btn.setIconSize(QSize(150,150)) self.btn.setObjectName("Alexa") self.btn.move(100,70) self.btn.pressed.connect(self.on_press) self.btn.released.connect(self.on_release) self.btn.clicked.connect(self.on_click) self.bool = False self.show() @pyqtSlot() def on_click(self): sending_button = self.sender() # TODO if not self.bool: self.label.setText('listening ...') self.bool = True else: self.label.setText("Hi, I am Winston. How can I help you?") self.bool = False data = {'App': str(sending_button.objectName())} self.messenger.postEvent(Events.UI_BTN_CLICKED, data) @pyqtSlot() def on_press(self): sending_button = self.sender() data = {'App': str(sending_button.objectName())} self.btn.setIcon(QIcon('media/Alexa_active.jpg')) self.btn.setCheckable(False); self.messenger.postEvent(Events.UI_BTN_PRESSED, data) @pyqtSlot() def on_release(self): sending_button = self.sender() data = {'App': str(sending_button.objectName())} self.btn.setIcon(QIcon('media/Alexa_passive.jpg')) self.btn.setCheckable(True); self.messenger.postEvent(Events.UI_BTN_RELEASED, data) if __name__ == '__main__': app = QApplication(sys.argv) ex = QTApp() app.exec_() delMessenger() sys.exit()
import sys from PyQt4.QtGui import * #from PyQt4.QtWidgets import * from PyQt4.QtCore import * from core.Messenger import * from core.Events import * from alexa import AlexaService class QTApp(QWidget): def __init__(self): super(QWidget, self).__init__() self.title = 'Winston' self.setWindowTitle(self.title) self.setGeometry(100,100,800,400) self.btn = QPushButton('', self) self.alexaService = AlexaService() self.messenger = getMessenger() self.initUI() def initUI(self): b = QLabel(self) b.setText("Hi, I am Winston. How can I help you?") b.move(50,40) self.btn.setCheckable(True) self.btn.setIcon(QIcon('media/Alexa_passive.jpg')) self.btn.setIconSize(QSize(150,150)) self.btn.setObjectName("Alexa") self.btn.move(100,70) self.btn.pressed.connect(self.on_press) self.btn.released.connect(self.on_release) self.btn.clicked.connect(self.on_click) self.show() @pyqtSlot() def on_click(self): sending_button = self.sender() data = {'App': str(sending_button.objectName())} self.messenger.postEvent(Events.UI_BTN_CLICKED, data) @pyqtSlot() def on_press(self): sending_button = self.sender() data = {'App': str(sending_button.objectName())} self.btn.setIcon(QIcon('media/Alexa_active.jpg')) self.btn.setCheckable(False); self.messenger.postEvent(Events.UI_BTN_PRESSED, data) @pyqtSlot() def on_release(self): sending_button = self.sender() data = {'App': str(sending_button.objectName())} self.btn.setIcon(QIcon('media/Alexa_passive.jpg')) self.btn.setCheckable(True); self.messenger.postEvent(Events.UI_BTN_RELEASED, data) if __name__ == '__main__': app = QApplication(sys.argv) ex = QTApp() app.exec_() delMessenger() sys.exit()
apache-2.0
Python
2f31a1f0745214c2b06dadc1258926f7440d429f
Set datetime output format to ISO8601
olinlibrary/ABE,olinlibrary/ABE,olinlibrary/ABE
abe/app.py
abe/app.py
#!/usr/bin/env python3 """Main flask app""" from flask import Flask, render_template, jsonify from flask_restful import Api from flask_cors import CORS from flask_sslify import SSLify # redirect to https from flask.json import JSONEncoder from datetime import datetime import os import logging FORMAT = "%(levelname)s:ABE: _||_ %(message)s" logging.basicConfig(level=logging.DEBUG, format=FORMAT) from .resource_models.event_resources import EventApi from .resource_models.label_resources import LabelApi from .resource_models.ics_resources import ICSApi app = Flask(__name__) CORS(app) SSLify(app) api = Api(app) class CustomJSONEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return obj.isoformat() else: return JSONEncoder.default(self, obj) app.json_encoder = CustomJSONEncoder # add return representations @api.representation('application/json') def output_json(data, code, headers=None): resp = jsonify(data) resp.status_code = code resp.headers.extend(headers or {}) return resp # Route resources api.add_resource(EventApi, '/events/', methods=['GET', 'POST'], endpoint='event') api.add_resource(EventApi, '/events/<string:event_id>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='event_id') # TODO: add route for string/gphycat links api.add_resource(EventApi, '/events/<string:event_id>/<string:rec_id>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='rec_id') # TODO: add route for string/gphycat links api.add_resource(LabelApi, '/labels/', methods=['GET', 'POST'], endpoint='label') api.add_resource(LabelApi, '/labels/<string:label_name>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='label_name') api.add_resource(ICSApi, '/ics/', methods=['GET', 'POST'], endpoint='ics') api.add_resource(ICSApi, '/ics/<string:ics_name>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='ics_name') @app.route('/') def splash(): return render_template('splash.html') @app.route('/add_event') def add_event(): return render_template('add_event.html') @app.route('/add_label') def add_label(): return render_template('add_label.html') if __name__ == '__main__': app.debug = os.getenv('FLASK_DEBUG') != 'False' # updates the page as the code is saved HOST = '0.0.0.0' if 'PORT' in os.environ else '127.0.0.1' PORT = int(os.environ.get('PORT', 3000)) app.run(host='0.0.0.0', port=PORT)
#!/usr/bin/env python3 """Main flask app""" from flask import Flask, render_template, jsonify from flask_restful import Api from flask_cors import CORS from flask_sslify import SSLify # redirect to https import os import logging FORMAT = "%(levelname)s:ABE: _||_ %(message)s" logging.basicConfig(level=logging.DEBUG, format=FORMAT) from .resource_models.event_resources import EventApi from .resource_models.label_resources import LabelApi from .resource_models.ics_resources import ICSApi app = Flask(__name__) CORS(app) SSLify(app) api = Api(app) # add return representations @api.representation('application/json') def output_json(data, code, headers=None): resp = jsonify(data) resp.status_code = code resp.headers.extend(headers or {}) return resp # Route resources api.add_resource(EventApi, '/events/', methods=['GET', 'POST'], endpoint='event') api.add_resource(EventApi, '/events/<string:event_id>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='event_id') # TODO: add route for string/gphycat links api.add_resource(EventApi, '/events/<string:event_id>/<string:rec_id>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='rec_id') # TODO: add route for string/gphycat links api.add_resource(LabelApi, '/labels/', methods=['GET', 'POST'], endpoint='label') api.add_resource(LabelApi, '/labels/<string:label_name>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='label_name') api.add_resource(ICSApi, '/ics/', methods=['GET', 'POST'], endpoint='ics') api.add_resource(ICSApi, '/ics/<string:ics_name>', methods=['GET', 'PUT', 'PATCH', 'DELETE'], endpoint='ics_name') @app.route('/') def splash(): return render_template('splash.html') @app.route('/add_event') def add_event(): return render_template('add_event.html') @app.route('/add_label') def add_label(): return render_template('add_label.html') if __name__ == '__main__': app.debug = os.getenv('FLASK_DEBUG') != 'False' # updates the page as the code is saved HOST = '0.0.0.0' if 'PORT' in os.environ else '127.0.0.1' PORT = int(os.environ.get('PORT', 3000)) app.run(host='0.0.0.0', port=PORT)
agpl-3.0
Python
ba1494afb962fb8fba84e306cfb4c26a83602b6d
update license
chuckhousley/DrinkMachine,chuckhousley/DrinkMachine,chuckhousley/DrinkMachine
drink.py
drink.py
# -*- coding: utf-8 -*- import os from server import app, db import server.model if __name__ == "__main__": db.create_all() app.run(debug=True) # host='10.10.56.190')
# -*- coding: utf-8 -*- """ Copyright (C) 2014 Chuck Housley This work is free. You can redistribute it and/or modify it under the terms of the Do What The Fuck You Want To Public License, Version 2, as published by Sam Hocevar. See the COPYING file for more details. """ import os from server import app, db import server.model if __name__ == "__main__": db.create_all() app.run(debug=True) # host='10.10.56.190')
mit
Python
1b726978e1604269c8c4d2728a6f7ce774e5d16d
Fix edit control assessment modal
kr41/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,vladan-m/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core
src/ggrc/models/control_assessment.py
src/ggrc/models/control_assessment.py
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from ggrc import db from .mixins import ( deferred, BusinessObject, Timeboxed, CustomAttributable, TestPlanned ) from .object_document import Documentable from .object_owner import Ownable from .object_person import Personable from .relationship import Relatable from .track_object_state import HasObjectState, track_state_for_class from ggrc.models.reflection import PublishOnly class ControlAssessment(HasObjectState, TestPlanned, CustomAttributable, Documentable, Personable, Timeboxed, Ownable, Relatable, BusinessObject, db.Model): __tablename__ = 'control_assessments' design = deferred(db.Column(db.String), 'ControlAssessment') operationally = deferred(db.Column(db.String), 'ControlAssessment') control_id = db.Column(db.Integer, db.ForeignKey('controls.id')) control = db.relationship('Control', foreign_keys=[control_id]) audit = {} # we add this for the sake of client side error checking # REST properties _publish_attrs = [ 'design', 'operationally', 'control', PublishOnly('audit') ] track_state_for_class(ControlAssessment)
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from ggrc import db from .mixins import ( deferred, BusinessObject, Timeboxed, CustomAttributable, TestPlanned ) from .object_document import Documentable from .object_owner import Ownable from .object_person import Personable from .relationship import Relatable from .track_object_state import HasObjectState, track_state_for_class from ggrc.models.reflection import PublishOnly class ControlAssessment(HasObjectState, TestPlanned, CustomAttributable, Documentable, Personable, Timeboxed, Ownable, Relatable, BusinessObject, db.Model): __tablename__ = 'control_assessments' design = deferred(db.Column(db.String), 'ControlAssessment') operationally = deferred(db.Column(db.String), 'ControlAssessment') control_id = db.Column(db.Integer, db.ForeignKey('controls.id')) control = db.relationship('Control', foreign_keys=[control_id]) # REST properties _publish_attrs = [ 'design', 'operationally', 'control' ] track_state_for_class(ControlAssessment)
apache-2.0
Python
e7d9a67611b2dc443c1f2bc23506323837d79bda
fix test_mcp
radarsat1/siconos,fperignon/siconos,siconos/siconos-deb,radarsat1/siconos,fperignon/siconos,bremond/siconos,bremond/siconos,siconos/siconos-deb,siconos/siconos-deb,radarsat1/siconos,siconos/siconos,radarsat1/siconos,siconos/siconos-deb,fperignon/siconos,fperignon/siconos,siconos/siconos,bremond/siconos,radarsat1/siconos,bremond/siconos,siconos/siconos,siconos/siconos-deb,bremond/siconos,siconos/siconos-deb,siconos/siconos,fperignon/siconos
numerics/swig/tests/test_mcp.py
numerics/swig/tests/test_mcp.py
# Copyright (C) 2005, 2012 by INRIA #!/usr/bin/env python import numpy as np import siconos.numerics as N def mcp_function(z): M = np.array([[2., 1.], [1., 2.]]) q = np.array([-5., -6.]) return np.dot(M,z) + q def mcp_Nablafunction(z): M = np.array([[2., 1.], [1., 2.]]) return M # solution zsol = np.array([4./3., 7./3.]) wsol = np.array([0. , 0.]) # problem #mcp=N.MCP(1,1,mcp_function,mcp_Nablafunction) ztol = 1e-8 def test_new(): mcp=N.MCP(1, 1, mcp_function, mcp_Nablafunction) def test_mcp_FB(): mcp=N.MCP(1,1,mcp_function,mcp_Nablafunction) z = np.array([0., 0.]) w = np.array([0., 0.]) SO=N.SolverOptions(mcp,N.SICONOS_MCP_FB) N.mcp_driver_init(mcp, SO) info = N.mcp_FischerBurmeister(mcp, z, w, SO) N.mcp_driver_reset(mcp, SO) print("z = ", z) print("w = ", w) assert (np.linalg.norm(z-zsol) <= ztol) assert not info
# Copyright (C) 2005, 2012 by INRIA #!/usr/bin/env python import numpy as np import siconos.numerics as N def mcp_function (z) : M = np.array([[2., 1.], [1., 2.]]) q = np.array([-5., -6.]) return dot(M,z) + q def mcp_Nablafunction (z) : M = np.array([[2., 1.], [1., 2.]]) return M # solution zsol = np.array([4./3., 7./3.]) wsol = np.array([0. , 0.]) # problem #mcp=N.MCP(1,1,mcp_function,mcp_Nablafunction) ztol = 1e-8 def test_new(): mcp=N.MCP(1,1,mcp_function,mcp_Nablafunction) def test_mcp_FB(): mcp=N.MCP(1,1,mcp_function,mcp_Nablafunction) z = np.array([0., 0.]) w = np.array([0., 0.]) SO=N.SolverOptions(mcp,N.SICONOS_MCP_FB) N.mcp_driver_init(mcp, SO) info = N.mcp_FischerBurmeister(mcp, z, w, SO) N.mcp_driver_reset(mcp, SO) #print("z = ", z) #print("w = ", w) assert (np.linalg.norm(z-zsol) <= ztol) assert not info
apache-2.0
Python
2fbdd9903fc9bf6e1fe797e92c0157abd67850ce
add robust tests for exec_command()
dimasad/numpy,rmcgibbo/numpy,endolith/numpy,ekalosak/numpy,pdebuyl/numpy,felipebetancur/numpy,njase/numpy,kiwifb/numpy,dch312/numpy,sigma-random/numpy,NextThought/pypy-numpy,Anwesh43/numpy,Srisai85/numpy,Eric89GXL/numpy,numpy/numpy,BMJHayward/numpy,mhvk/numpy,bringingheavendown/numpy,KaelChen/numpy,tacaswell/numpy,pbrod/numpy,pbrod/numpy,WillieMaddox/numpy,charris/numpy,dimasad/numpy,chatcannon/numpy,rudimeier/numpy,mattip/numpy,Anwesh43/numpy,mwiebe/numpy,moreati/numpy,ssanderson/numpy,abalkin/numpy,SunghanKim/numpy,MichaelAquilina/numpy,skymanaditya1/numpy,ESSS/numpy,empeeu/numpy,hainm/numpy,gmcastil/numpy,b-carter/numpy,drasmuss/numpy,ddasilva/numpy,ViralLeadership/numpy,andsor/numpy,mortada/numpy,hainm/numpy,madphysicist/numpy,brandon-rhodes/numpy,pbrod/numpy,grlee77/numpy,KaelChen/numpy,Srisai85/numpy,stuarteberg/numpy,BMJHayward/numpy,cowlicks/numpy,stuarteberg/numpy,larsmans/numpy,bertrand-l/numpy,madphysicist/numpy,dwillmer/numpy,rajathkumarmp/numpy,has2k1/numpy,skymanaditya1/numpy,abalkin/numpy,grlee77/numpy,ChristopherHogan/numpy,jonathanunderwood/numpy,dato-code/numpy,Yusa95/numpy,has2k1/numpy,endolith/numpy,ekalosak/numpy,pdebuyl/numpy,mhvk/numpy,ChanderG/numpy,rgommers/numpy,MichaelAquilina/numpy,pbrod/numpy,mathdd/numpy,numpy/numpy,sonnyhu/numpy,MSeifert04/numpy,simongibbons/numpy,mindw/numpy,MaPePeR/numpy,ChristopherHogan/numpy,Yusa95/numpy,WarrenWeckesser/numpy,musically-ut/numpy,ChanderG/numpy,jorisvandenbossche/numpy,andsor/numpy,mhvk/numpy,pyparallel/numpy,solarjoe/numpy,GrimDerp/numpy,ahaldane/numpy,SunghanKim/numpy,nbeaver/numpy,pyparallel/numpy,BabeNovelty/numpy,rherault-insa/numpy,pyparallel/numpy,grlee77/numpy,cowlicks/numpy,tynn/numpy,jakirkham/numpy,nbeaver/numpy,b-carter/numpy,solarjoe/numpy,AustereCuriosity/numpy,githubmlai/numpy,empeeu/numpy,drasmuss/numpy,ChanderG/numpy,groutr/numpy,sinhrks/numpy,tacaswell/numpy,pizzathief/numpy,ViralLeadership/numpy,maniteja123/numpy,MSeifert04/numpy,sinhrks/numpy,ekalosak/numpy,mortada/numpy,pbrod/numpy,jakirkham/numpy,kirillzhuravlev/numpy,embray/numpy,NextThought/pypy-numpy,Srisai85/numpy,kirillzhuravlev/numpy,NextThought/pypy-numpy,larsmans/numpy,ajdawson/numpy,naritta/numpy,mingwpy/numpy,jankoslavic/numpy,mwiebe/numpy,MaPePeR/numpy,Eric89GXL/numpy,jorisvandenbossche/numpy,gfyoung/numpy,MSeifert04/numpy,charris/numpy,pizzathief/numpy,brandon-rhodes/numpy,mhvk/numpy,sinhrks/numpy,Eric89GXL/numpy,bringingheavendown/numpy,ogrisel/numpy,shoyer/numpy,hainm/numpy,trankmichael/numpy,skwbc/numpy,mathdd/numpy,bringingheavendown/numpy,ahaldane/numpy,ViralLeadership/numpy,mindw/numpy,seberg/numpy,rudimeier/numpy,rajathkumarmp/numpy,jakirkham/numpy,brandon-rhodes/numpy,chatcannon/numpy,joferkington/numpy,nguyentu1602/numpy,ddasilva/numpy,GrimDerp/numpy,sigma-random/numpy,GrimDerp/numpy,jankoslavic/numpy,jonathanunderwood/numpy,numpy/numpy,empeeu/numpy,bmorris3/numpy,rgommers/numpy,MaPePeR/numpy,SiccarPoint/numpy,pizzathief/numpy,sonnyhu/numpy,gmcastil/numpy,utke1/numpy,groutr/numpy,endolith/numpy,larsmans/numpy,andsor/numpy,drasmuss/numpy,WarrenWeckesser/numpy,CMartelLML/numpy,mwiebe/numpy,maniteja123/numpy,pdebuyl/numpy,jankoslavic/numpy,githubmlai/numpy,ChanderG/numpy,embray/numpy,madphysicist/numpy,mathdd/numpy,ssanderson/numpy,anntzer/numpy,bmorris3/numpy,shoyer/numpy,mathdd/numpy,MichaelAquilina/numpy,argriffing/numpy,dato-code/numpy,anntzer/numpy,madphysicist/numpy,MSeifert04/numpy,rgommers/numpy,ddasilva/numpy,rmcgibbo/numpy,jorisvandenbossche/numpy,anntzer/numpy,abalkin/numpy,jankoslavic/numpy,dato-code/numpy,stuarteberg/numpy,mindw/numpy,immerrr/numpy,dch312/numpy,WarrenWeckesser/numpy,tacaswell/numpy,chiffa/numpy,CMartelLML/numpy,Eric89GXL/numpy,SunghanKim/numpy,tynn/numpy,immerrr/numpy,sigma-random/numpy,dato-code/numpy,joferkington/numpy,ContinuumIO/numpy,immerrr/numpy,WillieMaddox/numpy,rgommers/numpy,mingwpy/numpy,embray/numpy,dwillmer/numpy,mattip/numpy,ahaldane/numpy,SiccarPoint/numpy,gfyoung/numpy,jakirkham/numpy,BMJHayward/numpy,embray/numpy,ssanderson/numpy,nbeaver/numpy,Srisai85/numpy,musically-ut/numpy,ajdawson/numpy,bertrand-l/numpy,kirillzhuravlev/numpy,shoyer/numpy,ogrisel/numpy,ChristopherHogan/numpy,utke1/numpy,behzadnouri/numpy,tdsmith/numpy,numpy/numpy,Linkid/numpy,WillieMaddox/numpy,rherault-insa/numpy,skymanaditya1/numpy,Anwesh43/numpy,SunghanKim/numpy,rudimeier/numpy,yiakwy/numpy,ekalosak/numpy,sonnyhu/numpy,endolith/numpy,joferkington/numpy,mindw/numpy,GaZ3ll3/numpy,ogrisel/numpy,GaZ3ll3/numpy,tynn/numpy,joferkington/numpy,MaPePeR/numpy,moreati/numpy,dch312/numpy,GrimDerp/numpy,skwbc/numpy,kiwifb/numpy,ajdawson/numpy,grlee77/numpy,njase/numpy,mhvk/numpy,jorisvandenbossche/numpy,jschueller/numpy,chatcannon/numpy,hainm/numpy,simongibbons/numpy,immerrr/numpy,brandon-rhodes/numpy,yiakwy/numpy,argriffing/numpy,shoyer/numpy,gfyoung/numpy,cjermain/numpy,CMartelLML/numpy,CMartelLML/numpy,rajathkumarmp/numpy,simongibbons/numpy,stuarteberg/numpy,bmorris3/numpy,njase/numpy,WarrenWeckesser/numpy,empeeu/numpy,kirillzhuravlev/numpy,solarjoe/numpy,jorisvandenbossche/numpy,BMJHayward/numpy,ajdawson/numpy,rhythmsosad/numpy,jschueller/numpy,sigma-random/numpy,rhythmsosad/numpy,charris/numpy,ESSS/numpy,BabeNovelty/numpy,leifdenby/numpy,felipebetancur/numpy,dwillmer/numpy,cjermain/numpy,naritta/numpy,ESSS/numpy,skwbc/numpy,BabeNovelty/numpy,leifdenby/numpy,kiwifb/numpy,has2k1/numpy,KaelChen/numpy,maniteja123/numpy,BabeNovelty/numpy,behzadnouri/numpy,shoyer/numpy,Linkid/numpy,skymanaditya1/numpy,ahaldane/numpy,rhythmsosad/numpy,yiakwy/numpy,dch312/numpy,jakirkham/numpy,AustereCuriosity/numpy,trankmichael/numpy,GaZ3ll3/numpy,pizzathief/numpy,mattip/numpy,seberg/numpy,pizzathief/numpy,Linkid/numpy,Yusa95/numpy,madphysicist/numpy,naritta/numpy,simongibbons/numpy,mortada/numpy,rmcgibbo/numpy,NextThought/pypy-numpy,cjermain/numpy,tdsmith/numpy,SiccarPoint/numpy,jonathanunderwood/numpy,larsmans/numpy,trankmichael/numpy,argriffing/numpy,Yusa95/numpy,musically-ut/numpy,felipebetancur/numpy,bmorris3/numpy,yiakwy/numpy,andsor/numpy,GaZ3ll3/numpy,mortada/numpy,ahaldane/numpy,nguyentu1602/numpy,dimasad/numpy,Dapid/numpy,nguyentu1602/numpy,AustereCuriosity/numpy,Dapid/numpy,mattip/numpy,b-carter/numpy,dimasad/numpy,cowlicks/numpy,jschueller/numpy,pdebuyl/numpy,SiccarPoint/numpy,ChristopherHogan/numpy,musically-ut/numpy,utke1/numpy,sinhrks/numpy,seberg/numpy,mingwpy/numpy,ogrisel/numpy,tdsmith/numpy,nguyentu1602/numpy,grlee77/numpy,rherault-insa/numpy,moreati/numpy,groutr/numpy,tdsmith/numpy,charris/numpy,KaelChen/numpy,ogrisel/numpy,trankmichael/numpy,rudimeier/numpy,felipebetancur/numpy,Linkid/numpy,anntzer/numpy,bertrand-l/numpy,ContinuumIO/numpy,cjermain/numpy,ContinuumIO/numpy,Dapid/numpy,leifdenby/numpy,gmcastil/numpy,simongibbons/numpy,MSeifert04/numpy,sonnyhu/numpy,githubmlai/numpy,embray/numpy,githubmlai/numpy,cowlicks/numpy,rmcgibbo/numpy,behzadnouri/numpy,rajathkumarmp/numpy,jschueller/numpy,chiffa/numpy,WarrenWeckesser/numpy,Anwesh43/numpy,seberg/numpy,MichaelAquilina/numpy,naritta/numpy,rhythmsosad/numpy,dwillmer/numpy,chiffa/numpy,mingwpy/numpy,has2k1/numpy
numpy/distutils/tests/test_exec_command.py
numpy/distutils/tests/test_exec_command.py
import os import sys import StringIO from numpy.distutils import exec_command class redirect_stdout(object): """Context manager to redirect stdout for exec_command test.""" def __init__(self, stdout=None): self._stdout = stdout or sys.stdout def __enter__(self): self.old_stdout = sys.stdout sys.stdout = self._stdout def __exit__(self, exc_type, exc_value, traceback): self._stdout.flush() sys.stdout = self.old_stdout class redirect_stderr(object): """Context manager to redirect stderr for exec_command test.""" def __init__(self, stderr=None): self._stderr = stderr or sys.stderr def __enter__(self): self.old_stderr = sys.stderr sys.stderr = self._stderr def __exit__(self, exc_type, exc_value, traceback): self._stderr.flush() sys.stderr = self.old_stderr class emulate_nonposix(object): """Context manager to emulate os.name != 'posix' """ def __init__(self, osname='non-posix'): self._new_name = osname def __enter__(self): self._old_name = os.name os.name = self._new_name def __exit__(self, exc_type, exc_value, traceback): os.name = self._old_name def test_exec_command_stdout(): # Regression test for gh-2999 and gh-2915. # There are several packages (nose, scipy.weave.inline, Sage inline # Fortran) that replace stdout, in which case it doesn't have a fileno # method. This is tested here, with a do-nothing command that fails if the # presence of fileno() is assumed in exec_command. # Test posix version: with redirect_stdout(StringIO.StringIO()): exec_command.exec_command("cd '.'") # Test non-posix version: with emulate_nonposix(): with redirect_stdout(StringIO.StringIO()): exec_command.exec_command("cd '.'") def test_exec_command_stderr(): # Test posix version: with redirect_stderr(StringIO.StringIO()): exec_command.exec_command("cd '.'") # Test non-posix version: # Note: this test reveals a failure #with emulate_nonposix(): # with redirect_stderr(StringIO.StringIO()): # exec_command.exec_command("cd '.'")
import sys import StringIO from numpy.distutils import exec_command class redirect_stdout(object): """Context manager to redirect stdout for exec_command test.""" def __init__(self, stdout=None): self._stdout = stdout or sys.stdout def __enter__(self): self.old_stdout = sys.stdout sys.stdout = self._stdout def __exit__(self, exc_type, exc_value, traceback): self._stdout.flush() sys.stdout = self.old_stdout def test_exec_command(): # Regression test for gh-2999 and gh-2915. # There are several packages (nose, scipy.weave.inline, Sage inline # Fortran) that replace stdout, in which case it doesn't have a fileno # method. This is tested here, with a do-nothing command that fails if the # presence of fileno() is assumed in exec_command. with redirect_stdout(StringIO.StringIO()): exec_command.exec_command("cd '.'")
bsd-3-clause
Python
7138cd2fb7a5dc8a5044f15b19d3d53a1486dec3
order by companies by name, helps when viewing adding companies to jobs entry form
berkerpeksag/pythondotorg,SujaySKumar/pythondotorg,lebronhkh/pythondotorg,fe11x/pythondotorg,berkerpeksag/pythondotorg,malemburg/pythondotorg,ahua/pythondotorg,Mariatta/pythondotorg,ahua/pythondotorg,Mariatta/pythondotorg,demvher/pythondotorg,malemburg/pythondotorg,fe11x/pythondotorg,fe11x/pythondotorg,proevo/pythondotorg,demvher/pythondotorg,lsk112233/Clone-test-repo,willingc/pythondotorg,malemburg/pythondotorg,Mariatta/pythondotorg,fe11x/pythondotorg,proevo/pythondotorg,lepture/pythondotorg,ahua/pythondotorg,lepture/pythondotorg,lepture/pythondotorg,SujaySKumar/pythondotorg,SujaySKumar/pythondotorg,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,manhhomienbienthuy/pythondotorg,demvher/pythondotorg,berkerpeksag/pythondotorg,willingc/pythondotorg,willingc/pythondotorg,lsk112233/Clone-test-repo,python/pythondotorg,fe11x/pythondotorg,python/pythondotorg,manhhomienbienthuy/pythondotorg,ahua/pythondotorg,lebronhkh/pythondotorg,demvher/pythondotorg,lepture/pythondotorg,lsk112233/Clone-test-repo,lsk112233/Clone-test-repo,python/pythondotorg,proevo/pythondotorg,manhhomienbienthuy/pythondotorg,lepture/pythondotorg,berkerpeksag/pythondotorg,berkerpeksag/pythondotorg,SujaySKumar/pythondotorg,malemburg/pythondotorg,SujaySKumar/pythondotorg,lebronhkh/pythondotorg,proevo/pythondotorg,willingc/pythondotorg,lebronhkh/pythondotorg,demvher/pythondotorg,manhhomienbienthuy/pythondotorg,Mariatta/pythondotorg,python/pythondotorg,ahua/pythondotorg
companies/models.py
companies/models.py
from django.conf import settings from django.db import models from django.utils.translation import ugettext_lazy as _ from markupfield.fields import MarkupField from cms.models import NameSlugModel DEFAULT_MARKUP_TYPE = getattr(settings, 'DEFAULT_MARKUP_TYPE', 'restructuredtext') class Company(NameSlugModel): about = MarkupField(blank=True, default_markup_type=DEFAULT_MARKUP_TYPE) contact = models.CharField(null=True, blank=True, max_length=100) email = models.EmailField(null=True, blank=True) url = models.URLField('URL', null=True, blank=True) logo = models.ImageField(upload_to='companies/logos/', blank=True, null=True) class Meta: verbose_name = _('company') verbose_name_plural = _('companies') ordering = ('name', )
from django.conf import settings from django.db import models from django.utils.translation import ugettext_lazy as _ from markupfield.fields import MarkupField from cms.models import NameSlugModel DEFAULT_MARKUP_TYPE = getattr(settings, 'DEFAULT_MARKUP_TYPE', 'restructuredtext') class Company(NameSlugModel): about = MarkupField(blank=True, default_markup_type=DEFAULT_MARKUP_TYPE) contact = models.CharField(null=True, blank=True, max_length=100) email = models.EmailField(null=True, blank=True) url = models.URLField('URL', null=True, blank=True) logo = models.ImageField(upload_to='companies/logos/', blank=True, null=True) class Meta: verbose_name = _('company') verbose_name_plural = _('companies')
apache-2.0
Python
10e7388eec8d16f5a69e5d4f3b9e6cf56a1c956e
Remove explicit byte string from migration 0003 (#298)
jazzband/silk,mtford90/silk,django-silk/silk,mtford90/silk,jazzband/silk,django-silk/silk,mtford90/silk,django-silk/silk,django-silk/silk,jazzband/silk,jazzband/silk,mtford90/silk
silk/migrations/0003_request_prof_file.py
silk/migrations/0003_request_prof_file.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-07-08 18:23 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('silk', '0002_auto_update_uuid4_id_field'), ] operations = [ migrations.AddField( model_name='request', name='prof_file', field=models.FileField(null=True, upload_to=''), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-07-08 18:23 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('silk', '0002_auto_update_uuid4_id_field'), ] operations = [ migrations.AddField( model_name='request', name='prof_file', field=models.FileField(null=True, upload_to=b''), ), ]
mit
Python
30452b9fe815a2b68826b739625d1c06886fb17e
Remove redundant isinstance() check
vmalloc/pact
pact/group.py
pact/group.py
import itertools from .base import PactBase class PactGroup(PactBase): def __init__(self, pacts=None, lazy=True): if pacts is None: pacts = [] self._pacts = list(pacts) self._finished_pacts = [] self._is_lazy = lazy super(PactGroup, self).__init__() def __iadd__(self, other): self.add(other) return self def __iter__(self): return itertools.chain(self._pacts, self._finished_pacts) def add(self, pact, absorb=False): if absorb and isinstance(pact, PactGroup): raise NotImplementedError('Absorbing groups is not supported') # pragma: no cover self._pacts.append(pact) if absorb: # pylint: disable=protected-access while pact._then: # then might throw, so we attempt it first self.then(pact._then[0]) pact._then.pop(0) def _is_finished(self): has_finished = True indexes_to_remove = [] for index, pact in enumerate(self._pacts): if pact.poll(): indexes_to_remove.append(index) else: has_finished = False if self._is_lazy: break for index in reversed(indexes_to_remove): self._finished_pacts.append(self._pacts.pop(index)) return has_finished def __repr__(self): return repr(list(self._pacts))
import itertools from .base import PactBase class PactGroup(PactBase): def __init__(self, pacts=None, lazy=True): if pacts is None: pacts = [] self._pacts = list(pacts) self._finished_pacts = [] self._is_lazy = lazy super(PactGroup, self).__init__() def __iadd__(self, other): self.add(other) return self def __iter__(self): return itertools.chain(self._pacts, self._finished_pacts) def add(self, pact, absorb=False): if absorb and isinstance(pact, PactGroup): if isinstance(pact, PactGroup): raise NotImplementedError('Absorbing groups is not supported') # pragma: no cover self._pacts.append(pact) if absorb: # pylint: disable=protected-access while pact._then: # then might throw, so we attempt it first self.then(pact._then[0]) pact._then.pop(0) def _is_finished(self): has_finished = True indexes_to_remove = [] for index, pact in enumerate(self._pacts): if pact.poll(): indexes_to_remove.append(index) else: has_finished = False if self._is_lazy: break for index in reversed(indexes_to_remove): self._finished_pacts.append(self._pacts.pop(index)) return has_finished def __repr__(self): return repr(list(self._pacts))
bsd-3-clause
Python
7dc9085bf0665efc3083b64c0b34cb7c8c92ae31
update now drops duplicates
DepthDeluxe/dot11sniffer,DepthDeluxe/dot11sniffer,DepthDeluxe/dot11sniffer,DepthDeluxe/dot11sniffer,DepthDeluxe/dot11sniffer
dblib/dbUpdate.py
dblib/dbUpdate.py
import pymongo import multiprocessing import multiprocessing.connection import time SIZE = 128 NUM_NODES = 3 def recv_data(sock,dataQueue,cQueue): connect = sock.accept() cQueue.put("listen") data = connect.recv() dataQueue.put(data) connect.close() print("received data") exit(0) def db_send(database,queue): collection = database.times t = int(time.time()) doc = int(t/600) for i in range(queue.qsize()): data = queue.get() data = data.split(',') for j in range(0,len(data)-3,4): new_posts = {} new_posts.update({'data':{"mac":data[j+3],'node':int(data[0]),'time':int(data[j+1]),'sigstr':int(data[j+2])}}) collection.update({'_id':doc},{"$addToSet":new_posts},upsert=True) ## dic = {'node':temp[0],'time':temp[1],'sigstr':temp[2],'mac':temp[3]} ## new_posts.append(dic) ## posts.insert_many(new_posts) print("sent") exit(0) def server(host,port): client = pymongo.MongoClient() db = client.cheddar sock = multiprocessing.connection.Listener((host,port)) dq = multiprocessing.Queue() cq = multiprocessing.Queue() cq.put("listen") while True: try: task = cq.get(True,1) except: task = "none" if task == "listen": print("spawning listening thread") p = multiprocessing.Process(target=recv_data, args=(sock,dq,cq)) p.start() ## if (dq.qsize() == 100): if dq.qsize() != 0: print("spawning sending thread") p = multiprocessing.Process(target=db_send,args=(db,dq)) p.start() ## pass server('',10000)
import pymongo import multiprocessing import multiprocessing.connection import time SIZE = 128 NUM_NODES = 3 def recv_data(sock,dataQueue,cQueue): connect = sock.accept() cQueue.put("listen") data = connect.recv() dataQueue.put(data) connect.close() print("received data") exit(0) def db_send(database,queue): collection = database.times t = int(time.time()) doc = int(t/600) for i in range(queue.qsize()): data = queue.get() data = data.split(',') for j in range(0,len(data)-3,4): new_posts = {} new_posts.update({'data':{"mac":data[j+3],'node':int(data[0]),'time':int(data[j+1]),'sigstr':int(data[j+2])}}) collection.update({'_id':doc},{"$push":new_posts},upsert=True) ## dic = {'node':temp[0],'time':temp[1],'sigstr':temp[2],'mac':temp[3]} ## new_posts.append(dic) ## posts.insert_many(new_posts) print("sent") exit(0) def server(host,port): client = pymongo.MongoClient() db = client.cheddar sock = multiprocessing.connection.Listener((host,port)) dq = multiprocessing.Queue() cq = multiprocessing.Queue() cq.put("listen") while True: try: task = cq.get(True,1) except: task = "none" if task == "listen": print("spawning listening thread") p = multiprocessing.Process(target=recv_data, args=(sock,dq,cq)) p.start() ## if (dq.qsize() == 100): if dq.qsize() != 0: print("spawning sending thread") p = multiprocessing.Process(target=db_send,args=(db,dq)) p.start() ## pass server('',10000)
mit
Python
e76d6ad7a4670bfa47ba506343aff2e5f118f976
fix rsync options for use in shared scenarios
uwescience/myria,jamesmarva/myria,jamesmarva/myria,uwescience/myria,bsalimi/myria,bsalimi/myria,uwescience/myria,bsalimi/myria,jamesmarva/myria
myriadeploy/update_myria_jar_only.py
myriadeploy/update_myria_jar_only.py
#!/usr/bin/env python import myriadeploy import subprocess import sys def host_port_list(workers): return [str(worker[0]) + ':' + str(worker[1]) for worker in workers] def get_host_port_path(node, default_path): if len(node) == 2: (hostname, port) = node if default_path is None: raise Exception("Path not specified for node %s" % str(node)) else: path = default_path else: (hostname, port, path) = node return (hostname, port, path) def copy_distribution(config): "Copy the distribution (jar and libs and conf) to compute nodes." nodes = config['nodes'] description = config['description'] default_path = config['path'] username = config['username'] for node in nodes: (hostname, _, path) = get_host_port_path(node, default_path) if hostname != 'localhost': remote_path = "%s@%s:%s/%s-files" % (username, hostname, path, description) else: remote_path = "%s/%s-files" % (path, description) to_copy = ["libs", "conf"] args = ["rsync", "--del", "-rlDLvz"] + to_copy + [remote_path] if subprocess.call(args): raise Exception("Error copying distribution to %s" % (hostname,)) def main(argv): # Usage if len(argv) != 2: print >> sys.stderr, "Usage: %s <deployment.cfg>" % (argv[0]) print >> sys.stderr, " deployment.cfg: a configuration file modeled after deployment.cfg.sample" sys.exit(1) config = myriadeploy.read_config_file(argv[1]) # Step 1: Copy over libs, "conf", myria copy_distribution(config) if __name__ == "__main__": main(sys.argv)
#!/usr/bin/env python import myriadeploy import subprocess import sys def host_port_list(workers): return [str(worker[0]) + ':' + str(worker[1]) for worker in workers] def get_host_port_path(node, default_path): if len(node) == 2: (hostname, port) = node if default_path is None: raise Exception("Path not specified for node %s" % str(node)) else: path = default_path else: (hostname, port, path) = node return (hostname, port, path) def copy_distribution(config): "Copy the distribution (jar and libs and conf) to compute nodes." nodes = config['nodes'] description = config['description'] default_path = config['path'] username = config['username'] for node in nodes: (hostname, _, path) = get_host_port_path(node, default_path) if hostname != 'localhost': remote_path = "%s@%s:%s/%s-files" % (username, hostname, path, description) else: remote_path = "%s/%s-files" % (path, description) to_copy = ["libs", "conf"] args = ["rsync", "--del", "-aLvz"] + to_copy + [remote_path] if subprocess.call(args): raise Exception("Error copying distribution to %s" % (hostname,)) def main(argv): # Usage if len(argv) != 2: print >> sys.stderr, "Usage: %s <deployment.cfg>" % (argv[0]) print >> sys.stderr, " deployment.cfg: a configuration file modeled after deployment.cfg.sample" sys.exit(1) config = myriadeploy.read_config_file(argv[1]) # Step 1: Copy over libs, "conf", myria copy_distribution(config) if __name__ == "__main__": main(sys.argv)
bsd-3-clause
Python
afbef65bd28f0058edf39579125e2ccb35a72aee
Update test_multivariate.py to Python 3.4
Chippers255/nb_twitter
nb_twitter/test/test_multivariate.py
nb_twitter/test/test_multivariate.py
# -*- coding: utf-8 -*- # test_multivariate.py # nb_twitter/nb_twitter/bayes # # Created by Thomas Nelson <[email protected]> # Preston Engstrom <[email protected]> # Created..........................2015-06-29 # Modified.........................2015-06-30 # # This script was developed for use as part of the nb_twitter package from nb_twitter.bayes import multivariate train_class = ['c', 'j'] train_docs = [['c', 'chinese beijing chinese'], ['c', 'chinese chinese shanghai'], ['c', 'chinese macao'], ['j', 'tokyo japan chinese']] test_docs = 'chinese chinese chinese tokyo japan' classifier = multivariate.Multivariate(train_class, train_docs) classifier.train() results = classifier.run(test_docs) print("C\t\t=", classifier.C) print("D\t\t=", classifier.D) print("N\t\t=", classifier.N) print("V\t\t=", classifier.V) print("Nc\t\t=", classifier.Nc) print("Prior\t=", classifier.prior) print("Prob\t=", classifier.prob) print print(results)
# -*- coding: utf-8 -*- # test_multivariate.py # nb_twitter/nb_twitter/bayes # # Created by Thomas Nelson <[email protected]> # Preston Engstrom <[email protected]> # Created..........................2015-06-29 # Modified.........................2015-06-29 # # This script was developed for use as part of the nb_twitter package from nb_twitter.bayes import multivariate train_class = ['c', 'j'] train_docs = [['c', 'chinese beijing chinese'], ['c', 'chinese chinese shanghai'], ['c', 'chinese macao'], ['j', 'tokyo japan chinese']] test_docs = 'chinese chinese chinese tokyo japan' classifier = multivariate.Multivariate(train_class, train_docs) classifier.train() results = classifier.run(test_docs) print "C\t\t=", classifier.C print "D\t\t=", classifier.D print "N\t\t=", classifier.N print "V\t\t=", classifier.V print "Nc\t\t=", classifier.Nc print "Prior\t=", classifier.prior print "Prob\t=", classifier.prob print print(results)
mit
Python
1b668fa59624bc1f73f5fceebecbbadfc0038156
support arrow DictionaryType
maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex
packages/vaex-arrow/vaex_arrow/dataset.py
packages/vaex-arrow/vaex_arrow/dataset.py
__author__ = 'maartenbreddels' import logging import pyarrow as pa import pyarrow.parquet as pq import vaex.dataset import vaex.file.other from .convert import column_from_arrow_array logger = logging.getLogger("vaex_arrow") class DatasetArrow(vaex.dataset.DatasetLocal): """Implements storage using arrow""" def __init__(self, filename=None, table=None, write=False): super(DatasetArrow, self).__init__(name=filename, path=filename, column_names=[]) self._write = write if table is None: self._load() else: self._load_table(table) def _load(self): source = pa.memory_map(self.path) reader = pa.ipc.open_stream(source) table = pa.Table.from_batches([b for b in reader]) self._load_table(table) def _load_table(self, table): self._length_unfiltered = self._length_original = table.num_rows self._index_end = self._length_original = table.num_rows for col in table.columns: name = col.name # TODO: keep the arrow columns, and support and test chunks arrow_array = col.data.chunks[0] if isinstance(arrow_array.type, pa.DictionaryType): column = column_from_arrow_array(arrow_array.indices) labels = column_from_arrow_array(arrow_array.dictionary).tolist() self._categories[name] = dict(labels=labels, N=len(labels)) else: column = column_from_arrow_array(arrow_array) self.columns[name] = column self.column_names.append(name) self._save_assign_expression(name, vaex.expression.Expression(self, name)) @classmethod def can_open(cls, path, *args, **kwargs): return path.rpartition('.')[2] == 'arrow' @classmethod def get_options(cls, path): return [] @classmethod def option_to_args(cls, option): return [] class DatasetParquet(DatasetArrow): def _load(self): # might not be optimal, but it works, we can always see if we can # do mmapping later on table = pq.read_table(self.path) self._load_table(table) vaex.file.other.dataset_type_map["arrow"] = DatasetArrow vaex.file.other.dataset_type_map["parquet"] = DatasetParquet
__author__ = 'maartenbreddels' import logging import pyarrow as pa import pyarrow.parquet as pq import vaex.dataset import vaex.file.other from .convert import column_from_arrow_array logger = logging.getLogger("vaex_arrow") class DatasetArrow(vaex.dataset.DatasetLocal): """Implements storage using arrow""" def __init__(self, filename=None, table=None, write=False): super(DatasetArrow, self).__init__(name=filename, path=filename, column_names=[]) self._write = write if table is None: self._load() else: self._load_table(table) def _load(self): source = pa.memory_map(self.path) reader = pa.ipc.open_stream(source) table = pa.Table.from_batches([b for b in reader]) self._load_table(table) def _load_table(self, table): self._length_unfiltered = self._length_original = table.num_rows self._index_end = self._length_original = table.num_rows for col in table.columns: name = col.name # TODO: keep the arrow columns, and support and test chunks arrow_array = col.data.chunks[0] column = column_from_arrow_array(arrow_array) self.columns[name] = column self.column_names.append(name) self._save_assign_expression(name, vaex.expression.Expression(self, name)) @classmethod def can_open(cls, path, *args, **kwargs): return path.rpartition('.')[2] == 'arrow' @classmethod def get_options(cls, path): return [] @classmethod def option_to_args(cls, option): return [] class DatasetParquet(DatasetArrow): def _load(self): # might not be optimal, but it works, we can always see if we can # do mmapping later on table = pq.read_table(self.path) self._load_table(table) vaex.file.other.dataset_type_map["arrow"] = DatasetArrow vaex.file.other.dataset_type_map["parquet"] = DatasetParquet
mit
Python
52239a9b6cd017127d52c29ac0e2a0d3818e7d9e
Add new lab_members fieldset_website to fieldsets for cms_lab_members
mfcovington/djangocms-lab-members,mfcovington/djangocms-lab-members
cms_lab_members/admin.py
cms_lab_members/admin.py
from django.contrib import admin from cms.admin.placeholderadmin import PlaceholderAdminMixin from lab_members.models import Scientist from lab_members.admin import ScientistAdmin class CMSScientistAdmin(PlaceholderAdminMixin, ScientistAdmin): fieldsets = [ ScientistAdmin.fieldset_basic, ScientistAdmin.fieldset_website, ScientistAdmin.fieldset_advanced, ] admin.site.unregister(Scientist) admin.site.register(Scientist, CMSScientistAdmin)
from django.contrib import admin from cms.admin.placeholderadmin import PlaceholderAdminMixin from lab_members.models import Scientist from lab_members.admin import ScientistAdmin class CMSScientistAdmin(PlaceholderAdminMixin, ScientistAdmin): fieldsets = [ ScientistAdmin.fieldset_basic, ScientistAdmin.fieldset_advanced, ] admin.site.unregister(Scientist) admin.site.register(Scientist, CMSScientistAdmin)
bsd-3-clause
Python
dda3ebfcb9fff7f7304ee72c087dca9f8556fe6c
Update yadisk.py
haitaka/DroiTaka
cogs/utils/api/yadisk.py
cogs/utils/api/yadisk.py
import json import requests DEVICE_ID = '141f72b7-fd02-11e5-981a-00155d860f42' DEVICE_NAME = 'DroiTaka' CLIENT_ID = 'b12710fc26ee46ba82e34b97f08f2305' CLIENT_SECRET = '4ff2284115644e04acc77c54526364d2' class YaDisk(object): def __init__(self, token): self.session = requests.session() self.session.headers.update({'Authentication': 'OAuth ' + str(token),}) def get_key_url(): format_url = "https://oauth.yandex.ru/authorize?response_type=code&client_id={}&device_id={}&device_name={}&force_confirm=yes" return format_url.format(CLIENT_ID, DEVICE_ID, DEVICE_NAME) def get_token(key): res = requests.post('https://oauth.yandex.ru/token', data = { 'grant_type': 'authorization_code', 'code': key, 'client_id': CLIENT_ID, 'client_secret': CLIENT_SECRET, 'device_id': DEVICE_ID, 'device_name': DEVICE_NAME, }) print(res.text) return res.json()['access_token'] def _get(self, url, *args, **kwargs): return self.session.get(url, *args, **kwargs) def _post(self, url, data, *args, **kwargs): return self.session.post(url, {'data': json.dumps(data), }, *args, **kwargs) def list_files(self, dir_path): file_list = [] res = self._get("https://cloud-api.yandex.net:443/v1/disk/resources", params={"path": dir_path,}) for file in res.json()['_embedded']['items']: if file['type'] == 'file': file_list.append(file['name']) return file_list def direct_link(self, file_path): response = self.session._get("https://cloud-api.yandex.net:443/v1/disk/resources/download", params={"path": file_path,}) return response.json()['href']
import json import requests DEVICE_ID = '141f72b7-fd02-11e5-981a-00155d860f42' DEVICE_NAME = 'DroiTaka' CLIENT_ID = 'b12710fc26ee46ba82e34b97f08f2305' CLIENT_SECRET = '4ff2284115644e04acc77c54526364d2' class YaDisk(object): def __init__(self, token): self.session = requests.session() self.session.headers.update({'Authentication': 'OAuth ' + str(token),}) def get_key_url(): format_url = "https://oauth.yandex.ru/authorize?response_type=code&client_id={}&device_id={}&device_name={}&force_confirm=yes" return format_url.format(CLIENT_ID, DEVICE_ID, DEVICE_NAME) def get_token(key): res = requests.post('http://oauth.yandex.ru/token', data = { 'grant_type': 'authorization_code', 'code': key, 'client_id': CLIENT_ID, 'client_secret': CLIENT_SECRET, 'device_id': DEVICE_ID, 'device_name': DEVICE_NAME, }) print(res.text) return res.json()['access_token'] def _get(self, url, *args, **kwargs): return self.session.get(url, *args, **kwargs) def _post(self, url, data, *args, **kwargs): return self.session.post(url, {'data': json.dumps(data), }, *args, **kwargs) def list_files(self, dir_path): file_list = [] res = self._get("https://cloud-api.yandex.net:443/v1/disk/resources", params={"path": dir_path,}) for file in res.json()['_embedded']['items']: if file['type'] == 'file': file_list.append(file['name']) return file_list def direct_link(self, file_path): response = self.session._get("https://cloud-api.yandex.net:443/v1/disk/resources/download", params={"path": file_path,}) return response.json()['href']
mit
Python
14043a783e2ebd6c4a27a38f08ca75e6e31dd5d8
Add show admin panel
Cinemair/cinemair-server,Cinemair/cinemair-server
cinemair/shows/admin.py
cinemair/shows/admin.py
from django.contrib import admin from . import models class ShowsInline(admin.TabularInline): model = models.Show extra = 0 @admin.register(models.Show) class Show(admin.ModelAdmin): fieldsets = ( (None, {"fields": ("cinema", "movie", "datetime")}), ) list_display = ("id", "cinema", "movie", "datetime") #list_editable = (,) list_filter = ("cinema",) search_fields = ("id", "cinema__name", "movie__title", "datetime") date_hierarchy = "datetime" ordering = ("cinema", "datetime")
from django.contrib import admin from . import models class ShowsInline(admin.TabularInline): model = models.Show extra = 0
mit
Python
9e1b3893a676f0fff7d601245fd06ec5df7fb61f
bump version
kszlim/osu-replay-parser
circleparse/__init__.py
circleparse/__init__.py
from circleparse.replay import parse_replay_file, parse_replay __version__ = "6.1.0"
from circleparse.replay import parse_replay_file, parse_replay __version__ = "6.0.0"
mit
Python
3026d78dc6e2a0f6f391819370f2369df94e77eb
Move Data Portal / Other to bottom of contact select
NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm
ckanext/nhm/settings.py
ckanext/nhm/settings.py
#!/usr/bin/env python # encoding: utf-8 # # This file is part of ckanext-nhm # Created by the Natural History Museum in London, UK from collections import OrderedDict # the order here matters as the default option should always be first in the dict so that it is # automatically selected in combo boxes that use this list as a source for options COLLECTION_CONTACTS = OrderedDict([ ('Algae, Fungi & Plants', '[email protected]'), ('Economic & Environmental Earth Sciences', '[email protected]'), ('Fossil Invertebrates & Plants', '[email protected]'), ('Fossil Vertebrates & Anthropology', '[email protected]'), ('Insects', '[email protected]'), ('Invertebrates', '[email protected]'), ('Library & Archives', '[email protected]'), ('Mineral & Planetary Sciences', '[email protected]'), ('Vertebrates', '[email protected]'), ('Data Portal / Other', '[email protected]'), ])
#!/usr/bin/env python # encoding: utf-8 # # This file is part of ckanext-nhm # Created by the Natural History Museum in London, UK from collections import OrderedDict # the order here matters as the default option should always be first in the dict so that it is # automatically selected in combo boxes that use this list as a source for options COLLECTION_CONTACTS = OrderedDict([ ('Data Portal / Other', '[email protected]'), ('Algae, Fungi & Plants', '[email protected]'), ('Economic & Environmental Earth Sciences', '[email protected]'), ('Fossil Invertebrates & Plants', '[email protected]'), ('Fossil Vertebrates & Anthropology', '[email protected]'), ('Insects', '[email protected]'), ('Invertebrates', '[email protected]'), ('Library & Archives', '[email protected]'), ('Mineral & Planetary Sciences', '[email protected]'), ('Vertebrates', '[email protected]'), ])
mit
Python
2105143c63292ec225258b3ca129156d858cf972
Use OrderParameterDistribution objects in wetting.
adamrall/coex
coex/wetting.py
coex/wetting.py
"""Find the wetting properties of a direct or expanded ensemble grand canonical simulation. """ import numpy as np def get_cos_theta(s, d): """Calculate the cosine of the contact angle. Args: s: A float (or numpy array): the spreading coefficient. d: A float (or numpy array): the drying coefficient. Returns: The cosine of the contact angle as a float or numpy array. """ return -(s - d) / (s + d) def get_drying_coefficient(distribution): """Calculate the drying coefficient. Args: distribution: An OrderParameterDistribution from a direct (GC) drying simulation. Returns: The dimensionless drying coefficient (beta*d*A). See also: get_spreading_coefficient() """ potential = -distribution.log_probabilities valley = np.amin(potential) split = int(0.5 * len(potential)) plateau = np.mean(potential[:split]) return valley - plateau def get_expanded_ensemble_coefficients(valley, plateau, index, reference): """Calculate the change in spreading/drying coefficient for a pair of simulations. Args: valley: An OrderParameterDistribution from the valley region. plateau: An OrderParameterDistribution from the plateau region. index: The reference subensemble number. reference: The reference spreading/drying coefficient. Returns: A numpy array with the spreading/drying coefficient of each subensemble. """ return reference - (valley - valley[index]) + (plateau - plateau[index]) def get_spreading_coefficient(distribution): """Calculate the spreading coefficient. Args: distribution: An OrderParameterDistribution from a direct (GC) spreading simulation. Returns: The dimensionless spreading coefficient (beta*s*A). See Also: get_drying_coefficient() """ potential = -distribution.log_probabilities valley = np.amin(potential) split = int(0.5 * len(potential)) plateau = np.mean(potential[split:]) return valley - plateau def get_tension(s, d): """Calculate the interfacial tension. Args: s: A float (or numpy array): the spreading coefficient. d: A float (or numpy array): the drying coefficient. Returns: The interfacial tension in the appropriate units. """ return -0.5 * (s + d)
"""Find the wetting properties of a direct or expanded ensemble grand canonical simulation. """ import numpy as np def get_cos_theta(s, d): """Calculate the cosine of the contact angle. Args: s: A float (or numpy array): the spreading coefficient. d: A float (or numpy array): the drying coefficient. Returns: The cosine of the contact angle as a float or numpy array. """ return -(s - d) / (s + d) def get_drying_coefficient(lnpi): """Calculate the drying coefficient. Args: lnpi: The logarithm of the probability distribution. Returns: The dimensionless drying coefficient (beta*d*A). See also: get_spreading_coefficient() """ potential = -lnpi valley = np.amin(potential) split = int(0.5 * len(potential)) plateau = np.mean(potential[:split]) return valley - plateau def get_expanded_ensemble_coefficients(valley, plateau, index, reference): """Calculate the change in spreading/drying coefficient for a pair of simulations. Args: valley: The logarithm of the probability distribution of the valley region. plateau: The logarithm of the probability distribution of the plateau region. index: The reference subensemble number. reference: The reference spreading/drying coefficient. Returns: A numpy array with the spreading/drying coefficient of each subensemble. """ return reference - (valley - valley[index]) + (plateau - plateau[index]) def get_spreading_coefficient(lnpi): """Calculate the spreading coefficient. Args: potential: The logarithm of the probability distribution. Returns: The dimensionless spreading coefficient (beta*s*A). See Also: get_drying_coefficient() """ potential = -lnpi valley = np.amin(potential) split = int(0.5 * len(potential)) plateau = np.mean(potential[split:]) return valley - plateau def get_tension(s, d): """Calculate the interfacial tension. Args: s: A float (or numpy array): the spreading coefficient. d: A float (or numpy array): the drying coefficient. Returns: The interfacial tension in the appropriate units. """ return -0.5 * (s + d)
bsd-2-clause
Python
a962e631b0fc997a6a5569244463c3f96da8b671
add extra fwhm2sigma test
nipy/nipy-labs,arokem/nipy,nipy/nipy-labs,arokem/nipy,alexis-roche/nireg,nipy/nireg,arokem/nipy,arokem/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,alexis-roche/nipy,alexis-roche/niseg,alexis-roche/register,bthirion/nipy,alexis-roche/nipy,nipy/nireg,alexis-roche/nireg,bthirion/nipy,bthirion/nipy,alexis-roche/niseg,alexis-roche/register,alexis-roche/register
lib/neuroimaging/fmri/tests/test_utils.py
lib/neuroimaging/fmri/tests/test_utils.py
import unittest import numpy as N import scipy from neuroimaging.fmri.utils import CutPoly, WaveFunction, sigma2fwhm, fwhm2sigma class utilTest(unittest.TestCase): def test_CutPoly(self): f = CutPoly(2.0) t = N.arange(0, 10.0, 0.1) y = f(t) scipy.testing.assert_almost_equal(y, [x*x for x in t]) f = CutPoly(2.0, (5, 7)) y = f(t) scipy.testing.assert_almost_equal(y, [x*x*(x >= 5 and x < 7) for x in t]) f = CutPoly(2.0, (None, 7)) y = f(t) scipy.testing.assert_almost_equal(y, [x*x*(x < 7) for x in t]) f = CutPoly(2.0, (5, None)) y = f(t) scipy.testing.assert_almost_equal(y, [x*x*(x >= 5) for x in t]) def test_WaveFunction(self): start = 5.0 duration = 2.0 height = 3.0 f = WaveFunction(5, 2, 3) t = N.arange(0, 10.0, 0.1) y = f(t) scipy.testing.assert_almost_equal(y, [height*(x >= start and x < start + duration) for x in t]) def test_sigma_fwhm(self): """ ensure that fwhm2sigma and sigma2fwhm are inverses of each other """ fwhm = N.arange(1.0, 5.0, 0.1) sigma = N.arange(1.0, 5.0, 0.1) scipy.testing.assert_almost_equal(sigma2fwhm(fwhm2sigma(fwhm)), fwhm) scipy.testing.assert_almost_equal(fwhm2sigma(sigma2fwhm(sigma)), sigma) if __name__ == '__main__': unittest.main()
import unittest import numpy as N import scipy from neuroimaging.fmri.utils import CutPoly, WaveFunction, sigma2fwhm, fwhm2sigma class utilTest(unittest.TestCase): def test_CutPoly(self): f = CutPoly(2.0) t = N.arange(0, 10.0, 0.1) y = f(t) scipy.testing.assert_almost_equal(y, [x*x for x in t]) f = CutPoly(2.0, (5, 7)) y = f(t) scipy.testing.assert_almost_equal(y, [x*x*(x >= 5 and x < 7) for x in t]) f = CutPoly(2.0, (None, 7)) y = f(t) scipy.testing.assert_almost_equal(y, [x*x*(x < 7) for x in t]) f = CutPoly(2.0, (5, None)) y = f(t) scipy.testing.assert_almost_equal(y, [x*x*(x >= 5) for x in t]) def test_WaveFunction(self): start = 5.0 duration = 2.0 height = 3.0 f = WaveFunction(5, 2, 3) t = N.arange(0, 10.0, 0.1) y = f(t) scipy.testing.assert_almost_equal(y, [height*(x >= start and x < start + duration) for x in t]) def test_sigma_fwhm(self): """ ensure that fwhm2sigma and sigma2fwhm are inverses of each other """ fwhm = N.arange(1.0, 5.0, 0.1) scipy.testing.assert_almost_equal(sigma2fwhm(fwhm2sigma(fwhm)), fwhm) if __name__ == '__main__': unittest.main()
bsd-3-clause
Python
108763ace5f250922387aacffab4a668155cfe67
deploy script changes
rootio/rootio_web,rootio/rootio_web,rootio/rootio_web,rootio/rootio_web
deploy/fabfile.py
deploy/fabfile.py
# -*- coding: utf-8 -*- # http://docs.fabfile.org/en/1.5/tutorial.html from __future__ import with_statement from fabric.api import * from contextlib import contextmanager as _contextmanager @_contextmanager def virtualenv(): with prefix(env.virtualenv_activate): yield env.hosts = ['176.58.125.166'] env.user = 'rootio' env.project_root = '/home/rootio/public_python/rootio_web' env.virtualenv_activate = 'source venv/bin/activate' env.forward_agent = True def git_update(): stash_str = run("git stash") run("git pull origin master") if stash_str.strip() != 'No local changes to save': run("git stash pop") def restart_apache(): sudo("/etc/init.d/apache2 graceful") def restart_cache(): sudo("/etc/init.d/memcached restart", pty=False) def touch_wsgi(): # Touching the deploy.wsgi file will cause apache's mod_wsgi to # reload all python modules having to restart apache. with cd(env.project_root): run("touch deploy/rootio_web.wsgi") def update(full=False): with cd(env.project_root): git_update() with virtualenv(): run("pip install -r requirements.txt") run("python manage.py migrate up") #todo: static files touch_wsgi() #restart_cache() #restart_apache() def deploy(): update() def initdb(): local("python manage.py initdb") def reset(): """ Reset local debug env. """ local("rm -rf /tmp/instance") local("mkdir /tmp/instance") def runserver(): """ Run local server, for debugging only. Need to move up one directory, from deploy to see manage.py """ with lcd('..'): reset() initdb() with virtualenv(): local("python manage.py run")
# -*- coding: utf-8 -*- # http://docs.fabfile.org/en/1.5/tutorial.html from __future__ import with_statement from fabric.api import * from contextlib import contextmanager as _contextmanager @_contextmanager def virtualenv(): with prefix(env.virtualenv_activate): yield env.hosts = ['176.58.125.166'] env.user = 'rootio' env.project_root = '/home/rootio/public_python/rootio_web' env.virtualenv_activate = 'source .venv/bin/activate' env.forward_agent = True def git_update(): stash_str = run("git stash") run("git pull origin master") if stash_str.strip() != 'No local changes to save': run("git stash pop") def restart_apache(): sudo("/etc/init.d/apache2 graceful") def restart_cache(): sudo("/etc/init.d/memcached restart", pty=False) def touch_wsgi(): # Touching the deploy.wsgi file will cause apache's mod_wsgi to # reload all python modules having to restart apache. with cd(env.project_root): run("touch deploy/wsgi_handler.py") def update(full=False): with cd(env.project_root): git_update() with virtualenv(): run("pip install -r requirements.txt") #todo: alembic update #todo: static files touch_wsgi() restart_cache() #restart_apache() def deploy(): update() def initdb(): local("python manage.py initdb") def reset(): """ Reset local debug env. """ local("rm -rf /tmp/instance") local("mkdir /tmp/instance") def runserver(): """ Run local server, for debugging only. Need to move up one directory, from deploy to see manage.py """ with lcd('..'): reset() initdb() with virtualenv(): local("python manage.py run")
agpl-3.0
Python
34fa7433ea6f04089a420e0392605147669801d1
Revert "added more crappy codes"
kp89/do-git
dummy.py
dummy.py
import os def foo(): """ This is crappy function. should be removed using git checkout """ return None def main(): pass if __name__ == '__main__': main()
import os def foo(): """ This is crappy function. should be removed using git checkout """ if True == True: return True else: return False def main(): pass if __name__ == '__main__': main()
apache-2.0
Python
178bde1703bbb044f8af8c70a57517af4490a3c0
Fix duplicate cookie issue and header parsing
sirex/databot,sirex/databot
databot/handlers/download.py
databot/handlers/download.py
import time import requests import bs4 import cgi from databot.recursive import call class DownloadErrror(Exception): pass def dump_response(response): return { 'headers': dict(response.headers), 'cookies': response.cookies.get_dict(), 'status_code': response.status_code, 'encoding': response.encoding, 'content': response.content, } def download(url, delay=None, update=None, **kwargs): update = update or {} def func(row): if delay is not None: time.sleep(delay) kw = call(kwargs, row) _url = url(row) response = requests.get(_url, **kw) if response.status_code == 200: value = dump_response(response) for k, fn in update.items(): value[k] = fn(row) yield _url, value else: raise DownloadErrror('Error while downloading %s, returned status code was %s, response content:\n\n%s' % ( _url, response.status_code, response.content, )) return func def get_content(data): content_type_header = data.get('headers', {}).get('Content-Type', '') content_type, params = cgi.parse_header(content_type_header) if content_type == 'text/html': soup = bs4.BeautifulSoup(data['content'], 'lxml') return data['content'].decode(soup.original_encoding) else: return data['content']
import time import requests import bs4 from databot.recursive import call class DownloadErrror(Exception): pass def dump_response(response): return { 'headers': dict(response.headers), 'cookies': dict(response.cookies), 'status_code': response.status_code, 'encoding': response.encoding, 'content': response.content, } def download(url, delay=None, update=None, **kwargs): update = update or {} def func(row): if delay is not None: time.sleep(delay) kw = call(kwargs, row) _url = url(row) response = requests.get(_url, **kw) if response.status_code == 200: value = dump_response(response) for k, fn in update.items(): value[k] = fn(row) yield _url, value else: raise DownloadErrror('Error while downloading %s, returned status code was %s, response content:\n\n%s' % ( _url, response.status_code, response.content, )) return func def get_content(data): content_type = data.get('headers', {}).get('Content-Type') if content_type == 'text/html': soup = bs4.BeautifulSoup(data['content'], 'lxml') return data['content'].decode(soup.original_encoding) else: return data['content']
agpl-3.0
Python
32446090486db452342ec76606d28a05f6736e81
Update tracking.py
joshwalawender/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS,joshwalawender/POCS,joshwalawender/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,AstroHuntsman/POCS
panoptes/state/states/default/tracking.py
panoptes/state/states/default/tracking.py
import time def on_enter(event_data): """ The unit is tracking the target. Proceed to observations. """ pan = event_data.model pan.say("Checking our tracking") next_state = 'parking' try: pan.say("I'm adjusting the tracking rate") #pan.observatory.update_tracking() next_state = 'observe' pan.say("Done with tracking adjustment, going to observe") # Trying to prevent stall time.sleep(2) except Exception as e: pan.logger.warning("Tracking problem: {}".format(e)) pan.say("Yikes! A problem while updating our tracking.") pan.goto(next_state)
import time def on_enter(event_data): """ The unit is tracking the target. Proceed to observations. """ pan = event_data.model pan.say("Checking our tracking") next_state = 'parking' try: pan.say("I'm adjusting the tracking rate") pan.observatory.update_tracking() next_state = 'observe' pan.say("Done with tracking adjustment, going to observe") # Trying to prevent stall time.sleep(2) except Exception as e: pan.logger.warning("Tracking problem: {}".format(e)) pan.say("Yikes! A problem while updating our tracking.") pan.goto(next_state)
mit
Python
cbae828ee9eb91a2373a415f1a1521fb5dee3100
Add method to generate list of abscissa dicts
jrsmith3/datac,jrsmith3/datac
datac/main.py
datac/main.py
# -*- coding: utf-8 -*- import copy def init_abscissa(params, abscissae, abscissa_name): """ List of dicts to initialize object w/ calc method This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object. :param dict params: Static parameters required to initialize the object featuring the ordinate calculator method. :param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method. :param str abscissa_name: Dictionary key for the abscissa name. """ dict_list = [] for abscissa in abscissae: param_dict = copy.copy(params) param_dict[abscissa_name] = abscissa param_dict["abscissa_name"] = abscissa_name dict_list.append(param_dict) return dict_list
# -*- coding: utf-8 -*- import copy
mit
Python
cd5053ac36e13b57e95eeb1241032c97b48a4a85
Drop try/catch that causes uncaught errors in the Observer to be silently ignored
wathsalav/xos,wathsalav/xos,wathsalav/xos,wathsalav/xos
planetstack/openstack_observer/backend.py
planetstack/openstack_observer/backend.py
import threading import time from observer.event_loop import PlanetStackObserver from observer.event_manager import EventListener from util.logger import Logger, logging logger = Logger(level=logging.INFO) class Backend: def run(self): # start the openstack observer observer = PlanetStackObserver() observer_thread = threading.Thread(target=observer.run) observer_thread.start() # start event listene event_manager = EventListener(wake_up=observer.wake_up) event_manager_thread = threading.Thread(target=event_manager.run) event_manager_thread.start()
import threading import time from observer.event_loop import PlanetStackObserver from observer.event_manager import EventListener from util.logger import Logger, logging logger = Logger(level=logging.INFO) class Backend: def run(self): try: # start the openstack observer observer = PlanetStackObserver() observer_thread = threading.Thread(target=observer.run) observer_thread.start() # start event listene event_manager = EventListener(wake_up=observer.wake_up) event_manager_thread = threading.Thread(target=event_manager.run) event_manager_thread.start() except: logger.log_exc("Exception in child thread")
apache-2.0
Python
b725ef74f8e6f0887737e13783062b987fb3dd77
bump to 7.0.3 final
eReuse/device-inventory,eReuse/workbench,eReuse/device-inventory,eReuse/workbench
device_inventory/__init__.py
device_inventory/__init__.py
VERSION = (7, 0, 3, 'final', 0) def get_version(): "Returns a PEP 386-compliant version number from VERSION." assert len(VERSION) == 5 assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if VERSION[2] == 0 else 3 main = '.'.join(str(x) for x in VERSION[:parts]) sub = '' if VERSION[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'} sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
VERSION = (7, 0, 3, 'beta', 6) def get_version(): "Returns a PEP 386-compliant version number from VERSION." assert len(VERSION) == 5 assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if VERSION[2] == 0 else 3 main = '.'.join(str(x) for x in VERSION[:parts]) sub = '' if VERSION[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'} sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
agpl-3.0
Python
584c2f69df66bd08ace0652da7337e8e71a72099
Use bool for zero_mask. Requires pytorch 1.7+
mrcslws/nupic.research,numenta/nupic.research,subutai/nupic.research,numenta/nupic.research,mrcslws/nupic.research,subutai/nupic.research
projects/transformers/models/sparse_embedding.py
projects/transformers/models/sparse_embedding.py
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2021, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import numpy as np import torch from nupic.torch.modules.sparse_weights import SparseWeightsBase __all__ = ["SparseEmbeddings"] class SparseEmbeddings(SparseWeightsBase): """ This wraps a torch.nn.Embedding module to sparsify the weights where the sparsity is applied per embedding. The embedding of an arbitrary index j will have the desired sparsity specified through the init. Note: A torch.nn.Embedding is already sparse in one sense. Specifically, it's input is expected to be sparse (i.e. an integer specifying the index of the embedding). In contrast, this introduces sparsity in the weights of the embedding layer, which effectively yields sparse output embeddings. :param module: A torch.nn.Embedding module :param sparsity: Sparsity to apply to the weights; each output embedding will have this level of sparsity. """ def __init__(self, module, sparsity=None): assert len(module.weight.shape) == 2, "Should resemble a nn.Embedding" super(SparseEmbeddings, self).__init__( module, sparsity=sparsity ) # For each unit, decide which weights are going to be zero num_embeddings = self.module.num_embeddings embedding_dim = self.module.embedding_dim num_nz = int(round((1 - self.sparsity) * embedding_dim)) zero_mask = torch.ones(num_embeddings, embedding_dim, dtype=torch.bool, device=module.weight.device) for embedding_j in range(num_embeddings): on_indices = np.random.choice(embedding_dim, num_nz, replace=False) zero_mask[embedding_j, on_indices] = False self.register_buffer("zero_mask", zero_mask) self.rezero_weights() def rezero_weights(self): self.module.weight.data[self.zero_mask] = 0
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2021, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import numpy as np import torch from nupic.torch.modules.sparse_weights import SparseWeightsBase __all__ = ["SparseEmbeddings"] class SparseEmbeddings(SparseWeightsBase): """ This wraps a torch.nn.Embedding module to sparsify the weights where the sparsity is applied per embedding. The embedding of an arbitrary index j will have the desired sparsity specified through the init. Note: A torch.nn.Embedding is already sparse in one sense. Specifically, it's input is expected to be sparse (i.e. an integer specifying the index of the embedding). In contrast, this introduces sparsity in the weights of the embedding layer, which effectively yields sparse output embeddings. :param module: A torch.nn.Embedding module :param sparsity: Sparsity to apply to the weights; each output embedding will have this level of sparsity. """ def __init__(self, module, sparsity=None): assert len(module.weight.shape) == 2, "Should resemble a nn.Embedding" super(SparseEmbeddings, self).__init__( module, sparsity=sparsity ) # For each unit, decide which weights are going to be zero num_embeddings = self.module.num_embeddings embedding_dim = self.module.embedding_dim num_nz = int(round((1 - self.sparsity) * embedding_dim)) zero_mask = torch.ones(num_embeddings, embedding_dim, dtype=torch.bool) for embedding_j in range(num_embeddings): on_indices = np.random.choice(embedding_dim, num_nz, replace=False) zero_mask[embedding_j, on_indices] = False # Use float16 because pytorch distributed nccl doesn't support bools self.register_buffer("zero_mask", zero_mask.half()) self.rezero_weights() def rezero_weights(self): self.module.weight.data[self.zero_mask.bool()] = 0
agpl-3.0
Python
37fa40a9b5260f8090adaa8c15d3767c0867574f
Create a list of messages that contain system time.
PointOneNav/fusion-engine-client,PointOneNav/fusion-engine-client,PointOneNav/fusion-engine-client
python/fusion_engine_client/messages/__init__.py
python/fusion_engine_client/messages/__init__.py
from .core import * from . import ros message_type_to_class = { # Navigation solution messages. PoseMessage.MESSAGE_TYPE: PoseMessage, PoseAuxMessage.MESSAGE_TYPE: PoseAuxMessage, GNSSInfoMessage.MESSAGE_TYPE: GNSSInfoMessage, GNSSSatelliteMessage.MESSAGE_TYPE: GNSSSatelliteMessage, # Sensor measurement messages. IMUMeasurement.MESSAGE_TYPE: IMUMeasurement, # ROS messages. ros.PoseMessage.MESSAGE_TYPE: ros.PoseMessage, ros.GPSFixMessage.MESSAGE_TYPE: ros.GPSFixMessage, ros.IMUMessage.MESSAGE_TYPE: ros.IMUMessage, # Command and control messages. CommandResponseMessage.MESSAGE_TYPE: CommandResponseMessage, MessageRequest.MESSAGE_TYPE: MessageRequest, ResetRequest.MESSAGE_TYPE: ResetRequest, VersionInfoMessage.MESSAGE_TYPE: VersionInfoMessage, EventNotificationMessage.MESSAGE_TYPE: EventNotificationMessage, } messages_with_system_time = [t for t, c in message_type_to_class.items() if hasattr(c(), 'system_time_ns')]
from .core import * from . import ros message_type_to_class = { # Navigation solution messages. PoseMessage.MESSAGE_TYPE: PoseMessage, PoseAuxMessage.MESSAGE_TYPE: PoseAuxMessage, GNSSInfoMessage.MESSAGE_TYPE: GNSSInfoMessage, GNSSSatelliteMessage.MESSAGE_TYPE: GNSSSatelliteMessage, # Sensor measurement messages. IMUMeasurement.MESSAGE_TYPE: IMUMeasurement, # ROS messages. ros.PoseMessage.MESSAGE_TYPE: ros.PoseMessage, ros.GPSFixMessage.MESSAGE_TYPE: ros.GPSFixMessage, ros.IMUMessage.MESSAGE_TYPE: ros.IMUMessage, # Command and control messages. CommandResponseMessage.MESSAGE_TYPE: CommandResponseMessage, MessageRequest.MESSAGE_TYPE: MessageRequest, ResetRequest.MESSAGE_TYPE: ResetRequest, VersionInfoMessage.MESSAGE_TYPE: VersionInfoMessage, EventNotificationMessage.MESSAGE_TYPE: EventNotificationMessage, }
mit
Python
d99dfa94a42d70900e31c36023602bea3e5efdfb
Bump forgotten version to 3.2
nMustaki/debinterface,nMustaki/debinterface
debinterface/__init__.py
debinterface/__init__.py
# -*- coding: utf-8 -*- """Imports for easier use""" from .adapter import NetworkAdapter from .adapterValidation import NetworkAdapterValidation from .dnsmasqRange import (DnsmasqRange, DEFAULT_CONFIG as DNSMASQ_DEFAULT_CONFIG) from .hostapd import Hostapd from .interfaces import Interfaces from .interfacesReader import InterfacesReader from .interfacesWriter import InterfacesWriter __version__ = '3.2.0' __all__ = [ 'NetworkAdapter', 'NetworkAdapterValidation', 'DnsmasqRange', 'DNSMASQ_DEFAULT_CONFIG', 'Hostapd', 'Interfaces', 'InterfacesReader', 'InterfacesWriter' ]
# -*- coding: utf-8 -*- """Imports for easier use""" from .adapter import NetworkAdapter from .adapterValidation import NetworkAdapterValidation from .dnsmasqRange import (DnsmasqRange, DEFAULT_CONFIG as DNSMASQ_DEFAULT_CONFIG) from .hostapd import Hostapd from .interfaces import Interfaces from .interfacesReader import InterfacesReader from .interfacesWriter import InterfacesWriter __version__ = '3.1.0' __all__ = [ 'NetworkAdapter', 'NetworkAdapterValidation', 'DnsmasqRange', 'DNSMASQ_DEFAULT_CONFIG', 'Hostapd', 'Interfaces', 'InterfacesReader', 'InterfacesWriter' ]
bsd-3-clause
Python
e9e6d5a6c42ff1522010f003fbed2cd324eab48e
Update cluster config
saketkc/rna-seq-snakemake,saketkc/rna-seq-snakemake,saketkc/rna-seq-snakemake
configs/config_cluster.py
configs/config_cluster.py
CDNA = '/home/cmb-panasas2/skchoudh/genomes/hg19/kallisto/hg19' GENOMES_DIR='/home/cmb-panasas2/skchoudh/genomes' OUT_DIR = '/home/cmb-panasas2/skchoudh/HuR_results/human/rna_seq_star_hg38_annotated' SRC_DIR = '/home/cmb-panasas2/skchoudh/github_projects/clip_seq_pipeline/scripts' RAWDATA_DIR ='/home/cmb-06/as/skchoudh/data/HuR_Mouse_Human_liver/rna-seq/Penalva_L_08182016' SAMPLES=['HepG2_CTRL1_S31_L004', 'HepG2_CTRL_2_S33_L004', 'HepG2_CTRL_7_S35_L004', 'HepG2_HuR_KD_1_S32_L004', 'HepG2_HuR_KD_2_S34_L004', 'HepG2_HuR_KD_7_S36_L004'] GENOME_BUILD = 'hg38' GENOME_FASTA = GENOMES_DIR + '/' + GENOME_BUILD + '/fasta/'+ GENOME_BUILD+ '.fa' STAR_INDEX = GENOMES_DIR + '/' + GENOME_BUILD + '/star_annotated' GTF = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.annotation.gtf' GENE_NAMES = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + GENOME_BUILD+'_gene_names_stripped.tsv' GENE_LENGTHS = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.coding_lengths.tsv' #+ GENOME_BUILD+'_gene_lengths.tsv' GENE_NAME_MAP = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + GENOME_BUILD + '_gene_names_stripped.tsv'
CDNA = '/home/cmb-panasas2/skchoudh/genomes/hg19/kallisto/hg19' GENOMES_DIR='/home/cmb-panasas2/skchoudh/genomes' OUT_DIR = '/home/cmb-panasas2/skchoudh/HuR_results/analysis/rna_seq_star_hg38_annotated' RAWDATA_DIR ='/home/cmb-06/as/skchoudh/data/HuR_Mouse_Human_liver/rna-seq/Penalva_L_08182016' SAMPLES=['HepG2_CTRL1_S31_L004', 'HepG2_CTRL_2_S33_L004', 'HepG2_CTRL_7_S35_L004', 'HepG2_HuR_KD_1_S32_L004', 'HepG2_HuR_KD_2_S34_L004', 'HepG2_HuR_KD_7_S36_L004'] GENOME_BUILD = 'hg38' GENOME_FASTA = GENOMES_DIR + '/' + GENOME_BUILD + '/fasta/'+ GENOME_BUILD+ '.fa' STAR_INDEX = GENOMES_DIR + '/' + GENOME_BUILD + '/star_annotated' GTF = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.annotation.gtf'
bsd-2-clause
Python
79eb9241ac8ce36b14512287bc473a426db50cf1
Use elif to make it faster.
Notulp/Pluton,Notulp/Pluton
Example/Pluton/Plugins/Example/Example.py
Example/Pluton/Plugins/Example/Example.py
import clr import sys clr.AddReferenceByPartialName("UnityEngine") clr.AddReferenceByPartialName("Pluton") import UnityEngine import Pluton from Pluton import InvItem from System import * from UnityEngine import * class Example: def On_PlayerConnected(self, player): for p in Server.ActivePlayers: if(p.Name != player.Name): p.Message(String.Format("{0} has joined the server!", player.Name)) def On_PlayerDisconnected(self, player): for p in Server.ActivePlayers: if(p.Name != player.Name): p.Message(String.Format("{0} has left the server!", player.Name)) def On_Command(self, cmd): try: if(cmd.cmd == "kit"): if(Server.LoadOuts.ContainsKey(cmd.quotedArgs[0])): loadout = Server.LoadOuts[cmd.quotedArgs[0]] loadout.ToInv(cmd.User.Inventory) elif(cmd.cmd == "apple"): cmd.User.Message("An apple a day keeps the doctor away!") item = InvItem("Apple") item.Instantiate(Vector3(cmd.User.X + 3, cmd.User.Y + 3, cmd.User.Z + 3)) elif(cmd.cmd == "help"): cmd.User.Message("Usable command: /whereami, /kit starter") except: Debug.Log(String.Format("Something went wrong while executing: /{0} args", cmd.cmd, String.Join(" ", cmd.args)))
import clr import sys clr.AddReferenceByPartialName("UnityEngine") clr.AddReferenceByPartialName("Pluton") import UnityEngine import Pluton from Pluton import InvItem from System import * from UnityEngine import * class Example: def On_PlayerConnected(self, player): for p in Server.ActivePlayers: if(p.Name != player.Name): p.Message(String.Format("{0} has joined the server!", player.Name)) def On_PlayerDisconnected(self, player): for p in Server.ActivePlayers: if(p.Name != player.Name): p.Message(String.Format("{0} has left the server!", player.Name)) def On_Command(self, cmd): try: if(cmd.cmd == "kit"): if(Server.LoadOuts.ContainsKey(cmd.quotedArgs[0])): loadout = Server.LoadOuts[cmd.quotedArgs[0]] loadout.ToInv(cmd.User.Inventory) if(cmd.cmd == "apple"): cmd.User.Message("An apple a day keeps the doctor away!") item = InvItem("Apple") item.Instantiate(Vector3(cmd.User.X + 3, cmd.User.Y + 3, cmd.User.Z + 3)) if(cmd.cmd == "help"): cmd.User.Message("Usable command: /whereami, /kit starter") except: Debug.Log(String.Format("Something went wrong while executing: /{0} args", cmd.cmd, String.Join(" ", cmd.args)))
mit
Python
9af1cbe0676ca71edecfa6d44c66690a5a583b01
Rewrite for clarity
louisswarren/hieretikz
constructive_hierarchy.py
constructive_hierarchy.py
'''Reason about a directed graph in which the (non-)existence of some edges must be inferred by the disconnectedness of certain vertices. Collect (truthy) evidence for boolean function return values.''' def transitive_closure_dict(known_vertices, edges): '''Find the transitive closure of a dict mapping vertices to their paths.''' found_vertices = {b: known_vertices[a] + ((a, b),) for a, b in edges if a in known_vertices} if all(v in known_vertices for v in found_vertices): return known_vertices found_vertices.update(known_vertices) return transitive_closure_dict(found_vertices, edges) def transitive_closure(vertex, edges): closure = transitive_closure_dict({vertex: ()}, edges) # Use a (truthy) loop instead of an empty path closure[vertex] = (vertex, vertex) return closure def downward_closure(vertex, edges): '''Find the downward closure of a vertex.''' return transitive_closure(vertex, edges) def upward_closure(vertex, edges): '''Find the upward closure of a vertex.''' return transitive_closure(vertex, {(b, a) for a, b in edges}) def is_connected(a, b, edges): '''Check if there is a path from a to b.''' return downward_closure(a, edges).get(b, False) def is_separated(a, b, edges, disconnections): '''Check that a and b will remain not connected even if edges are added to the graph, as long as the vertex pairs listed in disconnections remain disconnected.''' for p, p_path in upward_closure(a, edges).items(): for q, q_path in downward_closure(b, edges).items(): if (p, q) in disconnections: # Should reverse p_path return p_path, q_path return False def find_possible_connections(vertices, edges, disconnections): '''Find which edges can be added to create new connections, without connecting any pairs in disconnections.''' return {(a, b) for a in vertices for b in vertices if not is_connected(a, b, edges) if not is_separated(a, b, edges, disconnections)} def is_redundant_edge(edge, edges): '''Give alternate path if one exists.''' return is_connected(*edge, edges - {edge}) def spanning_tree(edges): for edge in edges: if is_redundant_edge(edge, edges): return spanning_tree(edges - {edge}) return edges def rank_possible_edge(edge, vertices, edges, disconnections): evaluator = lambda x, y: len(find_possible_connections(vertices, x, y)) exists_rank = evaluator(edges | {edge}, disconnections) not_exists_rank = evaluator(edges, disconnections | {edge}) return abs(exists_rank) + abs(not_exists_rank)
'''Reason about a directed graph in which the (non-)existence of some edges must be inferred by the disconnectedness of certain vertices. Collect (truthy) evidence for boolean function return values.''' def transitive_closure_dict(vertices, edges): '''Find the transitive closure of a dict mapping vertices to their paths.''' neighbours = {b: vertices[a] + ((a, b),) for a, b in edges if a in vertices} if set(neighbours).issubset(set(vertices)): return vertices return transitive_closure_dict(dict(neighbours, **vertices), edges) def transitive_closure(vertex, edges): closure = transitive_closure_dict({vertex: ()}, edges) # Use a (truthy) loop instead of an empty path closure[vertex] = (vertex, vertex) return closure def downward_closure(vertex, edges): '''Find the downward closure of a vertex.''' return transitive_closure(vertex, edges) def upward_closure(vertex, edges): '''Find the upward closure of a vertex.''' return transitive_closure(vertex, {(b, a) for a, b in edges}) def is_connected(a, b, edges): '''Check if there is a path from a to b.''' return downward_closure(a, edges).get(b, False) def is_separated(a, b, edges, disconnections): '''Check that a and b will remain not connected even if edges are added to the graph, as long as the vertex pairs listed in disconnections remain disconnected.''' for p, p_path in upward_closure(a, edges).items(): for q, q_path in downward_closure(b, edges).items(): if (p, q) in disconnections: # Should reverse p_path return p_path, q_path return False def find_possible_connections(vertices, edges, disconnections): '''Find which edges can be added to create new connections, without connecting any pairs in disconnections.''' return {(a, b) for a in vertices for b in vertices if not is_connected(a, b, edges) if not is_separated(a, b, edges, disconnections)} def is_redundant_edge(edge, edges): '''Give alternate path if one exists.''' return is_connected(*edge, edges - {edge}) def spanning_tree(edges): for edge in edges: if is_redundant_edge(edge, edges): return spanning_tree(edges - {edge}) return edges def rank_possible_edge(edge, vertices, edges, disconnections): evaluator = lambda x, y: len(find_possible_connections(vertices, x, y)) exists_rank = evaluator(edges | {edge}, disconnections) not_exists_rank = evaluator(edges, disconnections | {edge}) return abs(exists_rank) + abs(not_exists_rank)
mit
Python
7760d75bb5ca38d2c96924e0ea1d65485cdc5c6f
Update version 0.12.2 -> 0.12.3
dwavesystems/dimod,dwavesystems/dimod
dimod/__init__.py
dimod/__init__.py
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # version is used by serialization below so we need it before everything __version__ = '0.12.3' from dimod.constrained import * import dimod.constrained from dimod.core import * import dimod.core from dimod.cyutilities import * from dimod.reference import * import dimod.reference from dimod.roof_duality import fix_variables from dimod.binary import * import dimod.binary from dimod.discrete import * import dimod.testing from dimod.converters import * import dimod.decorators import dimod.generators from dimod.exceptions import * import dimod.exceptions from dimod.higherorder import make_quadratic, make_quadratic_cqm, reduce_binary_polynomial, poly_energy, poly_energies, BinaryPolynomial import dimod.higherorder from dimod.package_info import __version__, __author__, __authoremail__, __description__ from dimod.quadratic import * import dimod.quadratic from dimod.traversal import * from dimod.sampleset import * from dimod.serialization.format import set_printoptions import dimod.lp from dimod.utilities import * import dimod.utilities from dimod.vartypes import * # flags for some global features REAL_INTERACTIONS = False
# Copyright 2018 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # version is used by serialization below so we need it before everything __version__ = '0.12.2' from dimod.constrained import * import dimod.constrained from dimod.core import * import dimod.core from dimod.cyutilities import * from dimod.reference import * import dimod.reference from dimod.roof_duality import fix_variables from dimod.binary import * import dimod.binary from dimod.discrete import * import dimod.testing from dimod.converters import * import dimod.decorators import dimod.generators from dimod.exceptions import * import dimod.exceptions from dimod.higherorder import make_quadratic, make_quadratic_cqm, reduce_binary_polynomial, poly_energy, poly_energies, BinaryPolynomial import dimod.higherorder from dimod.package_info import __version__, __author__, __authoremail__, __description__ from dimod.quadratic import * import dimod.quadratic from dimod.traversal import * from dimod.sampleset import * from dimod.serialization.format import set_printoptions import dimod.lp from dimod.utilities import * import dimod.utilities from dimod.vartypes import * # flags for some global features REAL_INTERACTIONS = False
apache-2.0
Python
00b7cf15877dc17d07d591c893671decb6b869e2
Enable touch events for smoothness tests.
littlstar/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,Chilledheart/chromium,dednal/chromium.src,jaruba/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,axinging/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,ltilve/chromium,littlstar/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,Just-D/chromium-1,ltilve/chromium,M4sse/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,dednal/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,Chilledheart/chromium,ondra-novak/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,littlstar/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,ondra-novak/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk
tools/perf/measurements/smoothness.py
tools/perf/measurements/smoothness.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from metrics import power from measurements import smoothness_controller from telemetry.page import page_measurement class Smoothness(page_measurement.PageMeasurement): def __init__(self): super(Smoothness, self).__init__('RunSmoothness') self._power_metric = None self._smoothness_controller = None def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArgs('--enable-gpu-benchmarking') options.AppendExtraBrowserArgs('--touch-events=enabled') power.PowerMetric.CustomizeBrowserOptions(options) def WillRunActions(self, page, tab): self._power_metric = power.PowerMetric() self._power_metric.Start(page, tab) self._smoothness_controller = smoothness_controller.SmoothnessController() self._smoothness_controller.Start(page, tab) def DidRunActions(self, page, tab): self._power_metric.Stop(page, tab) self._smoothness_controller.Stop(tab) def MeasurePage(self, page, tab, results): self._power_metric.AddResults(tab, results) self._smoothness_controller.AddResults(tab, results) def CleanUpAfterPage(self, _, tab): self._smoothness_controller.CleanUp(tab)
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from metrics import power from measurements import smoothness_controller from telemetry.page import page_measurement class Smoothness(page_measurement.PageMeasurement): def __init__(self): super(Smoothness, self).__init__('RunSmoothness') self._power_metric = None self._smoothness_controller = None def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArgs('--enable-gpu-benchmarking') power.PowerMetric.CustomizeBrowserOptions(options) def WillRunActions(self, page, tab): self._power_metric = power.PowerMetric() self._power_metric.Start(page, tab) self._smoothness_controller = smoothness_controller.SmoothnessController() self._smoothness_controller.Start(page, tab) def DidRunActions(self, page, tab): self._power_metric.Stop(page, tab) self._smoothness_controller.Stop(tab) def MeasurePage(self, page, tab, results): self._power_metric.AddResults(tab, results) self._smoothness_controller.AddResults(tab, results) def CleanUpAfterPage(self, _, tab): self._smoothness_controller.CleanUp(tab)
bsd-3-clause
Python
cf2004cec6e84cbec213f9e70dd8245327af541d
Update api.py
AlecAivazis/nautilus,aaivazis/nautilus,aaivazis/nautilus,AlecAivazis/nautilus,AlecAivazis/nautilus
example/services/api.py
example/services/api.py
# external imports from nautilus import APIGateway from graphene import Schema, ObjectType, String, Mutation, Boolean from nautilus.api import ServiceObjectType from nautilus.api.fields import Connection from nautilus.network import dispatchAction from nautilus.conventions import getCRUDAction # local imports from .recipes import service as RecipeService from .ingredients import service as IngredientService # create the schema based on the query object schema = Schema(name='Product Schema') ## define the schema that encapsulates the cloud class Recipe(ServiceObjectType): class Meta: service = RecipeService # connections are resolved/joined using the appropriate connection service # you can avoid circular/undefined references using strings - nautilus will look # for the corresponding ServiceObjectType ingredients = Connection('Ingredient', description = 'The ingredients in this recipe.') class Ingredient(ServiceObjectType): class Meta: service = IngredientService recipes = Connection(Recipe, description = 'The recipes with this ingredient') # add the query to the schema schema.query = Query class AddRecipeMutation(Mutation): class Input: name = String() success = Boolean(description="Wether or not the dispatch was successful") @classmethod def mutate(cls, instance, args, info): """ perform the mutation """ # send the new production action into the queue dispatchAction({ 'type': getCRUDAction('create', 'recipe'), 'payload': args }) class Mutation(ObjectType): """ the list of mutations that the api supports """ addRecipe = Field(AddRecipeMutation) sceham.mutation = Mutation # create a nautilus service with just the schema service = APIGateway(schema=schema)
# external imports from nautilus import APIGateway from graphene import Schema, ObjectType, String, Mutation, Boolean from nautilus.api import ServiceObjectType from nautilus.api.fields import Connection from nautilus.network import dispatchAction from nautilus.conventions import getCRUDAction # local imports from .recipes import service as RecipeService from .ingredients import service as IngredientService # create the schema based on the query object schema = Schema(name='Product Schema') ## define the schema that encapsulates the cloud class Recipe(ServiceObjectType): class Meta: service = RecipeService # connections are resolved/joined using the appropriate connection service # you can avoid circular/undefined references using strings - nautilus will look # for the corresponding ServiceObjectType ingredients = Connection('Ingredient', description = 'The ingredients in this recipe.') class Ingredient(ServiceObjectType): class Meta: service = IngredientService recipes = Connection(Recipe, description = 'The recipes with this ingredient') # add the query to the schema schema.query = Query # third party imports class AddRecipeMutation(Mutation): class Input: name = String() success = Boolean(description="Wether or not the dispatch was successful") @classmethod def mutate(cls, instance, args, info): """ perform the mutation """ # send the new production action into the queue dispatchAction({ 'type': getCRUDAction('create', 'recipe'), 'payload': args }) class Mutation(ObjectType): """ the list of mutations that the api supports """ addRecipe = Field(AddRecipeMutation) sceham.mutation = Mutation # create a nautilus service with just the schema service = APIGateway(schema=schema)
mit
Python
c4b83c9554ca0f501ac42c63a53394ff8b90c2af
bump version to 20190807
AOSC-Dev/acbs,AOSC-Dev/acbs,AOSC-Dev/acbs,AOSC-Dev/acbs
acbs/__init__.py
acbs/__init__.py
__version__ = '20190807'
__version__ = '20181007'
lgpl-2.1
Python
2b20e803733db09ad4643be00b2af11ecea1eeb8
Increase version to 0.11.0 (#394)
FabioRosado/opsdroid,jacobtomlinson/opsdroid,opsdroid/opsdroid
opsdroid/const.py
opsdroid/const.py
"""Constants used by OpsDroid.""" import os __version__ = "0.11.0" DEFAULT_GIT_URL = "https://github.com/opsdroid/" MODULES_DIRECTORY = "opsdroid-modules" DEFAULT_ROOT_PATH = os.path.expanduser("~/.opsdroid") DEFAULT_LOG_FILENAME = os.path.join(DEFAULT_ROOT_PATH, 'output.log') DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules") DEFAULT_MODULE_DEPS_PATH = os.path.join(DEFAULT_ROOT_PATH, "site-packages") DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml") DEFAULT_MODULE_BRANCH = "master" EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "configuration/example_configuration.yaml") REGEX_MAX_SCORE = 0.6 RASANLU_DEFAULT_URL = "http://localhost:5000" RASANLU_DEFAULT_PROJECT = "opsdroid"
"""Constants used by OpsDroid.""" import os __version__ = "0.10.0" DEFAULT_GIT_URL = "https://github.com/opsdroid/" MODULES_DIRECTORY = "opsdroid-modules" DEFAULT_ROOT_PATH = os.path.expanduser("~/.opsdroid") DEFAULT_LOG_FILENAME = os.path.join(DEFAULT_ROOT_PATH, 'output.log') DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules") DEFAULT_MODULE_DEPS_PATH = os.path.join(DEFAULT_ROOT_PATH, "site-packages") DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml") DEFAULT_MODULE_BRANCH = "master" EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "configuration/example_configuration.yaml") REGEX_MAX_SCORE = 0.6 RASANLU_DEFAULT_URL = "http://localhost:5000" RASANLU_DEFAULT_PROJECT = "opsdroid"
apache-2.0
Python
8f4f1e8cc45daa8cf49f050200ce17a48f008e5a
Fix process entity migration
jberci/resolwe,jberci/resolwe,genialis/resolwe,genialis/resolwe
resolwe/flow/migrations/0023_process_entity_2.py
resolwe/flow/migrations/0023_process_entity_2.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.14 on 2018-10-01 03:15 from __future__ import unicode_literals from django.db import migrations def migrate_flow_collection(apps, schema_editor): """Migrate 'flow_collection' field to 'entity_type'.""" Process = apps.get_model('flow', 'Process') DescriptorSchema = apps.get_model('flow', 'DescriptorSchema') for process in Process.objects.all(): process.entity_type = process.flow_collection process.entity_descriptor_schema = process.flow_collection if (process.entity_descriptor_schema is not None and not DescriptorSchema.objects.filter(slug=process.entity_descriptor_schema).exists()): raise LookupError( "Descriptow schema '{}' referenced in 'entity_descriptor_schema' not " "found.".format(process.entity_descriptor_schema) ) process.save() class Migration(migrations.Migration): dependencies = [ ('flow', '0022_process_entity_1'), ] operations = [ migrations.RunPython(migrate_flow_collection) ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.14 on 2018-10-01 03:15 from __future__ import unicode_literals from django.db import migrations def migrate_flow_collection(apps, schema_editor): """Migrate 'flow_collection' field to 'entity_type'.""" Process = apps.get_model('flow', 'Process') DescriptorSchema = apps.get_model('flow', 'DescriptorSchema') for process in Process.objects.all(): process.entity_type = process.flow_collection process.entity_descriptor_schema = process.flow_collection if not DescriptorSchema.objects.filter(slug=process.entity_descriptor_schema).exists(): raise LookupError( "Descriptow schema '{}' referenced in 'entity_descriptor_schema' not " "found.".format(process.entity_descriptor_schema) ) process.save() class Migration(migrations.Migration): dependencies = [ ('flow', '0022_process_entity_1'), ] operations = [ migrations.RunPython(migrate_flow_collection) ]
apache-2.0
Python
21b453946bfa35c7730d5ab15e62b48d299170ed
Update password loading test
betatim/osf-cli,betatim/osf-cli
osfclient/tests/test_listing.py
osfclient/tests/test_listing.py
"""Test `osf ls` command""" from unittest import mock from unittest.mock import patch, MagicMock, PropertyMock, mock_open from osfclient import OSF from osfclient.cli import list_ from osfclient.tests.mocks import MockProject @patch('osfclient.cli.OSF') def test_anonymous_doesnt_use_password(MockOSF): args = MagicMock() username = PropertyMock(return_value=None) type(args).username = username list_(args) MockOSF.assert_called_once_with(username=None, password=None) @patch('osfclient.cli.OSF') def test_username_password(MockOSF): args = MagicMock() username = PropertyMock(return_value='[email protected]') type(args).username = username def simple_getenv(key): if key == 'OSF_PASSWORD': return 'secret' with patch('osfclient.cli.os.getenv', side_effect=simple_getenv) as mock_getenv: list_(args) MockOSF.assert_called_once_with(username='[email protected]', password='secret') mock_getenv.assert_called_with('OSF_PASSWORD') @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_get_project(OSF_project): args = MagicMock() username = PropertyMock(return_value=None) type(args).username = username project = PropertyMock(return_value='1234') type(args).project = project output = PropertyMock(return_value=None) type(args).output = output list_(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been printed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called
"""Test `osf ls` command""" from unittest import mock from unittest.mock import patch, MagicMock, PropertyMock, mock_open from osfclient import OSF from osfclient.cli import list_ from osfclient.tests.mocks import MockProject @patch('osfclient.cli.OSF') def test_anonymous_doesnt_use_password(MockOSF): args = MagicMock() username = PropertyMock(return_value=None) type(args).username = username list_(args) MockOSF.assert_called_once_with(username=None, password=None) @patch('osfclient.cli.OSF') def test_username_password(MockOSF): args = MagicMock() username = PropertyMock(return_value='[email protected]') type(args).username = username mock_open_func = mock_open(read_data="secret") with patch('osfclient.cli.open', mock_open_func, create=True): list_(args) MockOSF.assert_called_once_with(username='[email protected]', password='secret') assert mock_open_func.called @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_get_project(OSF_project): args = MagicMock() username = PropertyMock(return_value=None) type(args).username = username project = PropertyMock(return_value='1234') type(args).project = project output = PropertyMock(return_value=None) type(args).output = output list_(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been printed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called
bsd-3-clause
Python
e9060c166987a18aa9faf3b790b80135b319ecca
Update example.py
bwipp/postscriptbarcode,bwipp/postscriptbarcode,bwipp/postscriptbarcode,bwipp/postscriptbarcode,bwipp/postscriptbarcode,bwipp/postscriptbarcode,bwipp/postscriptbarcode,bwipp/postscriptbarcode
libs/python/example.py
libs/python/example.py
#!/usr/bin/env python import postscriptbarcode c=postscriptbarcode.BWIPP("../../build/monolithic_package/barcode.ps") c.get_version()
#!/usr/bin/env python import postscriptbarcode c=postscriptbarcode.BWIPP("../barcode.ps") c.get_version()
mit
Python
068a94a455448b3fc2ee552616658d9f980104ea
Add comment.
efiring/numpy-work,teoliphant/numpy-refactor,efiring/numpy-work,illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,Ademan/NumPy-GSoC,efiring/numpy-work,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,teoliphant/numpy-refactor,teoliphant/numpy-refactor,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,illume/numpy3k,chadnetzer/numpy-gaurdro,jasonmccampbell/numpy-refactor-sprint,efiring/numpy-work,Ademan/NumPy-GSoC,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC,illume/numpy3k
numpy/distutils/command/bdist_rpm.py
numpy/distutils/command/bdist_rpm.py
import os import sys from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm class bdist_rpm(old_bdist_rpm): def _make_spec_file(self): spec_file = old_bdist_rpm._make_spec_file(self) # Replace hardcoded setup.py script name # with the real setup script name. setup_py = os.path.basename(sys.argv[0]) if setup_py == 'setup.py': return spec_file new_spec_file = [] for line in spec_file: line = line.replace('setup.py',setup_py) new_spec_file.append(line) return new_spec_file
import os import sys from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm class bdist_rpm(old_bdist_rpm): def _make_spec_file(self): spec_file = old_bdist_rpm._make_spec_file(self) setup_py = os.path.basename(sys.argv[0]) if setup_py == 'setup.py': return spec_file new_spec_file = [] for line in spec_file: line = line.replace('setup.py',setup_py) new_spec_file.append(line) return new_spec_file
bsd-3-clause
Python
6af3eacec303abfe6f260581687a38d89f7b7474
Fix wavelength issue for QE65000
ap--/python-oceanoptics
oceanoptics/spectrometers/QE65xxx.py
oceanoptics/spectrometers/QE65xxx.py
# tested # ---------------------------------------------------------- from oceanoptics.base import OceanOpticsBase as _OOBase from oceanoptics.base import OceanOpticsTEC as _OOTEC import struct #---------------------------------------------------------- class _QE65xxx(_OOBase, _OOTEC): def _set_integration_time(self, time_us): """ send command 0x02 """ # XXX: The QE65000 requires the time set in Milliseconds! # This overides the provided function of OOBase time_ms = int(time_us/1000) self._usb_send(struct.pack('<BI', 0x02, time_ms)) def _query_status(self): """ 0xFE query status """ # XXX: The QE65000 also returns the time in Milliseconds! # This overides the provided function of OOBase # and pretends to return us ret = self._usb_query(struct.pack('<B', 0xFE)) data = struct.unpack('<HLBBBBBBBBBB', ret[:]) ret = { 'pixels' : data[0], 'integration_time' : data[1] * 1000, # ms to us 'lamp_enable' : data[2], 'trigger_mode' : data[3], 'acquisition_status' : data[4], 'packets_in_spectrum' : data[5], 'power_down' : data[6], 'packets_in_endpoint' : data[7], 'usb_speed' : data[10] } return ret #-------- # tested #-------- class QE65000(_QE65xxx): def __init__(self): super(QE65000, self).__init__('QE65000') # The QE65000 needs a -10 offset for calculating the wavelengths # due to some legacy issues... self._wl = sum( self._wl_factors[i] * np.arange(-10, self._pixels - 10, dtype=np.float64)**i for i in range(4) ) self.initialize_TEC() #---------- # untested #---------- class QE65pro(_QE65xxx): def __init__(self): super(QE65pro, self).__init__('QE65pro') self.initialize_TEC()
# tested # ---------------------------------------------------------- from oceanoptics.base import OceanOpticsBase as _OOBase from oceanoptics.base import OceanOpticsTEC as _OOTEC import struct #---------------------------------------------------------- class _QE65xxx(_OOBase, _OOTEC): def _set_integration_time(self, time_us): """ send command 0x02 """ # XXX: The QE65000 requires the time set in Milliseconds! # This overides the provided function of OOBase time_ms = int(time_us/1000) self._usb_send(struct.pack('<BI', 0x02, time_ms)) def _query_status(self): """ 0xFE query status """ # XXX: The QE65000 also returns the time in Milliseconds! # This overides the provided function of OOBase # and pretends to return us ret = self._usb_query(struct.pack('<B', 0xFE)) data = struct.unpack('<HLBBBBBBBBBB', ret[:]) ret = { 'pixels' : data[0], 'integration_time' : data[1] * 1000, # ms to us 'lamp_enable' : data[2], 'trigger_mode' : data[3], 'acquisition_status' : data[4], 'packets_in_spectrum' : data[5], 'power_down' : data[6], 'packets_in_endpoint' : data[7], 'usb_speed' : data[10] } return ret #-------- # tested #-------- class QE65000(_QE65xxx): def __init__(self): super(QE65000, self).__init__('QE65000') self.initialize_TEC() #---------- # untested #---------- class QE65pro(_QE65xxx): def __init__(self): super(QE65pro, self).__init__('QE65pro') self.initialize_TEC()
mit
Python
56ac633029c9d7ef40415e1881d2cb3c18c83d7b
Bump to version 0.17.1
reubano/ckanny,reubano/ckanny
ckanny/__init__.py
ckanny/__init__.py
# -*- coding: utf-8 -*- # vim: sw=4:ts=4:expandtab """ ckanny ~~~~~~ Miscellaneous CKAN utility scripts Examples: literal blocks:: python example_google.py Attributes: module_level_variable1 (int): Module level variables may be documented in """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) from manager import Manager from . import datastorer, filestorer, package, hdx __version__ = '0.17.1' __title__ = 'ckanny' __author__ = 'Reuben Cummings' __description__ = 'Miscellaneous CKAN utility scripts' __email__ = '[email protected]' __license__ = 'MIT' __copyright__ = 'Copyright 2015 Reuben Cummings' manager = Manager() manager.merge(datastorer.manager, namespace='ds') manager.merge(filestorer.manager, namespace='fs') manager.merge(hdx.manager, namespace='hdx') manager.merge(package.manager, namespace='pk') @manager.command def ver(): """Show ckanny version""" print('v%s' % __version__) if __name__ == '__main__': manager.main()
# -*- coding: utf-8 -*- # vim: sw=4:ts=4:expandtab """ ckanny ~~~~~~ Miscellaneous CKAN utility scripts Examples: literal blocks:: python example_google.py Attributes: module_level_variable1 (int): Module level variables may be documented in """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) from manager import Manager from . import datastorer, filestorer, package, hdx __version__ = '0.17.0' __title__ = 'ckanny' __author__ = 'Reuben Cummings' __description__ = 'Miscellaneous CKAN utility scripts' __email__ = '[email protected]' __license__ = 'MIT' __copyright__ = 'Copyright 2015 Reuben Cummings' manager = Manager() manager.merge(datastorer.manager, namespace='ds') manager.merge(filestorer.manager, namespace='fs') manager.merge(hdx.manager, namespace='hdx') manager.merge(package.manager, namespace='pk') @manager.command def ver(): """Show ckanny version""" print('v%s' % __version__) if __name__ == '__main__': manager.main()
mit
Python
20ffbab08c244ec788e8a6114ccdbf38e39d97b6
Fix unclassifiable problem
WangWenjun559/Weiss,WangWenjun559/Weiss,WangWenjun559/Weiss,WangWenjun559/Weiss,WangWenjun559/Weiss,WangWenjun559/Weiss,WangWenjun559/Weiss
classifier/demo.py
classifier/demo.py
""" This is a demo about how to use LibLINEAR to do the prediction ============================================================== Usage: python demo.py Author: Wenjun Wang Date: June 18, 2015 """ import pickle import datetime from liblinearutil import * from feature import convert_query # Read training file #y, x = svm_read_problem(path_to_training_file) # Train and save model #m = train(y, x, '-c 1 -s 1 -B 1 -e 0.01 -v 5 -q') #save_model(name_of_model_file,m) # Load the trained model, which is in the same directory as this script date = str(datetime.date.today()) m = load_model('model_'+date) if m == None: date = str(datetime.date.fromordinal(datetime.date.today().toordinal()-1)) m = load_model('model_'+date) # Load feature file, which is also in the same directory infile = open('features') feature_list = pickle.load(infile) # Class labels y = [1,2,3,4,5] # Example query query = 'Alan Black' # Convert query x = convert_query(query, feature_list, 'test') # Do the prediction p_label, p_val = predict(y, x, m, '-b 0') # Cannot classify it to any class if p_val[0][int(p_label[0])-1] == 0: p_label[0] = -1 print p_label #predict class/label print p_val #svm value for each class/label
""" This is a demo about how to use LibLINEAR to do the prediction ============================================================== Usage: python demo.py Author: Wenjun Wang Date: June 18, 2015 """ import pickle import datetime from liblinearutil import * from feature import convert_query # Read training file #y, x = svm_read_problem(path_to_training_file) # Train and save model #m = train(y, x, '-c 1 -s 1 -B 1 -e 0.01 -v 5 -q') #save_model(name_of_model_file,m) # Load the trained model, which is in the same directory as this script date = str(datetime.date.today()) m = load_model('model_'+date) if m == None: date = str(datetime.date.fromordinal(datetime.date.today().toordinal()-1)) m = load_model('model_'+date) # Load feature file, which is also in the same directory infile = open('features') feature_list = pickle.load(infile) # Class labels y = [1,2,3,4,5] # Example query query = 'next comment' # Convert query x = convert_query(query, feature_list, 'test') # Do the prediction p_label, p_val = predict(y, x, m, '-b 0') print p_label #predict class/label print p_val #svm value for each class/label
apache-2.0
Python
c3951f942633438e91e43b523a814bf1a3528295
Add impl to analyzer.
xanxys/shogi_recognizer,xanxys/shogi_recognizer
analyze.py
analyze.py
#!/bin/python from __future__ import print_function, division import cv import cv2 import argparse import preprocess if __name__ == '__main__': parser = argparse.ArgumentParser( description="""Analyze shogi board state in a photo""", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( 'photo', metavar='PHOTO', nargs=1, type=str, help='Photo image path') parser.add_argument( '--output-visualization', nargs='?', metavar='VISUALIZATION_PATH', type=str, default=None, const=True, help='Output path of pretty visualization image') args = parser.parse_args() img = cv2.imread(args.photo[0]) # TODO: Refactoring required args.derive_emptiness = False args.derive_types_up = False args.derive_validness = False detected = preprocess.detect_board("", img, visualize=False, derive=args) print("Detected?: %s" % detected)
#!/bin/python from __future__ import print_function, division import argparse if __name__ == '__main__': parser = argparse.ArgumentParser( description="""Analyze shogi board state in a photo""", formatter_class=argparse.ArgumentDefaultsHelpFormatter) args = parser.parse_args()
mit
Python
b5b40dc232b04a2cfa75438bb5143ffdb103a57c
split a method
alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl
AlphaTwirl/EventReader/ProgressReporter.py
AlphaTwirl/EventReader/ProgressReporter.py
# Tai Sakuma <[email protected]> import multiprocessing import time from ProgressReport import ProgressReport ##____________________________________________________________________________|| class ProgressReporter(object): def __init__(self, queue, pernevents = 1000): self.queue = queue self.pernevents = pernevents self.interval = 0.02 # [second] self._readTime() def report(self, event, component): if not self.needToReport(event, component): return self._report(event, component) def _report(self, event, component): done = event.iEvent + 1 report = ProgressReport(name = component.name, done = done, total = event.nEvents) self.queue.put(report) self._readTime() def needToReport(self, event, component): iEvent = event.iEvent + 1 # add 1 because event.iEvent starts from 0 if self._time() - self.lastTime > self.interval: return True if iEvent % self.pernevents == 0: return True if iEvent == event.nEvents: return True return False def _time(self): return time.time() def _readTime(self): self.lastTime = self._time() ##____________________________________________________________________________||
# Tai Sakuma <[email protected]> import multiprocessing import time from ProgressReport import ProgressReport ##____________________________________________________________________________|| class ProgressReporter(object): def __init__(self, queue, pernevents = 1000): self.queue = queue self.pernevents = pernevents self.interval = 0.02 # [second] self._readTime() def report(self, event, component): if not self.needToReport(event, component): return done = event.iEvent + 1 report = ProgressReport(name = component.name, done = done, total = event.nEvents) self.queue.put(report) self._readTime() def needToReport(self, event, component): iEvent = event.iEvent + 1 # add 1 because event.iEvent starts from 0 if self._time() - self.lastTime > self.interval: return True if iEvent % self.pernevents == 0: return True if iEvent == event.nEvents: return True return False def _time(self): return time.time() def _readTime(self): self.lastTime = self._time() ##____________________________________________________________________________||
bsd-3-clause
Python
ec013d194e2b26155949bf89a5cd03ef4a013cc5
Add import unicode on csv_importer
marcwebbie/passpie,eiginn/passpie,scorphus/passpie,scorphus/passpie,marcwebbie/passpie,eiginn/passpie
passpie/importers/csv_importer.py
passpie/importers/csv_importer.py
import csv from passpie.importers import BaseImporter from passpie._compat import is_python2, unicode def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs): csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs) for row in csv_reader: if is_python2(): yield [unicode(cell, 'utf-8') for cell in row] else: yield [str(cell) for cell in row] class CSVImporter(BaseImporter): def match(self, filepath): """Dont match this importer""" return False def handle(self, filepath, cols): credentials = [] with open(filepath) as csv_file: reader = unicode_csv_reader(csv_file) try: next(reader) except StopIteration: raise ValueError('empty csv file: %s' % filepath) for row in reader: credential = { 'name': row[cols['name']], 'login': row[cols.get('login', '')], 'password': row[cols['password']], 'comment': row[cols.get('comment', '')], } credentials.append(credential) return credentials
import csv from passpie.importers import BaseImporter from passpie._compat import is_python2 def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs): csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs) for row in csv_reader: if is_python2(): yield [unicode(cell, 'utf-8') for cell in row] else: yield [str(cell) for cell in row] class CSVImporter(BaseImporter): def match(self, filepath): """Dont match this importer""" return False def handle(self, filepath, cols): credentials = [] with open(filepath) as csv_file: reader = unicode_csv_reader(csv_file) try: next(reader) except StopIteration: raise ValueError('empty csv file: %s' % filepath) for row in reader: credential = { 'name': row[cols['name']], 'login': row[cols.get('login', '')], 'password': row[cols['password']], 'comment': row[cols.get('comment', '')], } credentials.append(credential) return credentials
mit
Python
8e536e4911ab18a5ac6e2e018fa041425a57a14b
Update serializers.py
Bugheist/website,Bugheist/website,Bugheist/website,Bugheist/website
website/serializers.py
website/serializers.py
from website.models import Issue, User , UserProfile,Points, Domain from rest_framework import routers, serializers, viewsets, filters import django_filters class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id','username') class IssueSerializer(serializers.ModelSerializer): user = UserSerializer(read_only=True) class Meta: model = Issue fields = '__all__' class IssueViewSet(viewsets.ModelViewSet): queryset = Issue.objects.all() serializer_class = IssueSerializer filter_backends = (filters.SearchFilter,) search_fields = ('url', 'description', 'user__id') http_method_names = ['get', 'post', 'head'] class UserIssueViewSet(viewsets.ModelViewSet): queryset = Issue.objects.all() serializer_class = IssueSerializer filter_backends = (filters.SearchFilter,) search_fields = ('user__username', 'user__id') http_method_names = ['get', 'post', 'head'] class UserProfileSerializer(serializers.ModelSerializer): user = UserSerializer(read_only=True) class Meta: model = UserProfile fields = '__all__' class UserProfileViewSet(viewsets.ModelViewSet): serializer_class = UserProfileSerializer queryset = UserProfile.objects.all() filter_backends = (filters.SearchFilter,) search_fields = ('id', 'user__id','user__username') http_method_names = ['get', 'post', 'head'] class DomainSerializer(serializers.ModelSerializer): class Meta: model = Domain fields = '__all__' class DomainViewSet(viewsets.ModelViewSet): serializer_class = DomainSerializer queryset = Domain.objects.all() filter_backends = (filters.SearchFilter,) search_fields = ('url', 'name') http_method_names = ['get', 'post', 'head'] router = routers.DefaultRouter() router.register(r'issues', IssueViewSet, basename="issues") router.register(r'userissues', UserIssueViewSet, basename="userissues") router.register(r'profile', UserProfileViewSet, basename="profile") router.register(r'domain', DomainViewSet, basename="domain")
from website.models import Issue, User , UserProfile,Points, Domain from rest_framework import routers, serializers, viewsets, filters import django_filters class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id','username') class IssueSerializer(serializers.ModelSerializer): user = UserSerializer(read_only=True) class Meta: model = Issue fields = '__all__' class IssueViewSet(viewsets.ModelViewSet): queryset = Issue.objects.all() serializer_class = IssueSerializer filter_backends = (filters.SearchFilter,) search_fields = ('url', 'description', 'user__id') class UserIssueViewSet(viewsets.ModelViewSet): queryset = Issue.objects.all() serializer_class = IssueSerializer filter_backends = (filters.SearchFilter,) search_fields = ('user__username', 'user__id') class UserProfileSerializer(serializers.ModelSerializer): user = UserSerializer(read_only=True) class Meta: model = UserProfile fields = '__all__' class UserProfileViewSet(viewsets.ModelViewSet): serializer_class = UserProfileSerializer queryset = UserProfile.objects.all() filter_backends = (filters.SearchFilter,) search_fields = ('id', 'user__id','user__username') class DomainSerializer(serializers.ModelSerializer): class Meta: model = Domain fields = '__all__' class DomainViewSet(viewsets.ModelViewSet): serializer_class = DomainSerializer queryset = Domain.objects.all() filter_backends = (filters.SearchFilter,) search_fields = ('url', 'name') router = routers.DefaultRouter() router.register(r'issues', IssueViewSet, basename="issues") router.register(r'userissues', UserIssueViewSet, basename="userissues") router.register(r'profile', UserProfileViewSet, basename="profile") router.register(r'domain', DomainViewSet, basename="domain")
agpl-3.0
Python
6f0c05ee4743528550dd083d9290b5be0074ff0e
Add commands args to runner and improve docs in it
zillolo/vsut-python
runner.py
runner.py
import argparse import sys from vsut.unit import CSVFormatter, TableFormatter, Unit if __name__ == "__main__": parser = argparse.ArgumentParser(description="Runs unit tests.") parser.add_argument('units', metavar='Unit', type=str, nargs='+') parser.add_argument( '--format', help="Default: table; Decides whether to use table or csv for output.", required=False) args = vars(parser.parse_args()) for unit in args["units"]: try: # Get the name of the module. modName = unit.split(".")[0:-1] modName = ".".join(modName) # Get the name of the class. className = unit.split(".")[-1] # Import the module. module = __import__(modName, fromlist=[className]) # Create unit and run it. unit = getattr(module, className)() unit.run() # Format the results and output them. if args["format"] == "csv": formatter = CSVFormatter(unit) else: formatter = TableFormatter(unit) print(formatter) except (ImportError, AttributeError) as e: print("[Error] Could not import unit: {0}".format(unit))
import sys from vsut.unit import CSVFormatter, TableFormatter if __name__ == "__main__": for i in range(1, len(sys.argv)): try: modName = sys.argv[i].split(".")[0:-1] modName = ".".join(modName) className = sys.argv[i].split(".")[-1] module = __import__(modName, fromlist=[className]) className = getattr(module, className) unit = className() unit.run() formatter = TableFormatter(unit) print(formatter.format()) except ImportError as e: #TODO: Handle this import error. print(e)
mit
Python
553cd68fb5d54be6ecbf3ca93c6d6c6be75afdb5
Add EveLinkCache to evelink.appengine
ayust/evelink,Morloth1274/EVE-Online-POCO-manager,zigdon/evelink,FashtimeDotCom/evelink,bastianh/evelink
evelink/appengine/__init__.py
evelink/appengine/__init__.py
from evelink.appengine.api import AppEngineAPI from evelink.appengine.api import AppEngineCache from evelink.appengine.api import AppEngineDatastoreCache from evelink.appengine.api import EveLinkCache from evelink.appengine import account from evelink.appengine import char from evelink.appengine import corp from evelink.appengine import eve from evelink.appengine import map from evelink.appengine import server __all__ = [ "AppEngineAPI", "AppEngineCache", "AppEngineDatastoreCache", "EveLinkCache", "account", "char", "corp", "eve", "map", "server", ]
from evelink.appengine.api import AppEngineAPI from evelink.appengine.api import AppEngineCache from evelink.appengine.api import AppEngineDatastoreCache from evelink.appengine import account from evelink.appengine import char from evelink.appengine import corp from evelink.appengine import eve from evelink.appengine import map from evelink.appengine import server __all__ = [ "AppEngineAPI", "AppEngineCache", "AppEngineDatastoreCache", "account", "char", "corp", "eve", "map", "server", ]
mit
Python
68c4f723f5eea2802209862d323825f33a445154
Fix url id to pk.
rg3915/wttd2,rg3915/wttd2,rg3915/wttd2,rg3915/wttd2
eventex/subscriptions/urls.py
eventex/subscriptions/urls.py
from django.urls import path import eventex.subscriptions.views as s app_name = 'subscriptions' urlpatterns = [ path('', s.new, name='new'), path('<int:pk>/', s.detail, name='detail'), path('json/donut/', s.paid_list_json, name='paid_list_json'), path('json/column/', s.paid_column_json, name='paid_column_json'), path('graphic/', s.graphic, name='graphic'), ]
from django.urls import path import eventex.subscriptions.views as s app_name = 'subscriptions' urlpatterns = [ path('', s.new, name='new'), path('<int:id>/', s.detail, name='detail'), path('json/donut/', s.paid_list_json, name='paid_list_json'), path('json/column/', s.paid_column_json, name='paid_column_json'), path('graphic/', s.graphic, name='graphic'), ]
mit
Python
ca625e22cb397905f859c826c6507b3977665a51
Fix import
farizrahman4u/keras-contrib,keras-team/keras-contrib,stygstra/keras-contrib,keras-team/keras-contrib,keras-team/keras-contrib
examples/cifar10_ror.py
examples/cifar10_ror.py
''' Trains a Residual-of-Residual Network (WRN-40-2) model on the CIFAR-10 Dataset. Gets a 94.53% accuracy score after 150 epochs. ''' import numpy as np import sklearn.metrics as metrics import keras.callbacks as callbacks import keras.utils.np_utils as kutils from keras.datasets import cifar10 from keras.preprocessing.image import ImageDataGenerator from keras.optimizers import Adam from keras_contrib.applications import ResidualOfResidual batch_size = 64 nb_epoch = 150 img_rows, img_cols = 32, 32 (trainX, trainY), (testX, testY) = cifar10.load_data() trainX = trainX.astype('float32') testX = testX.astype('float32') trainX /= 255 testX /= 255 tempY = testY trainY = kutils.to_categorical(trainY) testY = kutils.to_categorical(testY) generator = ImageDataGenerator(rotation_range=15, width_shift_range=5. / 32, height_shift_range=5. / 32) generator.fit(trainX, seed=0) model = ResidualOfResidual(depth=40, width=2, dropout_rate=0.0, weights='None') optimizer = Adam(lr=1e-3) model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["acc"]) print("Finished compiling") model.fit_generator(generator.flow(trainX, trainY, batch_size=batch_size), samples_per_epoch=len(trainX), nb_epoch=nb_epoch, callbacks=[callbacks.ModelCheckpoint("weights/RoR-WRN-40-2-Weights.h5", monitor="val_acc", save_best_only=True, save_weights_only=True)], validation_data=(testX, testY), nb_val_samples=testX.shape[0], verbose=2) scores = model.evaluate(testX, testY, batch_size) print("Test loss : ", scores[0]) print("Test accuracy : ", scores[1])
''' Trains a Residual-of-Residual Network (WRN-40-2) model on the CIFAR-10 Dataset. Gets a 94.53% accuracy score after 150 epochs. ''' import numpy as np import sklearn.metrics as metrics import keras.callbacks as callbacks import keras.utils.np_utils as kutils from keras.datasets import cifar10 from keras.preprocessing.image import ImageDataGenerator from keras.optimizers import Adam from keras_contrib.applications.ror import ResidualOfResidual batch_size = 64 nb_epoch = 150 img_rows, img_cols = 32, 32 (trainX, trainY), (testX, testY) = cifar10.load_data() trainX = trainX.astype('float32') testX = testX.astype('float32') trainX /= 255 testX /= 255 tempY = testY trainY = kutils.to_categorical(trainY) testY = kutils.to_categorical(testY) generator = ImageDataGenerator(rotation_range=15, width_shift_range=5. / 32, height_shift_range=5. / 32) generator.fit(trainX, seed=0) model = ResidualOfResidual(depth=40, width=2, dropout_rate=0.0, weights='None') optimizer = Adam(lr=1e-3) model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["acc"]) print("Finished compiling") model.fit_generator(generator.flow(trainX, trainY, batch_size=batch_size), samples_per_epoch=len(trainX), nb_epoch=nb_epoch, callbacks=[callbacks.ModelCheckpoint("weights/RoR-WRN-40-2-Weights.h5", monitor="val_acc", save_best_only=True, save_weights_only=True)], validation_data=(testX, testY), nb_val_samples=testX.shape[0], verbose=2) scores = model.evaluate(testX, testY, batch_size) print("Test loss : ", scores[0]) print("Test accuracy : ", scores[1])
mit
Python
d458fb855df77dfb553ee3e95a8201f58aba169e
Increment version number
clippercard/clippercard-python,anthonywu/clippercard,clippercard/clippercard-python,anthonywu/clippercard
clippercard/__init__.py
clippercard/__init__.py
""" Copyright (c) 2012-2017 (https://github.com/clippercard/clippercard-python) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import clippercard.client as client Session = client.ClipperCardWebSession __version__ = '0.4.1'
""" Copyright (c) 2012-2017 (https://github.com/clippercard/clippercard-python) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import clippercard.client as client Session = client.ClipperCardWebSession __version__ = '0.4.0'
mit
Python
6e663d4010f9a79d2816a212e504773a1745a8e6
Fix project name!
LeastAuthority/txkube
src/txkube/__init__.py
src/txkube/__init__.py
# Copyright Least Authority Enterprises. # See LICENSE for details. """ A Kubernetes client. """ __all__ = [ "version", "IKubernetesClient", "network_client", "memory_client", ] from incremental import Version from ._metadata import version_tuple as _version_tuple version = Version("txkube", *_version_tuple) from ._interface import IKubernetes, IKubernetesClient from ._network import network_kubernetes from ._memory import memory_kubernetes
# Copyright Least Authority Enterprises. # See LICENSE for details. """ A Kubernetes client. """ __all__ = [ "version", "IKubernetesClient", "network_client", "memory_client", ] from incremental import Version from ._metadata import version_tuple as _version_tuple version = Version("pykube", *_version_tuple) from ._interface import IKubernetes, IKubernetesClient from ._network import network_kubernetes from ._memory import memory_kubernetes
mit
Python
15faef8beb415211a04fd6dca976158343d8f77f
add abc to guid, fixed issues
grepme/cmput410-project,grepme/cmput410-project,grepme/cmput410-project,grepme/cmput410-project,grepme/cmput410-project
user_profile/models.py
user_profile/models.py
from django.db import models from django.contrib.auth.models import User import uuid # Create your models here. # using the guid model from framework.models import GUIDModel class Profile(GUIDModel): author = models.ForeignKey(User) display_name = models.CharField(max_length=55) def as_dict(self): return { "id": self.guid, # TODO implement host "host": "", "displayname" : self.display_name, "url": self.host + "/author/" + self.guid }
from django.db import models from django.contrib.auth.models import User import uuid # Create your models here. # using the guid model from framework.models import GUIDModel class Profile(GUIDModel): author = models.ForeignKey(User) display_name = models.CharField(max_length=55) # guid guid = models.CharField(max_length=55, default=None) def as_dict(self): return { "id": self.guid, # TODO implement host "host": "", "displayname" : self.display_name, "url": self.host + "/author/" + self.guid }
apache-2.0
Python
127ad982617c2376c9378d1ef7e50b716a077428
Replace imp with __import__
shineyear/catawampus,ankurjimmy/catawampus,shineyear/catawampus,pombredanne/catawampus,ankurjimmy/catawampus,ultilix/catawampus,pombredanne/catawampus,ultilix/catawampus
dm_root.py
dm_root.py
#!/usr/bin/python # Copyright 2012 Google Inc. All Rights Reserved. # # TR-069 has mandatory attribute names that don't comply with policy #pylint: disable-msg=C6409 #pylint: disable-msg=W0404 # """The Device Model root, allowing specific platforms to populate it.""" __author__ = '[email protected] (Denton Gentry)' import google3 import dm.catawampus import dm.management_server import tr.core import traceroute def _RecursiveImport(name): return __import__(name, fromlist=['']) class DeviceModelRoot(tr.core.Exporter): """A class to hold the device models.""" def __init__(self, loop, platform): tr.core.Exporter.__init__(self) if platform: self.device = _RecursiveImport('platform.%s.device' % platform) (params, objects) = self.device.PlatformInit(name=platform, device_model_root=self) else: (params, objects) = (list(), list()) self.TraceRoute = traceroute.TraceRoute(loop) objects.append('TraceRoute') self.X_CATAWAMPUS_ORG_CATAWAMPUS = dm.catawampus.CatawampusDm() objects.append('X_CATAWAMPUS-ORG_CATAWAMPUS') self.Export(params=params, objects=objects) def get_platform_config(self): """Return the platform_config.py object for this platform.""" return self.device.PlatformConfig() def add_management_server(self, mgmt): # tr-181 Device.ManagementServer try: ms181 = self.GetExport('Device') ms181.ManagementServer = dm.management_server.ManagementServer181(mgmt) except (AttributeError, KeyError): pass # no tr-181 for this platform # tr-98 InternetGatewayDevice.ManagementServer try: ms98 = self.GetExport('InternetGatewayDevice') ms98.ManagementServer = dm.management_server.ManagementServer98(mgmt) except (AttributeError, KeyError): pass # no tr-98 for this platform
#!/usr/bin/python # Copyright 2012 Google Inc. All Rights Reserved. # # TR-069 has mandatory attribute names that don't comply with policy #pylint: disable-msg=C6409 #pylint: disable-msg=W0404 # """The Device Model root, allowing specific platforms to populate it.""" __author__ = '[email protected] (Denton Gentry)' import imp import sys import google3 import dm.catawampus import dm.management_server import tr.core import traceroute def _RecursiveImport(name): split = name.split('.') last = split.pop() if split: path = _RecursiveImport('.'.join(split)).__path__ else: path = sys.path fileobj, path, description = imp.find_module(last, path) return imp.load_module(name, fileobj, path, description) class DeviceModelRoot(tr.core.Exporter): """A class to hold the device models.""" def __init__(self, loop, platform): tr.core.Exporter.__init__(self) if platform: self.device = _RecursiveImport('platform.%s.device' % platform) (params, objects) = self.device.PlatformInit(name=platform, device_model_root=self) else: (params, objects) = (list(), list()) self.TraceRoute = traceroute.TraceRoute(loop) objects.append('TraceRoute') self.X_CATAWAMPUS_ORG_CATAWAMPUS = dm.catawampus.CatawampusDm() objects.append('X_CATAWAMPUS-ORG_CATAWAMPUS') self.Export(params=params, objects=objects) def get_platform_config(self): """Return the platform_config.py object for this platform.""" return self.device.PlatformConfig() def add_management_server(self, mgmt): # tr-181 Device.ManagementServer try: ms181 = self.GetExport('Device') ms181.ManagementServer = dm.management_server.ManagementServer181(mgmt) except (AttributeError, KeyError): pass # no tr-181 for this platform # tr-98 InternetGatewayDevice.ManagementServer try: ms98 = self.GetExport('InternetGatewayDevice') ms98.ManagementServer = dm.management_server.ManagementServer98(mgmt) except (AttributeError, KeyError): pass # no tr-98 for this platform
apache-2.0
Python
e784227ae5da242d474bc02209289e1dabd2d3a2
Test Spectral Reconstruction on Sin Wave
googleinterns/audio_synthesis
utils/spectral_test.py
utils/spectral_test.py
# Lint as: python3 """Tests for spectral.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf import numpy as np import os import spectral class SpectralTest(tf.test.TestCase): def test_waveform_to_spectogram_shape(self): waveform = np.random.normal(size=(2**14,)) spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128) self.assertEqual((128, 256, 2), spectogram.shape) def test_waveform_to_magnitude_shape(self): waveform = np.random.normal(size=(2**14,)) magnitude = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128) self.assertEqual((128, 256), magnitude.shape) def test_waveform_to_spectogram_return(self): waveform = np.sin(np.linspace(0, 4 * np.pi, 2**14)) spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128) waveform_hat = spectral.spectogram_2_waveform(spectogram, frame_length=512, frame_step=128) # Account for extra samples from reverse transform waveform_hat = waveform[0:len(waveform)] self.assertAllClose(waveform, waveform_hat) def test_waveform_to_magnitude_return(self): waveform = np.sin(np.linspace(0, 4 * np.pi, 2**14)) spectogram = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128) waveform_hat = spectral.magnitude_2_waveform(spectogram, frame_length=512, frame_step=128) # Account for extra samples from reverse transform waveform_hat = waveform[0:len(waveform)] self.assertAllClose(waveform, waveform_hat) if __name__ == '__main__': os.environ["CUDA_VISIBLE_DEVICES"] = '' tf.test.main()
# Lint as: python3 """Tests for spectral.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf import numpy as np import os import spectral class SpectralTest(tf.test.TestCase): def test_waveform_to_spectogram_shape(self): waveform = np.random.normal(size=(2**14,)) spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128) self.assertEqual((128, 256, 2), spectogram.shape) def test_waveform_to_magnitude_shape(self): waveform = np.random.normal(size=(2**14,)) magnitude = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128) self.assertEqual((128, 256), magnitude.shape) def test_waveform_to_spectogram_return(self): waveform = np.random.normal(size=(2**14,)) spectogram = spectral.waveform_2_spectogram(waveform, frame_length=512, frame_step=128) waveform_hat = spectral.spectogram_2_waveform(spectogram, frame_length=512, frame_step=128) # Account for extra samples from reverse transform waveform_hat = waveform[0:len(waveform)] self.assertAllClose(waveform, waveform_hat) def test_waveform_to_magnitude_return(self): waveform = np.random.normal(size=(2**14,)) spectogram = spectral.waveform_2_magnitude(waveform, frame_length=512, frame_step=128) waveform_hat = spectral.magnitude_2_waveform(spectogram, frame_length=512, frame_step=128) # Account for extra samples from reverse transform waveform_hat = waveform[0:len(waveform)] self.assertAllClose(waveform, waveform_hat) if __name__ == '__main__': os.environ["CUDA_VISIBLE_DEVICES"] = '' tf.test.main()
apache-2.0
Python
05939b0b797780ac1d265c8415f72f1ca44be53d
Modify return tag search data with tag_name
NA5G/coco-server-was,NA5G/coco-server-was,NA5G/coco-server-was
coco/dashboard/views.py
coco/dashboard/views.py
# -*- coding: utf-8 -*- from django.shortcuts import render from django.contrib.auth.decorators import login_required from posts.models import Post, Tag @login_required def index(request): context = {'posts': Post.objects.all()} return render(request, 'dashboard/index.html', context) @login_required def tagged_posts(request, tag_name=""): context = { 'tag': tag_name, 'posts': Post.objects.filter(tags__name=tag_name) } return render(request, 'dashboard/search_result.html', context)
# -*- coding: utf-8 -*- from django.shortcuts import render from django.contrib.auth.decorators import login_required from posts.models import Post, Tag @login_required def index(request): context = {'posts': Post.objects.all()} return render(request, 'dashboard/index.html', context) @login_required def tagged_posts(request, tag_name=""): context = {'posts': Post.objects.filter(tags__name=tag_name)} return render(request, 'dashboard/search_result.html', context)
mit
Python
d0de5476580b466d7b13cfc7668c267e62cb15f0
create 32 bit integer var, not 64 (to allow test with NETCDF4_CLASSIC)
Unidata/netcdf4-python,Unidata/netcdf4-python,Unidata/netcdf4-python
examples/mpi_example.py
examples/mpi_example.py
# to run: mpirun -np 4 python mpi_example.py from mpi4py import MPI import numpy as np from netCDF4 import Dataset rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run) nc = Dataset('parallel_test.nc', 'w', parallel=True, comm=MPI.COMM_WORLD, info=MPI.Info(),format='NETCDF4_CLASSIC') # below should work also - MPI_COMM_WORLD and MPI_INFO_NULL will be used. #nc = Dataset('parallel_test.nc', 'w', parallel=True) d = nc.createDimension('dim',4) v = nc.createVariable('var', np.int32, 'dim') v[rank] = rank # switch to collective mode, rewrite the data. v.set_collective(True) v[rank] = rank nc.close() # reopen the file read-only, check the data nc = Dataset('parallel_test.nc', parallel=True, comm=MPI.COMM_WORLD, info=MPI.Info()) assert rank==nc['var'][rank] nc.close() # reopen the file in append mode, modify the data on the last rank. nc = Dataset('parallel_test.nc', 'a',parallel=True, comm=MPI.COMM_WORLD, info=MPI.Info()) if rank == 3: v[rank] = 2*rank nc.close() # reopen the file read-only again, check the data. # leave out the comm and info kwargs to check that the defaults # (MPI_COMM_WORLD and MPI_INFO_NULL) work. nc = Dataset('parallel_test.nc', parallel=True) if rank == 3: assert 2*rank==nc['var'][rank] else: assert rank==nc['var'][rank] nc.close()
# to run: mpirun -np 4 python mpi_example.py from mpi4py import MPI import numpy as np from netCDF4 import Dataset rank = MPI.COMM_WORLD.rank # The process ID (integer 0-3 for 4-process run) nc = Dataset('parallel_test.nc', 'w', parallel=True, comm=MPI.COMM_WORLD, info=MPI.Info(),format='NETCDF4_CLASSIC') # below should work also - MPI_COMM_WORLD and MPI_INFO_NULL will be used. #nc = Dataset('parallel_test.nc', 'w', parallel=True) d = nc.createDimension('dim',4) v = nc.createVariable('var', np.int, 'dim') v[rank] = rank # switch to collective mode, rewrite the data. v.set_collective(True) v[rank] = rank nc.close() # reopen the file read-only, check the data nc = Dataset('parallel_test.nc', parallel=True, comm=MPI.COMM_WORLD, info=MPI.Info()) assert rank==nc['var'][rank] nc.close() # reopen the file in append mode, modify the data on the last rank. nc = Dataset('parallel_test.nc', 'a',parallel=True, comm=MPI.COMM_WORLD, info=MPI.Info()) if rank == 3: v[rank] = 2*rank nc.close() # reopen the file read-only again, check the data. # leave out the comm and info kwargs to check that the defaults # (MPI_COMM_WORLD and MPI_INFO_NULL) work. nc = Dataset('parallel_test.nc', parallel=True) if rank == 3: assert 2*rank==nc['var'][rank] else: assert rank==nc['var'][rank] nc.close()
mit
Python
a53fae5b42e9b33774650e017967b865552870e9
tag v0.7.4
cihai/cihaidata-unihan
unihan_tabular/__about__.py
unihan_tabular/__about__.py
__title__ = 'unihan-tabular' __package_name__ = 'unihan_tabular' __description__ = 'Export UNIHAN to Python, Data Package, CSV, JSON and YAML' __version__ = '0.7.4' __author__ = 'Tony Narlock' __email__ = '[email protected]' __license__ = 'MIT' __copyright__ = 'Copyright 2013-2017 Tony Narlock'
__title__ = 'unihan-tabular' __package_name__ = 'unihan_tabular' __description__ = 'Export UNIHAN to Python, Data Package, CSV, JSON and YAML' __version__ = '0.7.3' __author__ = 'Tony Narlock' __email__ = '[email protected]' __license__ = 'MIT' __copyright__ = 'Copyright 2013-2017 Tony Narlock'
mit
Python
4420892ad3e8c1797753e7893772e53785efb570
add logfile handling
sassoftware/mirrorball,sassoftware/mirrorball
updatebot/cmdline/simple.py
updatebot/cmdline/simple.py
# # Copyright (c) 2008 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import os from updatebot import log from updatebot import config def validatePlatform(platform, configDir): validPlatforms = os.listdir(configDir) if platform not in validPlatforms: print ('Invalid platform %s... Please select from the following ' 'available platforms %s' % (platform, ', '.join(validPlatforms))) return False return True def usage(argv): print 'usage: %s <platform name> [logfile]' % argv[0] return 1 def main(argv, workerFunc, configDir='/etc/mirrorball', enableLogging=True): if len(argv) < 2 or len(argv) > 3: return usage(argv) logFile = None if len(argv) == 3: logFile = argv[2] if enableLogging: log.addRootLogger(logFile=logFile) platform = argv[1] if not validatePlatform(platform, configDir): return 1 cfg = config.UpdateBotConfig() cfg.read(os.path.join(configDir, platform, 'updatebotrc')) rc = workerFunc(cfg) return rc
# # Copyright (c) 2008 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import os from updatebot import log from updatebot import config def validatePlatform(platform, configDir): validPlatforms = os.listdir(configDir) if platform not in validPlatforms: print ('Invalid platform %s... Please select from the following ' 'available platforms %s' % (platform, ', '.join(validPlatforms))) return False return True def usage(argv): print 'usage: %s <platform name>' % argv[0] return 1 def main(argv, workerFunc, configDir='/etc/mirrorball', enableLogging=True): if enableLogging: log.addRootLogger() if len(argv) != 2: return usage(argv) platform = argv[1] if not validatePlatform(platform, configDir): return 1 cfg = config.UpdateBotConfig() cfg.read(os.path.join(configDir, platform, 'updatebotrc')) rc = workerFunc(cfg) return rc
apache-2.0
Python
98cb673b358671211a0aa7fed0725dbb732200d0
Fix edge cases due to artworkUrl100 being missing
fallenshell/coverpy
coverpy/coverpy.py
coverpy/coverpy.py
import os import requests from . import exceptions class Result: """ Parse an API result into an object format. """ def __init__(self, item): """ Call the list parser. """ self.parse(item) def parse(self, item): """ Parse the given list into self variables. """ try: self.artworkThumb = item['artworkUrl100'] except KeyError as e: # A vital parameter is missing, and magic on our end can't get us out of this error case situation. # Therefore, we try to save the user from issues (mostly KeyErrors), and stop them from using the public API. # Just return a NoResultsException, because the data is corrupt on the API's end, # and the library can't use the results. # This gets many edge cases in which the API had issues dealing with. raise exceptions.NoResultsException # The above should prevent most keyerrors, this one is just guarding. However, if something fails here, # I can't do much because the API sometimes skips on things and this is _not vital._ self.artist = item['artistName'] self.album = item['collectionName'] self.url = item['url'] # Take some measures to detect whether it is a song or album if 'kind' in item: self.type = item['kind'].lower() elif 'wrapperType' in item: if item['wrapperType'].lower() == 'track': self.type = 'song' elif item['wrapperType'].lower() == 'collection': self.type = 'album' elif 'collectionType' in item: self.type = 'album' else: # Assuming edge case of the API self.type = 'unknown' if self.type == 'song': self.name = item['trackName'] elif self.type == 'album': self.name = item['collectionName'] else: self.name = 'unknown' def artwork(self, size = 625): """ Return the artwork to the thumb URL contained. """ # Replace size because API doesn't hand links to full res. It only gives 60x60 and 100x100. # However, I found a way to circumvent it. return self.artworkThumb.replace('100x100bb', "%sx%s" % (size, size)) class CoverPy: def __init__(self): """ Initialize CoverPy. Set a base_url. """ self.base_url = "https://itunes.apple.com/search/" def _get(self, payload, override = False, entities = False): """ Get a payload using the base_url. General purpose GET interface """ if override: data = requests.get("%s%s" % (self.base_url, override)) else: payload['entity'] = "musicArtist,musicTrack,album,mix,song" payload['media'] = 'music' data = requests.get(self.base_url, params = payload) if data.status_code != 200: raise requests.HTTPError else: return data def _search(self, term, limit = 1): """ Expose a friendlier internal API for executing searches """ payload = { 'term': term, 'limit': limit } req = self._get(payload) return req def get_cover(self, term, limit = 1, debug = False): """ Get an album cover, return a Result object """ search = self._search(term, limit) parsed = search.json() if parsed['resultCount'] == 0: raise exceptions.NoResultsException result = parsed['results'][0] result['url'] = search.url return Result(result)
import os import requests from . import exceptions class Result: """ Parse an API result into an object format. """ def __init__(self, item): """ Call the list parser. """ self.parse(item) def parse(self, item): """ Parse the given list into self variables. """ self.artworkThumb = item['artworkUrl100'] self.artist = item['artistName'] self.album = item['collectionName'] self.url = item['url'] # Take some measures to detect whether it is a song or album if 'kind' in item: self.type = item['kind'].lower() elif 'wrapperType' in item: if item['wrapperType'].lower() == 'track': self.type = 'song' elif item['wrapperType'].lower() == 'collection': self.type = 'album' elif 'collectionType' in item: self.type = 'album' else: # Assuming edge case of the API self.type = 'unknown' if self.type == 'song': self.name = item['trackName'] elif self.type == 'album': self.name = item['collectionName'] else: self.name = 'unknown' def artwork(self, size = 625): """ Return the artwork to the thumb URL contained. """ # Replace size because API doesn't hand links to full res. It only gives 60x60 and 100x100. # However, I found a way to circumvent it. return self.artworkThumb.replace('100x100bb', "%sx%s" % (size, size)) class CoverPy: def __init__(self): """ Initialize CoverPy. Set a base_url. """ self.base_url = "https://itunes.apple.com/search/" def _get(self, payload, override = False, entities = False): """ Get a payload using the base_url. General purpose GET interface """ if override: data = requests.get("%s%s" % (self.base_url, override)) else: payload['entity'] = "musicArtist,musicTrack,album,mix,song" payload['media'] = 'music' data = requests.get(self.base_url, params = payload) if data.status_code != 200: raise requests.HTTPError else: return data def _search(self, term, limit = 1): """ Expose a friendlier internal API for executing searches """ payload = { 'term': term, 'limit': limit } req = self._get(payload) return req def get_cover(self, term, limit = 1, debug = False): """ Get an album cover, return a Result object """ search = self._search(term, limit) parsed = search.json() if parsed['resultCount'] == 0: raise exceptions.NoResultsException result = parsed['results'][0] result['url'] = search.url return Result(result)
mit
Python