max_stars_repo_path
stringlengths
3
269
max_stars_repo_name
stringlengths
4
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.05M
score
float64
0.23
5.13
int_score
int64
0
5
6.爬取豆瓣排行榜电影数据(含GUI界面版)/main.py
shengqiangzhang/examples-of-web-crawlers
12,023
2800
<reponame>shengqiangzhang/examples-of-web-crawlers # -*- coding:utf-8 -*- from uiObject import uiObject # main入口 if __name__ == '__main__': ui = uiObject() ui.ui_process()
1.453125
1
photos/models.py
eude313/vault
0
2801
from django.db import models from cloudinary.models import CloudinaryField # Create your models here. class Category(models.Model): name = models.CharField( max_length=200, null=False, blank=False ) def __str__(self): return self.name class Photo(models.Model): category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) image = CloudinaryField('image', default='') description = models.TextField() def __str__(self): return self.description
2.375
2
server/djangoapp/restapis.py
christiansencq/ibm_capstone
0
2802
<reponame>christiansencq/ibm_capstone import requests import json # import related models here from .models import CarDealer, DealerReview from requests.auth import HTTPBasicAuth import logging logger = logging.getLogger(__name__) # Create a `get_request` to make HTTP GET requests # e.g., response = requests.get(url, params=params, headers={'Content-Type': 'application/json'}, # auth=HTTPBasicAuth('apikey', api_key)) def get_request(url, api_key, **kwargs): print("GET from {}".format(url)) print(kwargs) try: if api_key is not None: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs, auth=HTTPBasicAuth('apikey', api_key)) else: response = requests.get(url, headers={'Content-Type': 'application/json'}, params=kwargs) except: print("Network Error") status_code = response.status_code print("With status code {}".format(status_code)) json_data = json.loads(response.text) return json_data, status_code # Create a `post_request` to make HTTP POST requests # e.g., response = requests.post(url, params=kwargs, json=payload) def post_request(url, json_payload, **kwargs): print("Post to url: {} ".format(url)) print(kwargs) print(json_payload) response = requests.post(url, headers={'Content-Type': 'application/json'}, params=kwargs, json=json_payload) status_code = response.status_code print("With status code {}".format(status_code)) json_data = json.loads(response.text) return json_data, status_code # Create a get_dealers_from_cf method to get dealers from a cloud function def get_dealers_from_cf(url, **kwargs): info = [] result = "ok" # - Call get_request() with specified arguments logger.info("Get Dealers from CF Called!") json_result, status_code = get_request(url, None) if status_code == 200 and json_result: dealers = json_result['rows'] logger.info(len(dealers)) for dealer in dealers: dlr_data = dealer['doc'] #print('ADDRESS', dlr_data["address"]) if dlr_data.get('address'): # Create a CarDealer object with values in `doc` object dealer_obj = CarDealer(address=dlr_data.get("address"), city=dlr_data.get("city"), full_name=dlr_data.get("full_name"), id=dlr_data.get("id"), lat=dlr_data.get("lat"), long=dlr_data.get("long"), short_name=dlr_data.get("short_name"), state=dlr_data.get("state"), st=dlr_data.get("st"), zip=dlr_data.get("zip")) # dealer_obj = CarDealer(address=dealer["doc"]["address"], city=dealer["doc"]["city"], full_name=dealer["doc"]["full_name"], # id=dealer["doc"]["id"], lat=dealer["doc"]["lat"], long=dealer["doc"]["long"], # short_name=dealer["doc"]["short_name"], # st=dealer["doc"]["st"], state=dealer["doc"]["state"], zip=dealer["doc"]["zip"]) info.append(dealer_obj) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result def get_dealer_by_id(url, dealerId): # Call get_request with a URL parameter info = None result = "ok" json_result, status_code = get_request(url, None, dealerId=dealerId) # json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code == 200 and json_result: # Get the row list in JSON as dealers dealers = json_result["rows"] for dealer in dealers: # Create a CarDealer object with values in `doc` object info = CarDealer(address=dealer.get("address"), city=dealer.get("city"), full_name=dealer.get("full_name"), id=dealer.get("id"), lat=dealer.get("lat"), long=dealer.get("long"), short_name=dealer.get("short_name"), st=dealer.get("st"), state=dealer.get("state"), zip=dealer.get("zip")) # info = CarDealer(address=dealer["address"], city=dealer["city"], full_name=dealer["full_name"], # id=dealer["id"], lat=dealer["lat"], long=dealer["long"], # short_name=dealer["short_name"], state=dealer["state"], # st=dealer["st"], zip=dealer["zip"]) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result def get_dealers_by_state (url, state): info = [] result = "ok" # Call get_request with a URL parameter json_result, status_code = get_request(url, None, state=state) if status_code == 200 and json_result: # Get the row list in JSON as dealers dealers = json_result["rows"] # For each dealer object for dealer in dealers: # dlr_data = dealer["doc"] # Create a CarDealer object with values in `doc` object dealer_obj = CarDealer(address=dealer.get("address"), city=dealer.get("city"), full_name=dealer.get("full_name"), id=dealer.get("id"), lat=dealer.get("lat"), long=dealer.get("long"), short_name=dealer.get("short_name"), state=dealer.get("state"), st=dealer.get("st"), zip=dealer.get("zip")) # dealer_obj = CarDealer(address=dlr_data.get("address"), city=dlr_data.get("city"), full_name=dlr_data.get("full_name"), # id=dlr_data.get("id"), lat=dlr_data.get("lat"), long=dlr_data.get("long"), # short_name=dlr_data.get("short_name"), state=dlr_data.get("state"), # st=dlr_data.get("st"), zip=dlr_data.get("zip")) info.append(dealer_obj) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result def get_dealer_reviews_from_cf (url, dealerId): info = [] result = "ok" # Call get_request with a URL parameter json_result, status_code = get_request(url, None, dealerId=dealerId) if status_code == 200 and json_result: # Get the row list in JSON as reviews reviews = json_result["body"]["data"] # For each review object for review in reviews: if (dealerId == review.get("dealership")): # Create a DealerReview object with values in object #sentiment = analyze_review_sentiments(review["review"]) review_obj = DealerReview( id=review.get("id"), name=review.get("name"), review=review.get("review"), purchase=review.get("purchase"), car_make=review.get("car_make", None), car_model=review.get("car_model", None), car_year=review.get("car_year", None), purchase_date=review.get("purchase_date", None)) info.append(review_obj) elif json_result: result = json_result["message"] else: result = "Unknown error" return info, result # Create an `analyze_review_sentiments` method to call Watson NLU and analyze text # def analyze_review_sentiments(text): # - Call get_request() with specified arguments # - Get the returned sentiment label such as Positive or Negative
2.640625
3
examples/python/upload.py
oslokommune/okdata-data-uploader
0
2803
import logging from configparser import ConfigParser from sdk.data_uploader import DataUploader logging.basicConfig(level=logging.INFO) log = logging.getLogger() config = ConfigParser() config.read("config.ini") ##### # Datasets to be added to metadata API datasetData = { "title": "Test", "description": "Test data", "keywords": ["test"], "accessRights": "non-public", "objective": "Formålsbeskrivelse", "contactPoint": { "name": "Tim", "email": "<EMAIL>", "phone": "12345678", }, "publisher": "Tim", } datasetVersionData = {"version": "6", "schema": {}, "transformation": {}} datasetVersionEditionData = { "edition": "2019-05-28T15:37:00+02:00", "description": "Data for one hour", "startTime": "2018-12-21T08:00:00+01:00", "endTime": "2018-12-21T09:00:00+01:00", } ###### # The dataset* variables are optional, if these are set in config.ini this script will # not run the relevant DataUploader function datasetId = config.get("dataUploader", "datasetId", fallback=None) datasetVersion = config.get("dataUploader", "datasetVersion", fallback=None) datasetVersionEdition = config.get( "dataUploader", "datasetVersionEdition", fallback=None ) upload = DataUploader(config) try: log.info("Uploading a file to S3") upload.login() if datasetId is None: upload.createDataset(datasetData) if datasetVersion is None: upload.createVersion(datasetVersionData) if datasetVersionEdition is None: upload.createEdition(datasetVersionEditionData) log.info(f"Dataset: {upload.datasetId}") log.info(f"Version: {upload.datasetVersion}") log.info(f"Edition: {upload.datasetVersionEdition}") if upload.upload("README.md"): log.info("Done... go brew some coffee") else: log.error("Could not upload file....") except Exception as e: log.exception(f">> Something went horrible wrong:\n{e}") # To upload with curl: cmd = upload.curl("tmp3.zip") # Max upload size for now is 5GB
2.28125
2
doc/examples.py
Enerccio/mahjong
254
2804
<gh_stars>100-1000 from mahjong.hand_calculating.hand import HandCalculator from mahjong.meld import Meld from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules from mahjong.shanten import Shanten from mahjong.tile import TilesConverter calculator = HandCalculator() # useful helper def print_hand_result(hand_result): print(hand_result.han, hand_result.fu) print(hand_result.cost['main']) print(hand_result.yaku) for fu_item in hand_result.fu_details: print(fu_item) print('') #################################################################### # Tanyao hand by ron # #################################################################### # we had to use all 14 tiles in that array tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile) print_hand_result(result) #################################################################### # Tanyao hand by tsumo # #################################################################### result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True)) print_hand_result(result) #################################################################### # Add open set to hand # #################################################################### melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))] result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True))) print_hand_result(result) #################################################################### # Shanten calculation # #################################################################### shanten = Shanten() tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443') result = shanten.calculate_shanten(tiles) print(result) #################################################################### # Kazoe as a sanbaiman # #################################################################### tiles = TilesConverter.string_to_136_array(man='22244466677788') win_tile = TilesConverter.string_to_136_array(man='7')[0] melds = [ Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False) ] dora_indicators = [ TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], TilesConverter.string_to_136_array(man='1')[0], ] config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN)) result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config) print_hand_result(result) #################################################################### # Change the cost of yaku # #################################################################### config = HandConfig(is_renhou=True) # renhou as an yakuman - old style config.yaku.renhou.han_closed = 13 tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444') win_tile = TilesConverter.string_to_136_array(sou='4')[0] result = calculator.estimate_hand_value(tiles, win_tile, config=config) print_hand_result(result)
2.21875
2
src/infi/mount_utils/solaris/mounter.py
Infinidat/mount-utils
0
2805
from ..base.mounter import MounterMixin, execute_mount class SolarisMounterMixin(MounterMixin): def _get_fstab_path(self): return "/etc/fstab" def _get_entry_format(self, entry): return entry.get_format_solaris() def mount_entry(self, entry): args = ["-F", entry.get_typename(), entry.get_fsname(), entry.get_dirname()] args.extend(self._format_options(entry)) execute_mount(args)
2.15625
2
sdk/python/pulumi_oci/database/get_external_non_container_database.py
EladGabay/pulumi-oci
5
2806
<reponame>EladGabay/pulumi-oci # coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs __all__ = [ 'GetExternalNonContainerDatabaseResult', 'AwaitableGetExternalNonContainerDatabaseResult', 'get_external_non_container_database', ] @pulumi.output_type class GetExternalNonContainerDatabaseResult: """ A collection of values returned by getExternalNonContainerDatabase. """ def __init__(__self__, character_set=None, compartment_id=None, database_configuration=None, database_edition=None, database_management_config=None, database_version=None, db_id=None, db_packs=None, db_unique_name=None, defined_tags=None, display_name=None, external_non_container_database_id=None, freeform_tags=None, id=None, lifecycle_details=None, ncharacter_set=None, operations_insights_config=None, state=None, time_created=None, time_zone=None): if character_set and not isinstance(character_set, str): raise TypeError("Expected argument 'character_set' to be a str") pulumi.set(__self__, "character_set", character_set) if compartment_id and not isinstance(compartment_id, str): raise TypeError("Expected argument 'compartment_id' to be a str") pulumi.set(__self__, "compartment_id", compartment_id) if database_configuration and not isinstance(database_configuration, str): raise TypeError("Expected argument 'database_configuration' to be a str") pulumi.set(__self__, "database_configuration", database_configuration) if database_edition and not isinstance(database_edition, str): raise TypeError("Expected argument 'database_edition' to be a str") pulumi.set(__self__, "database_edition", database_edition) if database_management_config and not isinstance(database_management_config, dict): raise TypeError("Expected argument 'database_management_config' to be a dict") pulumi.set(__self__, "database_management_config", database_management_config) if database_version and not isinstance(database_version, str): raise TypeError("Expected argument 'database_version' to be a str") pulumi.set(__self__, "database_version", database_version) if db_id and not isinstance(db_id, str): raise TypeError("Expected argument 'db_id' to be a str") pulumi.set(__self__, "db_id", db_id) if db_packs and not isinstance(db_packs, str): raise TypeError("Expected argument 'db_packs' to be a str") pulumi.set(__self__, "db_packs", db_packs) if db_unique_name and not isinstance(db_unique_name, str): raise TypeError("Expected argument 'db_unique_name' to be a str") pulumi.set(__self__, "db_unique_name", db_unique_name) if defined_tags and not isinstance(defined_tags, dict): raise TypeError("Expected argument 'defined_tags' to be a dict") pulumi.set(__self__, "defined_tags", defined_tags) if display_name and not isinstance(display_name, str): raise TypeError("Expected argument 'display_name' to be a str") pulumi.set(__self__, "display_name", display_name) if external_non_container_database_id and not isinstance(external_non_container_database_id, str): raise TypeError("Expected argument 'external_non_container_database_id' to be a str") pulumi.set(__self__, "external_non_container_database_id", external_non_container_database_id) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError("Expected argument 'freeform_tags' to be a dict") pulumi.set(__self__, "freeform_tags", freeform_tags) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if lifecycle_details and not isinstance(lifecycle_details, str): raise TypeError("Expected argument 'lifecycle_details' to be a str") pulumi.set(__self__, "lifecycle_details", lifecycle_details) if ncharacter_set and not isinstance(ncharacter_set, str): raise TypeError("Expected argument 'ncharacter_set' to be a str") pulumi.set(__self__, "ncharacter_set", ncharacter_set) if operations_insights_config and not isinstance(operations_insights_config, dict): raise TypeError("Expected argument 'operations_insights_config' to be a dict") pulumi.set(__self__, "operations_insights_config", operations_insights_config) if state and not isinstance(state, str): raise TypeError("Expected argument 'state' to be a str") pulumi.set(__self__, "state", state) if time_created and not isinstance(time_created, str): raise TypeError("Expected argument 'time_created' to be a str") pulumi.set(__self__, "time_created", time_created) if time_zone and not isinstance(time_zone, str): raise TypeError("Expected argument 'time_zone' to be a str") pulumi.set(__self__, "time_zone", time_zone) @property @pulumi.getter(name="characterSet") def character_set(self) -> str: """ The character set of the external database. """ return pulumi.get(self, "character_set") @property @pulumi.getter(name="compartmentId") def compartment_id(self) -> str: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment. """ return pulumi.get(self, "compartment_id") @property @pulumi.getter(name="databaseConfiguration") def database_configuration(self) -> str: """ The Oracle Database configuration """ return pulumi.get(self, "database_configuration") @property @pulumi.getter(name="databaseEdition") def database_edition(self) -> str: """ The Oracle Database edition. """ return pulumi.get(self, "database_edition") @property @pulumi.getter(name="databaseManagementConfig") def database_management_config(self) -> 'outputs.GetExternalNonContainerDatabaseDatabaseManagementConfigResult': """ The configuration of the Database Management service. """ return pulumi.get(self, "database_management_config") @property @pulumi.getter(name="databaseVersion") def database_version(self) -> str: """ The Oracle Database version. """ return pulumi.get(self, "database_version") @property @pulumi.getter(name="dbId") def db_id(self) -> str: """ The Oracle Database ID, which identifies an Oracle Database located outside of Oracle Cloud. """ return pulumi.get(self, "db_id") @property @pulumi.getter(name="dbPacks") def db_packs(self) -> str: """ The database packs licensed for the external Oracle Database. """ return pulumi.get(self, "db_packs") @property @pulumi.getter(name="dbUniqueName") def db_unique_name(self) -> str: """ The `DB_UNIQUE_NAME` of the external database. """ return pulumi.get(self, "db_unique_name") @property @pulumi.getter(name="definedTags") def defined_tags(self) -> Mapping[str, Any]: """ Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). """ return pulumi.get(self, "defined_tags") @property @pulumi.getter(name="displayName") def display_name(self) -> str: """ The user-friendly name for the external database. The name does not have to be unique. """ return pulumi.get(self, "display_name") @property @pulumi.getter(name="externalNonContainerDatabaseId") def external_non_container_database_id(self) -> str: return pulumi.get(self, "external_non_container_database_id") @property @pulumi.getter(name="freeformTags") def freeform_tags(self) -> Mapping[str, Any]: """ Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}` """ return pulumi.get(self, "freeform_tags") @property @pulumi.getter def id(self) -> str: """ The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure external database resource. """ return pulumi.get(self, "id") @property @pulumi.getter(name="lifecycleDetails") def lifecycle_details(self) -> str: """ Additional information about the current lifecycle state. """ return pulumi.get(self, "lifecycle_details") @property @pulumi.getter(name="ncharacterSet") def ncharacter_set(self) -> str: """ The national character of the external database. """ return pulumi.get(self, "ncharacter_set") @property @pulumi.getter(name="operationsInsightsConfig") def operations_insights_config(self) -> 'outputs.GetExternalNonContainerDatabaseOperationsInsightsConfigResult': """ The configuration of Operations Insights for the external database """ return pulumi.get(self, "operations_insights_config") @property @pulumi.getter def state(self) -> str: """ The current state of the Oracle Cloud Infrastructure external database resource. """ return pulumi.get(self, "state") @property @pulumi.getter(name="timeCreated") def time_created(self) -> str: """ The date and time the database was created. """ return pulumi.get(self, "time_created") @property @pulumi.getter(name="timeZone") def time_zone(self) -> str: """ The time zone of the external database. It is a time zone offset (a character type in the format '[+|-]TZH:TZM') or a time zone region name, depending on how the time zone value was specified when the database was created / last altered. """ return pulumi.get(self, "time_zone") class AwaitableGetExternalNonContainerDatabaseResult(GetExternalNonContainerDatabaseResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetExternalNonContainerDatabaseResult( character_set=self.character_set, compartment_id=self.compartment_id, database_configuration=self.database_configuration, database_edition=self.database_edition, database_management_config=self.database_management_config, database_version=self.database_version, db_id=self.db_id, db_packs=self.db_packs, db_unique_name=self.db_unique_name, defined_tags=self.defined_tags, display_name=self.display_name, external_non_container_database_id=self.external_non_container_database_id, freeform_tags=self.freeform_tags, id=self.id, lifecycle_details=self.lifecycle_details, ncharacter_set=self.ncharacter_set, operations_insights_config=self.operations_insights_config, state=self.state, time_created=self.time_created, time_zone=self.time_zone) def get_external_non_container_database(external_non_container_database_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExternalNonContainerDatabaseResult: """ This data source provides details about a specific External Non Container Database resource in Oracle Cloud Infrastructure Database service. Gets information about a specific external non-container database. ## Example Usage ```python import pulumi import pulumi_oci as oci test_external_non_container_database = oci.database.get_external_non_container_database(external_non_container_database_id=oci_database_external_non_container_database["test_external_non_container_database"]["id"]) ``` :param str external_non_container_database_id: The external non-container database [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm). """ __args__ = dict() __args__['externalNonContainerDatabaseId'] = external_non_container_database_id if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:database/getExternalNonContainerDatabase:getExternalNonContainerDatabase', __args__, opts=opts, typ=GetExternalNonContainerDatabaseResult).value return AwaitableGetExternalNonContainerDatabaseResult( character_set=__ret__.character_set, compartment_id=__ret__.compartment_id, database_configuration=__ret__.database_configuration, database_edition=__ret__.database_edition, database_management_config=__ret__.database_management_config, database_version=__ret__.database_version, db_id=__ret__.db_id, db_packs=__ret__.db_packs, db_unique_name=__ret__.db_unique_name, defined_tags=__ret__.defined_tags, display_name=__ret__.display_name, external_non_container_database_id=__ret__.external_non_container_database_id, freeform_tags=__ret__.freeform_tags, id=__ret__.id, lifecycle_details=__ret__.lifecycle_details, ncharacter_set=__ret__.ncharacter_set, operations_insights_config=__ret__.operations_insights_config, state=__ret__.state, time_created=__ret__.time_created, time_zone=__ret__.time_zone)
1.492188
1
setup.py
xmedius/xmedius-mailrelayserver
0
2807
from setuptools import setup from setuptools.command.install import install class PostInstallCommand(install): user_options = install.user_options + [ ('noservice', None, None), ] def initialize_options(self): install.initialize_options(self) self.noservice = None def finalize_options(self): install.finalize_options(self) def run(self): install.run(self) if not self.noservice: from xmediusmailrelayserver import console console.install_service(['--startup', 'auto', 'install']) setup( name='xmediusmailrelayserver', version='1.0.0', description='The Python module to be used to relay mail to different servers depending on patterns', long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information', url='https://github.com/xmedius/xmedius-mailrelayserver/', author='<NAME>', license='MIT', classifiers=[ 'Programming Language :: Python :: 3.6', 'Environment :: Win32 (MS Windows)', 'Operating System :: Microsoft :: Windows' ], cmdclass={ 'install': PostInstallCommand }, packages=['xmediusmailrelayserver'], package_data={'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml', 'aiosmtpd'], dependency_links=[] )
1.914063
2
143.py
tsbxmw/leetcode
0
2808
# 143. 重排链表 # 给定一个单链表 L:L0→L1→…→Ln-1→Ln , # 将其重新排列后变为: L0→Ln→L1→Ln-1→L2→Ln-2→… # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例 1: # 给定链表 1->2->3->4, 重新排列为 1->4->2->3. # 示例 2: # 给定链表 1->2->3->4->5, 重新排列为 1->5->2->4->3. # Definition for singly-linked list. # class ListNode: # def __init__(self, x): # self.val = x # self.next = None # Definition for singly-linked list. # class ListNode: # def __init__(self, x): # self.val = x # self.next = None ## 整体上是交换,使用递归,先找到最后节点 ## 1 -》 2 -》 3 -》 4 -》 5 ## | | ## temp = 1.next == 2 ## 1.next = 4.next == 5 ## 4.next = None ## 1.next.next == 5.next = 2 ## now = 2 ## last = 3.next class Solution: def reorderList(self, head: ListNode) -> None: """ Do not return anything, modify head in-place instead. """ if not head: return self.pre = head self.flag = True def test(node): if not node.next: # 如果 node.next 是 None,就不需要交换了 return test(node.next) if not self.flag: return if not self.pre.next: self.flag = False return if self.pre == node: self.flag = False return temp = self.pre.next self.pre.next = node.next self.pre.next.next = temp self.pre = temp node.next = None test(self.pre)
3.734375
4
CraftProtocol/NBT/NBTTagList.py
Toranktto/CraftProtocol
21
2809
#!/usr/bin/env python from CraftProtocol.NBT.NBTBase import NBTBase from CraftProtocol.NBT.NBTProvider import NBTProvider from CraftProtocol.StreamIO import StreamIO class NBTTagList(NBTBase): TYPE_ID = 0x09 def __init__(self, tag_type, values=None): NBTBase.__init__(self) if values is None: values = [] self._tag_type = tag_type self._values = list(values) def get(self): return self._values def get_tag_type(self): return self._tag_type def __getitem__(self, i): return self._values.__getitem__(i) def __setitem__(self, i, o): assert isinstance(o, self._tag_type), "value must be " + self._tag_type.__name__ self._values.__setitem__(i, o) def __delitem__(self, i): self._values.__delitem__(i) def __iter__(self): return self._values.__iter__() def __contains__(self, o): return self._values.__contains__(o) def __len__(self): return self._values.__len__() def append(self, x): assert isinstance(x, self._tag_type), "arg must be " + self._tag_type.__name__ self._values.append(x) def remove(self, x): assert isinstance(x, self._tag_type), "arg must be " + self._tag_type.__name__ self._values.remove(x) @staticmethod def write(stream, tag): StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID) StreamIO.write_int(stream, len(tag)) for i in tag: tag.get_tag_type().write(stream, i) @staticmethod def read(stream): tag_type_id = StreamIO.read_ubyte(stream) tag_type = NBTProvider.get_tag_class(tag_type_id) values = [] len = StreamIO.read_int(stream) for i in xrange(len): values.append(tag_type.read(stream)) return NBTTagList(tag_type, values)
2.125
2
examples/0b02b172-ad67-449b-b4a2-ff645b28c508.py
lapaniku/GAS
37
2810
<reponame>lapaniku/GAS<gh_stars>10-100 # This program was generated by "Generative Art Synthesizer" # Generation date: 2021-11-28 09:21:40 UTC # GAS change date: 2021-11-28 09:20:21 UTC # GAS md5 hash: ad55481e87ca5a7e9a8e92cd336d1cad # Python version: 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # For more information visit: https://github.com/volotat/GAS #import python libraries import os #OS version: default import numpy as np #Numpy version: 1.19.5 from PIL import Image #PIL version: 8.1.2 #set initial params SIZE = 768 GRID_CHANNELS = 16 def test_values(arr): if np.isnan(arr).any(): raise Exception('Array has None elements!') if np.amin(arr) < -1 or np.amax(arr) > 1: raise Exception('Values went to far! [ %.2f : %.2f ]'%(np.amin(arr), np.amax(arr)) ) return arr #define grid transformation methods def transit(x, t_indx, s_indx, alphas): res = x.copy() res[:,:,t_indx] = np.sum(x[:,:,s_indx] * alphas, axis = -1) return test_values(res.clip(-1,1)) def sin(x, t_indx, s_indx, scale = 1, shift = 0): res = x.copy() res[:,:,t_indx] = np.sin(x[:,:,s_indx] * 0.5 * np.pi * scale + shift) return test_values(res) def magnitude(x, t_indx, s_indx, ord = 2): res = x.copy() res[:,:,t_indx] = np.linalg.norm(x[:,:,s_indx], axis = -1, ord = ord) / np.sqrt(len(s_indx)) return test_values(res) def shift(x, t_indx, s_indx, shift): res = x.copy() if shift > 0: res[:,:,t_indx] = (-np.abs(((x[:,:,s_indx] + 1) / 2) ** (1 + shift) - 1) ** (1 / (1 + shift)) + 1) * 2 - 1 if shift < 0: res[:,:,t_indx] = np.abs((1 - (x[:,:,s_indx] + 1) / 2) ** (1 - shift) - 1) ** (1 / (1 - shift)) * 2 - 1 return test_values(res) def inverse(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = -x[:,:,s_indx] return test_values(res) def smooth_max(x, t_indx, s1_indx, s2_indx, p = 10): res = x.copy() res[:,:,t_indx] = np.log((np.exp(x[:,:,s1_indx] * p) + np.exp(x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res) def smooth_min(x, t_indx, s1_indx, s2_indx, p = 10): res = x.copy() res[:,:,t_indx] = -np.log((np.exp(-x[:,:,s1_indx] * p) + np.exp(-x[:,:,s2_indx] * p)) ** (1/p)) / 1.07 return test_values(res) def prod(x, t_indx, s_indx): res = x.copy() res[:,:,t_indx] = np.prod(x[:,:,s_indx], -1) return test_values(res) def power(x, t_indx, s_indx, p = 1): res = x.copy() res[:,:,t_indx] = np.sign(x[:,:,s_indx]) * np.abs(x[:,:,s_indx]) ** p return test_values(res) #set initial grid grid = np.zeros((SIZE, SIZE, GRID_CHANNELS)) x = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((1, SIZE)).repeat(SIZE, 0) y = ((np.arange(SIZE)/(SIZE-1) - 0.5) * 2).reshape((SIZE, 1)).repeat(SIZE, 1) grid[:,:,0] = (x * 0.9386329219527516 + y * -0.45147169454413794) / 2 grid[:,:,1] = (x * 0.8090860808441245 + y * 0.2914526739617249) / 2 grid[:,:,2] = (x * 0.9804797761207309 + y * -0.5063344373124843) / 2 grid[:,:,3] = (x * -0.8484277738516293 + y * -0.5155435342135386) / 2 grid[:,:,4] = (x * -0.6644350461377522 + y * 0.1739322518414499) / 2 grid[:,:,5] = (x * -0.5986715486203882 + y * 0.9515468928881716) / 2 grid[:,:,6] = (x * 0.2265055481768512 + y * 0.4365452266748293) / 2 grid[:,:,7] = (x * 0.5049774961793401 + y * 0.05113255120007798) / 2 grid[:,:,8] = (x * -0.3391983246964396 + y * -0.5135707069423852) / 2 grid[:,:,9] = (x * -0.4075423366723827 + y * 0.5388833863473126) / 2 grid[:,:,10] = (x * -0.4262457935185371 + y * -0.6817079327248272) / 2 grid[:,:,11] = (x * 0.8435706697714382 + y * 0.7746597063144072) / 2 grid[:,:,12] = (x * -0.5303146721156469 + y * -0.41048419195488317) / 2 grid[:,:,13] = (x * -0.5864100240508576 + y * -0.9425245660964123) / 2 grid[:,:,14] = (x * -0.7665883618456049 + y * -0.3867357840809138) / 2 grid[:,:,15] = (x * 0.49037959172682255 + y * -0.7671554143072785) / 2 #apply transformations to the grid grid = transit(grid, 4, [7, 6, 12, 8, 9, 0, 1], [0.05863158300898051, 0.3467981515651057, 0.262107802795733, 0.038001653167336905, 0.2112967596903696, 0.002128256606899112, 0.08103579316557531]) grid = shift(grid, 3, 3, 2.4622222565241207) grid = sin(grid, 10, 0, 0.5112825397666086, 37.95950546335726) grid = sin(grid, 12, 13, 3.6938747278005737, 76.37702042567852) grid = magnitude(grid, 15, [5, 3, 8, 0, 15], 2) grid = prod(grid, 2, [3, 11, 1]) grid = smooth_min(grid, 3, 2, 7) grid = smooth_max(grid, 8, 10, 6) grid = prod(grid, 3, [2, 6, 10, 7, 4]) grid = smooth_min(grid, 7, 12, 0) grid = transit(grid, 2, [1, 2], [0.9078557995211777, 0.09214420047882232]) grid = smooth_max(grid, 1, 0, 1) grid = sin(grid, 9, 4, 3.0281102269529683, 11.185401112275173) grid = sin(grid, 10, 4, 1.2844464834351186, -45.836492724169695) grid = sin(grid, 1, 2, -1.5301674594368837, -60.29431568717391) grid = transit(grid, 2, [13, 11, 5], [0.421270391024163, 0.5054038923567993, 0.07332571661903758]) grid = transit(grid, 11, [1, 15, 5, 0, 6, 12, 2, 7, 4], [0.03047869593495055, 0.024092687676923453, 0.02665655056773558, 0.17667886361751853, 0.15211061797378253, 0.016462544099609754, 0.0072484377164178625, 0.4477791048998878, 0.11849249751317383]) grid = transit(grid, 10, [5, 11, 15, 8, 2, 13, 12, 3, 6], [0.1020239434902293, 0.05405846145210329, 0.11525379082942891, 0.11556721863292163, 0.12372657123165616, 0.1356897031789931, 0.20047556686480725, 0.09921434949484752, 0.05399039482501285]) grid = transit(grid, 9, [5], [1.0]) grid = transit(grid, 15, [12, 0, 1, 11], [0.01847979792505241, 0.33442336387003857, 0.15192425697494277, 0.4951725812299663]) grid = sin(grid, 4, 8, 3.386521226555936, 60.95572898751007) grid = shift(grid, 14, 2, 2.55681173849493) grid = sin(grid, 10, 14, 0.8649185298731181, 3.1973516320924773) grid = sin(grid, 9, 7, -2.4657577404884132, 72.95418196004374) grid = transit(grid, 12, [7, 4, 10, 5], [0.5076634403621766, 0.003404332378773421, 0.04142944289977586, 0.4475027843592742]) grid = inverse(grid, 4, 5) grid = transit(grid, 1, [4, 14, 0, 13], [0.2785496566747933, 0.004915230889640017, 0.30146401859790545, 0.4150710938376613]) grid = sin(grid, 3, 11, -6.496603906160505, -73.75617586359363) grid = transit(grid, 6, [6, 14], [0.7201753385758813, 0.2798246614241187]) grid = prod(grid, 4, [10, 0, 2, 4, 8, 5, 6, 7]) grid = transit(grid, 8, [3], [1.0]) grid = inverse(grid, 8, 5) grid = smooth_max(grid, 10, 5, 13) grid = sin(grid, 9, 10, -1.8565532127479274, -54.75186223635349) grid = transit(grid, 10, [14], [1.0]) grid = transit(grid, 15, [11, 4, 10], [0.6926745567135898, 0.1831142410590532, 0.12421120222735695]) grid = magnitude(grid, 7, [6, 12, 7, 13, 8], 2) grid = transit(grid, 8, [3, 15, 9, 6, 11], [0.036102265915692405, 0.1224495166624379, 0.2384660328868578, 0.3357862916746864, 0.2671958928603256]) grid = smooth_min(grid, 1, 1, 11) grid = transit(grid, 5, [11, 4, 2, 1, 13, 12, 0, 8], [0.08486049729383285, 0.15069099224942706, 0.024923245737924458, 0.07191051851248272, 0.25942601829807205, 0.16834508849259286, 0.14540219911263502, 0.094441440303033]) grid = transit(grid, 11, [12], [1.0]) grid = power(grid, 3, 5, 0.10200689258338674) grid = transit(grid, 2, [10, 11, 4, 15, 0, 6], [0.24973877983541862, 0.3378766591098989, 0.15974656746239488, 0.027776085211312595, 0.02330072841260748, 0.20156117996836745]) grid = smooth_min(grid, 0, 5, 1) grid = magnitude(grid, 0, [5, 0], 2) grid = transit(grid, 6, [15, 8], [0.5303803951305812, 0.4696196048694189]) grid = inverse(grid, 0, 0) grid = magnitude(grid, 13, [8], 2) grid = transit(grid, 13, [15, 5, 9, 4, 6, 12], [0.18067242214638962, 0.12939497982917472, 0.08164480089591167, 0.24583958083442445, 0.2244518823086713, 0.13799633398542827]) grid = transit(grid, 11, [0], [1.0]) grid = magnitude(grid, 0, [4, 13], 2) grid = transit(grid, 8, [5, 4, 15, 6, 14, 0, 3, 11], [0.13835365002720226, 0.008781149737259792, 0.24627334258742545, 0.04870190081124998, 0.049950480577274, 0.15123046752435387, 0.31255198044446264, 0.04415702829077187]) grid = transit(grid, 1, [3], [1.0]) grid = magnitude(grid, 14, [4], 2) grid = sin(grid, 1, 5, 8.18216846853571, -6.729427492311089) grid = magnitude(grid, 11, [8, 2], 2) grid = transit(grid, 7, [12, 11, 13, 4], [0.1713900685471786, 0.14082681623065177, 0.19859698568682838, 0.4891861295353413]) grid = transit(grid, 13, [12, 15, 9, 2, 0, 1, 5], [0.18796556626817826, 0.19260744772691155, 0.11226112831146452, 0.08161640805634696, 0.08706050582840198, 0.2243337708440404, 0.11415517296465624]) grid = sin(grid, 11, 13, -6.909579361872105, 70.84834564082374) grid = transit(grid, 2, [11, 7, 13], [0.3629247592109436, 0.10073172896374764, 0.5363435118253088]) grid = sin(grid, 1, 5, 0.6814927249849106, 30.75954926767548) grid = inverse(grid, 8, 7) grid = prod(grid, 10, [5, 2]) grid = transit(grid, 15, [0, 3], [0.29345909580747953, 0.7065409041925205]) grid = sin(grid, 12, 4, -1.6398586072056767, 84.51374680259704) grid = sin(grid, 1, 1, -0.183401440709518, -88.40242580975152) grid = transit(grid, 12, [3, 13, 2, 9, 0], [0.24803411847529433, 0.2425397323068922, 0.0904752958055755, 0.11683555248582808, 0.30211530092641004]) grid = sin(grid, 5, 2, -2.2972705471452146, -12.522748365129786) grid = smooth_min(grid, 12, 9, 11) grid = sin(grid, 4, 15, -1.9527829039221054, 20.537776250912316) grid = transit(grid, 7, [11, 9, 2], [0.5001532946669459, 0.42070604285213226, 0.07914066248092186]) grid = inverse(grid, 5, 12) grid = sin(grid, 10, 2, 0.9155140652310594, -34.1653400637653) grid = transit(grid, 8, [14], [1.0]) grid = transit(grid, 4, [1, 12, 15, 13, 3], [0.32356965941479515, 0.022696478437764827, 0.2132573540073865, 0.11957266769813353, 0.3209038404419199]) grid = transit(grid, 6, [1, 7, 0, 2, 9, 4, 8], [0.06904450551777742, 0.12680650314665426, 0.1756104206123629, 0.013987480750913602, 0.1337935702206657, 0.39097327478734406, 0.08978424496428203]) grid = smooth_min(grid, 9, 9, 10) grid = shift(grid, 8, 1, -0.2952350240798842) grid = sin(grid, 11, 6, 1.576100090732909, -21.508000199215132) grid = shift(grid, 11, 5, 1.0526879494498724) grid = transit(grid, 1, [14], [1.0]) grid = transit(grid, 8, [9, 10, 2, 15, 13], [0.3265190472987195, 0.21568397721657098, 0.06226802479442838, 0.0028158122366541832, 0.39271313845362693]) grid = magnitude(grid, 11, [13, 10, 12, 2, 11, 14], 2) grid = transit(grid, 12, [8, 11, 3], [0.2717231795161624, 0.38648847983305307, 0.3417883406507845]) grid = transit(grid, 15, [7, 3], [0.9172074355564371, 0.08279256444356292]) grid = transit(grid, 13, [1, 2, 7, 5, 8, 9, 15], [0.085742434722219, 0.4119764535375412, 0.08377067725345017, 0.13045782410775286, 0.02917564277599849, 0.12489006625007311, 0.13398690135296518]) grid = transit(grid, 2, [2, 0, 11, 10, 5, 4, 15, 13], [0.1869735689344564, 0.06343641920215143, 0.038951322931441136, 0.04613309733662021, 0.19750663742298355, 0.16072124228620793, 0.15869932715876592, 0.14757838472737334]) grid = transit(grid, 2, [1, 7], [0.18247956114317448, 0.8175204388568255]) grid = transit(grid, 8, [11, 15, 0], [0.08195235243098883, 0.6796005904358621, 0.23844705713314918]) grid = power(grid, 14, 0, 0.10854801586669052) grid = shift(grid, 8, 9, 2.766857264282361) grid = transit(grid, 3, [6, 14, 0, 3, 15, 4, 2, 11, 13], [0.03597236183123865, 0.04938629068404894, 0.08457069101219464, 0.014801187461296406, 0.3649334871683411, 0.28062233683539095, 0.08637063851194285, 0.06076815802338077, 0.022574848472165728]) grid = transit(grid, 4, [11, 4, 15, 10, 8, 5, 2, 3], [0.23701292672659616, 0.08316792464084911, 0.017867439461611043, 0.36417402420248035, 0.02841485585755143, 0.19916101840344472, 0.03422984110049058, 0.03597196960697647]) grid = magnitude(grid, 13, [11, 7], 2) grid = sin(grid, 4, 8, 4.28026157040775, -75.14180284322572) grid = prod(grid, 3, [14, 15]) grid = inverse(grid, 5, 5) grid = transit(grid, 4, [8, 4, 15, 9, 10], [0.10267794314653868, 0.019022820046952493, 0.061606568183823145, 0.4832751235896067, 0.33341754503307897]) grid = transit(grid, 13, [10, 8, 9, 12, 2], [0.031587088727564654, 0.024264739611302585, 0.0306940545567164, 0.19611241111174804, 0.7173417059926683]) grid = transit(grid, 0, [7, 1, 11, 0, 15], [0.036901331671075975, 0.5054281720479712, 0.13288430351514774, 0.10820806749406277, 0.21657812527174225]) grid = transit(grid, 3, [7, 3, 12, 9], [0.13643904772292245, 0.38438336340747, 0.15936221296996333, 0.31981537589964426]) grid = sin(grid, 10, 3, -2.5681840787633137, -30.256455817944243) grid = sin(grid, 8, 2, 3.501615294498545, -75.50049353340206) grid = prod(grid, 9, [1, 4, 0, 6]) grid = transit(grid, 8, [9, 3], [0.30088974760959275, 0.6991102523904072]) grid = transit(grid, 8, [2, 11, 15, 4, 1, 0, 14], [0.29712982335534416, 0.2526657169525107, 0.08415696601637544, 0.18541009701166816, 0.011062110917544764, 0.017334502896306194, 0.1522407828502505]) grid = prod(grid, 2, [8, 7, 11, 10, 15, 0, 5]) grid = transit(grid, 11, [7, 2, 3, 9, 5], [0.24039798004748805, 0.2886075990223525, 0.18742374307846998, 0.11615833154358073, 0.16741234630810867]) grid = prod(grid, 0, [0, 1, 2, 14]) grid = prod(grid, 9, [10, 11, 8, 15, 0, 12, 3]) grid = transit(grid, 13, [5, 15, 10], [0.13237609957996088, 0.22944646977966682, 0.6381774306403722]) grid = transit(grid, 6, [15], [1.0]) grid = sin(grid, 15, 0, -0.033265790773207085, 51.94880270063618) grid = smooth_min(grid, 13, 10, 15) grid = transit(grid, 1, [12, 8, 10, 4, 2], [0.43102537693091664, 0.25433300797798253, 0.21618454566402304, 0.046743011673522995, 0.05171405775355483]) grid = sin(grid, 10, 10, 0.9558311639914843, -47.618914508652054) grid = shift(grid, 9, 8, -1.1449289879251126) grid = transit(grid, 7, [4, 10, 1, 13, 5, 0, 7, 8, 9, 12, 6, 11, 14], [0.10006330804326793, 0.03891760159161208, 0.005474465860804227, 0.12962618248625338, 0.03090992138168193, 0.016043163973997736, 0.13259375374543056, 0.09920705802758992, 0.1415090600653345, 0.09597789664069131, 0.06106766497801195, 0.14032187015082653, 0.008288053054498123]) grid = prod(grid, 15, [12, 15]) grid = prod(grid, 8, [11, 7, 4, 12]) grid = transit(grid, 7, [15, 6, 2, 7], [0.45073658968521574, 0.16060948991238613, 0.12949271785123345, 0.2591612025511646]) grid = transit(grid, 10, [11, 4, 2, 8, 14], [0.3705316303566195, 0.1755951969700656, 0.043989590834687294, 0.22866693087969006, 0.1812166509589377]) grid = sin(grid, 4, 2, -3.329894296119046, -76.41676919069447) grid = smooth_min(grid, 11, 8, 12) grid = transit(grid, 1, [1, 14, 8], [0.38986786543390084, 0.40057743619803005, 0.20955469836806906]) grid = transit(grid, 9, [5], [1.0]) grid = shift(grid, 9, 13, -5.367438086043798) grid = magnitude(grid, 13, [2, 0], 2) grid = transit(grid, 13, [6, 2, 3, 15, 5, 7], [0.06492287400539203, 0.21223490901058306, 0.36311130408652753, 0.09994467226348329, 0.12833432959710458, 0.1314519110369097]) grid = transit(grid, 8, [6, 2], [0.6857167761482571, 0.31428322385174284]) grid = shift(grid, 6, 15, 4.115946851379848) grid = transit(grid, 15, [13, 3], [0.5897775709748927, 0.41022242902510725]) grid = sin(grid, 12, 14, 1.097917736937588, 58.87772371184383) grid = transit(grid, 11, [9, 11], [0.37033495928182997, 0.6296650407181701]) grid = smooth_min(grid, 4, 1, 8) grid = sin(grid, 4, 4, 3.47544933993972, -37.11795195118333) grid = sin(grid, 11, 7, -0.3409112713023047, 75.93313567333723) grid = transit(grid, 11, [5, 10, 7], [0.22694849313985146, 0.5162695719847235, 0.25678193487542517]) grid = sin(grid, 9, 9, -4.261918262131112, 18.680580924548693) grid = smooth_max(grid, 2, 2, 11) grid = sin(grid, 13, 13, 7.718114740496995, 55.242200715207815) grid = sin(grid, 12, 10, -3.1151555334821888, 17.571856948335267) grid = prod(grid, 6, [2, 4, 13]) grid = transit(grid, 5, [1, 9, 3, 10, 4], [0.24075568684771534, 0.02527375632067568, 0.4828116495090197, 0.09546712897709621, 0.15569177834549294]) grid = sin(grid, 6, 3, -0.1377650382373763, -96.34412250071645) grid = sin(grid, 7, 3, 1.6405444007982959, -37.09230830685477) grid = transit(grid, 9, [8], [1.0]) grid = sin(grid, 5, 10, -1.5052434957207308, 24.900059771988836) grid = sin(grid, 8, 10, 2.5947698108630664, -90.74050288622541) grid = sin(grid, 9, 8, -0.8743741598911887, 15.92872484723533) grid = transit(grid, 4, [3, 13, 9, 8, 5, 2, 12], [0.05731677054419865, 0.08527765171582982, 0.33929504571762287, 0.1932983536368378, 0.0036374435750729187, 0.12289545051895708, 0.19827928429148084]) grid = transit(grid, 8, [13, 9, 5, 7, 14], [0.05801706264076675, 0.341923243761946, 0.0494872820880747, 0.29583940098242745, 0.2547330105267852]) grid = inverse(grid, 11, 5) grid = magnitude(grid, 14, [4, 6, 1, 0], 2) grid = transit(grid, 13, [11, 0], [0.6569516962992897, 0.3430483037007103]) grid = sin(grid, 14, 5, 0.053526366336325744, 4.147364704932215) grid = transit(grid, 4, [3], [1.0]) grid = sin(grid, 3, 12, -4.078686662791614, 24.459526349523884) grid = inverse(grid, 15, 10) grid = shift(grid, 6, 1, -1.115193397983063) grid = smooth_max(grid, 13, 3, 8) grid = transit(grid, 13, [13, 0, 5, 14], [0.09662806703796267, 0.1621478194912538, 0.21548762580464817, 0.5257364876661353]) grid = inverse(grid, 1, 0) grid = smooth_max(grid, 1, 15, 12) grid = prod(grid, 11, [3]) grid = smooth_max(grid, 8, 11, 15) grid = sin(grid, 12, 6, -3.621533174445339, 24.02414911462421) grid = sin(grid, 1, 11, 0.5071121900678415, 10.950101187785563) grid = shift(grid, 13, 3, 5.677279514103952) grid = transit(grid, 3, [15, 11, 2, 8, 0], [0.28772794692354614, 0.1935939805514465, 0.06024872230823076, 0.13457223936247906, 0.32385711085429764]) grid = transit(grid, 1, [7, 2, 6, 1, 4, 0], [0.2070905138265326, 0.06562120796792839, 0.17355051228662716, 0.05514926535269553, 0.0829726599151083, 0.41561584065110807]) grid = transit(grid, 2, [0, 4, 2], [0.010597803396528332, 0.7371576932264431, 0.25224450337702853]) grid = sin(grid, 11, 8, 4.303514875116891, -67.11152580467314) grid = prod(grid, 5, [3, 9, 2]) grid = sin(grid, 5, 1, 2.0751861425380627, 63.37681521624819) grid = smooth_min(grid, 11, 10, 9) grid = sin(grid, 13, 2, 4.295107938126156, 57.378601701270014) grid = sin(grid, 10, 2, -0.010214061334835559, 20.43114218394348) grid = transit(grid, 8, [1], [1.0]) grid = sin(grid, 4, 9, 0.2366252211469413, -40.63773874328931) grid = sin(grid, 9, 15, -2.507870105026106, -89.43842740853354) grid = transit(grid, 0, [12, 6, 4, 9, 1, 0, 14], [0.36336761526831185, 0.17372789204937897, 0.08036453739500136, 0.09747098994785518, 0.040818441056887325, 0.16796111771248814, 0.07628940657007711]) grid = transit(grid, 3, [11, 1, 12, 9, 0, 8, 15, 2, 10, 14], [0.20381942291270427, 0.07753380798970702, 0.11445683149439734, 0.08475226158626031, 0.1416941580568898, 0.020968563089492034, 0.0847896752697893, 0.0921589665387646, 0.008240731277180186, 0.17158558178481512]) grid = transit(grid, 5, [11, 10], [0.9817011300708863, 0.018298869929113594]) grid = sin(grid, 14, 8, -0.4693746108213766, -98.17810769380118) grid = sin(grid, 12, 10, 3.6427863324838423, 99.297524709649) grid = sin(grid, 5, 14, -1.45141083652418, -99.85812912291547) grid = transit(grid, 0, [4, 3, 8], [0.23275058190778222, 0.49901982570530873, 0.2682295923869092]) grid = magnitude(grid, 8, [10, 9, 12, 4, 7, 15], 2) grid = sin(grid, 12, 7, 1.439019575760617, 13.126437741104823) grid = transit(grid, 10, [15, 8, 13, 2], [0.32464063956303774, 0.20922781529873477, 0.16179927966914437, 0.30433226546908315]) grid = magnitude(grid, 6, [14, 5, 13, 11, 2, 9], 2) grid = sin(grid, 9, 5, -5.606152225672729, -35.928477282758536) grid = transit(grid, 0, [7, 11, 15, 8, 12, 0, 4, 14, 3, 5], [0.11084510086381213, 0.003439701966452383, 0.10819642722960272, 0.15371289739415475, 0.25812192912399506, 0.005727171643985687, 0.14633649245899077, 0.033890406689391105, 0.05550396325806974, 0.1242259093715456]) grid = smooth_max(grid, 10, 15, 10) grid = transit(grid, 11, [9, 0, 11, 7, 3, 8], [0.03500911832175082, 0.03265868671024263, 0.3248025339288217, 0.4234363710484886, 0.13338109758306646, 0.050712192407629864]) grid = transit(grid, 7, [14, 2, 13, 1, 11, 3, 8, 7], [0.207462236904601, 0.11516125867317799, 0.12240760599022518, 0.05066197369764289, 0.13869178538077429, 0.09948828746526778, 0.16686217850764798, 0.09926467338066268]) grid = transit(grid, 6, [6, 13, 7], [0.16813621041531998, 0.42150135317124293, 0.410362436413437]) grid = inverse(grid, 6, 6) grid = sin(grid, 7, 15, -4.9164570678736865, 86.15931416043557) grid = sin(grid, 1, 7, 1.6265187305620117, -97.13150019385894) grid = transit(grid, 11, [0, 9], [0.1290607634325389, 0.8709392365674611]) grid = transit(grid, 14, [14, 13, 15], [0.530662002197574, 0.1082014600047566, 0.36113653779766947]) grid = transit(grid, 14, [10, 14, 4, 9, 13, 6], [0.3199750359220948, 0.07376266150860299, 0.03622483092076182, 0.09070212266434277, 0.4030414045204916, 0.07629394446370606]) grid = magnitude(grid, 13, [7, 4, 15], 2) grid = transit(grid, 13, [6, 15, 11, 9, 12], [0.21908823570589997, 0.1636179110868493, 0.03797238284324163, 0.29532957711092916, 0.2839918932530799]) grid = sin(grid, 4, 3, 2.634465399239887, 62.07538440217337) grid = sin(grid, 7, 2, 3.41043792019894, 65.36615977552518) grid = transit(grid, 0, [14, 3, 11, 10, 7], [0.5203714128788618, 0.068511863728177, 0.10141059844877331, 0.2728285912351676, 0.036877533709020166]) grid = transit(grid, 7, [11], [1.0]) grid = transit(grid, 5, [9, 13, 3, 14], [0.28064413535886806, 0.5181512474389621, 0.1504742947642479, 0.050730322437922]) grid = prod(grid, 1, [12, 13]) grid = sin(grid, 6, 14, -1.927951619591129, -65.3028706482776) grid = prod(grid, 14, [13]) grid = sin(grid, 1, 12, -0.5111321725063378, 18.261359970959475) grid = power(grid, 6, 5, 0.9223892145169746) grid = transit(grid, 2, [9, 11, 10], [0.2662646690994658, 0.2460545507972383, 0.4876807801032959]) grid = transit(grid, 2, [7], [1.0]) grid = sin(grid, 10, 9, 6.219381309190064, -71.03631884776823) grid = sin(grid, 9, 6, 1.6821417847846682, -64.12547446801875) grid = sin(grid, 13, 3, -0.15800274281797377, 90.63950889076133) grid = sin(grid, 14, 14, -1.842523240371888, 74.23947694195837) grid = inverse(grid, 7, 8) grid = smooth_max(grid, 10, 3, 15) grid = magnitude(grid, 9, [15, 7], 2) grid = transit(grid, 4, [4, 12, 14, 15, 7, 1], [0.20378471182464508, 0.038241020379710625, 0.16903312106740406, 0.3387613981701764, 0.11303295854369695, 0.13714679001436697]) grid = transit(grid, 4, [14, 11, 12, 13, 4, 7], [0.23221079251346607, 0.3307147367708056, 0.26199556841553734, 0.018127231672754242, 0.13788777275073352, 0.01906389787670339]) grid = sin(grid, 4, 7, 3.7705302330112063, 56.91558505626969) grid = sin(grid, 3, 9, 1.4275963527158242, -76.78247379244436) grid = sin(grid, 2, 5, -5.225820110717917, 57.71107021356826) grid = transit(grid, 2, [12], [1.0]) grid = prod(grid, 14, [11, 10]) grid = transit(grid, 2, [0, 15, 10], [0.005204838856346087, 0.5116602651328436, 0.48313489601081044]) grid = transit(grid, 10, [10], [1.0]) grid = transit(grid, 1, [8, 10, 15, 14, 9], [0.33493798319460544, 0.14040206011900094, 0.3010385316537353, 0.07412413198773361, 0.14949729304492473]) grid = magnitude(grid, 10, [11, 0, 5], 2) grid = magnitude(grid, 9, [15, 3, 11, 0, 14], 2) grid = sin(grid, 4, 5, -1.8457292172108153, -53.43885199947502) grid = sin(grid, 10, 0, 7.741409383532979, -12.082110529508299) grid = prod(grid, 11, [9]) grid = sin(grid, 4, 3, 0.10154488887533689, 12.479110491961137) grid = magnitude(grid, 1, [7], 2) grid = smooth_min(grid, 7, 4, 13) grid = magnitude(grid, 5, [7], 2) grid = transit(grid, 6, [9, 11, 2, 13], [0.381505247910628, 0.12073241493361198, 0.3454992433435407, 0.15226309381221942]) grid = magnitude(grid, 10, [7, 15, 5], 2) grid = magnitude(grid, 9, [12, 14, 4], 2) grid = shift(grid, 3, 9, 3.0393348894939773) grid = shift(grid, 2, 4, 2.1961962516242517) grid = prod(grid, 15, [3, 5, 0, 1]) grid = sin(grid, 6, 11, -0.7697482296056479, 23.55348445076298) grid = sin(grid, 7, 7, 0.5492744322205282, 35.873568370773654) grid = transit(grid, 7, [13], [1.0]) grid = sin(grid, 3, 12, 6.470760426148978, -53.62090724330151) grid = sin(grid, 10, 10, 0.7827958631857042, -90.82177259964699) grid = transit(grid, 6, [8, 6, 5, 7, 4, 2], [0.39579476392315127, 0.3200094081197146, 0.06439062651950353, 0.03284446726347166, 0.04732779189481446, 0.13963294227934445]) grid = smooth_min(grid, 0, 13, 15) grid = smooth_max(grid, 5, 8, 4) grid = transit(grid, 10, [1], [1.0]) grid = transit(grid, 15, [15], [1.0]) grid = prod(grid, 13, [6, 3, 7]) grid = sin(grid, 0, 3, -3.561651028660104, 11.539889679902203) grid = power(grid, 10, 5, 0.12539493928522222) grid = power(grid, 0, 12, 2.5526439221510495) grid = sin(grid, 4, 10, -3.680544885171134, 30.633332441673872) grid = transit(grid, 11, [12, 6, 9], [0.1597221050818672, 0.523275926379751, 0.31700196853838186]) grid = sin(grid, 14, 7, 5.409920766787869, -58.09956716630187) grid = sin(grid, 2, 15, -2.5319898824657017, -45.01904701883333) grid = shift(grid, 5, 5, 3.1584260780059252) grid = transit(grid, 10, [9, 8], [0.7777441717493406, 0.22225582825065934]) grid = transit(grid, 3, [9], [1.0]) grid = transit(grid, 11, [2], [1.0]) #create color space def shift_colors(x, shift): res = x.copy() for i in range(x.shape[-1]): if shift[i] > 0: res[:,:,i] = (-np.abs(((x[:,:,i] + 1) / 2) ** (1 + shift[i]) - 1) ** (1 / (1 + shift[i])) + 1) * 2 - 1 if shift[i] < 0: res[:,:,i] = np.abs((1 - (x [:,:,i]+ 1) / 2) ** (1 - shift[i]) - 1) ** (1 / (1 - shift[i])) * 2 - 1 return test_values(res) res = np.zeros((SIZE, SIZE, 3)) res += shift_colors(grid[:,:,0:1].repeat(3, -1), [1.9355805467383669, 1.4677093499726706, 1.2451388311186942]) res = res / 1 res = ((res + 1) / 2 * 255).clip(0,255) #save results im = Image.fromarray(np.uint8(res)) im.save(os.path.basename(__file__) + '.png') #save layers img = np.zeros((SIZE * 4, SIZE * 4)) for j in range(GRID_CHANNELS): x = j % 4 y = j // 4 img[x*SIZE:(x + 1)*SIZE, y*SIZE:(y+1)*SIZE] = grid[:,:,j] img = (img + 1) * 127.5 im = Image.fromarray(np.uint8(img)) im.save(os.path.basename(__file__) + '_layers.png')
2.328125
2
onmt/bin/build_vocab.py
comydream/OpenNMT-py
1
2811
#!/usr/bin/env python """Get vocabulary coutings from transformed corpora samples.""" from onmt.utils.logging import init_logger from onmt.utils.misc import set_random_seed, check_path from onmt.utils.parse import ArgumentParser from onmt.opts import dynamic_prepare_opts from onmt.inputters.corpus import build_vocab from onmt.transforms import make_transforms, get_transforms_cls def build_vocab_main(opts): """Apply transforms to samples of specified data and build vocab from it. Transforms that need vocab will be disabled in this. Built vocab is saved in plain text format as following and can be pass as `-src_vocab` (and `-tgt_vocab`) when training: ``` <tok_0>\t<count_0> <tok_1>\t<count_1> ``` """ ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True) assert opts.n_sample == -1 or opts.n_sample > 1, \ f"Illegal argument n_sample={opts.n_sample}." logger = init_logger() set_random_seed(opts.seed, False) transforms_cls = get_transforms_cls(opts._all_transform) fields = None transforms = make_transforms(opts, transforms_cls, fields) logger.info(f"Counter vocab from {opts.n_sample} samples.") src_counter, tgt_counter, src_feats_counter = build_vocab( opts, transforms, n_sample=opts.n_sample) logger.info(f"Counters src:{len(src_counter)}") logger.info(f"Counters tgt:{len(tgt_counter)}") for feat_name, feat_counter in src_feats_counter.items(): logger.info(f"Counters {feat_name}:{len(feat_counter)}") def save_counter(counter, save_path): check_path(save_path, exist_ok=opts.overwrite, log=logger.warning) with open(save_path, "w", encoding="utf8") as fo: for tok, count in counter.most_common(): fo.write(tok + "\t" + str(count) + "\n") if opts.share_vocab: src_counter += tgt_counter tgt_counter = src_counter logger.info(f"Counters after share:{len(src_counter)}") save_counter(src_counter, opts.src_vocab) else: save_counter(src_counter, opts.src_vocab) save_counter(tgt_counter, opts.tgt_vocab) for k, v in src_feats_counter.items(): save_counter(v, opts.src_feats_vocab[k]) def _get_parser(): parser = ArgumentParser(description='build_vocab.py') dynamic_prepare_opts(parser, build_vocab_only=True) return parser def main(): parser = _get_parser() opts, unknown = parser.parse_known_args() build_vocab_main(opts) if __name__ == '__main__': main()
2.625
3
schools3/ml/experiments/feat_pruning_experiment.py
dssg/mlpolicylab_fall20_schools3_public
0
2812
<filename>schools3/ml/experiments/feat_pruning_experiment.py import numpy as np import pandas as pd from schools3.ml.experiments.models_experiment import ModelsExperiment from schools3.data.base.cohort import Cohort from schools3.config import main_config from schools3.config import global_config from schools3.data.datasets.dataset import Dataset from schools3.ml.experiments.feat_importances_experiment import FeatureImportancesExperiment from schools3.ml.experiments.single_dataset_experiment import SingleDatasetExperiment from schools3.ml.models.tfkeras_model import TFKerasModel from schools3.ml.models.sklearn_model import SklearnModel import schools3.config.ml.experiments.feat_pruning_experiment_config as config from schools3.config.data.datasets import dataset_config # an experiment that trains models with subsets of the features according to their permutation importance rank # like SingleDatasetExperiment, this works on a specific grade class FeaturePruningExperiment(ModelsExperiment): def __init__( self, name='ignore', features_list=main_config.features, labels=main_config.labels, models=main_config.models, metrics=main_config.metrics, use_cache=main_config.use_cache ): super(FeaturePruningExperiment, self).__init__( name, features_list, labels, models, metrics, use_cache=use_cache ) def perform( self, grade=main_config.single_grade, train_years=main_config.train_years, test_years=main_config.test_years, compute_train_metrics=False, **kwargs ): train_cohort = Cohort(grade, train_years) df = pd.DataFrame() for model in self.models: if not (isinstance(model, SklearnModel) or isinstance(model, TFKerasModel)): continue train_data = Dataset(train_cohort, self.features_list, model.get_feature_processor(), self.labels) model.train(train_data) feats_exp = FeatureImportancesExperiment('ignore', self.features_list, self.labels, [model], self.metrics) feature_names, _, sorted_idxs = feats_exp.get_feature_importances(model, train_data) feats = np.flip(feature_names[sorted_idxs]) for i in config.num_feats: dataset_config.feat_whitelist.clear() for feat in feats[:i]: dataset_config.feat_whitelist.append(feat) exp = SingleDatasetExperiment('ignore', self.features_list, self.labels, [model], self.metrics) cur_df = exp.perform(grade, train_years, test_years, compute_train_metrics=compute_train_metrics, **kwargs) cur_df['num_feats'] = i df = pd.concat([df, cur_df], ignore_index=True) return df
2.5625
3
network/dataset/image_loading.py
imsb-uke/podometric_u_net
0
2813
import os import numpy as np from skimage.io import imread def get_file_count(paths, image_format='.tif'): total_count = 0 for path in paths: try: path_list = [_ for _ in os.listdir(path) if _.endswith(image_format)] total_count += len(path_list) except OSError: print("Directory does not exist. Returned file count for this path will be 0") return total_count # Function to load image def load_image(img_path): img = imread(img_path) if img.shape[2] == 4: img = img[:, :, :-1] # img = np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT return img # Function to load mask def load_mask(mask_path): mask = imread(mask_path) return mask def load_mask_from_img(cfg, img_path, img_name, suffixes): a_mask = imread(os.path.join(img_path, img_name + suffixes[0])) msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in suffixes: msk_channel = imread(os.path.join(img_path, img_name + suffix)) if len(msk_channel.shape) == 2: msk_channel = np.expand_dims(msk_channel, axis=-1) if len(msk_channel.shape) != 3: raise ValueError("Mask must be 3-dim here. Does your mask have 1 or more than 3 dimensions? " "Check the masks.") msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel i += cfg.NUMBER_MSK_CHANNELS # print(msk, msk.shape) return msk def load_weights(cfg, img_path, img_name, weight_suffixes): a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0])) weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS)) i = 0 for suffix in weight_suffixes: weights_channel = np.load(os.path.join(img_path, img_name + suffix)) if len(weights_channel.shape) == 2: weights_channel = np.expand_dims(weights_channel, axis=-1) if len(weights_channel.shape) != 3: raise ValueError("Weights must be 3-dim here. Has your weights 1 or more than 3 dimensions? Check the weights.") weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel i += cfg.NUMBER_MSK_CHANNELS return weights
2.828125
3
series/simple/numeric_series.py
kefir/snakee
0
2814
from typing import Optional, Callable try: # Assume we're a sub-module in a package. from series import series_classes as sc from utils import numeric as nm except ImportError: # Apparently no higher-level package has been imported, fall back to a local import. from .. import series_classes as sc from ...utils import numeric as nm Native = sc.AnySeries DEFAULT_NUMERIC = True WINDOW_DEFAULT = (-1, 0, 1) WINDOW_WO_CENTER = (-2, -1, 0, 1, 2) WINDOW_NEIGHBORS = (-1, 0) class NumericSeries(sc.AnySeries): def __init__( self, values=[], validate=False, name=None, ): super().__init__( values=values, validate=validate, name=name, ) @staticmethod def get_distance_func(): return nm.diff def get_errors(self): yield from super().get_errors() if not self.has_valid_items(): yield 'Values of {} must be numeric'.format(self.get_class_name()) def has_valid_items(self): for v in self.get_values(): if not isinstance(v, (int, float)): return False return True def is_numeric(self, check=False): if check: return self.has_valid_items() else: return DEFAULT_NUMERIC def get_sum(self): return sum( self.filter_values_defined().get_values(), ) def get_mean(self): values_defined = self.filter_values_defined().get_values() if values_defined: return sum(values_defined) / len(values_defined) def norm(self, rate=None, default=None): if rate is None: rate = self.get_mean() return self.map_values(lambda v: v / rate if rate else default) def divide(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x / y if y else default, extend, series, ) def subtract(self, series, default=None, extend=False): return self.map_optionally_extend_zip_values( lambda x, y: x - y if x is not None and y is not None else default, extend, series, ) def derivative(self, extend=False, default=0): if extend: return self.preface(None).subtract( self, extend=True, default=default, ).crop(0, 1) else: return self.slice(0, -1).subtract( self.shift(-1) ) def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True): if extend: n_min = 0 n_max = self.get_count() else: n_min = - min(window) n_max = self.get_count() - max(window) for center in range(n_min, n_max): sliding_window = [center + n for n in window] if as_series: yield self.value_series().items_no(sliding_window, extend=extend, default=default) else: yield self.value_series().get_items_no(sliding_window, extend=extend, default=default) def apply_window_func( self, function: Callable, window=WINDOW_DEFAULT, extend=True, default=None, as_series=False, inplace: bool = False, ) -> Optional[Native]: values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series)) return self.set_values(values, inplace=inplace) def mark_local_extremums(self, local_min=True, local_max=True): return self.apply_window_func( lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max), window=WINDOW_DEFAULT, extend=True, default=False, ) def mark_local_max(self): return self.mark_local_extremums(local_min=False, local_max=True) def mark_local_min(self): return self.mark_local_extremums(local_min=True, local_max=False) def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False): smoothed_series = self.smooth(window=window) deviation = self.subtract(smoothed_series) if rel: deviation = deviation.divide(smoothed_series, default=0) return deviation # @deprecated def smooth_simple_linear(self, window_len=3, exclude_center=False): center = int((window_len - 1) / 2) count = self.get_count() result = self.new() for n in self.get_range_numbers(): is_edge = n < center or n >= count - center if is_edge: result.append(self.get_item_no(n), inplace=True) else: sub_series = self.slice(n - center, n + center + 1) if exclude_center: sub_series = sub_series.drop_item_no(center) result.append(sub_series.get_mean(), inplace=True) return result def smooth(self, how='linear', *args, **kwargs): method_name = 'smooth_{}'.format(how) smooth_method = self.__getattribute__(method_name) return smooth_method(*args, **kwargs) def smooth_multiple(self, list_kwargs=[]): series = self for kwargs in list_kwargs: series = series.smooth(**kwargs) return series def smooth_linear(self, window=WINDOW_DEFAULT): return self.apply_window_func( lambda s: s.get_mean(), window=window, extend=True, default=None, as_series=True, ) def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None): spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max) if whitelist: spikes = spikes.map_zip_values( lambda a, b: a and not b, whitelist, ) return self.map_zip_values( lambda v, t, s: s if t else v, spikes, self.smooth(window=window), ) def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True): deviation = self.deviation_from_neighbors(window=window, rel=True) if local_min or local_max: deviation = deviation.map_zip_values( lambda x, m: x if m else None, self.mark_local_extremums(local_min=local_min, local_max=local_max), ) spikes = deviation.map_values( lambda x: abs(x or 0) > threshold, ) return spikes def plot(self, fmt='-'): nm.plot(self.get_range_numbers(), self.get_values(), fmt=fmt)
2.375
2
app/internal/module/video/database.py
kuropengin/SHINtube-video-api
0
2815
import glob import pathlib from .filemanager import filemanager_class class database_class(filemanager_class): def __init__(self): filemanager_class.__init__(self) async def update_info(self, year, cid, vid, title, explanation): # 既存のjsonを読み込み json_file = "/".join([self.video_dir, str(year), cid, vid, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False # jsonの更新 _dict["title"] = title _dict["explanation"] = explanation # jsonの書き込み if self.write_json(json_file, _dict): return True return False async def encode_result(self, folderpath, resolution, result=True): # 既存のjsonを読み込み json_file = "/".join([folderpath, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False if result: # 画質の追加 _dict["resolution"].append(f"{resolution}p") _dict["encode_tasks"].remove(f"{resolution}p") else: _dict["encode_error"].append(f"{resolution}p") _dict["encode_tasks"].remove(f"{resolution}p") # jsonの書き込み self.write_json(json_file, _dict) # プレイリストに書き込み playlist = "/".join([folderpath, "playlist.m3u8"]) await self.write_playlist(playlist, resolution) async def encode_task(self, folderpath, resolution): # 既存のjsonを読み込み json_file = "/".join([folderpath, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False if f"{resolution}p" in _dict["resolution"]: return True # 画質の追加 _dict["encode_tasks"].append(f"{resolution}p") # jsonの書き込み if self.write_json(json_file, _dict): return True return False async def encode_error(self, folderpath, message): # 既存のjsonを読み込み json_file = "/".join([folderpath, "info.json"]) _dict = await self.read_json(json_file) if not _dict: return False # 画質の追加 _dict["encode_error"].append(f"{message}") # jsonの書き込み if self.write_json(json_file, _dict): return True return False async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f"./{self.video_dir}/**/info.json", recursive=True) result = [] for json_file in json_files_path: temp = await self.read_json(json_file) directory = "/".join(json_file.split("/")[:-1]) temp["video_directory"] = directory try: temp["video_file_name"] = glob.glob( f"{directory}/1.*")[0].split("/")[-1] except IndexError: temp["video_file_name"] = None result.append(temp) return result async def get_encode_tasks(self): video_info = await self.get_all_info() result = [] for info in video_info: if len(info["encode_tasks"]) > 0: result.append(info) return result async def list_video_id(self, year, cid): _video_dir = "/".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f"{_video_dir}/*") return [video_id.split("/")[-1] for video_id in temp] async def list_link(self, year, cid): _video_dir = "/".join([self.video_dir, str(year), cid]) temp = await self.async_wrap(glob.glob)(f"{_video_dir}/*") result = {} for link_path in temp: json_file = link_path + "/info.json" _dict = await self.read_json(json_file) if not _dict: pass else: result[link_path.split("/")[-1]] = _dict return result async def get_all_info(self): json_files_path = await self.async_wrap(glob.glob)( f"./{self.video_dir}/**/info.json", recursive=True) result = [] for json_file in json_files_path: temp = await self.read_json(json_file) directory = "/".join(json_file.split("/")[:-1]) temp["video_directory"] = directory try: temp["video_file_name"] = glob.glob( f"{directory}/1.*")[0].split("/")[-1] except IndexError: temp["video_file_name"] = None result.append(temp) return result async def get_encode_tasks(self): video_info = await self.get_all_info() result = [] for info in video_info: if len(info["encode_tasks"]) > 0: result.append(info) return result database = database_class()
2.71875
3
python/OpenGeoTile.py
scoofy/open-geotiling
0
2816
<gh_stars>0 from openlocationcode import openlocationcode as olc from enum import Enum import math, re class TileSize(Enum): ''' An area of 20° x 20°. The side length of this tile varies with its location on the globe, but can be up to approximately 2200km. Tile addresses will be 2 characters long.''' GLOBAL = (2, 20.0) ''' An area of 1° x 1°. The side length of this tile varies with its location on the globe, but can be up to approximately 110km. Tile addresses will be 4 characters long.''' REGION = (4, 1.0) ''' An area of 0.05° x 0.05°. The side length of this tile varies with its location on the globe, but can be up to approximately 5.5km. Tile addresses will be 6 characters long.''' DISTRICT = (6, 0.05) ''' An area of 0.0025° x 0.0025°. The side length of this tile varies with its location on the globe, but can be up to approximately 275m. Tile addresses will be 8 characters long.''' NEIGHBORHOOD = (8, 0.0025) ''' An area of 0.000125° x 0.000125°. The side length of this tile varies with its location on the globe, but can be up to approximately 14m. Tile addresses will be 10 characters long.''' PINPOINT = (10, 0.000125) def __init__(self, code_length, coordinate_increment): self.code_length = code_length self.coordinate_increment = coordinate_increment def getCodeLength(self): '''get 0th value''' return self.code_length def getCoordinateIncrement(self): '''get 1th value''' return self.coordinate_increment # Copy from OpenLocationCode.java # A separator used to break the code into two parts to aid memorability. SEPARATOR = '+' # Copy from OpenLocationCode.java # The character used to pad codes. PADDING_CHARACTER = '0' PADDING_2 = "00" PADDING_4 = "0000" PADDING_6 = "000000" CODE_ALPHABET = olc.CODE_ALPHABET_ BASE_20_SET = {x+y for x in CODE_ALPHABET for y in CODE_ALPHABET} BASE_20_BORDER_SET = {x for x in BASE_20_SET if x[0] in ['2', 'X'] or x[1] in ['2', 'X']} NORTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == 'X'} EAST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == 'X'} SOUTH_DIGITS = {x for x in BASE_20_BORDER_SET if x[0] == '2'} WEST_DIGITS = {x for x in BASE_20_BORDER_SET if x[1] == '2'} memoized_digit_dict = { "N1": NORTH_DIGITS, "E1": EAST_DIGITS, "S1": SOUTH_DIGITS, "W1": WEST_DIGITS, } def is_padded(plus_code): return plus_code.find(PADDING_CHARACTER) != -1 def is_tile_address(plus_code): return plus_code.find(SEPARATOR) == -1 def return_code_of_tile_size(too_precise_plus_code, desired_tile_size): code = too_precise_plus_code if not is_tile_address(code): code = code.replace(SEPARATOR, '') if is_padded(code): if code.find(PADDING_CHARACTER) < desired_tile_size.getCodeLength(): raise Exception("OLC padding larger than allowed by desired_tile_size") code_address = code[:desired_tile_size.getCodeLength()] full_length = TileSize.PINPOINT.getCodeLength() code = code_address + ("0" * (full_length - len(code_address))) if desired_tile_size == TileSize.PINPOINT: code = code[:-2] + SEPARATOR + code[-2:] else: code = code[:-2] + SEPARATOR return code def return_set_of_subaddresses(set_of_addresses): for address in set_of_addresses: if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None return {address+base for address in set_of_addresses for base in BASE_20_SET} class OpenGeoTile(): ''' /** * A wrapper around an {@code OpenLocationCode} object, focusing on the area identified by a prefix * of the given OpenLocationCode. * * Using this wrapper class allows to determine whether two locations are in the same or adjacent * "tiles", to determine all neighboring tiles of a given one, to calculate a distance in tiles etc. * * Open Location Code is a technology developed by Google and licensed under the Apache License 2.0. * For more information, see https://github.com/google/open-location-code * * @author <NAME> * @version 0.1.0 */ Ported by scoofy on 08.31.21 ''' def __init__(self, code=None, tile_size=None, lat=None, long=None, ): if not (code or (code and tile_size) or (lat and long)): raise Exception("Invalid OpenGeoTile constructor arguments") if lat and long: self.constructTileFromLatLong(lat, long, tile_size) elif code and tile_size: self.constructTileFromCodeAndSize(code, tile_size) elif code: if is_tile_address(code): self.constructTileFromTileAddress(code) else: self.constructTileFromCode(code) self.tile_address = self.code.replace(SEPARATOR, "")[0: self.tile_size.getCodeLength()] def constructTileFromCode(self, plus_code): '''/** * Creates a new OpenGeoTile from an existing * {@link com.google.openlocationcode.OpenLocationCode}. * @param olc OpenLocationCode for the current location. This can be a padded code, in which * case the resulting OpenGeoTile will have a larger TileSize. * @throws IllegalArgumentException if olc is not a full code */''' if not olc.isFull(plus_code): raise Exception("Only full OLC supported. Use olc.recoverNearest().") self.code = plus_code.upper() if is_padded(plus_code): code_length = plus_code.find(PADDING_CHARACTER) else: code_length = min(len(plus_code)-1, 10) if code_length == TileSize.GLOBAL.getCodeLength(): self.tile_size = TileSize.GLOBAL elif code_length == TileSize.REGION.getCodeLength(): self.tile_size = TileSize.REGION elif code_length == TileSize.DISTRICT.getCodeLength(): self.tile_size = TileSize.DISTRICT elif code_length == TileSize.NEIGHBORHOOD.getCodeLength(): self.tile_size = TileSize.NEIGHBORHOOD elif code_length == TileSize.PINPOINT.getCodeLength(): self.tile_size = TileSize.PINPOINT else: raise Exception("Too precise, sort this later") def constructTileFromCodeAndSize(self, plus_code, tile_size): ''' Creates a new OpenGeoTile from an existing {@link com.google.openlocationcode.OpenLocationCode}. @param olc OpenLocationCode for the current location @param tile_size tile size to use for this OpenGeoTile @throws IllegalArgumentException when trying to pass a short (non-full) OLC, or if OLC has too much padding for given tile_size ''' if not olc.isFull(plus_code): raise Exception("Only full OLC supported. Use recover().") modified_plus_code = return_code_of_tile_size(plus_code, tile_size) self.code = modified_plus_code.upper() self.tile_size = tile_size def constructTileFromLatLong(self, lat: float, long: float, tile_size=None): '''/** * Creates a new OpenGeoTile from lat/long coordinates. * @param latitude latitude of the location * @param longitude longitude of the location * @param tile_size tile size to use for this OpenGeoTile * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(double, double, int)} */''' if not tile_size: tile_size = TileSize.PINPOINT self.code = olc.encode(lat, long, tile_size.getCodeLength()).upper() self.tile_size = tile_size def constructTileFromTileAddress(self, tileAddress): '''/** * Creates a new OpenGeoTile from a tile address. * @param tileAddress a tile address is a [2/4/6/8/10]-character string that corresponds to a * valid {@link com.google.openlocationcode.OpenLocationCode} after removing * '+' and an additional number of trailing characters; tile size is * determined by the length of this address * @throws IllegalArgumentException passed through from * {@link OpenLocationCode#OpenLocationCode(String)} or thrown if tileAddress is of * invalid length */''' detectedTileSize = None olcBuilder = "" if len(tileAddress) == TileSize.GLOBAL.getCodeLength(): detectedTileSize = TileSize.GLOBAL olcBuilder += tileAddress + PADDING_6 + SEPARATOR if len(tileAddress) == TileSize.REGION.getCodeLength(): detectedTileSize = TileSize.REGION olcBuilder += tileAddress + PADDING_4 + SEPARATOR if len(tileAddress) == TileSize.DISTRICT.getCodeLength(): detectedTileSize = TileSize.DISTRICT olcBuilder += tileAddress + PADDING_2 + SEPARATOR if len(tileAddress) == TileSize.NEIGHBORHOOD.getCodeLength(): detectedTileSize = TileSize.NEIGHBORHOOD olcBuilder += tileAddress + SEPARATOR if len(tileAddress) == TileSize.PINPOINT.getCodeLength(): detectedTileSize = TileSize.PINPOINT olcBuilder += tileAddress[0:8] + SEPARATOR + tileAddress[8:10] if detectedTileSize == None: print(tileAddress) raise Exception("Invalid tile address") self.tile_size = detectedTileSize self.code = olcBuilder.upper() def getWrappedOpenLocationCode(self): # this code is effectively redundant as python has no wrapping '''/** * The exact {@link com.google.openlocationcode.OpenLocationCode} wrapped by this OpenGeoTile. * For the plus code of the whole tile, see {@link #getTileOpenLocationCode()}. * @return the exact plus code wrapped by this OpenGeoTile */''' return self.code def returnCode(self): return self.code def getTileSize(self): '''/** * Get the {@link TileSize} of this OpenGeoTile. * @return the {@link TileSize} of this OpenGeoTile */''' return self.tile_size def getTileAddress(self): '''/** * A tile address is a string of length 2, 4, 6, 8, or 10, which corresponds to a valid * {@link com.google.openlocationcode.OpenLocationCode} after padding with an appropriate * number of '0' and '+' characters. Example: Address "CVXW" corresponds to OLC "CVXW0000+" * @return the tile address of this OpenGeoTile; */''' return self.tile_address def getTileAddressPrefix(self): '''/** * The prefix of a tile address is the address of the next biggest tile at this location. * @return this tile's address with the final two characters removed. In case of a GLOBAL tile, * returns the empty string. */''' if self.tile_size == TileSize.GLOBAL: return "" else: return self.getTileAddress()[0: self.tile_size.getCodeLength()-2] def getParentTileAddress(self): return self.getTileAddressPrefix() def getTileOpenLocationCode(self): # this code is redundant '''/** * The full {@link com.google.openlocationcode.OpenLocationCode} for this tile. Other than * {@link #getWrappedOpenLocationCode()}, this will return a full plus code for the whole tile. * @return a plus code for the whole tile, probably padded with '0' characters */''' return self.getWrappedOpenLocationCode() def getNeighbors(self, eight_point_direction=None): '''/** * Get an array of the typically 8 neighboring tiles of the same size. * @return an array of the typically 8 neighboring tiles of the same size; * may return less than 8 neighbors for tiles near the poles. */''' # deltas = [20.0, 1.0, 0.05, 0.0025, 0.000125] delta = self.getTileSize().getCoordinateIncrement() code_area = olc.decode(self.code) latitude = code_area.latitudeCenter longitude = code_area.longitudeCenter '''directions_list included to keep ordered data''' directions_list = ["NW", "N", "NE", "E", "SE", "S", "SW", "W"] direction_dict = { "NW": [+1, -1], "N": [+1, 0], "NE": [+1, +1], "W": [ 0, -1], "E": [ 0, +1], "SW": [-1, -1], "S": [-1, 0], "SE": [-1, +1], } #lat_diff = [+1, +1, +1, 0, -1, -1, -1, 0] #long_diff = [-1, 0, +1, +1, +1, 0, -1, -1] if not type(eight_point_direction) in [type(None), list, str]: raise Exception("eight_point_direction must be of type list or str") if eight_point_direction is None: directions = directions_list elif isinstance(eight_point_direction, str): directions = [] if eight_point_direction.upper() in directions_list: directions.append(eight_point_direction.upper()) else: ''' this list construction keeps directions in the order above ''' uppercase_input_directions = [d.upper() for d in eight_point_direction] directions = [direction for direction in directions_list if direction in uppercase_input_directions] neighbors = set() for direction in directions: lat_diff, long_diff = direction_dict.get(direction) ''' //OLC constructor clips and normalizes, //so we don't have to deal with invalid lat/long values directly''' neighborLatitude = latitude + (delta * lat_diff) neighborLongitude = longitude + (delta * long_diff) new_OpenGeoTile = OpenGeoTile(lat=neighborLatitude, long=neighborLongitude, tile_size=self.getTileSize()) if not self.isSameTile(new_OpenGeoTile): '''//don't add tiles that are the same as this one due to clipping near the poles''' neighbors.add(new_OpenGeoTile) return neighbors def isSameTile(self, potentialSameTile): '''/** * Check if a tile describes the same area as this one. * @param potentialSameTile the OpenGeoTile to check * @return true if tile sizes and addresses are the same; false if not */''' if potentialSameTile.getTileSize() != self.getTileSize(): return False return potentialSameTile.getTileAddress() == self.getTileAddress() def isNeighbor(self, potentialNeighbor): '''/** * Check if a tile is neighboring this one. * @param potentialNeighbor the OpenGeoTile to check * @return true if this and potentialNeighbor are adjacent (8-neighborhood); * false if not */''' if potentialNeighbor.getTileSize() == self.getTileSize(): '''//avoid iterating over neighbors for same tile''' if self.isSameTile(potentialNeighbor): return False neighbors = self.getNeighbors() for neighbor in neighbors: if potentialNeighbor.isSameTile(neighbor): return True return False else: '''//tiles of different size are adjacent if at least one neighbor of the smaller tile, //but not the smaller tile itself, is contained within the bigger tile''' if potentialNeighbor.getTileSize().getCodeLength() > self.tile_size.getCodeLength(): smallerTile = potentialNeighbor biggerTile = self else: smallerTile = self biggerTile = potentialNeighbor if biggerTile.contains(smallerTile): return False neighbors = smallerTile.getNeighbors() for neighbor in neighbors: if biggerTile.contains(neighbor): return True return False def contains(self, potentialMember): '''/** * Check if this tile contains another one. * @param potentialMember the OpenGeoTile to check * @return true if the area potentialMember falls within the area of this tile, including cases * where both are the same; false if not */''' # //if A contains B, then B's address has A's address as a prefix return potentialMember.getTileAddress().startswith(self.getTileAddress()) def getManhattanTileDistanceTo(self, otherTile): '''/** * Calculates the Manhattan (city block) distance between this and another tile of the same size. * @param otherTile another tile of the same size as this one * @return an integer value corresponding to the number of tiles of the given size that need to * be traversed getting from one to the other tile * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") return self.getLatitudinalTileDistance(otherTile, True) + self.getLongitudinalTileDistance(otherTile, True) def getChebyshevTileDistanceTo(self, otherTile): '''/** * Calculates the Chebyshev (chessboard) distance between this and another tile of the same size. * @param otherTile another tile of the same size as this one * @return an integer value corresponding to the number of tiles of the given size that need to * be traversed getting from one to the other tile * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") return max(self.getLatitudinalTileDistance(otherTile, True), self.getLongitudinalTileDistance(otherTile, True)) def getDirection(self, otherTile): '''/** * Returns the approximate direction of the other tile relative to this. The return value can * have a large margin of error, especially for big or far away tiles, so this should only be * interpreted as a very rough approximation and used as such. * @param otherTile another tile of the same size as this one * @return an angle in radians, 0 being an eastward direction, +/- PI being westward direction * @throws IllegalArgumentException thrown if otherTile has different {@link TileSize} */''' if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") xDiff = int(self.getLongitudinalTileDistance(otherTile, False)) yDiff = int(self.getLatitudinalTileDistance(otherTile, False)) return math.atan2(yDiff, xDiff) def getEightPointDirectionOfNeighbor(self, neighborTile): ''' returns neighbor's direction, to assist in expanding tile areas ''' if not self.isNeighbor(neighborTile): raise Exception("neighborTile must be neighbor") if neighborTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") self_tile_x = self.getTileAddress()[-2] self_tile_y = self.getTileAddress()[-1] other_tile_x = neighborTile.getTileAddress()[-2] other_tile_y = neighborTile.getTileAddress()[-1] direction = "" north_south = None if self_tile_x != other_tile_x: ''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_x) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_x) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_x) == 0: ''' other tile is above -> neighborTile is north ''' direction = direction + 'N' else: direction = direction + 'S' else: if CODE_ALPHABET.find(self_tile_x) < CODE_ALPHABET.find(other_tile_x): ''' other tile is above -> neighborTile is north ''' direction = direction + 'N' else: ''' other tile is below -> neighborTile is south ''' direction = direction + 'S' if self_tile_y != other_tile_y: ''' one tile is above the other ''' if CODE_ALPHABET.find(self_tile_y) in [0, len(CODE_ALPHABET)-1] and CODE_ALPHABET.find(other_tile_y) in [0, len(CODE_ALPHABET)-1]: ''' ajacent parent tiles ''' if CODE_ALPHABET.find(other_tile_y) == 0: ''' other tile is right -> neighborTile is east ''' direction = direction + 'E' else: ''' other tile is left -> neighborTile is west ''' direction = direction + 'W' else: if CODE_ALPHABET.find(self_tile_y) < CODE_ALPHABET.find(other_tile_y): ''' other tile is right -> neighborTile is east ''' direction = direction + 'E' else: ''' other tile is left -> neighborTile is west ''' direction = direction + 'W' return direction def getCharacterIndex(self, c): '''//following definitions copied from OpenLocationCode.java''' index = "23456789CFGHJMPQRVWX".find(c.upper()) if index == -1: raise Exception("Character does not exist in alphabet") return index def characterDistance(self, c1, c2): return self.getCharacterIndex(c1) - self.getCharacterIndex(c2) def getLatitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") numIterations = self.tile_size.getCodeLength()/2 #1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2] c2 = otherTile.getTileAddress()[i*2] tileDistance += self.characterDistance(c1,c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def getLongitudinalTileDistance(self, otherTile, absolute_value_bool): if otherTile.getTileSize() != self.getTileSize(): raise Exception("Tile sizes don't match") numIterations = self.tile_size.getCodeLength()/2 #; //1..5 tileDistance = 0 for i in range(int(numIterations)): tileDistance *= 20 c1 = self.getTileAddress()[i*2 + 1] c2 = otherTile.getTileAddress()[i*2 + 1] if i == 0: '''//for the first longitudinal value, we need to take care of wrapping - basically, //if it's shorter to go the other way around, do so''' firstDiff = self.characterDistance(c1, c2) NUM_CHARACTERS_USED = 18 #; //360°/20° = 18 if abs(firstDiff) > NUM_CHARACTERS_USED/2: if firstDiff > 0: firstDiff -= NUM_CHARACTERS_USED else: firstDiff += NUM_CHARACTERS_USED tileDistance += firstDiff else: tileDistance += self.characterDistance(c1, c2) if absolute_value_bool: return abs(tileDistance) return tileDistance def returnSetOfSubtiles(self, desired_tile_size=TileSize.PINPOINT): if self.tile_size.getCodeLength() == desired_tile_size.getCodeLength(): ''' tile is desired size ''' return self elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception("OLC padding larger than allowed by desired_tile_size") iterations_needed = desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2 address_set = set([self.getTileAddress()]) for i in range(int(iterations_needed)): address_set = return_set_of_subaddresses(address_set) tile_set = {OpenGeoTile(address) for address in address_set} return tile_set def returnSetOfBorderSubtiles(self, desired_tile_size=TileSize.PINPOINT, eight_point_direction=None): address = self.getTileAddress() if len(address) == TileSize.PINPOINT.getCodeLength(): ''' address already minimum possible size ''' return None elif self.tile_size.getCodeLength() > desired_tile_size.getCodeLength(): 'desired_tile_size is too big' raise Exception("OLC padding larger than allowed by desired_tile_size") iterations_needed = int(desired_tile_size.getCodeLength()/2 - self.tile_size.getCodeLength()/2) north_set = set() east_set = set() south_set = set() west_set = set() if isinstance(eight_point_direction, str): eight_point_direction = eight_point_direction.upper() set_of_border_subaddresses = set() if eight_point_direction is None: ''' all borders ''' ''' traveling salesman problem ''' ''' let's do it once, and try to reduce by swaping digits ''' all_border_set = memoized_digit_dict.get(f"A{iterations_needed}") if not all_border_set: north_base_set = memoized_digit_dict.get(f"N{iterations_needed}") if not north_base_set: self.memoizeDigitDict("N", iterations_needed) north_set = memoized_digit_dict.get(f"N{iterations_needed}") east_set = memoized_digit_dict.get(f"E{iterations_needed}", set()) south_set = memoized_digit_dict.get(f"S{iterations_needed}", set()) west_set = memoized_digit_dict.get(f"W{iterations_needed}", set()) east_exists = east_set != set() south_exists = south_set != set() west_exists = west_set != set() for base in north_set: east_base = "" south_base = "" west_base = "" base_tuple_list = re.findall('..', base) ''' north will be Xd east dX south 2d west d2''' for n_tuple in base_tuple_list: relevant_digit = n_tuple[1] if not east_exists: east_base += relevant_digit + "X" if not south_exists: south_base += "2" + relevant_digit if not west_exists: west_base += relevant_digit + "2" if not east_exists: east_set.add(east_base) if not south_exists: south_set.add(south_base) if not west_exists: west_set.add(west_base) memoized_digit_dict[f"E{iterations_needed}"] = east_set memoized_digit_dict[f"S{iterations_needed}"] = south_set memoized_digit_dict[f"W{iterations_needed}"] = west_set all_border_set = north_set | east_set | south_set | west_set memoized_digit_dict[f"A{iterations_needed}"] = all_border_set return {OpenGeoTile(address+base) for base in all_border_set} elif len(eight_point_direction) == 1: ''' North, South, East, or West ''' base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}") if not base_set: self.memoizeDigitDict(eight_point_direction, iterations_needed) base_set = memoized_digit_dict.get(f'{eight_point_direction}{iterations_needed}') return {OpenGeoTile(address + base) for base in base_set} elif len(eight_point_direction) == 2: ''' NW, NE, SW, SE... should return only one tile''' ordinal_digit_dict = { 'NW': 'X2', 'NE': 'XX', 'SE': '2X', 'SW': '22' } base = '' for i in range(iterations_needed): base += ordinal_digit_dict.get(eight_point_direction) return {OpenGeoTile(address + base)} def memoizeDigitDict(self, eight_point_direction, iterations_needed): base_set = memoized_digit_dict.get(f"{eight_point_direction}{iterations_needed}") if not base_set: quickest_i = 0 for i in reversed(range(iterations_needed)): if memoized_digit_dict.get(f"{eight_point_direction}{i + 1}"): quickest_i = i break for i in range(quickest_i, iterations_needed): existing_bases = memoized_digit_dict.get(f"{eight_point_direction}{i + 1}") next_set = {existing_base + base for existing_base in existing_bases for base in memoized_digit_dict.get(f"{eight_point_direction}1")} memoized_digit_dict[f"{eight_point_direction}{i + 2}"] = next_set
3
3
deep-rl/lib/python2.7/site-packages/OpenGL/arrays/arraydatatype.py
ShujaKhalid/deep-rl
87
2817
"""Array data-type implementations (abstraction points for GL array types""" import ctypes import OpenGL from OpenGL.raw.GL import _types from OpenGL import plugins from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1 from OpenGL import logs _log = logs.getLog( 'OpenGL.arrays.arraydatatype' ) from OpenGL import acceleratesupport ADT = None if acceleratesupport.ACCELERATE_AVAILABLE: try: from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT except ImportError as err: _log.warn( "Unable to load ArrayDatatype accelerator from OpenGL_accelerate" ) if ADT is None: # Python-coded version class HandlerRegistry( dict ): GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays'] def __init__( self, plugin_match ): self.match = plugin_match self.output_handler = None self.preferredOutput = None self.all_output_handlers = [] def __call__( self, value ): """Lookup of handler for given value""" try: typ = value.__class__ except AttributeError as err: typ = type(value) handler = self.get( typ ) if not handler: if hasattr( typ, '__mro__' ): for base in typ.__mro__: handler = self.get( base ) if not handler: handler = self.match( base ) if handler: handler = handler.load() if handler: handler = handler() if handler: self[ typ ] = handler if hasattr( handler, 'registerEquivalent' ): handler.registerEquivalent( typ, base ) return handler raise TypeError( """No array-type handler for type %s.%s (value: %s) registered"""%( typ.__module__, type.__name__, repr(value)[:50] ) ) return handler def handler_by_plugin_name( self, name ): plugin = plugins.FormatHandler.by_name( name ) if plugin: try: return plugin.load() except ImportError as err: return None else: raise RuntimeError( 'No handler of name %s found'%(name,)) def get_output_handler( self ): """Fast-path lookup for output handler object""" if self.output_handler is None: if self.preferredOutput is not None: self.output_handler = self.handler_by_plugin_name( self.preferredOutput ) if not self.output_handler: for preferred in self.GENERIC_OUTPUT_PREFERENCES: self.output_handler = self.handler_by_plugin_name( preferred ) if self.output_handler: break if not self.output_handler: raise RuntimeError( """Unable to find any output handler at all (not even ctypes/numpy ones!)""" ) return self.output_handler def register( self, handler, types=None ): """Register this class as handler for given set of types""" if not isinstance( types, (list,tuple)): types = [ types ] for type in types: self[ type ] = handler if handler.isOutput: self.all_output_handlers.append( handler ) def registerReturn( self, handler ): """Register this handler as the default return-type handler""" if isinstance( handler, (str,unicode)): self.preferredOutput = handler self.output_handler = None else: self.preferredOutput = None self.output_handler = handler GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match) formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY class ArrayDatatype( object ): """Mix-in for array datatype classes The ArrayDatatype marker essentially is used to mark a particular argument as having an "array" type, which means that it is eligible for handling via the arrays sub-package and its registered handlers. """ typeConstant = None handler = GLOBAL_REGISTRY getHandler = GLOBAL_REGISTRY.__call__ returnHandler = GLOBAL_REGISTRY.get_output_handler isAccelerated = False @classmethod def getRegistry( cls ): """Get our handler registry""" return cls.handler def from_param( cls, value, typeConstant=None ): """Given a value in a known data-pointer type, convert to a ctypes pointer""" return cls.getHandler(value).from_param( value, cls.typeConstant ) from_param = classmethod( logs.logOnFail( from_param, _log ) ) def dataPointer( cls, value ): """Given a value in a known data-pointer type, return long for pointer""" try: return cls.getHandler(value).dataPointer( value ) except Exception as err: _log.warn( """Failure in dataPointer for %s instance %s""", type(value), value, ) raise dataPointer = classmethod( logs.logOnFail( dataPointer, _log ) ) def voidDataPointer( cls, value ): """Given value in a known data-pointer type, return void_p for pointer""" pointer = cls.dataPointer( value ) try: return ctypes.c_void_p(pointer) except TypeError as err: return pointer voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log ) ) def typedPointer( cls, value ): """Return a pointer-to-base-type pointer for given value""" return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType )) typedPointer = classmethod( typedPointer ) def asArray( cls, value, typeCode=None ): """Given a value, convert to preferred array representation""" return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant ) asArray = classmethod( logs.logOnFail( asArray, _log ) ) def arrayToGLType( cls, value ): """Given a data-value, guess the OpenGL type of the corresponding pointer Note: this is not currently used in PyOpenGL and may be removed eventually. """ return cls.getHandler(value).arrayToGLType( value ) arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log ) ) def arraySize( cls, value, typeCode = None ): """Given a data-value, calculate dimensions for the array (number-of-units)""" return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant ) arraySize = classmethod( logs.logOnFail( arraySize, _log ) ) def unitSize( cls, value, typeCode=None ): """Determine unit size of an array (if possible) Uses our local type if defined, otherwise asks the handler to guess... """ return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant ) unitSize = classmethod( logs.logOnFail( unitSize, _log ) ) def zeros( cls, dims, typeCode=None ): """Allocate a return array of the given dimensions filled with zeros""" return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant ) zeros = classmethod( logs.logOnFail( zeros, _log ) ) def dimensions( cls, value ): """Given a data-value, get the dimensions (assumes full structure info)""" return cls.getHandler(value).dimensions( value ) dimensions = classmethod( logs.logOnFail( dimensions, _log ) ) def arrayByteCount( cls, value ): """Given a data-value, try to determine number of bytes it's final form occupies For most data-types this is arraySize() * atomic-unit-size """ return cls.getHandler(value).arrayByteCount( value ) arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log ) ) # the final array data-type classes... class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )): """Array datatype for GLclampd types""" baseType = _types.GLclampd typeConstant = _types.GL_DOUBLE class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )): """Array datatype for GLclampf types""" baseType = _types.GLclampf typeConstant = _types.GL_FLOAT class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )): """Array datatype for GLfloat types""" baseType = _types.GLfloat typeConstant = _types.GL_FLOAT class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )): """Array datatype for GLdouble types""" baseType = _types.GLdouble typeConstant = _types.GL_DOUBLE class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )): """Array datatype for GLbyte types""" baseType = _types.GLbyte typeConstant = _types.GL_BYTE class GLcharArray( ArrayDatatype, ctypes.c_char_p): """Array datatype for ARB extension pointers-to-arrays""" baseType = _types.GLchar typeConstant = _types.GL_BYTE GLcharARBArray = GLcharArray class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )): """Array datatype for GLshort types""" baseType = _types.GLshort typeConstant = _types.GL_SHORT class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )): """Array datatype for GLint types""" baseType = _types.GLint typeConstant = _types.GL_INT class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )): """Array datatype for GLubyte types""" baseType = _types.GLubyte typeConstant = _types.GL_UNSIGNED_BYTE GLbooleanArray = GLubyteArray class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )): """Array datatype for GLushort types""" baseType = _types.GLushort typeConstant = _types.GL_UNSIGNED_SHORT class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )): """Array datatype for GLuint types""" baseType = _types.GLuint typeConstant = _types.GL_UNSIGNED_INT class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )): """Array datatype for GLuint types""" baseType = _types.GLint64 typeConstant = None # TODO: find out what this should be! class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )): """Array datatype for GLuint types""" baseType = _types.GLuint64 typeConstant = _types.GL_UNSIGNED_INT64 class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )): """Array datatype for GLenum types""" baseType = _types.GLenum typeConstant = _types.GL_UNSIGNED_INT class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )): """Array datatype for GLsizei types""" baseType = _types.GLsizei typeConstant = _types.GL_INT class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )): """Array datatype for GLenum types""" baseType = _types.GLvoidp typeConstant = _types.GL_VOID_P else: # Cython-coded array handler _log.info( 'Using accelerated ArrayDatatype' ) ArrayDatatype = ADT( None, None ) GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd ) GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf ) GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble ) GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat ) GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte ) GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar ) GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort ) GLintArray = ADT( GL_1_1.GL_INT, _types.GLint ) GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte ) GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort ) GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint ) GLint64Array = ADT( None, _types.GLint64 ) GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 ) GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum ) GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei ) GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp ) GL_CONSTANT_TO_ARRAY_TYPE = { GL_1_1.GL_DOUBLE : GLclampdArray, GL_1_1.GL_FLOAT : GLclampfArray, GL_1_1.GL_FLOAT : GLfloatArray, GL_1_1.GL_DOUBLE : GLdoubleArray, GL_1_1.GL_BYTE : GLbyteArray, GL_1_1.GL_SHORT : GLshortArray, GL_1_1.GL_INT : GLintArray, GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray, GL_1_1.GL_UNSIGNED_SHORT : GLushortArray, GL_1_1.GL_UNSIGNED_INT : GLuintArray, #GL_1_1.GL_UNSIGNED_INT : GLenumArray, }
2.46875
2
tensorflow_probability/python/build_defs.bzl
jbergmanster/probability
0
2818
<filename>tensorflow_probability/python/build_defs.bzl # Copyright 2019 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Build defs for TF/NumPy/JAX-variadic libraries & tests.""" # [internal] load python3.bzl NO_REWRITE_NEEDED = [ "internal:all_util", "internal:docstring_util", "internal:reparameterization", "layers", "platform_google", ] REWRITER_TARGET = "//tensorflow_probability/substrates/meta:rewrite" RUNFILES_ROOT = "tensorflow_probability/" def _substrate_src(src, substrate): """Rewrite a single src filename for the given substrate.""" return "_{}/_generated_{}".format(substrate, src) def _substrate_srcs(srcs, substrate): """Rewrite src filenames for the given substrate.""" return [_substrate_src(src, substrate) for src in srcs] def _substrate_dep(dep, substrate): """Convert a single dep to one appropriate for the given substrate.""" dep_to_check = dep if dep.startswith(":"): dep_to_check = "{}{}".format(native.package_name(), dep) for no_rewrite in NO_REWRITE_NEEDED: if no_rewrite in dep_to_check: return dep if "tensorflow_probability/" in dep or dep.startswith(":"): if "internal/backend" in dep: return dep if ":" in dep: return "{}.{}".format(dep, substrate) return "{}:{}.{}".format(dep, dep.split("/")[-1], substrate) return dep def _substrate_deps(deps, substrate): """Convert deps to those appropriate for the given substrate.""" new_deps = [_substrate_dep(dep, substrate) for dep in deps] backend_dep = "//tensorflow_probability/python/internal/backend/{}".format(substrate) if backend_dep not in new_deps: new_deps.append(backend_dep) return new_deps # This is needed for the transitional period during which we have the internal # py2and3_test and py_test comingling in BUILD files. Otherwise the OSS export # rewrite process becomes irreversible. def py3_test(*args, **kwargs): """Internal/external reversibility, denotes py3-only vs py2+3 tests. Args: *args: Passed to underlying py_test. **kwargs: Passed to underlying py_test. srcs_version and python_version are added (with value `"PY3"`) if not specified. """ kwargs = dict(kwargs) if "srcs_version" not in kwargs: kwargs["srcs_version"] = "PY3" if "python_version" not in kwargs: kwargs["python_version"] = "PY3" native.py_test(*args, **kwargs) def _resolve_omit_dep(dep): """Resolves a `substrates_omit_deps` item to full target.""" if ":" not in dep: dep = "{}:{}".format(dep, dep.split("/")[-1]) if dep.startswith(":"): dep = "{}{}".format(native.package_name(), dep) return dep def _substrate_runfiles_symlinks_impl(ctx): """A custom BUILD rule to generate python runfiles symlinks. A custom build rule which adds runfiles symlinks for files matching a substrate genrule file pattern, i.e. `'_jax/_generated_normal.py'`. This rule will aggregate and pass along deps while adding the given symlinks to the runfiles structure. Build rule attributes: - substrate: One of 'jax' or 'numpy'; which substrate this applies to. - deps: A list of py_library labels. These are passed along. Args: ctx: Rule analysis context. Returns: Info objects to propagate deps and add runfiles symlinks. """ # Aggregate the depset inputs to resolve transitive dependencies. transitive_sources = [] uses_shared_libraries = [] imports = [] has_py2_only_sources = [] has_py3_only_sources = [] cc_infos = [] for dep in ctx.attr.deps: if PyInfo in dep: transitive_sources.append(dep[PyInfo].transitive_sources) uses_shared_libraries.append(dep[PyInfo].uses_shared_libraries) imports.append(dep[PyInfo].imports) has_py2_only_sources.append(dep[PyInfo].has_py2_only_sources) has_py3_only_sources.append(dep[PyInfo].has_py3_only_sources) # if PyCcLinkParamsProvider in dep: # DisableOnExport # cc_infos.append(dep[PyCcLinkParamsProvider].cc_info) # DisableOnExport if CcInfo in dep: cc_infos.append(dep[CcInfo]) # Determine the set of symlinks to generate. transitive_sources = depset(transitive = transitive_sources) runfiles_dict = {} substrate = ctx.attr.substrate file_substr = "_{}/_generated_".format(substrate) for f in transitive_sources.to_list(): if "tensorflow_probability" in f.dirname and file_substr in f.short_path: pre, post = f.short_path.split("/python/") out_path = "{}/substrates/{}/{}".format( pre, substrate, post.replace(file_substr, ""), ) runfiles_dict[RUNFILES_ROOT + out_path] = f # Construct the output structures to pass along Python srcs/deps/etc. py_info = PyInfo( transitive_sources = transitive_sources, uses_shared_libraries = any(uses_shared_libraries), imports = depset(transitive = imports), has_py2_only_sources = any(has_py2_only_sources), has_py3_only_sources = any(has_py3_only_sources), ) py_cc_link_info = cc_common.merge_cc_infos(cc_infos = cc_infos) py_runfiles = depset( transitive = [depset(transitive = [ dep[DefaultInfo].data_runfiles.files, dep[DefaultInfo].default_runfiles.files, ]) for dep in ctx.attr.deps], ) runfiles = DefaultInfo(runfiles = ctx.runfiles( transitive_files = py_runfiles, root_symlinks = runfiles_dict, )) return py_info, py_cc_link_info, runfiles # See documentation at: # https://docs.bazel.build/versions/3.4.0/skylark/rules.html substrate_runfiles_symlinks = rule( implementation = _substrate_runfiles_symlinks_impl, attrs = { "substrate": attr.string(), "deps": attr.label_list(), }, ) def multi_substrate_py_library( name, srcs = [], deps = [], substrates_omit_deps = [], jax_omit_deps = [], numpy_omit_deps = [], testonly = 0, srcs_version = "PY2AND3"): """A TFP `py_library` for each of TF, NumPy, and JAX. Args: name: The TF `py_library` name. NumPy and JAX libraries have '.numpy' and '.jax' appended. srcs: As with `py_library`. A `genrule` is used to rewrite srcs for NumPy and JAX substrates. deps: As with `py_library`. The list is rewritten to depend on substrate-specific libraries for substrate variants. substrates_omit_deps: List of deps to omit if those libraries are not rewritten for the substrates. jax_omit_deps: List of deps to omit for the JAX substrate. numpy_omit_deps: List of deps to omit for the NumPy substrate. testonly: As with `py_library`. srcs_version: As with `py_library`. """ native.py_library( name = name, srcs = srcs, deps = deps, srcs_version = srcs_version, testonly = testonly, ) remove_deps = [ "//third_party/py/tensorflow", "//third_party/py/tensorflow:tensorflow", ] trimmed_deps = [dep for dep in deps if (dep not in substrates_omit_deps and dep not in remove_deps)] resolved_omit_deps_numpy = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + numpy_omit_deps ] for src in srcs: native.genrule( name = "rewrite_{}_numpy".format(src.replace(".", "_")), srcs = [src], outs = [_substrate_src(src, "numpy")], cmd = "$(location {}) $(SRCS) --omit_deps={} > $@".format( REWRITER_TARGET, ",".join(resolved_omit_deps_numpy), ), tools = [REWRITER_TARGET], ) native.py_library( name = "{}.numpy.raw".format(name), srcs = _substrate_srcs(srcs, "numpy"), deps = _substrate_deps(trimmed_deps, "numpy"), srcs_version = srcs_version, testonly = testonly, ) # Add symlinks under tfp/substrates/numpy. substrate_runfiles_symlinks( name = "{}.numpy".format(name), substrate = "numpy", deps = [":{}.numpy.raw".format(name)], testonly = testonly, ) resolved_omit_deps_jax = [ _resolve_omit_dep(dep) for dep in substrates_omit_deps + jax_omit_deps ] jax_srcs = _substrate_srcs(srcs, "jax") for src in srcs: native.genrule( name = "rewrite_{}_jax".format(src.replace(".", "_")), srcs = [src], outs = [_substrate_src(src, "jax")], cmd = "$(location {}) $(SRCS) --omit_deps={} --numpy_to_jax > $@".format( REWRITER_TARGET, ",".join(resolved_omit_deps_jax), ), tools = [REWRITER_TARGET], ) native.py_library( name = "{}.jax.raw".format(name), srcs = jax_srcs, deps = _substrate_deps(trimmed_deps, "jax"), srcs_version = srcs_version, testonly = testonly, ) # Add symlinks under tfp/substrates/jax. substrate_runfiles_symlinks( name = "{}.jax".format(name), substrate = "jax", deps = [":{}.jax.raw".format(name)], testonly = testonly, ) def multi_substrate_py_test( name, size = "small", jax_size = None, numpy_size = None, srcs = [], deps = [], tags = [], numpy_tags = [], jax_tags = [], disabled_substrates = [], srcs_version = "PY2AND3", timeout = None, shard_count = None): """A TFP `py2and3_test` for each of TF, NumPy, and JAX. Args: name: Name of the `test_suite` which covers TF, NumPy and JAX variants of the test. Each substrate will have a dedicated `py2and3_test` suffixed with '.tf', '.numpy', or '.jax' as appropriate. size: As with `py_test`. jax_size: A size override for the JAX target. numpy_size: A size override for the numpy target. srcs: As with `py_test`. These will have a `genrule` emitted to rewrite NumPy and JAX variants, writing the test file into a subdirectory. deps: As with `py_test`. The list is rewritten to depend on substrate-specific libraries for substrate variants. tags: Tags global to this test target. NumPy also gets a `'tfp_numpy'` tag, and JAX gets a `'tfp_jax'` tag. A `f'_{name}'` tag is used to produce the `test_suite`. numpy_tags: Tags specific to the NumPy test. (e.g. `"notap"`). jax_tags: Tags specific to the JAX test. (e.g. `"notap"`). disabled_substrates: Iterable of substrates to disable, items from ["numpy", "jax"]. srcs_version: As with `py_test`. timeout: As with `py_test`. shard_count: As with `py_test`. """ name_tag = "_{}".format(name) tags = [t for t in tags] tags.append(name_tag) tags.append("multi_substrate") native.py_test( name = "{}.tf".format(name), size = size, srcs = srcs, main = "{}.py".format(name), deps = deps, tags = tags, srcs_version = srcs_version, timeout = timeout, shard_count = shard_count, ) if "numpy" not in disabled_substrates: numpy_srcs = _substrate_srcs(srcs, "numpy") native.genrule( name = "rewrite_{}_numpy".format(name), srcs = srcs, outs = numpy_srcs, cmd = "$(location {}) $(SRCS) > $@".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) py3_test( name = "{}.numpy".format(name), size = numpy_size or size, srcs = numpy_srcs, main = _substrate_src("{}.py".format(name), "numpy"), deps = _substrate_deps(deps, "numpy"), tags = tags + ["tfp_numpy"] + numpy_tags, srcs_version = srcs_version, python_version = "PY3", timeout = timeout, shard_count = shard_count, ) if "jax" not in disabled_substrates: jax_srcs = _substrate_srcs(srcs, "jax") native.genrule( name = "rewrite_{}_jax".format(name), srcs = srcs, outs = jax_srcs, cmd = "$(location {}) $(SRCS) --numpy_to_jax > $@".format(REWRITER_TARGET), tools = [REWRITER_TARGET], ) jax_deps = _substrate_deps(deps, "jax") # [internal] Add JAX build dep py3_test( name = "{}.jax".format(name), size = jax_size or size, srcs = jax_srcs, main = _substrate_src("{}.py".format(name), "jax"), deps = jax_deps, tags = tags + ["tfp_jax"] + jax_tags, srcs_version = srcs_version, python_version = "PY3", timeout = timeout, shard_count = shard_count, ) native.test_suite( name = name, tags = [name_tag], )
1.75
2
src/wikidated/wikidata/wikidata_dump.py
lschmelzeisen/wikidata-history-analyzer
6
2819
<filename>src/wikidated/wikidata/wikidata_dump.py<gh_stars>1-10 # # Copyright 2021 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import annotations import json from datetime import date, datetime from logging import getLogger from pathlib import Path from typing import Mapping, MutableSequence, Sequence, Type, TypeVar import requests from pydantic import BaseModel as PydanticModel from pydantic import validator from tqdm import tqdm # type: ignore from typing_extensions import Final from wikidated._utils import RangeMap from wikidated.wikidata.wikidata_dump_file import WikidataDumpFile from wikidated.wikidata.wikidata_dump_pages_meta_history import ( WikidataDumpPagesMetaHistory, ) from wikidated.wikidata.wikidata_dump_sites_table import WikidataDumpSitesTable _LOGGER = getLogger(__name__) _T_WikidataDumpFile = TypeVar("_T_WikidataDumpFile", bound=WikidataDumpFile) class WikidataDump: def __init__( self, dump_dir: Path, version: date, *, mirror: str = "https://dumps.wikimedia.org", ) -> None: self._dump_dir = dump_dir self.version: Final = version self.mirror: Final = mirror self._dump_status = _WikidataDumpStatus.load( self._dump_dir, self.version, self.mirror ) self.sites_table: Final = self._construct_dumps( WikidataDumpSitesTable, "sitestable" )[0] self.pages_meta_history: Final = RangeMap[WikidataDumpPagesMetaHistory]() for dump_file in self._construct_dumps( WikidataDumpPagesMetaHistory, "metahistory7zdump" ): self.pages_meta_history[dump_file.page_ids] = dump_file def download( self, *, sites_table: bool = True, pages_meta_history: bool = True ) -> None: _LOGGER.info( f"Downloading Wikidata dump {self.version:%4Y%2m%2d} from '{self.mirror}'." ) dump_files: MutableSequence[WikidataDumpFile] = [] if sites_table: dump_files.append(self.sites_table) if pages_meta_history: dump_files.extend(self.pages_meta_history.values()) with tqdm( desc=f"Wikidata dump {self.version:%4Y%2m%2d} files", total=len(dump_files), dynamic_ncols=True, position=1, ) as progress_bar_files, tqdm( desc=f"Wikidata dump {self.version:%4Y%2m%2d} bytes", total=sum(dump_file.size for dump_file in dump_files), dynamic_ncols=True, position=2, unit="B", unit_scale=True, unit_divisor=1024, ) as progress_bar_size: for dump_file in dump_files: dump_file.download() progress_bar_files.update(1) progress_bar_size.update(dump_file.size) _LOGGER.info(f"Done downloading Wikidata dump {self.version:%4Y%2m%2d}.") def _construct_dumps( self, dump_type: Type[_T_WikidataDumpFile], dump_type_id: str ) -> Sequence[_T_WikidataDumpFile]: return [ dump_type( path=self._dump_dir / path, url=self.mirror + dump_status_file.url, sha1=dump_status_file.sha1, size=dump_status_file.size, ) for path, dump_status_file in self._dump_status.jobs[ dump_type_id ].files.items() ] class _WikidataDumpStatusFile(PydanticModel): size: int url: str md5: str sha1: str class _WikidataDumpStatusJob(PydanticModel): status: str updated: datetime files: Mapping[str, _WikidataDumpStatusFile] @validator("updated", pre=True) def _parse_datetime(cls, value: str) -> datetime: # noqa: N805 return datetime.strptime(value, "%Y-%m-%d %H:%M:%S") class _WikidataDumpStatus(PydanticModel): jobs: Mapping[str, _WikidataDumpStatusJob] version: str @classmethod def load(cls, dump_dir: Path, version: date, mirror: str) -> _WikidataDumpStatus: path = dump_dir / f"wikidatawiki-{version:%4Y%2m%2d}-dumpstatus.json" if not path.exists(): url = f"{mirror}/wikidatawiki/{version:%4Y%2m%2d}/dumpstatus.json" _LOGGER.debug(f"Downloading Wikidata dump status from '{url}'.") response = requests.get(url) response.raise_for_status() path.parent.mkdir(exist_ok=True, parents=True) with path.open("w", encoding="UTF-8") as fd: fd.write(json.dumps(response.json(), indent=2) + "\n") _LOGGER.debug("Done downloading Wikidata dump status.") dump_status = _WikidataDumpStatus.parse_file(path) for job_name, job in dump_status.jobs.items(): if job.status != "done": path.unlink() raise Exception(f"Job '{job_name}' is not 'done', but '{job.status}'.") return dump_status
2.203125
2
tcapygen/layoutgen.py
Ahrvo-Trading-Systems/tcapy
189
2820
from __future__ import division, print_function __author__ = 'saeedamen' # <NAME> / <EMAIL> # # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro # # See the License for the specific language governing permissions and limitations under the License. # ## Web server components import dash_core_components as dcc import dash_html_components as html import base64 import os ## Date/time components import pandas as pd import datetime from datetime import timedelta from collections import OrderedDict from pandas.tseries.offsets import * from tcapy.vis.layoutdash import LayoutDash ######################################################################################################################## class LayoutDashImplGen(LayoutDash): """This implements the LayoutDash abstract class, to create the web based GUI for the tcapy application. It creates two web pages - detailed_page - for doing detailed tcapy analysis for a specific currency pair - aggregated_page - for more aggregated style analysis across multiple currency pairs and over multiple time periods """ def __init__(self, app=None, constants=None, url_prefix=''): super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix) available_dates = pd.date_range( datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window), datetime.datetime.today().date(), freq=BDay()) times = pd.date_range("0:00", "23:59", freq="15min") ### create the possible values for drop down boxes on both pages # Reverse date list (for both detailed and aggregated pages) self.available_dates = [x.date() for x in available_dates[::-1]] # For detailed page only self.available_times = [t.strftime("%H:%M") for t in times] self.available_tickers = self._constants.available_tickers_dictionary['All'] self.available_venues = self._constants.available_venues_dictionary['All'] self.available_brokers = self._constants.available_brokers_dictionary['All'] self.available_algos = self._constants.available_algos_dictionary['All'] self.available_market_data = self._constants.available_market_data self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap', 'buy trade', 'sell trade'] self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade'] self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask'] # For aggregated page only self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary) self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary) self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary) self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary) self.available_event_types = self._constants.available_event_types self.available_metrics = self._constants.available_metrics self.available_reload = ['no', 'yes'] self.available_visualization = ['yes', 'no'] self.construct_layout() def _flatten_dictionary(self, dictionary): available = dictionary['All'] available_groups = self._util_func.dict_key_list(dictionary.keys()) return self.flatten_list_of_strings([available_groups, available]) def construct_layout(self): self.page_content = html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content') ]) link_bar_dict = {'Detailed' : 'detailed', 'Aggregated' : 'aggregated', 'Compliance' : 'compliance'} trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not', 'exec not in rep cur', 'slippage'] broker_cols = ['Date', 'by broker notional (rep cur)'] # Main page for detailed analysing of (eg. over the course of a few days) self.pages['detailed'] = html.Div([ self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B("Status: ok", id='detailed-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates, 'start-time-val' : self.available_times}, prefix_id='detailed'), self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates), ('finish-time-val', self.available_times)]), prefix_id='detailed'), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed', drop_down_values=self.available_tickers), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed', drop_down_values=self.available_grouped_brokers), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed', drop_down_values=self.available_grouped_algos), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed', drop_down_values=self.available_grouped_venues), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed', drop_down_values=self.available_metrics) ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'), # self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'), # Orders self._sc.horizontal_bar(), self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot', self.available_order_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='order-candle-timeline-download-link', download_file='download_order_candle_timeline', height=500), self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500), # Execution trades self._sc.horizontal_bar(), self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed', element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot', self.available_execution_plot_lines), downloadplot_caption='Download CSV', downloadplot_tag='execution-candle-timeline-download-link', download_file='download_execution_candle_timeline.csv', height=500), self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500), self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500), # Detailed tcapy markout table for executions html.Div([ html.H3('Executions: Markout Table'), html.Div(id='detailed-execution-table') ], style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5, 'marginRight': 5}), ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ # Secondary page for analysing aggregated statistics over long periods of time, eg. who is the best broker? self.pages['aggregated'] = html.Div([ self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B("Status: ok", id='aggregated-status'), margin_left=5), self._sc.horizontal_bar(), # dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated', drop_down_values=self.available_event_types), self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated', drop_down_values=self.available_metrics), ]), self._sc.horizontal_bar(), self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'), # , msg_id='aggregated-status'), self._sc.horizontal_bar(), # self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]), self._sc.plot(caption='Aggregated Trader: Summary', id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot', prefix_id='aggregated', height=500), self._sc.horizontal_bar(), self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot', 'execution-by-venue-dist-plot'], prefix_id='aggregated', height=500), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) ################################################################################################################ self.pages['compliance'] = html.Div([ self._sc.header_bar('FX: Compliance Analysis', img='logo.png'), self._sc.link_bar(link_bar_dict), self._sc.width_row_cell(html.B("Status: ok", id='compliance-status'), margin_left=5), self._sc.horizontal_bar(), # Dropdown selection boxes html.Div([ self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance', drop_down_values=self.available_dates), self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance', drop_down_values=self.available_grouped_tickers, multiselect=True), self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance', drop_down_values=self.available_grouped_brokers, multiselect=True), self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance', drop_down_values=self.available_grouped_algos, multiselect=True), self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance', drop_down_values=self.available_grouped_venues, multiselect=True), self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance', drop_down_values=self.available_market_data), self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_reload), self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance', drop_down_values=self.available_times), self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance', drop_down_values=self.available_slippage_bounds), self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance', drop_down_values=self.available_visualization) ]), self._sc.horizontal_bar(), html.Div([ self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'), # self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'), # self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'), ]), self._sc.horizontal_bar(), self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance', columns=trade_outliers_cols, downloadplot_caption='Trade outliers CSV', downloadplot_tag='execution-by-anomalous-download-link', download_file='download_execution_by_anomalous.csv'), self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance', columns=broker_cols, downloadplot_caption='Download broker CSV', downloadplot_tag='summary-by-broker-download-link', download_file='download_broker.csv' ), self._sc.horizontal_bar() ], style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'}) # ID flags self.id_flags = { # Detailed trader page # 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'}, # 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'}, 'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df', 'order': 'sparse_market_order_df'}, 'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'}, 'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'}, 'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'}, 'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df', 'order-candle-timeline': 'sparse_market_order_df'}, # Aggregated trader page 'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker', 'execution-by-venue': 'bar_trade_df_by/mean/venue'}, 'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker', 'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'}, 'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker', 'execution-by-venue': 'dist_trade_df_by/pdf/venue'}, # Compliance page 'compliance_metric_table_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, 'compliance_download_link_trade_order': {'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all', 'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'}, }
2.265625
2
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
andrewtarzia/stk
21
2821
import pytest import stk from ...case_data import CaseData @pytest.fixture( scope='session', params=( lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), lambda name: CaseData( molecule=stk.ConstructedMolecule( topology_graph=stk.cof.PeriodicKagome( building_blocks=( stk.BuildingBlock( smiles='BrC1=C(Br)[C+]=N1', functional_groups=[stk.BromoFactory()], ), stk.BuildingBlock( smiles=( 'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+](' 'Br)[C+2]1' ), functional_groups=[stk.BromoFactory()], ), ), lattice_size=(2, 2, 1), optimizer=stk.PeriodicCollapser(), ), ), smiles=( '[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]=' 'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N=' '[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C' '+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1' '5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%' '20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21' ')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2][' 'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N' '=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)' '[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]' '5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206[' 'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+' ']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C' '2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N=' '[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%' '211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121' ), name=name, ), ), ) def cof_periodic_kagome(request) -> CaseData: return request.param( f'{request.fixturename}{request.param_index}', )
1.882813
2
projects/MAE/utils/weight_convert.py
Oneflow-Inc/libai
55
2822
<reponame>Oneflow-Inc/libai # coding=utf-8 # Copyright 2021 The OneFlow Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import oneflow as flow import torch logger = logging.getLogger(__name__) def convert_qkv_weight(cfg, value): """ Convert qkv.weight to be compatible with LiBai transformer layer Args: cfg: config file value: qkv.weight in the loaded checkpoint """ num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_weight = ( value.view([3, num_heads, head_size, hidden_size]) .permute(1, 0, 2, 3) .contiguous() .view(hidden_size * 3, hidden_size) ) return qkv_weight def convert_qkv_bias(cfg, value): """ Convert qkv.bias to be compatible with LiBai transformer layer Args: cfg: config file value: qkv.bias in the loaded checkpoint """ num_heads = cfg.model.num_heads hidden_size = cfg.model.embed_dim head_size = int(hidden_size / num_heads) qkv_bias = ( value.view(3, num_heads, head_size).permute(1, 0, 2).contiguous().view(hidden_size * 3) ) return qkv_bias def filter_keys(key, value, cfg): """ Filtering the state_dict keys and values to match LiBai's MAE model """ if "norm1" in key: key = key.replace("norm1", "input_layernorm") elif "attn.qkv" in key: key = key.replace("attn.qkv", "self_attention.query_key_value") if "weight" in key: value = convert_qkv_weight(cfg, value) if "bias" in key: value = convert_qkv_bias(cfg, value) elif "attn.proj" in key: key = key.replace("attn.proj", "self_attention.dense") elif "norm2" in key: key = key.replace("norm2", "post_attention_layernorm") elif "mlp.fc1" in key: key = key.replace("mlp.fc1", "mlp.dense_h_to_4h") elif "mlp.fc2" in key: key = key.replace("mlp.fc2", "mlp.dense_4h_to_h") elif "fc_norm" in key: key = key.replace("fc_norm", "norm") return key, value def load_torch_checkpoint(model, cfg, path="./mae_finetuned_vit_base.pth", strict=False): """ Load checkpoint from the given torch weights. Torch weight can be downloaded from the original repo: https://github.com/facebookresearch/mae """ torch_dict = torch.load(path, map_location="cpu")["model"] parameters = torch_dict new_parameters = dict() for key, value in parameters.items(): if "num_batches_tracked" not in key: # to global tensor key, val = filter_keys(key, value, cfg) val = val.detach().cpu().numpy() val = flow.tensor(val).to_global( sbp=flow.sbp.broadcast, placement=flow.placement("cuda", ranks=[0]) ) new_parameters[key] = val model.load_state_dict(new_parameters, strict=strict) print("Successfully load torch mae checkpoint.") return model
1.460938
1
dthm4kaiako/events/__init__.py
taskmaker1/dthm4kaiako
3
2823
"""Module for events application."""
1.09375
1
spot/level1.py
K0gata/SGLI_Python_output_tool
1
2824
<filename>spot/level1.py import numpy as np import logging from decimal import Decimal, ROUND_HALF_UP from abc import ABC, abstractmethod, abstractproperty from spot.utility import bilin_2d from spot.config import PROJ_TYPE # ============================= # Level-1 template class # ============================= class L1Interface(ABC): @property @abstractmethod def PROJECTION_TYPE(self): raise NotImplementedError() @property @abstractmethod def ALLOW_PROJECTION_TYPE(self): return NotImplementedError() def __init__(self, h5_file, product_id): self.h5_file = h5_file self.product_id = product_id geo_data_grp_attrs = self.h5_file['Geometry_data'].attrs self.geo_n_pix = geo_data_grp_attrs['Number_of_pixels'][0] self.geo_n_lin = geo_data_grp_attrs['Number_of_lines'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_n_pix = img_data_grp_attrs['Number_of_pixels'][0] self.img_n_lin = img_data_grp_attrs['Number_of_lines'][0] def get_product_data(self, prod_name:str): dset = self.h5_file['Image_data/' + prod_name] # Return uint16 type data if the product is QA_flag or Line_tai93 if 'QA_flag' == prod_name or 'Line_tai93' == prod_name: return dset[:] # Validate data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_DN'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') if 'Maximum_valid_DN' in dset.attrs: data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN if 'Minimum_valid_DN' in dset.attrs: data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN # Convert DN to physical value data = data * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] return data @abstractmethod def get_geometry_data(self, data_name:str, **kwargs): raise NotImplementedError() @abstractmethod def get_geometry_data_list(self): raise NotImplementedError() def get_product_data_list(self): return list(self.h5_file['/Image_data'].keys()) def get_unit(self, prod_name: str): if 'Rt_' in prod_name: return 'NA' # Get attrs set unit_name = 'Unit' attrs = self.h5_file['/Image_data/' + prod_name].attrs # Get unit if unit_name not in attrs: return 'NA' return attrs[unit_name][0].decode('UTF-8') # ============================= # Level-1 map-projection class # ============================= class Scene(L1Interface): PROJECTION_TYPE = PROJ_TYPE.SCENE.name ALLOW_PROJECTION_TYPE = [PROJECTION_TYPE, PROJ_TYPE.EQR.name] def __init__(self, h5_file, product_id): super().__init__(h5_file, product_id) self.scene_number = h5_file['/Global_attributes'].attrs['Scene_number'][0] self.path_number = h5_file['/Global_attributes'].attrs['RSP_path_number'][0] img_data_grp_attrs = self.h5_file['Image_data'].attrs self.img_spatial_reso = img_data_grp_attrs['Grid_interval'][0] def get_geometry_data(self, data_name: str, **kwargs): interval = kwargs['interval'] dset = self.h5_file['Geometry_data/' + data_name] data = dset[:] if 'Latitude' is not data_name and 'Longitude' is not data_name: data = data.astype(np.float32) * dset.attrs['Slope'][0] + dset.attrs['Offset'][0] # Finish if interval is none if interval is None or interval == 'none': return data # Interpolate raw data if interval == 'auto': interp_interval = dset.attrs['Resampling_interval'][0] else: interp_interval = interval lon_mode = False if 'Longitude' == data_name: lon_mode = True if interp_interval > 1: data = bilin_2d(data, interp_interval, lon_mode) # Trim away the excess pixel/line (data_size_lin, data_size_pxl) = data.shape if (kwargs['fit_img_size'] is True) and (self.img_n_lin <= data_size_lin) and (self.img_n_pix <= data_size_pxl): data = data[:self.img_n_lin, :self.img_n_pix] return data def get_geometry_data_list(self): return list(self.h5_file['/Geometry_data'].keys()) def get_allow_projection_type(self): return self.ALLOW_PROJECTION_TYPE # ============================= # Level-1 sub-processing level class # ============================= class L1B(Scene): # ----------------------------- # Public # ----------------------------- def get_product_data(self, prod_name:str): if 'Land_water_flag' in prod_name: return self._get_land_water_flag() if 'Lt_' in prod_name: return self._get_Lt(prod_name) if 'Rt_' in prod_name: return self._get_Rt(prod_name) if 'Stray_light_correction_flag_' in prod_name: return self._get_stray_light_correction_flag(prod_name) return super().get_product_data(prod_name) # ----------------------------- # Private # ----------------------------- def _get_land_water_flag(self): dset = self.h5_file['Image_data/Land_water_flag'] data = dset[:].astype(np.float32) if 'Error_DN' in dset.attrs: data[data == dset.attrs['Error_value'][0]] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_value'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_value'][0]] = np.NaN return data def _get_Lt(self, prod_name): dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope'] + dset.attrs['Offset'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN return data def _get_Rt(self, prod_name): prod_name = prod_name.replace('Rt_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] mask = dset.attrs['Mask'][0] data = np.bitwise_and(dn_data, mask).astype(np.float32) data = data * dset.attrs['Slope_reflectance'] + dset.attrs['Offset_reflectance'] data[dn_data == dset.attrs['Error_DN']] = np.NaN with np.warnings.catch_warnings(): np.warnings.filterwarnings('ignore', r'invalid value encountered in (greater|less)') data[data > dset.attrs['Maximum_valid_DN'][0]] = np.NaN data[data < dset.attrs['Minimum_valid_DN'][0]] = np.NaN cos_theta_0 = np.cos(np.deg2rad(self.get_geometry_data('Solar_zenith', interval='auto', fit_img_size=True))) data = data / cos_theta_0 return data def _get_stray_light_correction_flag(self, prod_name): prod_name = prod_name.replace('Stray_light_correction_flag_', 'Lt_') dset = self.h5_file['Image_data/' + prod_name] dn_data = dset[:] data = np.bitwise_and(dn_data, 0x8000) data[dn_data == dset.attrs['Error_DN']] = 0 return data > 0 class VNRL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list class IRSL1B(L1B): def get_product_data_list(self): prod_list = super().get_product_data_list() for prod in prod_list: if 'Lt_SW' in prod: prod_list.append(prod.replace('Lt', 'Rt')) prod_list.append(prod.replace('Lt', 'Stray_light_correction_flag')) prod_list = sorted(prod_list) return prod_list # EOF
2.171875
2
168. Excel Sheet Column Title.py
Alvin1994/leetcode-python3-
0
2825
class Solution: # @return a string def convertToTitle(self, n: int) -> str: capitals = [chr(x) for x in range(ord('A'), ord('Z')+1)] result = [] while n > 0: result.insert(0, capitals[(n-1)%len(capitals)]) n = (n-1) % len(capitals) # result.reverse() return ''.join(result)
3.53125
4
devil/devil/utils/cmd_helper.py
Martijnve23/catapult
1,894
2826
<gh_stars>1000+ # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A wrapper for subprocess to make calling shell commands easier.""" import codecs import logging import os import pipes import select import signal import string import subprocess import sys import time CATAPULT_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', '..')) SIX_PATH = os.path.join(CATAPULT_ROOT_PATH, 'third_party', 'six') if SIX_PATH not in sys.path: sys.path.append(SIX_PATH) import six from devil import base_error logger = logging.getLogger(__name__) _SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./') # Cache the string-escape codec to ensure subprocess can find it # later. Return value doesn't matter. if six.PY2: codecs.lookup('string-escape') def SingleQuote(s): """Return an shell-escaped version of the string using single quotes. Reliably quote a string which may contain unsafe characters (e.g. space, quote, or other special characters such as '$'). The returned value can be used in a shell command line as one token that gets to be interpreted literally. Args: s: The string to quote. Return: The string quoted using single quotes. """ return pipes.quote(s) def DoubleQuote(s): """Return an shell-escaped version of the string using double quotes. Reliably quote a string which may contain unsafe characters (e.g. space or quote characters), while retaining some shell features such as variable interpolation. The returned value can be used in a shell command line as one token that gets to be further interpreted by the shell. The set of characters that retain their special meaning may depend on the shell implementation. This set usually includes: '$', '`', '\', '!', '*', and '@'. Args: s: The string to quote. Return: The string quoted using double quotes. """ if not s: return '""' elif all(c in _SafeShellChars for c in s): return s else: return '"' + s.replace('"', '\\"') + '"' def ShrinkToSnippet(cmd_parts, var_name, var_value): """Constructs a shell snippet for a command using a variable to shrink it. Takes into account all quoting that needs to happen. Args: cmd_parts: A list of command arguments. var_name: The variable that holds var_value. var_value: The string to replace in cmd_parts with $var_name Returns: A shell snippet that does not include setting the variable. """ def shrink(value): parts = (x and SingleQuote(x) for x in value.split(var_value)) with_substitutions = ('"$%s"' % var_name).join(parts) return with_substitutions or "''" return ' '.join(shrink(part) for part in cmd_parts) def Popen(args, stdin=None, stdout=None, stderr=None, shell=None, cwd=None, env=None): # preexec_fn isn't supported on windows. # pylint: disable=unexpected-keyword-arg if sys.platform == 'win32': close_fds = (stdin is None and stdout is None and stderr is None) preexec_fn = None else: close_fds = True preexec_fn = lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL) if six.PY2: return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn ) else: # opens stdout in text mode, so that caller side always get 'str', # and there will be no type mismatch error. # Ignore any decoding error, so that caller will not crash due to # uncaught exception. Decoding errors are unavoidable, as we # do not know the encoding of the output, and in some output there # will be multiple encodings (e.g. adb logcat) return subprocess.Popen( args=args, cwd=cwd, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, close_fds=close_fds, env=env, preexec_fn=preexec_fn, universal_newlines=True, encoding='utf-8', errors='ignore' ) def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None): pipe = Popen( args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env) pipe.communicate() return pipe.wait() def RunCmd(args, cwd=None): """Opens a subprocess to execute a program and returns its return value. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. Returns: Return code from the command execution. """ logger.debug(str(args) + ' ' + (cwd or '')) return Call(args, cwd=cwd) def GetCmdOutput(args, cwd=None, shell=False, env=None): """Open a subprocess to execute a program and returns its output. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. env: If not None, a mapping that defines environment variables for the subprocess. Returns: Captures and returns the command's stdout. Prints the command's stderr to logger (which defaults to stdout). """ (_, output) = GetCmdStatusAndOutput(args, cwd, shell, env) return output def _ValidateAndLogCommand(args, cwd, shell): if isinstance(args, six.string_types): if not shell: raise Exception('string args must be run with shell=True') else: if shell: raise Exception('array args must be run with shell=False') args = ' '.join(SingleQuote(str(c)) for c in args) if cwd is None: cwd = '' else: cwd = ':' + cwd logger.debug('[host]%s> %s', cwd, args) return args def GetCmdStatusAndOutput(args, cwd=None, shell=False, env=None, merge_stderr=False): """Executes a subprocess and returns its exit code and output. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. merge_stderr: If True, captures stderr as part of stdout. Returns: The 2-tuple (exit code, stdout). """ status, stdout, stderr = GetCmdStatusOutputAndError( args, cwd=cwd, shell=shell, env=env, merge_stderr=merge_stderr) if stderr: logger.critical('STDERR: %s', stderr) logger.debug('STDOUT: %s%s', stdout[:4096].rstrip(), '<truncated>' if len(stdout) > 4096 else '') return (status, stdout) def StartCmd(args, cwd=None, shell=False, env=None): """Starts a subprocess and returns a handle to the process. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. Returns: A process handle from subprocess.Popen. """ _ValidateAndLogCommand(args, cwd, shell) return Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=cwd, env=env) def GetCmdStatusOutputAndError(args, cwd=None, shell=False, env=None, merge_stderr=False): """Executes a subprocess and returns its exit code, output, and errors. Args: args: A string or a sequence of program arguments. The program to execute is the string or the first item in the args sequence. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. merge_stderr: If True, captures stderr as part of stdout. Returns: The 3-tuple (exit code, stdout, stderr). """ _ValidateAndLogCommand(args, cwd, shell) stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE pipe = Popen( args, stdout=subprocess.PIPE, stderr=stderr, shell=shell, cwd=cwd, env=env) stdout, stderr = pipe.communicate() return (pipe.returncode, stdout, stderr) class TimeoutError(base_error.BaseError): """Module-specific timeout exception.""" def __init__(self, output=None): super(TimeoutError, self).__init__('Timeout') self._output = output @property def output(self): return self._output def _read_and_decode(fd, buffer_size): data = os.read(fd, buffer_size) if data and six.PY3: data = data.decode('utf-8', errors='ignore') return data def _IterProcessStdoutFcntl(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): """An fcntl-based implementation of _IterProcessStdout.""" # pylint: disable=too-many-nested-blocks import fcntl try: # Enable non-blocking reads from the child's stdout. child_fd = process.stdout.fileno() fl = fcntl.fcntl(child_fd, fcntl.F_GETFL) fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) end_time = (time.time() + timeout) if timeout else None iter_end_time = (time.time() + iter_timeout) if iter_timeout else None while True: if end_time and time.time() > end_time: raise TimeoutError() if iter_end_time and time.time() > iter_end_time: yield None iter_end_time = time.time() + iter_timeout if iter_end_time: iter_aware_poll_interval = min(poll_interval, max(0, iter_end_time - time.time())) else: iter_aware_poll_interval = poll_interval read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if not data: break yield data if process.poll() is not None: # If process is closed, keep checking for output data (because of timing # issues). while True: read_fds, _, _ = select.select([child_fd], [], [], iter_aware_poll_interval) if child_fd in read_fds: data = _read_and_decode(child_fd, buffer_size) if data: yield data continue break break finally: try: if process.returncode is None: # Make sure the process doesn't stick around if we fail with an # exception. process.kill() except OSError: pass process.wait() def _IterProcessStdoutQueue(process, iter_timeout=None, timeout=None, buffer_size=4096, poll_interval=1): """A Queue.Queue-based implementation of _IterProcessStdout. TODO(jbudorick): Evaluate whether this is a suitable replacement for _IterProcessStdoutFcntl on all platforms. """ # pylint: disable=unused-argument if six.PY3: import queue else: import Queue as queue import threading stdout_queue = queue.Queue() def read_process_stdout(): # TODO(jbudorick): Pick an appropriate read size here. while True: try: output_chunk = _read_and_decode(process.stdout.fileno(), buffer_size) except IOError: break stdout_queue.put(output_chunk, True) if not output_chunk and process.poll() is not None: break reader_thread = threading.Thread(target=read_process_stdout) reader_thread.start() end_time = (time.time() + timeout) if timeout else None try: while True: if end_time and time.time() > end_time: raise TimeoutError() try: s = stdout_queue.get(True, iter_timeout) if not s: break yield s except queue.Empty: yield None finally: try: if process.returncode is None: # Make sure the process doesn't stick around if we fail with an # exception. process.kill() except OSError: pass process.wait() reader_thread.join() _IterProcessStdout = (_IterProcessStdoutQueue if sys.platform == 'win32' else _IterProcessStdoutFcntl) """Iterate over a process's stdout. This is intentionally not public. Args: process: The process in question. iter_timeout: An optional length of time, in seconds, to wait in between each iteration. If no output is received in the given time, this generator will yield None. timeout: An optional length of time, in seconds, during which the process must finish. If it fails to do so, a TimeoutError will be raised. buffer_size: The maximum number of bytes to read (and thus yield) at once. poll_interval: The length of time to wait in calls to `select.select`. If iter_timeout is set, the remaining length of time in the iteration may take precedence. Raises: TimeoutError: if timeout is set and the process does not complete. Yields: basestrings of data or None. """ def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False, logfile=None, env=None): """Executes a subprocess with a timeout. Args: args: List of arguments to the program, the program to execute is the first element. timeout: the timeout in seconds or None to wait forever. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. logfile: Optional file-like object that will receive output from the command as it is running. env: If not None, a mapping that defines environment variables for the subprocess. Returns: The 2-tuple (exit code, output). Raises: TimeoutError on timeout. """ _ValidateAndLogCommand(args, cwd, shell) output = six.StringIO() process = Popen( args, cwd=cwd, shell=shell, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) try: for data in _IterProcessStdout(process, timeout=timeout): if logfile: logfile.write(data) output.write(data) except TimeoutError: raise TimeoutError(output.getvalue()) str_output = output.getvalue() logger.debug('STDOUT+STDERR: %s%s', str_output[:4096].rstrip(), '<truncated>' if len(str_output) > 4096 else '') return process.returncode, str_output def IterCmdOutputLines(args, iter_timeout=None, timeout=None, cwd=None, shell=False, env=None, check_status=True): """Executes a subprocess and continuously yields lines from its output. Args: args: List of arguments to the program, the program to execute is the first element. iter_timeout: Timeout for each iteration, in seconds. timeout: Timeout for the entire command, in seconds. cwd: If not None, the subprocess's current directory will be changed to |cwd| before it's executed. shell: Whether to execute args as a shell command. Must be True if args is a string and False if args is a sequence. env: If not None, a mapping that defines environment variables for the subprocess. check_status: A boolean indicating whether to check the exit status of the process after all output has been read. Yields: The output of the subprocess, line by line. Raises: CalledProcessError if check_status is True and the process exited with a non-zero exit status. """ cmd = _ValidateAndLogCommand(args, cwd, shell) process = Popen( args, cwd=cwd, shell=shell, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return _IterCmdOutputLines( process, cmd, iter_timeout=iter_timeout, timeout=timeout, check_status=check_status) def _IterCmdOutputLines(process, cmd, iter_timeout=None, timeout=None, check_status=True): buffer_output = '' iter_end = None cur_iter_timeout = None if iter_timeout: iter_end = time.time() + iter_timeout cur_iter_timeout = iter_timeout for data in _IterProcessStdout( process, iter_timeout=cur_iter_timeout, timeout=timeout): if iter_timeout: # Check whether the current iteration has timed out. cur_iter_timeout = iter_end - time.time() if data is None or cur_iter_timeout < 0: yield None iter_end = time.time() + iter_timeout continue else: assert data is not None, ( 'Iteration received no data despite no iter_timeout being set. ' 'cmd: %s' % cmd) # Construct lines to yield from raw data. buffer_output += data has_incomplete_line = buffer_output[-1] not in '\r\n' lines = buffer_output.splitlines() buffer_output = lines.pop() if has_incomplete_line else '' for line in lines: yield line if iter_timeout: iter_end = time.time() + iter_timeout if buffer_output: yield buffer_output if check_status and process.returncode: raise subprocess.CalledProcessError(process.returncode, cmd)
2.359375
2
services/server/server/apps/checkout/migrations/0001_initial.py
AyanSamanta23/moni-moni
0
2827
<filename>services/server/server/apps/checkout/migrations/0001_initial.py # Generated by Django 4.0.2 on 2022-02-26 15:52 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='FundingOptions', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funding_name', models.CharField(help_text='Required', max_length=255, verbose_name='funding_name')), ('funding_price', models.DecimalField(decimal_places=2, help_text='Required', max_digits=1000, verbose_name='funding price')), ('funding_timeframe', models.CharField(help_text='Required', max_length=255, verbose_name='funding timeframe')), ('funding_window', models.CharField(help_text='Required', max_length=255, verbose_name='funding window')), ], options={ 'verbose_name': 'Funding Option', 'verbose_name_plural': 'Funding Options', }, ), migrations.CreateModel( name='PaymentSelections', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(help_text='Required', max_length=255, verbose_name='name')), ('is_active', models.BooleanField(default=True)), ], options={ 'verbose_name': 'Payment Selection', 'verbose_name_plural': 'Payment Selections', }, ), ]
1.757813
2
api/to_astm.py
urchinpro/L2-forms
0
2828
<filename>api/to_astm.py import itertools from astm import codec from collections import defaultdict from django.utils import timezone import directions.models as directions import directory.models as directory import api.models as api import simplejson as json def get_astm_header() -> list: return ['H|\\^&', None, None, ['1', '2.00'], None, None, None, None, None, None, 'P', '1.00', timezone.now().strftime("%Y%m%d%H%M%S")] def get_leave() -> list: return ['L', 1, 'N'] def get_patient() -> list: return ['P', 1] def get_iss_direction(direction: directions.Napravleniya, analyzer: api.Analyzer, full=False) -> list: r = [] n = 0 iss_list = directions.Issledovaniya.objects.filter(napravleniye=direction) if not full: iss_list = iss_list.filter(doc_confirmation__isnull=True) for i in iss_list: researches = defaultdict(list) for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__analyzer=analyzer, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, analyzer=analyzer) if not rel.exists(): continue rel = rel[0] tube = directions.TubesRegistration.objects.filter(type__fractions=fraction) if not tube.exists(): continue tube = tube[0] researches[tube.pk].append(rel.astm_field) for tpk in researches: n += 1 r.append(['O', n, tpk, None, [[None, x, None, None] for x in researches[tpk]]]) return r def encode(m) -> str: return codec.iter_encode(m) def get_astm(directions_list, analyzer: api.Analyzer, full=False, out=None) -> str: iss = [get_iss_direction(x, analyzer, full) for x in directions_list] m = [get_astm_header(), get_patient()] m = list(itertools.chain(m, *iss)) m.append(get_leave()) if out: out.write(json.dumps(m)) return encode(m) def get_iss_astm(issledovaniya: list, app: api.Application, need_astm=False): m = [get_astm_header(), get_patient()] n = 0 researches = defaultdict(list) for row in issledovaniya: k = row["pk"] i = row["iss"] for fraction in directory.Fractions.objects.filter(research=i.research, relationfractionastm__application_api=app, hide=False): rel = api.RelationFractionASTM.objects.filter(fraction=fraction, application_api=app) if not rel.exists(): continue rel = rel[0] if rel.is_code: researches[k].append([None, None, None, rel.astm_field]) else: researches[k].append([None, rel.astm_field, None, None]) for tpk in researches: n += 1 m.append(['O', n, tpk, None, researches[tpk]]) m.append(get_leave()) return encode(m)
2.265625
2
test/unit/test_som_rom_parser.py
CospanDesign/nysa
15
2829
#!/usr/bin/python import unittest import json import sys import os import string sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) from nysa.cbuilder import sdb_component as sdbc from nysa.cbuilder import sdb_object_model as som from nysa.cbuilder.som_rom_parser import parse_rom_image from nysa.cbuilder.som_rom_generator import generate_rom_image from nysa.cbuilder.sdb import SDBInfo from nysa.cbuilder.sdb import SDBWarning from nysa.cbuilder.sdb import SDBError from nysa.common.status import StatusLevel from nysa.common.status import Status class Test (unittest.TestCase): """Unit test SDB Tree""" def setUp(self): pass ''' def test_simple_rom(self): rom_in = ROM1 som = parse_rom_image(rom_in) rom_out = generate_rom_image(som) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) self.assertEqual(rom_in, rom_out) ''' def test_full_dionysus_read(self): from nysa.host.platform_scanner import PlatformScanner pscanner = PlatformScanner() platform_dict = pscanner.get_platforms() platform_names = platform_dict.keys() if "dionysus" not in platform_names: return s = Status() platform_instance = platform_dict["dionysus"](s) platforms = platform_instance.scan() if len(platforms) == 0: return dionysus = platforms[platforms.keys()[0]] #print "Found Dionysus" s.set_level("fatal") s.Verbose("Read SDB") dionysus.read_sdb() def test_full_bus(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) peripheral.set_child_spacing(0x0010000000) root.set_child_spacing (0x0100000000) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) self.assertEqual(rom_in, rom_out) def test_full_bus_with_integration(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) intr = sdbc.create_integration_record("Integration Data", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #rom_in = ROM2 #print_sdb_rom(rom_in) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) intr = sdbc.create_integration_record("Integration Data", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record("http://www.geocities.com") sm.insert_component(root, url) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): sm = som.SOM() sm.initialize_root() root = sm.get_root() peripheral = sm.insert_bus() peripheral.set_name("peripheral") memory = sm.insert_bus() memory.set_name("memory") d1 = sdbc.create_device_record(name = "device 1", size = 0x100) d2 = sdbc.create_device_record(name = "device 2", size = 0x100) m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000) m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000) intr = sdbc.create_integration_record("Integration Data", vendor_id = 0x800BEAF15DEADC03, device_id = 0x00000000) url = sdbc.create_repo_url_record("http://www.geocities.com") synthesis = sdbc.create_synthesis_record("Synthesis Name", 123, "cool tool", 1.0, "jeff") sm.insert_component(root, url) sm.insert_component(root, synthesis) peripheral.set_child_spacing(0x0100000000) sm.insert_component(peripheral, intr) sm.insert_component(peripheral, d1) sm.insert_component(peripheral, d2) sm.insert_component(memory, m1) sm.insert_component(memory, m2) rom = generate_rom_image(sm) rom_in = sdbc.convert_rom_to_32bit_buffer(rom) #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) #print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def test_generate_one_sub_bus_with_url(self): rom_in = ROMD #print_sdb(rom) sm = parse_rom_image(rom_in) rom_out = generate_rom_image(sm) rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out) print_sdb_rom(rom_out) #compare_roms(rom_in, rom_out) self.assertEqual(rom_in, rom_out) def compare_roms(rom_in, rom_out): if len(rom_in) != len(rom_out): print "Length of rom is not equal!" return rom_in = rom_in.splitlines() rom_out = rom_out.splitlines() for i in range (0, len(rom_in), 4): if (i % 16 == 0): magic = "0x%s" % (rom_in[i].lower()) last_val = int(rom_in[i + 15], 16) & 0xFF print "" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print "Interconnect" elif last_val == 0x01: print "Device" elif last_val == 0x02: print "Bridge" elif last_val == 0x80: print "Integration" elif last_val == 0x81: print "URL" elif last_val == 0x82: print "Synthesis" elif last_val == 0xFF: print "Empty" else: print "???" if rom_in[i] == rom_out[i] and rom_in[i + 1] == rom_out[i + 1] and rom_in[i + 2] == rom_out[i + 2] and rom_in[i + 3] == rom_out[i + 3]: print "%s %s : %s %s" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3]) else: print "%s %s : %s %s != %s %s : %s %s" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3], rom_out[i], rom_out[i + 1], rom_out[i + 2], rom_out[i + 3]) def print_sdb_rom(rom): #rom = sdbc.convert_rom_to_32bit_buffer(rom) rom = rom.splitlines() print "ROM" for i in range (0, len(rom), 4): if (i % 16 == 0): magic = "0x%s" % (rom[i].lower()) last_val = int(rom[i + 15], 16) & 0xFF print "" if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0): print "Interconnect" elif last_val == 0x01: print "Device" elif last_val == 0x02: print "Bridge" elif last_val == 0x80: print "Integration" elif last_val == 0x81: print "URL" elif last_val == 0x82: print "Synthesis" elif last_val == 0xFF: print "Empty" else: print "???" print "%s %s : %s %s" % (rom[i], rom[i + 1], rom[i + 2], rom[i + 3]) ROM1 = "5344422D\n"\ "00010100\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "746F7000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "64657669\n"\ "63652031\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF" ROM2 = "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "03000000\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "746F7000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000020\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000040\n"\ "00000100\n"\ "00000000\n"\ "00000200\n"\ "00030000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000100\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "64657669\n"\ "63652031\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000207\n"\ "00000001\n"\ "00000000\n"\ "00000003\n"\ "00000100\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "64657669\n"\ "63652032\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00020100\n"\ "00000100\n"\ "00000000\n"\ "00000200\n"\ "00030000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00010000\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72792031\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00010000\n"\ "00000000\n"\ "00030000\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0105\n"\ "6D656D6F\n"\ "72792032\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF" ROMD = "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "746F7000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000020\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "20000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000040\n"\ "00000001\n"\ "00000000\n"\ "00000001\n"\ "00800000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000002\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00020100\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "20000000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "70657269\n"\ "70686572\n"\ "616C0000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000340\n"\ "80000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "140F0106\n"\ "53444200\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000101\n"\ "00000207\n"\ "00000000\n"\ "10000000\n"\ "00000000\n"\ "10000008\n"\ "80000000\n"\ "0000C594\n"\ "00000000\n"\ "00000001\n"\ "140F0107\n"\ "77625F67\n"\ "70696F00\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF\n"\ "5344422D\n"\ "00010100\n"\ "00000001\n"\ "00000000\n"\ "00000001\n"\ "00800000\n"\ "80000000\n"\ "0000C594\n"\ "00000001\n"\ "00000001\n"\ "140F0106\n"\ "6D656D6F\n"\ "72790000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000502\n"\ "00000207\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00800000\n"\ "80000000\n"\ "0000C594\n"\ "00000000\n"\ "00000001\n"\ "140F0107\n"\ "77625F73\n"\ "6472616D\n"\ "00000000\n"\ "00000000\n"\ "00000001\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "00000000\n"\ "000000FF"
2.171875
2
src/TF-gui/tftrain.py
jeetsagar/turbojet
0
2830
#!python3 import os import pandas as pd import tensorflow as tf from tensorflow.keras import layers os.environ["CUDA_VISIBLE_DEVICES"] = "0" # gpu_devices = tf.config.experimental.list_physical_devices("GPU") # for device in gpu_devices: # tf.config.experimental.set_memory_growth(device, True) def trainModel(data_in, params_in): data_in = data_in.take(2048) data_in = data_in.shuffle(24) data_in = data_in.batch(1024) arch = params_in["Architecture"] dropout = params_in["Dropout"] lr = params_in["LearningRate"] attrs = params_in["Attrs"] epochs = params_in["Epochs"] if arch == "BaseCNN": if params_in["BatchNorm"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Flatten(), layers.Dense(50, "relu"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Flatten(), layers.Dense(50, "relu"), layers.Dense(1) ]) elif arch == "CNN-LSTM": if params_in["BatchNorm"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, "relu"), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=False), layers.Dense(50, "relu"), layers.Dense(1) ]) elif arch == "CNN-2LSTM": if params_in["BatchNorm"]: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.BatchNormalization(), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) else: model = tf.keras.Sequential([ layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)), layers.Dropout(dropout), layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"), layers.Dropout(dropout), layers.Reshape((5, 10)), layers.LSTM(30, return_sequences=True), layers.LSTM(30, return_sequences=False), layers.Dense(1) ]) model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True)) filepath = "./checkpoints/Model_in-" + arch + str(attrs) + ".h5" losses = [] class CustomModelCheckPoint(tf.keras.callbacks.Callback): def __init__(self, **kargs): super(CustomModelCheckPoint, self).__init__(**kargs) self.epoch_loss = {} # accuracy at given epoch def on_epoch_begin(self, epoch, logs={}): # Things done on beginning of epoch. return def on_epoch_end(self, epoch, logs={}): # things done on end of the epoch self.epoch_loss[epoch] = logs.get("loss") losses.append(self.epoch_loss[epoch]) if params_in["ResumeTraining"]: model.load_weights(filepath) checkpoint2 = CustomModelCheckPoint() checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True, save_freq='epoch') model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2]) df_loss = pd.DataFrame() df_loss["Epochs"] = list(range(1, epochs + 1)) df_loss["Loss"] = losses df_loss.to_csv("./losses/lossTrend.csv", index=False)
2.546875
3
library/kong_api.py
sebastienc/ansible-kong-module
34
2831
<reponame>sebastienc/ansible-kong-module<filename>library/kong_api.py<gh_stars>10-100 #!/usr/bin/python DOCUMENTATION = ''' --- module: kong short_description: Configure a Kong API Gateway ''' EXAMPLES = ''' - name: Register a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: "Mockbin" taget_url: "http://mockbin.com" request_host: "mockbin.com" state: present - name: Delete a site kong: kong_admin_uri: http://127.0.0.1:8001/apis/ name: "Mockbin" state: absent ''' import json, requests, os class KongAPI: def __init__(self, base_url, auth_username=None, auth_password=<PASSWORD>): self.base_url = base_url if auth_username is not None and auth_password is not None: self.auth = (auth_username, auth_password) else: self.auth = None def __url(self, path): return "{}{}" . format (self.base_url, path) def _api_exists(self, name, api_list): for api in api_list: if name == api.get("name", None): return True return False def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False): method = "post" url = self.__url("/apis/") api_list = self.list().json().get("data", []) api_exists = self._api_exists(name, api_list) if api_exists: method = "patch" url = "{}{}" . format (url, name) data = { "name": name, "upstream_url": upstream_url, "strip_request_path": strip_request_path, "preserve_host": preserve_host } if request_host is not None: data['request_host'] = request_host if request_path is not None: data['request_path'] = request_path return getattr(requests, method)(url, data, auth=self.auth) def list(self): url = self.__url("/apis") return requests.get(url, auth=self.auth) def info(self, id): url = self.__url("/apis/{}" . format (id)) return requests.get(url, auth=self.auth) def delete_by_name(self, name): info = self.info(name) id = info.json().get("id") return self.delete(id) def delete(self, id): path = "/apis/{}" . format (id) url = self.__url(path) return requests.delete(url, auth=self.auth) class ModuleHelper: def __init__(self, fields): self.fields = fields def get_module(self): args = dict( kong_admin_uri = dict(required=False, type='str'), kong_admin_username = dict(required=False, type='str'), kong_admin_password = dict(required=False, type='str'), name = dict(required=False, type='str'), upstream_url = dict(required=False, type='str'), request_host = dict(required=False, type='str'), request_path = dict(required=False, type='str'), strip_request_path = dict(required=False, default=False, type='bool'), preserve_host = dict(required=False, default=False, type='bool'), state = dict(required=False, default="present", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'), ) return AnsibleModule(argument_spec=args,supports_check_mode=False) def prepare_inputs(self, module): url = module.params['kong_admin_uri'] auth_user = module.params['kong_admin_username'] auth_password = <PASSWORD>.params['<PASSWORD>'] state = module.params['state'] data = {} for field in self.fields: value = module.params.get(field, None) if value is not None: data[field] = value return (url, data, state, auth_user, auth_password) def get_response(self, response, state): if state == "present": meta = response.json() has_changed = response.status_code in [201, 200] if state == "absent": meta = {} has_changed = response.status_code == 204 if state == "list": meta = response.json() has_changed = False return (has_changed, meta) def main(): fields = [ 'name', 'upstream_url', 'request_host', 'request_path', 'strip_request_path', 'preserve_host' ] helper = ModuleHelper(fields) global module # might not need this module = helper.get_module() base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module) api = KongAPI(base_url, auth_user, auth_password) if state == "present": response = api.add_or_update(**data) if state == "absent": response = api.delete_by_name(data.get("name")) if state == "list": response = api.list() if response.status_code == 401: module.fail_json(msg="Please specify kong_admin_username and kong_admin_password", meta=response.json()) elif response.status_code == 403: module.fail_json(msg="Please check kong_admin_username and kong_admin_password", meta=response.json()) else: has_changed, meta = helper.get_response(response, state) module.exit_json(changed=has_changed, meta=meta) from ansible.module_utils.basic import * from ansible.module_utils.urls import * if __name__ == '__main__': main()
2.078125
2
src/compas_plotters/artists/lineartist.py
XingxinHE/compas
0
2832
<reponame>XingxinHE/compas<filename>src/compas_plotters/artists/lineartist.py from compas_plotters.artists import Artist from matplotlib.lines import Line2D from compas.geometry import intersection_line_box_xy __all__ = ['LineArtist'] class LineArtist(Artist): """""" zorder = 1000 def __init__(self, line, draw_points=False, draw_as_segment=False, linewidth=1.0, linestyle='solid', color=(0, 0, 0)): super(LineArtist, self).__init__(line) self._mpl_line = None self._start_artist = None self._end_artist = None self._segment_artist = None self._draw_points = draw_points self._draw_as_segment = draw_as_segment self.line = line self.linewidth = linewidth self.linestyle = linestyle self.color = color def clip(self): xlim, ylim = self.plotter.viewbox xmin, xmax = xlim ymin, ymax = ylim box = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] return intersection_line_box_xy(self.line, box) @property def data(self): return [self.line.start[:2], self.line.end[:2]] def draw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) else: points = self.clip() if points: p0, p1 = points x0, y0 = p0[:2] x1, y1 = p1[:2] line2d = Line2D([x0, x1], [y0, y1], linewidth=self.linewidth, linestyle=self.linestyle, color=self.color, zorder=self.zorder) self._mpl_line = self.plotter.axes.add_line(line2d) if self._draw_points: self._start_artist = self.plotter.add(self.line.start) self._end_artist = self.plotter.add(self.line.end) def redraw(self): if self._draw_as_segment: x0, y0 = self.line.start[:2] x1, y1 = self.line.end[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth) else: points = self.clip() if points: p0, p1 = points x0, y0 = p0[:2] x1, y1 = p1[:2] self._mpl_line.set_xdata([x0, x1]) self._mpl_line.set_ydata([y0, y1]) self._mpl_line.set_color(self.color) self._mpl_line.set_linewidth(self.linewidth)
2.59375
3
plot2d_artificial_dataset1_silvq.py
manome/python-silvq
0
2833
# -*- encoding: utf8 -*- import numpy as np from sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split from lvq import SilvqModel from lvq.utils import plot2d def main(): # Load dataset dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',') x = dataset[:, :-1].astype('float64') y = dataset[:, -1].astype('int64') # Split dataset into training set and test set x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y) # Generating model model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls') # Training the model model.fit(x_train, y_train, epochs=30) # Predict the response for test dataset y_predict = model.predict(x_test) # Evaluating the model print('Accuracy: %.3f' %accuracy_score(y_test, y_predict)) # Plot prediction results and prototypes plot2d(model, x, y, title='Artificial dataset1') if __name__ == '__main__': main()
3.296875
3
classification_experiments/Fine-Tuned-ResNet-50/Fine-Tuned-ResNet-50.py
ifr1m/hyper-kvasir
38
2834
#!/usr/bin/env python # coding: utf-8 # In[ ]: #Importing all required libraries # In[ ]: from __future__ import absolute_import, division, print_function, unicode_literals # In[ ]: #Checking for correct cuda and tf versions from tensorflow.python.platform import build_info as tf_build_info print(tf_build_info.cuda_version_number) # 9.0 in v1.10.0 print(tf_build_info.cudnn_version_number) # 7 in v1.10.0 # In[ ]: import tensorflow as tf import pathlib from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import ImageDataGenerator import os import numpy as np import matplotlib.pyplot as plt # In[ ]: AUTOTUNE = tf.data.experimental.AUTOTUNE # In[ ]: import IPython.display as display from PIL import Image import numpy as np import matplotlib.pyplot as plt import os # In[ ]: tf.__version__ # In[ ]: #Train and test data folder train_data_dir = "\\hyper-kvasir\\splits\\all\\1" test_data_dir = "\\hyper-kvasir\\splits\\all\\0" # In[ ]: train_data_dir = pathlib.Path(train_data_dir) test_data_dir = pathlib.Path(test_data_dir) # In[ ]: #count how many images are there image_count = len(list(train_data_dir.glob('*/*.jpg'))) image_count # In[ ]: total_train = len(list(train_data_dir.glob('*/*.jpg'))) total_val = len(list(test_data_dir.glob('*/*.jpg'))) # In[ ]: #get the class names CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name != "LICENSE.txt"]) CLASS_NAMES # In[ ]: #Define parameter for training batch_size = 32 IMG_HEIGHT = 224 IMG_WIDTH = 224 STEPS_PER_EPOCH = np.ceil(image_count/batch_size) epochs = 8 num_classes = len(CLASS_NAMES) #23 # In[ ]: #We use image data generators to load the images and prepare them for the training train_image_generator = ImageDataGenerator() # Generator for our training data validation_image_generator = ImageDataGenerator() # Generator for our validation data train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), classes = list(CLASS_NAMES), class_mode='categorical' ) val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir), batch_size=batch_size, shuffle=True, target_size=(IMG_HEIGHT, IMG_WIDTH), class_mode='categorical', classes = list(CLASS_NAMES) ) #get class order from directories print(train_data_gen.class_indices.keys()) print(val_data_gen.class_indices.keys()) # In[ ]: IMG_SIZE = 224 IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3) # base model from the pre-trained model. Resnet 50 in this case base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE, include_top=False, weights='imagenet') base_model.trainable = False # In[ ]: #add new classification layer x = base_model.output x = tf.keras.layers.GlobalAveragePooling2D()(x) x = tf.keras.layers.Dense(num_classes,activation='softmax')(x) model = tf.keras.models.Model(inputs=base_model.input, outputs=x) base_learning_rate = 0.001 model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate), loss='categorical_crossentropy', metrics=['accuracy']) # In[ ]: #fit the model history = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=epochs, validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: #create training plots history acc = history.history['accuracy'] val_acc = history.history['val_accuracy'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs_range = range(epochs) plt.figure(figsize=(8, 8)) plt.subplot(1, 2, 1) plt.plot(epochs_range, acc, label='Training Accuracy') plt.plot(epochs_range, val_acc, label='Validation Accuracy') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(1, 2, 2) plt.plot(epochs_range, loss, label='Training Loss') plt.plot(epochs_range, val_loss, label='Validation Loss') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.show() # In[ ]: base_model.trainable = True #now we want to train the base model # In[ ]: # How many layers are in the base model print("Layers base model: ", len(base_model.layers)) # Fine tune from layer x fine_tune_at = 100 # Freeze all the layers before the fine tune starting layer for layer in base_model.layers[:fine_tune_at]: layer.trainable = False # In[ ]: model.compile(loss='categorical_crossentropy', optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10), metrics=['accuracy']) # In[ ]: model.summary() # In[ ]: #Fine tune step initial_epochs = 7 fine_tune_epochs = 3 total_epochs = initial_epochs + fine_tune_epochs train_batches = total_train // batch_size print(total_val // batch_size) validation_batches = total_val // batch_size history_fine = model.fit_generator( train_data_gen, steps_per_epoch=total_train // batch_size, epochs=total_epochs, initial_epoch = history.epoch[-1], validation_data=val_data_gen, validation_steps=total_val // batch_size ) # In[ ]: acc += history_fine.history['accuracy'] val_acc += history_fine.history['val_accuracy'] loss += history_fine.history['loss'] val_loss += history_fine.history['val_loss'] # In[ ]: #Plot fine tuning plt.figure(figsize=(8, 8)) plt.subplot(2, 1, 1) plt.plot(acc, label='Training Accuracy') plt.plot(val_acc, label='Validation Accuracy') plt.ylim([0.8, 1]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='lower right') plt.title('Training and Validation Accuracy') plt.subplot(2, 1, 2) plt.plot(loss, label='Training Loss') plt.plot(val_loss, label='Validation Loss') plt.ylim([0, 1.0]) plt.plot([initial_epochs-1,initial_epochs-1], plt.ylim(), label='Start Fine Tuning') plt.legend(loc='upper right') plt.title('Training and Validation Loss') plt.xlabel('epoch') plt.show() # In[ ]: #model save and load import os # In[ ]: #some time stamp from datetime import datetime # current date and time. now = datetime.now() timestamp = datetime.timestamp(now) print("timestamp =", timestamp) # In[ ]: mode_filename = str(timestamp)+'mymodel.h5' model.save(model_filename) # In[ ]: #To apply the model on new data new_model = tf.keras.models.load_model(model_filename) # Show the model architecture new_model.summary() # In[ ]: from tensorflow.keras.preprocessing import image #image directory containing images to test img_dir="\\polyps" for i,img in enumerate(os.listdir(img_dir)): tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE)) tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32') result_class=new_model.predict(tmpimage) print(img,";",CLASS_NAMES[result_class.argmax(axis=-1)])
2.46875
2
tools/android/android_tools.gyp
SlimKatLegacy/android_external_chromium_org
2
2835
<reponame>SlimKatLegacy/android_external_chromium_org # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'targets': [ # Intermediate target grouping the android tools needed to run native # unittests and instrumentation test apks. { 'target_name': 'android_tools', 'type': 'none', 'dependencies': [ 'adb_reboot/adb_reboot.gyp:adb_reboot', 'forwarder2/forwarder.gyp:forwarder2', 'md5sum/md5sum.gyp:md5sum', 'purge_ashmem/purge_ashmem.gyp:purge_ashmem', ], }, { 'target_name': 'memdump', 'type': 'none', 'dependencies': [ 'memdump/memdump.gyp:memdump', ], }, { 'target_name': 'memconsumer', 'type': 'none', 'dependencies': [ 'memconsumer/memconsumer.gyp:memconsumer', ], }, ], }
1.164063
1
test/functional/fantasygold_opcall.py
FantasyGold/FantasyGold-Core
13
2836
<reponame>FantasyGold/FantasyGold-Core #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from test_framework.script import * from test_framework.mininode import * from test_framework.fantasygold import * from test_framework.fantasygoldconfig import * import sys class OpCallTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [['-txindex=1']]*2 def send_one_op_call_tx_with_counter_check(self, outputs, counter_should_increase_by=0, input_value=500000000, should_throw=False): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) inpt = make_vin(self.node, input_value) tx = make_transaction(self.node, [inpt], outputs) if should_throw: try: self.node.sendrawtransaction(tx) assert(False) except JSONRPCException as e: print(e) pass else: self.node.sendrawtransaction(tx) self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) def send_multiple_op_call_txs_with_counter_check(self, num_txs, outputs, counter_should_increase_by): # 61bc221a counter() old_out = int(self.node.callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) i = 0 unspents = self.node.listunspent() while i < num_txs and len(unspents) > 0: # Select as input a tx which has at least 5 fantasygold spendable for tx_i in range(len(unspents)): if int(unspents[tx_i]['amount']*COIN) == 1000000*FGC_MIN_GAS_PRICE and unspents[tx_i]['spendable']: break else: assert(False) inpt = CTxIn(COutPoint(int(unspents[tx_i]['txid'], 16), unspents[tx_i]['vout']), nSequence=0) tx = make_transaction(self.node, [inpt], outputs) txid = self.node.sendrawtransaction(tx) unspents = self.node.listunspent() i += 1 self.node.generate(1) sync_blocks(self.nodes) for i in range(2): # 61bc221a counter() out = int(self.nodes[i].callcontract(self.contract_address, "61bc221a")['executionResult']['output'], 16) assert(out-old_out == counter_should_increase_by) # Deploy the testing contract def create_contract_test(self): """ pragma solidity ^0.4.10; contract Example { uint public counter; function inc() public { counter += 1; } function getBalance() public { return this.balance; } } """ contract_data = self.node.createcontract("6060604052341561000c57fe5b5b61011e8061001c6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806312065fe0146058578063371303c014607b57806361bc221a14608a578063d0e30db01460ad575bfe5b3415605f57fe5b606560b5565b6040518082815260200191505060405180910390f35b3415608257fe5b608860d5565b005b3415609157fe5b609760e9565b6040518082815260200191505060405180910390f35b60b360ef565b005b60003073ffffffffffffffffffffffffffffffffffffffff163190505b90565b60016000600082825401925050819055505b565b60005481565b5b5600a165627a7a72305820fe93d8cc66557a2a6c8347f481f6d334402a7f90f8b2288668a874c34416a4dc0029", 1000000) self.contract_address = contract_data['address'] block_height = self.node.getblockcount() self.node.generate(1) sync_blocks(self.nodes) for i in range(2): assert(self.nodes[i].getblockcount() == block_height+1) assert(len(self.nodes[i].listcontracts()) == 1+NUM_DEFAULT_DGP_CONTRACTS) # Sends a tx containing 2 op_call outputs calling inc() def many_calls_in_same_tx_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=2, input_value=2*1000000*FGC_MIN_GAS_PRICE) # Sends a normal raw op_call tx with a single output. def normal_op_call_output_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", b"\xff\x7f", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=0x7fff*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output where txfee == gas_price*gas_limit. def gas_equal_to_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=1, input_value=1000000*FGC_MIN_GAS_PRICE) # Sends a tx containing 1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_1_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(10000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000001*FGC_MIN_GAS_PRICE-1, should_throw=True) # Sends a tx containing 1 op_call output where txfee < gas_price*gas_limit. def gas_exceeding_tx_fee_100001_2_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000001), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=1000000*FGC_MIN_GAS_PRICE, should_throw=True) # Sends a tx containing 2 op_call outputs that has a combined gas_price*gas_limit exceeding the tx fee. # This tx should be rejected since executing such a tx would be unable to pay for its potential execution costs in the same way as a tx with one output where txfee < gas_price*gas_limit. def two_calls_in_same_tx_exceeding_tx_fee_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, input_value=2000000*FGC_MIN_GAS_PRICE-1, should_throw=True) # sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas limit calling inc() def gas_limit_signedness_test(self): outputs = [] gas_limit = b"\xff" while len(gas_limit) < 20: outputs.append(make_op_call_output(0, b"\x04", gas_limit, CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=min(max(int(bytes_to_hex_str(gas_limit), 16)*FGC_MIN_GAS_PRICE, 10000000), 1000000000)) gas_limit += b"\xff" # sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas limit calling inc() def gas_limit_signedness_one_valid_test(self): outputs = [] gas_limit = b"\xff" outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff\x00", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff", CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=2*0xffff*FGC_MIN_GAS_PRICE) # sends a tx containing 1 op_call output with a (if interpreted with a signed integer) negative gas price calling inc() def gas_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", b"\x01\x00", b"\xff\xff", bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=10000000) # sends a tx containing 1 op_call output with a possible negative gas limit and price calling inc() def gas_limit_and_price_signedness_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", b"\xff\xff", b"\xff", bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, should_throw=True, input_value=0xff*0xffff) # Sends 100 valid op_call txs def send_100_txs_test(self): outputs = [] outputs.append(make_op_call_output(0, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("371303c0"), bytes.fromhex(self.contract_address))) self.send_multiple_op_call_txs_with_counter_check(100, outputs, 100) def send_tx_with_value_test(self): outputs = [] # d0e30db0 deposit() outputs.append(make_op_call_output(100000000, b"\x04", CScriptNum(1000000), CScriptNum(FGC_MIN_GAS_PRICE), bytes.fromhex("d0e30db0"), bytes.fromhex(self.contract_address))) self.send_one_op_call_tx_with_counter_check(outputs, counter_should_increase_by=0, input_value=100000000+1000000*FGC_MIN_GAS_PRICE) # 12065fe0 getBalance() balance = int(self.node.callcontract(self.contract_address, "12065fe0")['executionResult']['output'], 16) assert(balance == 100000000) def run_test(self): self.node = self.nodes[0] connect_nodes(self.nodes[0], 1) self.nodes[0].generate(200+COINBASE_MATURITY) self.node.sendmany("", {self.node.getnewaddress(): 1000000*FGC_MIN_GAS_PRICE / Decimal('100000000') for i in range(200)}) print("Creating contract") self.create_contract_test() print("Calling inc() in two outputs") self.many_calls_in_same_tx_test() print("Calling inc() in one output") self.normal_op_call_output_test() print("Calling inc() in one output with txfee equal to gas_limit*gas_price") self.gas_equal_to_tx_fee_test() print("Calling inc() in one output with txfee < gas_limit*gas_price") self.gas_exceeding_tx_fee_100001_1_test() print("Second test of inc() in one outputs with txfee < gas_limit*gas_price") self.gas_exceeding_tx_fee_100001_2_test() print("Second test of inc() in one output with txfee < gas_limit*gas_price") self.two_calls_in_same_tx_exceeding_tx_fee_test() print("Mining a block with 100 txs each with an output calling inc()") self.send_100_txs_test() print("Checking that the value of txs are correctly updated") self.send_tx_with_value_test() print("Checking gas limit signedness where one tx is valid") self.gas_limit_signedness_one_valid_test() print("Checking gas limit signedness") self.gas_limit_signedness_test() print("Checking gas price signedness") self.gas_price_signedness_test() print("Checking gas limit and gas price signedness") self.gas_limit_and_price_signedness_test() if __name__ == '__main__': OpCallTest().main()
1.914063
2
intake_sklearn/source.py
AlbertDeFusco/intake-sklearn
1
2837
<reponame>AlbertDeFusco/intake-sklearn<gh_stars>1-10 from intake.source.base import DataSource, Schema import joblib import fsspec import sklearn import re from . import __version__ class SklearnModelSource(DataSource): container = 'python' name = 'sklearn' version = __version__ partition_access = False def __init__(self, urlpath, storage_options=None, metadata=None): """ Parameters ---------- urlpath: str, location of model pkl file Either the absolute or relative path to the file or URL to be opened. Some examples: - ``{{ CATALOG_DIR }}/models/model.pkl`` - ``s3://some-bucket/models/model.pkl`` """ self._urlpath = urlpath self._storage_options = storage_options or {} super().__init__(metadata=metadata) def _load(self): with fsspec.open(self._urlpath, mode='rb', **self._storage_options) as f: return f.read() def _get_schema(self): as_binary = self._load() s = re.search(b'_sklearn_versionq(.*\x00)((\d+\.)?(\d+\.)?(\*|\d+))q', as_binary) if s: sklearn_version = s.group(2).decode() else: sklearn_version = None self._schema = Schema( npartitions=1, extra_metadata={ 'sklearn_version':sklearn_version } ) return self._schema def read(self): self._load_metadata() if not self.metadata['sklearn_version'] == sklearn.__version__: msg = ('The model was created with Scikit-Learn version {} ' 'but version {} has been installed in your current environment.' ).format(self.metadata['sklearn_version'], sklearn.__version__) raise RuntimeError(msg) with fsspec.open(self._urlpath, **self._storage_options) as f: return joblib.load(f)
2.203125
2
jedi/evaluate/dynamic.py
hatamov/jedi
0
2838
""" One of the really important features of |jedi| is to have an option to understand code like this:: def foo(bar): bar. # completion here foo(1) There's no doubt wheter bar is an ``int`` or not, but if there's also a call like ``foo('str')``, what would happen? Well, we'll just show both. Because that's what a human would expect. It works as follows: - |Jedi| sees a param - search for function calls named ``foo`` - execute these calls and check the input. This work with a ``ParamListener``. """ from itertools import chain from jedi._compatibility import unicode from jedi.parser import tree as pr from jedi import settings from jedi import debug from jedi.evaluate.cache import memoize_default from jedi.evaluate import imports class ParamListener(object): """ This listener is used to get the params for a function. """ def __init__(self): self.param_possibilities = [] def execute(self, params): self.param_possibilities += params @debug.increase_indent def search_params(evaluator, param): """ A dynamic search for param values. If you try to complete a type: >>> def func(foo): ... foo >>> func(1) >>> func("") It is not known what the type ``foo`` without analysing the whole code. You have to look for all calls to ``func`` to find out what ``foo`` possibly is. """ if not settings.dynamic_params: return [] debug.dbg('Dynamic param search for %s', param) func = param.get_parent_until(pr.Function) # Compare the param names. names = [n for n in search_function_call(evaluator, func) if n.value == param.name.value] # Evaluate the ExecutedParams to types. result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names)) debug.dbg('Dynamic param result %s', result) return result @memoize_default([], evaluator_is_first_arg=True) def search_function_call(evaluator, func): """ Returns a list of param names. """ from jedi.evaluate import representation as er def get_params_for_module(module): """ Returns the values of a param, or an empty array. """ @memoize_default([], evaluator_is_first_arg=True) def get_posibilities(evaluator, module, func_name): try: names = module.used_names[func_name] except KeyError: return [] for name in names: parent = name.parent if pr.is_node(parent, 'trailer'): parent = parent.parent trailer = None if pr.is_node(parent, 'power'): for t in parent.children[1:]: if t == '**': break if t.start_pos > name.start_pos and t.children[0] == '(': trailer = t break if trailer is not None: types = evaluator.goto_definition(name) # We have to remove decorators, because they are not the # "original" functions, this way we can easily compare. # At the same time we also have to remove InstanceElements. undec = [] for escope in types: if escope.isinstance(er.Function, er.Instance) \ and escope.decorates is not None: undec.append(escope.decorates) elif isinstance(escope, er.InstanceElement): undec.append(escope.var) else: undec.append(escope) if er.wrap(evaluator, compare) in undec: # Only if we have the correct function we execute # it, otherwise just ignore it. evaluator.eval_trailer(types, trailer) return listener.param_possibilities return get_posibilities(evaluator, module, func_name) current_module = func.get_parent_until() func_name = unicode(func.name) compare = func if func_name == '__init__': cls = func.get_parent_scope() if isinstance(cls, pr.Class): func_name = unicode(cls.name) compare = cls # add the listener listener = ParamListener() func.listeners.add(listener) try: result = [] # This is like backtracking: Get the first possible result. for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name): result = get_params_for_module(mod) if result: break finally: # cleanup: remove the listener; important: should not stick. func.listeners.remove(listener) return result
3.734375
4
steamcheck/views.py
moird/linux-game-report
0
2839
from steamcheck import app from flask import jsonify, render_template import os import steamapi import json @app.route('/') def index(): return render_template("index.html") @app.route('/report/<name>') def report(name=None): """ This will generate the report based on the users Steam ID. Returns JSON :param name: Steam ID (either numerical ID or vanity url: steamcommunity.com/id/moird :return: Json object that contains listing of all linux games and general information about them: { "steamuser": "real steam name", "image": "steam user image url", "games": [{'gametitle', {"linux":true}}] "error": "" } """ process_report = {} try: # See if we are running on heroku or not. Could probably set an environment variable for this as well. if os.path.exists('/app/assets/GAMES.json'): linux_game_list = '/app/assets/GAMES.json' winehq_list = '/app/assets/winehq.json' else: linux_game_list = './assets/GAMES.json' winehq_list = './assets/winehq.json' with open(linux_game_list) as linux_game_list_raw: linux_games = json.load(linux_game_list_raw) with open(winehq_list) as winehq_raw: winehq_apps = json.load(winehq_raw) steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key']) try: user = steamapi.user.SteamUser(userid=int(name)) except ValueError: # When we get further this as a fallback will be taken out, really don't want to do this. user = steamapi.user.SteamUser(userurl=name) process_report['steamuser'] = user.name process_report['image'] = user.avatar process_report['games'] = {} for game in user.games: linux = False winehq = False if str(game.id) in linux_games: linux = True if game.name in winehq_apps: winehq = winehq_apps[game.name] process_report['games'][game.id] = {"name": game.name, "linux": linux, "winehq":winehq} except Exception as e: process_report['error'] = e return jsonify(**process_report)
2.6875
3
validator/delphi_validator/run.py
benjaminysmith/covidcast-indicators
0
2840
<reponame>benjaminysmith/covidcast-indicators<filename>validator/delphi_validator/run.py # -*- coding: utf-8 -*- """Functions to call when running the tool. This module should contain a function called `run_module`, that is executed when the module is run with `python -m delphi_validator`. """ from delphi_utils import read_params from .validate import Validator def run_module(): """Run the validator as a module.""" parent_params = read_params() params = parent_params['validation'] validator = Validator(params) validator.validate(parent_params["export_dir"]).print_and_exit()
2.109375
2
datasets/validation_folders.py
zenithfang/supervised_dispnet
39
2841
import torch.utils.data as data import numpy as np from imageio import imread from path import Path import pdb def crawl_folders(folders_list): imgs = [] depth = [] for folder in folders_list: current_imgs = sorted(folder.files('*.jpg')) current_depth = [] for img in current_imgs: d = img.dirname()/(img.name[:-4] + '.npy') assert(d.isfile()), "depth file {} not found".format(str(d)) depth.append(d) imgs.extend(current_imgs) depth.extend(current_depth) return imgs, depth def load_as_float(path): return imread(path).astype(np.float32) class ValidationSet(data.Dataset): """A sequence data loader where the files are arranged in this way: root/scene_1/0000000.jpg root/scene_1/0000000.npy root/scene_1/0000001.jpg root/scene_1/0000001.npy .. root/scene_2/0000000.jpg root/scene_2/0000000.npy . transform functions must take in a list a images and a numpy array which can be None """ def __init__(self, root, transform=None): self.root = Path(root) scene_list_path = self.root/'val.txt' self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)] self.imgs, self.depth = crawl_folders(self.scenes) self.transform = transform def __getitem__(self, index): img = load_as_float(self.imgs[index]) depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace() if self.transform is not None: img, _, _ = self.transform([img], depth, None); #this depth is just used to fill the compose transform that is shared(no need for the result) img = img[0] return img, depth def __len__(self): return len(self.imgs)
2.71875
3
secretpy/ciphers/rot18.py
tigertv/crypt
51
2842
<gh_stars>10-100 #!/usr/bin/python from .rot13 import Rot13 import secretpy.alphabets as al class Rot18: """ The Rot18 Cipher """ __rot13 = Rot13() def __init__(self): alphabet = al.ENGLISH half = len(alphabet) >> 1 self.__alphabet = alphabet[:half] + al.DECIMAL[:5] + alphabet[half:] + al.DECIMAL[5:] def __crypt(self, text, alphabet): return self.__rot13.encrypt(text, alphabet=self.__alphabet) def encrypt(self, text, key=None, alphabet=None): """ Encryption method :param text: Text to encrypt :param key: is not used :param alphabet: is not used :type text: string :type key: integer :type alphabet: string :return: text :rtype: string """ return self.__crypt(text, self.__alphabet) def decrypt(self, text, key=None, alphabet=None): """ Decryption method :param text: Text to decrypt :param key: is not used :param alphabet: is not used :type text: string :type key: integer :type alphabet: string :return: text :rtype: string """ return self.__crypt(text, self.__alphabet) def get_fixed_alphabet(self): return self.__alphabet
3.609375
4
pysaurus/database/special_properties.py
notoraptor/pysaurus
0
2843
<reponame>notoraptor/pysaurus from abc import abstractmethod from pysaurus.database.properties import PropType from pysaurus.database.video import Video class SpecialPropType(PropType): __slots__ = () @abstractmethod def get(self, video: Video): raise NotImplementedError() class PropError(SpecialPropType): __slots__ = () def __init__(self): super().__init__("<error>", "", True) def get(self, video: Video): return sorted(set(video.errors) | set(video.properties.get(self.name, ()))) class SpecialProperties: properties = [PropError()] @classmethod def install(cls, database): to_save = False for expected in cls.properties: if ( not database.has_prop_type(expected.name) or database.get_prop_type(expected.name) != expected ): database.remove_prop_type(expected.name) database.add_prop_type(expected) to_save = True if to_save: database.save() @classmethod def all_in(cls, video: Video): return all(prop.name in video.properties for prop in cls.properties) @classmethod def set(cls, video: Video): for prop in cls.properties: video.properties[prop.name] = prop.get(video)
2.5
2
patrole_tempest_plugin/rbac_utils.py
openstack/patrole
14
2844
# Copyright 2017 AT&T Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import sys import time from oslo_log import log as logging from oslo_utils import excutils from tempest import config from tempest.lib import exceptions as lib_exc from patrole_tempest_plugin import rbac_exceptions CONF = config.CONF LOG = logging.getLogger(__name__) class _ValidateListContext(object): """Context class responsible for validation of the list functions. This class is used in ``override_role_and_validate_list`` function and the result of a list function must be assigned to the ``ctx.resources`` variable. Example:: with self.override_role_and_validate_list(...) as ctx: ctx.resources = list_function() """ def __init__(self, admin_resources=None, admin_resource_id=None): """Constructor for ``ValidateListContext``. Either ``admin_resources`` or ``admin_resource_id`` should be used, not both. :param list admin_resources: The list of resources received before calling the ``override_role_and_validate_list`` function. To validate will be used the ``_validate_len`` function. :param UUID admin_resource_id: An ID of a resource created before calling the ``override_role_and_validate_list`` function. To validate will be used the ``_validate_resource`` function. :raises RbacValidateListException: if both ``admin_resources`` and ``admin_resource_id`` are set or unset. """ self.resources = None if admin_resources is not None and not admin_resource_id: self._admin_len = len(admin_resources) if not self._admin_len: raise rbac_exceptions.RbacValidateListException( reason="the list of admin resources cannot be empty") self._validate_func = self._validate_len elif admin_resource_id and admin_resources is None: self._admin_resource_id = admin_resource_id self._validate_func = self._validate_resource else: raise rbac_exceptions.RbacValidateListException( reason="admin_resources and admin_resource_id are mutually " "exclusive") def _validate_len(self): """Validates that the number of resources is less than admin resources. """ if not len(self.resources): raise rbac_exceptions.RbacEmptyResponseBody() elif self._admin_len > len(self.resources): raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate_resource(self): """Validates that the admin resource is present in the resources. """ for resource in self.resources: if resource['id'] == self._admin_resource_id: return raise rbac_exceptions.RbacPartialResponseBody(body=self.resources) def _validate(self): """Calls the proper validation function. :raises RbacValidateListException: if the ``ctx.resources`` variable is not assigned. """ if self.resources is None: raise rbac_exceptions.RbacValidateListException( reason="ctx.resources is not assigned") self._validate_func() class RbacUtilsMixin(object): """Utility mixin responsible for switching ``os_primary`` role. Should be used as a mixin class alongside an instance of :py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a base RBAC class. Child classes should not use this mixin. Example:: class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest): @classmethod def setup_clients(cls): super(BaseRbacTest, cls).setup_clients() cls.hosts_client = cls.os_primary.hosts_client ... This class is responsible for overriding the value of the primary Tempest credential's role (i.e. ``os_primary`` role). By doing so, it is possible to seamlessly swap between admin credentials, needed for setup and clean up, and primary credentials, needed to perform the API call which does policy enforcement. The primary credentials always cycle between roles defined by ``CONF.identity.admin_role`` and ``CONF.patrole.rbac_test_roles``. """ credentials = ['primary', 'admin'] def __init__(self, *args, **kwargs): super(RbacUtilsMixin, self).__init__(*args, **kwargs) # Shows if override_role was called. self.__override_role_called = False # Shows if exception raised during override_role. self.__override_role_caught_exc = False _admin_role_id = None _rbac_role_ids = None _project_id = None _user_id = None _role_map = None _role_inferences_mapping = None _orig_roles = [] admin_roles_client = None @classmethod def restore_roles(cls): if cls._orig_roles: LOG.info("Restoring original roles %s", cls._orig_roles) roles_already_present = cls._list_and_clear_user_roles_on_project( cls._orig_roles) if not roles_already_present: cls._create_user_role_on_project(cls._orig_roles) @classmethod def setup_clients(cls): if CONF.identity_feature_enabled.api_v3: admin_roles_client = cls.os_admin.roles_v3_client else: raise lib_exc.InvalidConfiguration( "Patrole role overriding only supports v3 identity API.") cls.admin_roles_client = admin_roles_client cls._project_id = cls.os_primary.credentials.tenant_id cls._user_id = cls.os_primary.credentials.user_id cls._role_inferences_mapping = cls._prepare_role_inferences_mapping() cls._init_roles() # Store the user's original roles and rollback after testing. roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] cls._orig_roles = [role['id'] for role in roles] cls.addClassResourceCleanup(cls.restore_roles) # Change default role to admin cls._override_role(False) super(RbacUtilsMixin, cls).setup_clients() @classmethod def _prepare_role_inferences_mapping(cls): """Preparing roles mapping to support role inferences Making query to `list-all-role-inference-rules`_ keystone API returns all inference rules, which makes it possible to prepare roles mapping. It walks recursively through the raw data:: {"role_inferences": [ { "implies": [{"id": "3", "name": "reader"}], "prior_role": {"id": "2", "name": "member"} }, { "implies": [{"id": "2", "name": "member"}], "prior_role": {"id": "1", "name": "admin"} } ] } and converts it to the mapping:: { "2": ["3"], # "member": ["reader"], "1": ["2", "3"] # "admin": ["member", "reader"] } .. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules """ # noqa: E501 def process_roles(role_id, data): roles = data.get(role_id, set()) for rid in roles.copy(): roles.update(process_roles(rid, data)) return roles def convert_data(data): res = {} for rule in data: prior_role = rule['prior_role']['id'] implies = {r['id'] for r in rule['implies']} res[prior_role] = implies return res raw_data = cls.admin_roles_client.list_all_role_inference_rules() data = convert_data(raw_data['role_inferences']) res = {} for role_id in data: res[role_id] = process_roles(role_id, data) return res def get_all_needed_roles(self, roles): """Extending given roles with roles from mapping Examples:: ["admin"] >> ["admin", "member", "reader"] ["member"] >> ["member", "reader"] ["reader"] >> ["reader"] ["custom_role"] >> ["custom_role"] :param roles: list of roles :return: extended list of roles """ res = set(r for r in roles) for role in res.copy(): role_id = self.__class__._role_map.get(role) implied_roles = self.__class__._role_inferences_mapping.get( role_id, set()) role_names = {self.__class__._role_map[rid] for rid in implied_roles} res.update(role_names) LOG.debug('All needed roles: %s; Base roles: %s', res, roles) return list(res) @contextlib.contextmanager def override_role(self): """Override the role used by ``os_primary`` Tempest credentials. Temporarily change the role used by ``os_primary`` credentials to: * ``[patrole] rbac_test_roles`` before test execution * ``[identity] admin_role`` after test execution Automatically switches to admin role after test execution. :returns: None .. warning:: This function can alter user roles for pre-provisioned credentials. Work is underway to safely clean up after this function. Example:: @rbac_rule_validation.action(service='test', rules=['a:test:rule']) def test_foo(self): # Allocate test-level resources here. with self.override_role(): # The role for `os_primary` has now been overridden. Within # this block, call the API endpoint that enforces the # expected policy specified by "rule" in the decorator. self.foo_service.bar_api_call() # The role is switched back to admin automatically. Note that # if the API call above threw an exception, any code below this # point in the test is not executed. """ self._set_override_role_called() self._override_role(True) try: # Execute the test. yield finally: # Check whether an exception was raised. If so, remember that # for future validation. exc = sys.exc_info()[0] if exc is not None: self._set_override_role_caught_exc() # This code block is always executed, no matter the result of the # test. Automatically switch back to the admin role for test clean # up. self._override_role(False) @classmethod def _override_role(cls, toggle_rbac_role=False): """Private helper for overriding ``os_primary`` Tempest credentials. :param toggle_rbac_role: Boolean value that controls the role that overrides default role of ``os_primary`` credentials. * If True: role is set to ``[patrole] rbac_test_role`` * If False: role is set to ``[identity] admin_role`` """ LOG.debug('Overriding role to: %s.', toggle_rbac_role) roles_already_present = False try: target_roles = (cls._rbac_role_ids if toggle_rbac_role else [cls._admin_role_id]) roles_already_present = cls._list_and_clear_user_roles_on_project( target_roles) # Do not override roles if `target_role` already exists. if not roles_already_present: cls._create_user_role_on_project(target_roles) except Exception as exp: with excutils.save_and_reraise_exception(): LOG.exception(exp) finally: auth_providers = cls.get_auth_providers() for provider in auth_providers: provider.clear_auth() # Fernet tokens are not subsecond aware so sleep to ensure we are # passing the second boundary before attempting to authenticate. # Only sleep if a token revocation occurred as a result of role # overriding. This will optimize test runtime in the case where # ``[identity] admin_role`` == ``[patrole] rbac_test_roles``. if not roles_already_present: time.sleep(1) for provider in auth_providers: provider.set_auth() @classmethod def _init_roles(cls): available_roles = cls.admin_roles_client.list_roles()['roles'] cls._role_map = {r['name']: r['id'] for r in available_roles} LOG.debug('Available roles: %s', cls._role_map.keys()) rbac_role_ids = [] roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: if not roles: roles.append(CONF.patrole.rbac_test_role) for role_name in roles: rbac_role_ids.append(cls._role_map.get(role_name)) admin_role_id = cls._role_map.get(CONF.identity.admin_role) if not all([admin_role_id, all(rbac_role_ids)]): missing_roles = [] msg = ("Could not find `[patrole] rbac_test_roles` or " "`[identity] admin_role`, both of which are required for " "RBAC testing.") if not admin_role_id: missing_roles.append(CONF.identity.admin_role) if not all(rbac_role_ids): missing_roles += [role_name for role_name in roles if role_name not in cls._role_map] msg += " Following roles were not found: %s." % ( ", ".join(missing_roles)) msg += " Available roles: %s." % ", ".join(cls._role_map) raise rbac_exceptions.RbacResourceSetupFailed(msg) cls._admin_role_id = admin_role_id cls._rbac_role_ids = rbac_role_ids # Adding backward mapping cls._role_map.update({v: k for k, v in cls._role_map.items()}) @classmethod def _create_user_role_on_project(cls, role_ids): for role_id in role_ids: cls.admin_roles_client.create_user_role_on_project( cls._project_id, cls._user_id, role_id) @classmethod def _list_and_clear_user_roles_on_project(cls, role_ids): roles = cls.admin_roles_client.list_user_roles_on_project( cls._project_id, cls._user_id)['roles'] all_role_ids = [role['id'] for role in roles] # NOTE(felipemonteiro): We do not use ``role_id in all_role_ids`` here # to avoid over-permission errors: if the current list of roles on the # project includes "admin" and "Member", and we are switching to the # "Member" role, then we must delete the "admin" role. Thus, we only # return early if the user's roles on the project are an exact match. if set(role_ids) == set(all_role_ids): return True for role in roles: cls.admin_roles_client.delete_role_from_user_on_project( cls._project_id, cls._user_id, role['id']) return False @contextlib.contextmanager def override_role_and_validate_list(self, admin_resources=None, admin_resource_id=None): """Call ``override_role`` and validate RBAC for a list API action. List actions usually do soft authorization: partial or empty response bodies are returned instead of exceptions. This helper validates that unauthorized roles only return a subset of the available resources. Should only be used for validating list API actions. :param test_obj: Instance of ``tempest.test.BaseTestCase``. :param list admin_resources: The list of resources received before calling the ``override_role_and_validate_list`` function. :param UUID admin_resource_id: An ID of a resource created before calling the ``override_role_and_validate_list`` function. :return: py:class:`_ValidateListContext` object. Example:: # the resource created by admin admin_resource_id = ( self.ntp_client.create_dscp_marking_rule() ["dscp_marking_rule"]["id']) with self.override_role_and_validate_list( admin_resource_id=admin_resource_id) as ctx: # the list of resources available for member role ctx.resources = self.ntp_client.list_dscp_marking_rules( policy_id=self.policy_id)["dscp_marking_rules"] """ ctx = _ValidateListContext(admin_resources, admin_resource_id) with self.override_role(): yield ctx ctx._validate() @classmethod def get_auth_providers(cls): """Returns list of auth_providers used within test. Tests may redefine this method to include their own or third party client auth_providers. """ return [cls.os_primary.auth_provider] def _set_override_role_called(self): """Helper for tracking whether ``override_role`` was called.""" self.__override_role_called = True def _set_override_role_caught_exc(self): """Helper for tracking whether exception was thrown inside ``override_role``. """ self.__override_role_caught_exc = True def _validate_override_role_called(self): """Idempotently validate that ``override_role`` is called and reset its value to False for sequential tests. """ was_called = self.__override_role_called self.__override_role_called = False return was_called def _validate_override_role_caught_exc(self): """Idempotently validate that exception was caught inside ``override_role``, so that, by process of elimination, it can be determined whether one was thrown outside (which is invalid). """ caught_exception = self.__override_role_caught_exc self.__override_role_caught_exc = False return caught_exception def is_admin(): """Verifies whether the current test role equals the admin role. :returns: True if ``rbac_test_roles`` contain the admin role. """ roles = CONF.patrole.rbac_test_roles # TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed if CONF.patrole.rbac_test_role: roles.append(CONF.patrole.rbac_test_role) roles = list(set(roles)) # TODO(felipemonteiro): Make this more robust via a context is admin # lookup. return CONF.identity.admin_role in roles
2.109375
2
core/my_widgets/drug_picker.py
kimera1999/pmpktn
0
2845
from initialize import * from core.db.db_func import query_linedrug_list import os import wx class DrugPopup(wx.ComboPopup): def __init__(self, parent): super().__init__() self.lc = None self.mv = parent.mv self.init_d_l = query_linedrug_list(self.mv.sess).all() self.d_l = [] def Create(self, parent): self.lc = wx.ListCtrl( parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER) self.lc.AppendColumn('Thuốc', width=200) self.lc.AppendColumn('Thành phần', width=150) self.lc.AppendColumn('Số lượng') self.lc.AppendColumn('Đơn giá') self.lc.AppendColumn('Cách dùng', width=100) self.lc.Bind(wx.EVT_MOTION, self.OnMotion) self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Update() return True def Init(self): self.value = -1 self.curitem = -1 def GetControl(self): return self.lc def SetStringValue(self, val): idx = self.lc.FindItem(-1, val) if idx != wx.NOT_FOUND: self.lc.Select(idx) def GetStringValue(self): if self.value >= 0: return self.lc.GetItemText(self.value, col=0) return "" def GetAdjustedSize(self, minWidth, prefHeight, maxHeight): return super().GetAdjustedSize(*popup_size) def Update(self, s=''): self.lc.DeleteAllItems() self.d_l = list(filter( lambda x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(), self.init_d_l)) for index, item in enumerate(self.d_l): self.lc.Append( [item.name, item.element, item.quantity, item.sale_price, item.usage]) if item.quantity <= user_setting["so_luong_thuoc_toi_thieu_de_bao_dong_do"]: self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255)) def OnMotion(self, e): item, flags = self.lc.HitTest(e.GetPosition()) if item >= 0: self.lc.Select(item) self.curitem = item def OnLeftDown(self, e): try: self.value = self.curitem self.ComboCtrl.drugWH = self.d_l[self.value] self.Dismiss() self.ComboCtrl.SelectAll() self.ComboCtrl.SetInsertionPointEnd() except IndexError: self.Dismiss() def OnPopup(self): self.Init() self.Update(self.ComboCtrl.Value) if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyDown(self): if self.lc.ItemCount > 0: if self.curitem < (self.lc.ItemCount - 1): self.curitem += 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) def KeyUp(self): if self.lc.ItemCount > 0: if self.curitem > 0: self.curitem -= 1 self.lc.Select(self.curitem) self.lc.EnsureVisible(self.curitem) else: self.KeyESC() def KeyESC(self): a = self.ComboCtrl.Value self.Dismiss() self.ComboCtrl.ChangeValue(a) self.ComboCtrl.SetInsertionPointEnd() def KeyReturn(self): self.OnLeftDown(None) def onKeyPress(self, e): c = e.GetKeyCode() if c == wx.WXK_DOWN: self.KeyDown() elif c == wx.WXK_UP: self.KeyUp() elif c == wx.WXK_ESCAPE: self.KeyESC() elif c == wx.WXK_RETURN: self.KeyReturn() class DrugPicker(wx.ComboCtrl): def __init__(self, parent): super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER) self.mv = parent.mv self.drug_popup = DrugPopup(self) self.SetPopupControl(self.drug_popup) self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress) self.Bind(wx.EVT_TEXT, self.onTextChange) self.SetHint("Nhấn Enter để search thuốc") self._drugWH = None self.EnablePopupAnimation(enable=False) @property def drugWH(self): return self._drugWH @drugWH.setter def drugWH(self, dwh): self._drugWH = dwh pg = self.Parent if dwh: pg.usage_unit.Label = dwh.usage_unit + " " pg.sale_unit.Label = dwh.sale_unit + " " else: self.ChangeValue('') pg.dosage_per.ChangeValue('') pg.usage_unit.Label = '{Đơn vị} ' pg.times.ChangeValue("") pg.quantity.ChangeValue("") pg.sale_unit.Label = '{Đơn vị} ' pg.usage.ChangeValue("") def onKeyPress(self, e): if os.name == "posix": if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]: if not self.IsPopupShown(): self.Popup() else: e.Skip() else: if e.GetKeyCode() not in [wx.WXK_RETURN, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_ESCAPE]: if self.IsPopupShown(): a = self.Value self.Dismiss() self.ChangeValue(a) self.SetInsertionPointEnd() e.Skip() def onTextChange(self, e): if os.name == "nt": if e.String == "": self.Clear() elif len(e.String) >= 1: if not self.IsPopupShown(): self.Popup() self.SetInsertionPointEnd() if os.name == "posix": if e.String == "": self.Clear() def Clear(self): self.drugWH = None def refreshPopup(self): self.drug_popup.init_d_l = query_linedrug_list(self.mv.sess).all()
2.109375
2
em Python/Roteiro4/Roteiro4__grafos.py
GuilhermeEsdras/Grafos
0
2846
<reponame>GuilhermeEsdras/Grafos<gh_stars>0 from Roteiro4.Roteiro4__funcoes import Grafo class Grafos: # Grafo da Paraíba paraiba = Grafo(['J', 'C', 'E', 'P', 'M', 'T', 'Z']) for aresta in ['J-C', 'C-E', 'C-E', 'C-P', 'C-P', 'C-M', 'C-T', 'M-T', 'T-Z']: paraiba.adicionaAresta(aresta) # --- # # Grafo Completo grafo_completo = Grafo(['J', 'C', 'E', 'P']) for aresta in ['J-C', 'J-P', 'J-E', 'C-E', 'C-P', 'P-E']: grafo_completo.adicionaAresta(aresta) # --- # # K3 k3 = Grafo(['A', 'B', 'C']) for aresta in ['A-B', 'B-C', 'C-A']: k3.adicionaAresta(aresta) # --- #
2.5
2
fuzzybee/joboard/views.py
youtaya/knight
0
2847
<filename>fuzzybee/joboard/views.py # -*- coding: utf-8 -*- from django.shortcuts import get_object_or_404, render_to_response, render from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from django.shortcuts import redirect from joboard.models import Factory from joboard.forms import FactoryForm from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from urllib import urlopen, urlencode import urllib2 from fuzzybee.conf import b_url, b_ak, geo_table, l_url, app_id, app_key from utils.pack_json import toJSON, fromJSON from django.contrib.auth.decorators import login_required from people.models import People import logging logger = logging.getLogger(__name__) @login_required def index(request): form = None if request.method == 'POST': form = FactoryForm(request.POST) print form if form.is_valid(): factory = form.cleaned_data logger.debug("lat: " + str(factory['fact_lat'])) logger.debug("addr: " + factory['fact_addr']) #save factory in model factmodel = form.save(commit=False) print request.user factmodel.fact_maintainer = People.objects.get(user=request.user) factmodel.save() factid = factmodel.id #save in public server: leancloud and baidu save_factory_cloud(factory, factid) return HttpResponseRedirect(reverse('board:detail', args=(factid,))) else: form = FactoryForm() return render_to_response('board/new.html', {'form': form}, context_instance=RequestContext(request)) @login_required def detail(request, fact_id): print fact_id info = get_object_or_404(Factory, pk=fact_id) return render(request, 'board/detail.html', {'info':info}) @login_required def manager(request): print "manager..." try: people = People.objects.get(user=request.user) factory = Factory.objects.get(fact_maintainer=people) except ObjectDoesNotExist: print 'no hire action...' return redirect(reverse('joboard.views.index', args=[])) return render(request, 'board/manager.html', {'info':factory}) def save_factory_cloud(fact_info, fact_id): title = fact_info['fact_name'] address = fact_info['fact_addr'] lat = fact_info['fact_lat'] lng = fact_info['fact_lng'] num = fact_info['hire_num'] data = { 'title': title.encode("utf-8"), 'address': address.encode("utf-8"), 'latitude': lat, 'longitude': lng, 'job_num': num, 'factory_id': fact_id, } head = { 'X-AVOSCloud-Application-Id': app_id, 'X-AVOSCloud-Application-Key': app_key, 'Content-Type': 'application/json', } req = urllib2.Request(l_url, toJSON(data), head) print str(req) response = urllib2.urlopen(req) #print respone.read() lean_response = fromJSON(response.read()) print lean_response lean_objectId = lean_response['objectId'] # save in Baidu Map params = urlencode({ 'title': title.encode("utf-8"), 'address': address.encode("utf-8"), 'latitude': lat, 'longitude': lng, 'coord_type': 3, 'geotable_id': geo_table, 'ak': b_ak, 'job_num': num, 'lean_id': lean_objectId, }) req = urllib2.Request(b_url, params) #print str(req) response = urllib2.urlopen(req) #print respone.read()
1.953125
2
flask/app.py
yatsu/react-flask-graphql-example
21
2848
from flask import Flask from flask_cors import CORS from flask_graphql import GraphQLView from schema import Schema def create_app(**kwargs): app = Flask(__name__) app.debug = True app.add_url_rule( '/graphql', view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs) ) return app if __name__ == '__main__': app = create_app(graphiql=True) CORS(app, resources={r'/graphql': {'origins': '*'}}) app.run()
2.15625
2
mortgagetvm/mortgageOptions.py
AndrewChap/mortgagetvm
0
2849
# Factory-like class for mortgage options class MortgageOptions: def __init__(self,kind,**inputOptions): self.set_default_options() self.set_kind_options(kind = kind) self.set_input_options(**inputOptions) def set_default_options(self): self.optionList = dict() self.optionList['commonDefaults'] = dict( name = None , label = None , color = [0,0,0], houseCost = '100%', # how much you are paying for the house mortgageRate = '0.0%', # Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage length (in years) downPayment = '0%' , # Percentage of house cost paid upfront startingCash = '100%', # Amount of money you have before purchase tvmRate = '7.0%', # Annual rate of return of savings inflationRate = '1.8%', # Annual rate of inflation - NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate of increase in value of house houseValue = '100%', # how much the house is worth when you bought it originationFees = '0.0%', # Mortgage fees as a percentage of the loan otherMortgageFees = '0.0%', # Other fees as a percentage of the loan otherPurchaseFees = '0.0%', # Other fees as a percentage of home value paymentsPerYear = '12' , # Number of mortgage payments per year taxRate = '0.0%', # Annual taxes as percentage of home value insuranceRate = '0.0%', # Annual insurance as percentage of home value listingFee = '0.0%', # Cost of selling the house capitalGainsTax = '0.0%', # Paid if selling house within two years capitalGainsPeriod = '0' , # Years after which cap gains tax is not applied rentalIncome = '0.0%', # Monthly rental price as percentage of home value rentalPayment = '0.0%', # Monthly rental price as percentage of home value ) self.optionList['mortgageDefaults'] = dict( name = 'mortgage', label = 'Mortgage', mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage length (in years) downPayment = '20%' , # Percentage of house cost paid upfront startingCash = '100%', # Amount of money you have before purchase originationFees = '0.5%', # Mortgage fees as a percentage of the loan otherMortgageFees = '0.5%', # Other fees as a percentage of the loan otherPurchaseFees = '0.5%', # Other fees as a percentage of home value paymentsPerYear = '12' , # Number of mortgage payments per year taxRate = '0.6%', # Annual taxes as percentage of home value insuranceRate = '0.4%', # Annual insurance as percentage of home value listingFee = '6.0%', # Cost of selling the house capitalGainsTax = '15%' , # Paid if selling house within two years capitalGainsPeriod = '2' , # Years after which cap gains tax is not applied ) self.optionList['rentalDefaults'] = dict( rentalPayment = '0.6%', # Monthly rental price as percentage of home value ) self.optionList['investmentPropertyDefaults'] = dict( mortgageRate = '4.5%', # Mortgage annual interest rate mortgageLength = '30Y' , # Mortgage length (in years) downPayment = '20%' , # Percentage of house cost paid upfront startingCash = '100%', # Amount of money you have before purchase tvmRate = '7.0%', # Annual rate of return of savings inflationRate = '1.8%', # Annual rate of inflation - NOT IMPLEMENTED appreciationRate = '5.0%', # Annual rate of increase in value of house houseValue = '100%', # how much the house is worth when you bought it originationFees = '0.5%', # Mortgage fees as a percentage of the loan otherMortgageFees = '0.5%', # Other fees as a percentage of the loan otherPurchaseFees = '0.5%', # Other fees as a percentage of home value paymentsPerYear = '12' , # Number of mortgage payments per year taxRate = '0.6%', # Annual taxes as percentage of home value insuranceRate = '0.4%', # Annual insurance as percentage of home value listingFee = '6.0%', # Cost of selling the house capitalGainsTax = '15%' , # Paid if selling house within two years capitalGainsPeriod = '2' , # Years after which cap gains tax is not applied rentalIncome = '0.6%', # Monthly rental price as percentage of home value ) def set_kind_options(self,kind,**inputOptions): self.options = self.optionList['commonDefaults'] if kind == None: pass elif kind == 'mortgage': for key,val in self.optionList['mortgageDefaults'].items(): self.options[key] = val elif kind == 'rental': for key,val in self.optionList['rentalDefaults'].items(): self.options[key] = val elif kind == 'investmentProperty': for key,val in self.optionList['investmentPropertyDefaults'].items(): self.options[key] = val def set_input_options(self,**inputOptions): for key,val in inputOptions.items(): self.options[key] = val
2.953125
3
DD/Terrain.py
CodingBullywug/DDreshape
2
2850
from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray from DD.Entity import Entity import numpy as np class Terrain(Entity): def __init__(self, json, width, height, scale=4, terrain_types=4): super(Terrain, self).__init__(json) self._scale = scale self.terrain_types = terrain_types self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C') def get_json(self): json = self._json json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C')) return json def pad(self, top, bottom, left, right): self.splat = np.pad(self.splat, ((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)), mode='edge') def crop(self, top, bottom, left, right): self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale) def fliplr(self, width): self.splat = np.fliplr(self.splat) def flipud(self, height): self.splat = np.flipud(self.splat) def rot90(self, width, height): self.splat = self._rot90_map(self.splat) def rot180(self, width, height): self.splat = self._rot180_map(self.splat) def rot270(self, width, height): self.splat = self._rot270_map(self.splat)
2.40625
2
bluesky/tests/utils.py
AbbyGi/bluesky
43
2851
<reponame>AbbyGi/bluesky<filename>bluesky/tests/utils.py<gh_stars>10-100 from collections import defaultdict import contextlib import tempfile import sys import threading import asyncio @contextlib.contextmanager def _print_redirect(): old_stdout = sys.stdout try: fout = tempfile.TemporaryFile(mode="w+", encoding="utf-8") sys.stdout = fout yield fout finally: sys.stdout = old_stdout class MsgCollector: def __init__(self, msg_hook=None): self.msgs = [] self.msg_hook = msg_hook def __call__(self, msg): self.msgs.append(msg) if self.msg_hook: self.msg_hook(msg) class DocCollector: def __init__(self): self.start = [] self.stop = {} self.descriptor = defaultdict(list) self.event = {} def insert(self, name, doc): if name == "start": self.start.append(doc) elif name == "stop": self.stop[doc["run_start"]] = doc elif name == "descriptor": self.descriptor[doc["run_start"]].append(doc) self.event[doc["uid"]] = [] elif name == 'bulk_events': for k, v in doc.items(): self.event[k].extend(v) else: self.event[doc["descriptor"]].append(doc) def _fabricate_asycio_event(loop): th_ev = threading.Event() aio_event = None def really_make_the_event(): nonlocal aio_event aio_event = asyncio.Event() th_ev.set() h = loop.call_soon_threadsafe(really_make_the_event) if not th_ev.wait(0.1): h.cancel() raise Exception("failed to make asyncio event") return aio_event
2.125
2
cli/check_json.py
MJJojo97/openslides-backend
0
2852
import json import sys from openslides_backend.models.checker import Checker, CheckException def main() -> int: files = sys.argv[1:] if not files: print("No files specified.") return 1 possible_modes = tuple(f"--{mode}" for mode in Checker.modes) modes = tuple(mode[2:] for mode in possible_modes if mode in files) if len(modes) == 0: mode = "all" elif len(modes) > 1: print(f"You can only choose one mode of {', '.join(possible_modes)}.") exit(1) else: mode = modes[0] if len(modes): files = [x for x in files if x not in possible_modes] failed = False for f in files: with open(f) as data: try: Checker( json.load(data), mode=mode, ).run_check() except CheckException as e: print(f"Check for {f} failed:\n", e) failed = True else: print(f"Check for {f} successful.") return 1 if failed else 0 if __name__ == "__main__": sys.exit(main())
2.8125
3
utils/mgmt.py
robinagist/manic
2
2853
<filename>utils/mgmt.py from utils.data import load_memfile_configs from utils.server import plain_response from sanic import response def get_mappedfile_configs(): cfgs = load_memfile_configs() return response.json(plain_response(cfgs, 0), status=200) def created_mapped_file(): pass def delete_mapped_file(): pass
1.84375
2
datasets/medicalImage.py
UpCoder/YNe
0
2854
# -*- coding=utf-8 -*- import SimpleITK as itk import pydicom import numpy as np from PIL import Image, ImageDraw import gc from skimage.morphology import disk, dilation import nipy import os from glob import glob import scipy import cv2 from xml.dom.minidom import Document typenames = ['CYST', 'FNH', 'HCC', 'HEM', 'METS'] typeids = [0, 1, 2, 3, 4] def get_voxel_size(file_path): load_image_obj = nipy.load_image(file_path) header = load_image_obj.header x_size = header['srow_x'][0] y_size = header['srow_y'][1] z_size = header['srow_z'][2] return [x_size, y_size, z_size] def read_nii(file_path): return nipy.load_image(file_path).get_data() def read_nii_with_header(file_path): img_obj = nipy.load_image(file_path) header_obj = img_obj.header res_dict = {} res_dict['voxel_spacing'] = [header_obj['srow_x'][0], header_obj['srow_y'][1], header_obj['srow_z'][2]] img_arr = img_obj.get_data() return img_arr, res_dict # 读取文件序列 def read_dicom_series(dir_name): reader = itk.ImageSeriesReader() dicom_series = reader.GetGDCMSeriesFileNames(dir_name) reader.SetFileNames(dicom_series) images = reader.Execute() image_array = itk.GetArrayFromImage(images) return image_array # 将DICOM序列转化成MHD文件 def convert_dicomseries2mhd(dicom_series_dir, save_path): data = read_dicom_series(dicom_series_dir) save_mhd_image(data, save_path) # 读取单个DICOM文件 def read_dicom_file(file_name): header = pydicom.read_file(file_name) image = header.pixel_array image = header.RescaleSlope * image + header.RescaleIntercept return image # 读取mhd文件 def read_mhd_image(file_path, rejust=False): header = itk.ReadImage(file_path) image = np.array(itk.GetArrayFromImage(header)) if rejust: image[image < -70] = -70 image[image > 180] = 180 image = image + 70 return np.array(image) # 保存mhd文件 def save_mhd_image(image, file_name): header = itk.GetImageFromArray(image) itk.WriteImage(header, file_name) # 根据文件名返回期项名 def return_phasename(file_name): phasenames = ['NC', 'ART', 'PV'] for phasename in phasenames: if file_name.find(phasename) != -1: return phasename # 读取DICOM文件中包含的病例ID信息 def read_patientId(dicom_file_path): ds = pydicom.read_file(dicom_file_path) return ds.PatientID # 返回病灶类型和ID的字典类型的数据 key是typename value是typeid def return_type_nameid(): res = {} res['CYST'] = 0 res['FNH'] = 1 res['HCC'] = 2 res['HEM'] = 3 res['METS'] = 4 return res # 返回病灶类型ID和名称的字典类型的数据 key是typeid value是typename def return_type_idname(): res = {} res[0] = 'CYST' res[1] = 'FNH' res[2] = 'HCC' res[3] = 'HEM' res[4] = 'METS' return res # 根据病灶类型的ID返回类型的字符串 def return_typename_byid(typeid): idname_dict = return_type_idname() return idname_dict[typeid] # 根据病灶类型的name返回id的字符串 def return_typeid_byname(typename): nameid_dict = return_type_nameid() return nameid_dict[typename] # 填充图像 def fill_region(image): # image.show() from scipy import ndimage image = ndimage.binary_fill_holes(image).astype(np.uint8) return image def close_operation(binary_image, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_size, kernel_size)) close_r = cv2.morphologyEx(binary_image, cv2.MORPH_CLOSE, kernel) return close_r def open_operation(slice_image, kernel_size=3): opening = cv2.morphologyEx(slice_image, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size))) return opening def get_kernel_filters(kernel_size): ''' 返回进行kernel操作的5个模版 (1个是正常的dilated操作,还有四个是分别对四个方向进行单独进行dilated的操作) :param kernel_size: :return: [5, kernel_size, kernel_size] ''' kernel_whole = np.ones([kernel_size, kernel_size], np.uint8) half_size = kernel_size // 2 kernel_left = np.copy(kernel_whole) kernel_left[:, half_size + 1:] = 0 kernel_right = np.copy(kernel_whole) kernel_right[:, :half_size] = 0 kernel_top = np.copy(kernel_whole) kernel_top[half_size + 1:, :] = 0 kernel_bottom = np.copy(kernel_whole) kernel_bottom[:half_size, :] = 0 return np.concatenate([ np.expand_dims(kernel_whole, axis=0), np.expand_dims(kernel_left, axis=0), np.expand_dims(kernel_right, axis=0), np.expand_dims(kernel_top, axis=0), np.expand_dims(kernel_bottom, axis=0), ], axis=0) def image_erode(img, kernel_size=5): import cv2 import numpy as np kernel = np.ones((kernel_size, kernel_size), np.uint8) erosion = cv2.erode(img, kernel, iterations=1) return erosion def image_expand(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.dilate(img, kernel) return image def image_erode(img, kernel_size=5): kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (kernel_size, kernel_size)) image = cv2.erode(img, kernel) return image # 图像膨胀 # def image_expand(image, size): # def find_significant_layer(mask_image): ''' 找到显著层 :param mask_image: [depth, width, height] :return: idx ''' sum_res = np.sum(np.sum(mask_image, axis=1), axis=1) return np.argmax(sum_res) # 将一个矩阵保存为图片 def save_image(image_arr, save_path): image = Image.fromarray(np.asarray(image_arr, np.uint8)) image.save(save_path) def show_image(image): img = np.asarray(image, np.uint8) import matplotlib.pyplot as plt plt.figure("Image") # 这里必须加 cmap='gray' ,否则尽管原图像是灰度图(下图1),但是显示的是伪彩色图像(下图2)(如果不加的话) plt.imshow(img, cmap='gray') plt.axis('on') plt.title('image') plt.show() # 将图像画出来,并且画出标记的病灶 def save_image_with_mask(image_arr, mask_image, save_path): image_arr[image_arr < -70] = -70 image_arr[image_arr > 180] = 180 image_arr = image_arr + 70 shape = list(np.shape(image_arr)) image_arr_rgb = np.zeros(shape=[shape[0], shape[1], 3]) image_arr_rgb[:, :, 0] = image_arr image_arr_rgb[:, :, 1] = image_arr image_arr_rgb[:, :, 2] = image_arr image = Image.fromarray(np.asarray(image_arr_rgb, np.uint8)) image_draw = ImageDraw.Draw(image) [ys, xs] = np.where(mask_image != 0) miny = np.min(ys) maxy = np.max(ys) minx = np.min(xs) maxx = np.max(xs) ROI = image_arr_rgb[miny - 1:maxy + 1, minx - 1:maxx + 1, :] ROI_Image = Image.fromarray(np.asarray(ROI, np.uint8)) for index, y in enumerate(ys): image_draw.point([xs[index], y], fill=(255, 0, 0)) if save_path is None: image.show() else: image.save(save_path) ROI_Image.save(os.path.join(os.path.dirname(save_path), os.path.basename(save_path).split('.')[0] + '_ROI.jpg')) del image, ROI_Image gc.collect() def compress22dim(image): ''' 将一个矩阵如果可能,压缩到三维的空间 ''' shape = list(np.shape(image)) if len(shape) == 3: return np.squeeze(image) return image def extract_ROI(image, mask_image): ''' 提取一幅图像中的ROI ''' xs, ys = np.where(mask_image == 1) xs_min = np.min(xs) xs_max = np.max(xs) ys_min = np.min(ys) ys_max = np.max(ys) return image[xs_min: xs_max + 1, ys_min: ys_max + 1] def resize_image(image, size): image = Image.fromarray(np.asarray(image, np.uint8)) return image.resize((size, size)) # def image_expand(mask_image, r): # return dilation(mask_image, disk(r)) ''' 将形式如(512, 512)格式的图像转化为(1, 512, 512)形式的图片 ''' def expand23D(mask_image): shape = list(np.shape(mask_image)) if len(shape) == 2: mask_image = np.expand_dims(mask_image, axis=0) print('after expand23D', np.shape(mask_image)) return mask_image ''' 返回一个mask图像的中心,是对xyz坐标计算平均值之后的结果 ''' def find_centroid3D(image, flag): [x, y, z] = np.where(image == flag) centroid_x = int(np.mean(x)) centroid_y = int(np.mean(y)) centroid_z = int(np.mean(z)) return centroid_x, centroid_y, centroid_z ''' 将[w, h, d]reshape为[d, w, h] ''' def convert2depthfirst(image): image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[2], shape[0], shape[1]]) for i in range(shape[2]): new_image[i, :, :] = image[:, :, i] return new_image # def test_convert2depthfirst(): # zeros = np.zeros([100, 100, 30]) # after_zeros = convert2depthfirst(zeros) # print np.shape(after_zeros) # test_convert2depthfirst() ''' 将[d, w, h]reshape为[w, h, d] ''' def convert2depthlastest(image): image = np.array(image) shape = np.shape(image) new_image = np.zeros([shape[1], shape[2], shape[0]]) for i in range(shape[0]): new_image[:, :, i] = image[i, :, :] return new_image def read_image_file(file_path): if file_path.endswith('.nii'): return read_nil(file_path) if file_path.endswith('.mhd'): return read_mhd_image(file_path) print('the format of image is not support in this version') return None def processing(image, size_training): image = np.array(image) # numpy_clip bottom = -300. top = 500. image = np.clip(image, bottom, top) # to float minval = -350 interv = 500 - (-350) image -= minval # scale down to 0 - 2 image /= (interv / 2) # zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order = 1 => biliniear interpolation return after_zoom def preprocessing_agumentation(image, size_training): image = np.array(image) # numpy_clip c_minimum = -300. c_maximum = 500. s_maximum = 255. image = np.clip(image, c_minimum, c_maximum) interv = float(c_maximum - c_minimum) image = (image - c_minimum) / interv * s_maximum minval = 0. maxval = 255. image -= minval interv = maxval - minval # print('static scaler 0', interv) # scale down to 0 - 2 # image /= (interv / 2) image = np.asarray(image, np.float32) image = image / interv image = image * 2.0 # zoom desired_size = [size_training, size_training] desired_size = np.asarray(desired_size, dtype=np.int) zooms = desired_size / np.array(image[:, :, 0].shape, dtype=np.float) print(zooms) after_zoom = np.zeros([size_training, size_training, np.shape(image)[2]]) for i in range(np.shape(after_zoom)[2]): after_zoom[:, :, i] = scipy.ndimage.zoom(image[:, :, i], zooms, order=1) # order = 1 => biliniear interpolation return after_zoom def MICCAI2018_Iterator(image_dir, execute_func, *parameters): ''' 遍历MICCAI2018文件夹的框架 :param execute_func: :return: ''' for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) execute_func(cur_slice_dir, *parameters) def dicom2jpg_singlephase(slice_dir, save_dir, phase_name='PV'): mhd_image_path = glob(os.path.join(slice_dir, phase_name+'_Image*.mhd'))[0] mhd_mask_path = glob(os.path.join(slice_dir, phase_name + '_Mask*.mhd'))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_image = np.expand_dims(mhd_image, axis=2) mhd_image = np.concatenate([mhd_image, mhd_image, mhd_image], axis=2) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) max_v = 300. min_v = -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v - min_v mhd_image = (mhd_image - min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.jpg') if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) cv2.imwrite(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys = np.where(mask_image == 1) min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\t', encoding='utf-8')) line = '%s %d %d %d %d\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line) lines = [] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def dicom2jpg_multiphase(slice_dir, save_dir, phasenames=['NC', 'ART', 'PV'], target_phase='PV', suffix_name='npy'): target_mask = None mhd_images = [] for phase_name in phasenames: mhd_image_path = glob(os.path.join(slice_dir, 'Image_%s*.mhd' % phase_name))[0] mhd_mask_path = glob(os.path.join(slice_dir, 'Mask_%s*.mhd' % phase_name))[0] mhd_image = read_mhd_image(mhd_image_path) mask_image = read_mhd_image(mhd_mask_path) mhd_image = np.asarray(np.squeeze(mhd_image), np.float32) mhd_images.append(mhd_image) mask_image = np.asarray(np.squeeze(mask_image), np.uint8) if phase_name == target_phase: target_mask = mask_image print(np.shape(mhd_images)) mask_image = target_mask mask_image_shape = list(np.shape(mask_image)) if len(mask_image_shape) == 3: mask_image = mask_image[1, :, :] print('the mask image shape is ', np.shape(mask_image)) if suffix_name == 'jpg': mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images elif suffix_name == 'npy': mhd_images = np.concatenate(np.asarray(mhd_images, np.float), axis=0) mhd_images = np.transpose(np.asarray(mhd_images, np.float32), axes=[1, 2, 0]) mhd_image = mhd_images else: print('the suffix name does not support') assert False max_v = 300. min_v = -350. mhd_image[mhd_image > max_v] = max_v mhd_image[mhd_image < min_v] = min_v print(np.mean(mhd_image, dtype=np.float32)) mhd_image -= np.mean(mhd_image) min_v = np.min(mhd_image) max_v = np.max(mhd_image) interv = max_v - min_v mhd_image = (mhd_image - min_v) / interv file_name = os.path.basename(slice_dir) dataset_name = os.path.basename(os.path.dirname(slice_dir)) phase_name = ''.join(phasenames) save_path = os.path.join(save_dir, phase_name, dataset_name, file_name+'.' + suffix_name) if not os.path.exists(os.path.dirname(save_path)): os.makedirs(os.path.dirname(save_path)) print('the shape of mhd_image is ', np.shape(mhd_image), np.min(mhd_image), np.max(mhd_image)) #cv2.imwrite(save_path, mhd_image * 255) np.save(save_path, mhd_image * 255) xml_save_dir = os.path.join(save_dir, phase_name, dataset_name+'_xml') if not os.path.exists(xml_save_dir): os.makedirs(xml_save_dir) evulate_gt_dir = os.path.join(save_dir, phase_name, dataset_name+'_gt') if not os.path.exists(evulate_gt_dir): os.makedirs(evulate_gt_dir) xml_save_path = os.path.join(xml_save_dir, file_name + '.xml') gt_save_path = os.path.join(evulate_gt_dir, file_name + '.txt') # for evulate doc = Document() root_node = doc.createElement('annotation') doc.appendChild(root_node) folder_name = os.path.basename(save_dir) + '/' + phase_name folder_node = doc.createElement('folder') root_node.appendChild(folder_node) folder_txt_node = doc.createTextNode(folder_name) folder_node.appendChild(folder_txt_node) file_name = file_name + '.jpg' filename_node = doc.createElement('filename') root_node.appendChild(filename_node) filename_txt_node = doc.createTextNode(file_name) filename_node.appendChild(filename_txt_node) shape = list(np.shape(mhd_image)) size_node = doc.createElement('size') root_node.appendChild(size_node) width_node = doc.createElement('width') width_node.appendChild(doc.createTextNode(str(shape[0]))) height_node = doc.createElement('height') height_node.appendChild(doc.createTextNode(str(shape[1]))) depth_node = doc.createElement('depth') depth_node.appendChild(doc.createTextNode(str(3))) size_node.appendChild(width_node) size_node.appendChild(height_node) size_node.appendChild(depth_node) mask_image[mask_image != 1] = 0 xs, ys = np.where(mask_image == 1) print(xs, ys) min_x = np.min(xs) min_y = np.min(ys) max_x = np.max(xs) max_y = np.max(ys) object_node = doc.createElement('object') root_node.appendChild(object_node) name_node = doc.createElement('name') name_node.appendChild(doc.createTextNode('Cyst')) object_node.appendChild(name_node) truncated_node = doc.createElement('truncated') object_node.appendChild(truncated_node) truncated_node.appendChild(doc.createTextNode('0')) difficult_node = doc.createElement('difficult') object_node.appendChild(difficult_node) difficult_node.appendChild(doc.createTextNode('0')) bndbox_node = doc.createElement('bndbox') object_node.appendChild(bndbox_node) xmin_node = doc.createElement('xmin') xmin_node.appendChild(doc.createTextNode(str(min_y))) bndbox_node.appendChild(xmin_node) ymin_node = doc.createElement('ymin') ymin_node.appendChild(doc.createTextNode(str(min_x))) bndbox_node.appendChild(ymin_node) xmax_node = doc.createElement('xmax') xmax_node.appendChild(doc.createTextNode(str(max_y))) bndbox_node.appendChild(xmax_node) ymax_node = doc.createElement('ymax') ymax_node.appendChild(doc.createTextNode(str(max_x))) bndbox_node.appendChild(ymax_node) with open(xml_save_path, 'wb') as f: f.write(doc.toprettyxml(indent='\t', encoding='utf-8')) line = '%s %d %d %d %d\n' % ('Cyst', min_y, min_x, max_y, max_x) print(line) lines = [] lines.append(line) with open(gt_save_path, 'w') as f: f.writelines(lines) f.close() def static_pixel_num(image_dir, target_phase='PV'): # {0: 217784361, 1: 1392043, 2: 209128, 3: 1486676, 4: 458278, 5: 705482} # {0: 1.0, 156, 1041, 146, 475, 308} static_res = { 0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 } from convert2jpg import extract_bboxs_mask_from_mask from config import pixel2type, type2pixel for sub_name in ['train', 'val', 'test']: names = os.listdir(os.path.join(image_dir, sub_name)) for name in names: cur_slice_dir = os.path.join(image_dir, sub_name, name) mhd_mask_path = glob(os.path.join(cur_slice_dir, 'Mask_%s*.mhd' % target_phase))[0] mask_image = read_mhd_image(mhd_mask_path) min_xs, min_ys, max_xs, max_ys, names, mask = extract_bboxs_mask_from_mask(mask_image, os.path.join(cur_slice_dir, 'tumor_types')) for key in pixel2type.keys(): mask[mask == key] = type2pixel[pixel2type[key]][0] pixel_value_set = np.unique(mask) print pixel_value_set for value in list(pixel_value_set): static_res[value] += np.sum(mask == value) print(static_res) def convertCase2PNGs(volume_path, seg_path, save_dir=None, z_axis=5.0, short_edge=64): ''' 将nii转化成PNG :param volume_path: nii的路径 :param seg_path: :return: ''' from skimage.measure import label volume, header = read_nii_with_header(volume_path) # volume = np.transpose(volume, [1, 0, 2]) volume = np.asarray(volume, np.float32) max_v = 250. min_v = -200. # max_v = 180 # min_v = -70 volume[volume > max_v] = max_v volume[volume < min_v] = min_v volume -= np.mean(volume) min_v = np.min(volume) max_v = np.max(volume) interv = max_v - min_v volume = (volume - min_v) / interv z_axis_case = header['voxel_spacing'][-1] slice_num = int(z_axis / z_axis_case) if slice_num == 0: slice_num = 1 seg = read_nii(seg_path) # print np.shape(volume), np.shape(seg) [_, _, channel] = np.shape(volume) imgs = [] names = [] masks = [] tumor_weakly_masks = [] liver_masks = [] i = slice_num + 1 pos_slice_num = np.sum(np.sum(np.sum(seg == 2, axis=0), axis=0) != 0) total_slice_num = np.shape(seg)[-1] print('pos_slice_num is ', pos_slice_num, total_slice_num) neg_rate = (3.0 * pos_slice_num) / total_slice_num # 正样本是负样本的 if neg_rate > 1.0: neg_rate = 1.0 for i in range(channel): seg_slice = seg[:, :, i] mid_slice = np.expand_dims(volume[:, :, i], axis=0) pre_slice = [] # pre_end = i - slice_num / 2 # pre_end = i # for j in range(1, slice_num + 1): # z = pre_end - j # if z < 0: # z = 0 # pre_slice.append(volume[:, :, z]) if (i - 1) < 0: pre_slice = np.expand_dims(volume[:, :, i], axis=0) else: pre_slice = np.expand_dims(volume[:, :, i-1], axis=0) next_slice = [] # next_start = i + slice_num / 2 # next_start = i # for j in range(1, slice_num + 1): # z = next_start + j # if z >= channel: # z = channel - 1 # next_slice.append(volume[:, :, z]) if (i + 1) >= channel: next_slice = np.expand_dims(volume[:, :, i], axis=0) else: next_slice = np.expand_dims(volume[:, :, i+1], axis=0) # pre_slice = np.mean(pre_slice, axis=0, keepdims=True) # next_slice = np.mean(next_slice, axis=0, keepdims=True) imgs.append( np.transpose(np.concatenate([pre_slice, mid_slice, next_slice], axis=0), axes=[1, 2, 0])) names.append(os.path.basename(volume_path).split('.')[0].split('-')[1] + '-' + str(i)) binary_seg_slice = np.asarray(seg_slice == 2, np.uint8) # print np.max(binary_seg_slice) masks.append(binary_seg_slice) labeled_mask = label(binary_seg_slice) weakly_label_mask = np.zeros_like(binary_seg_slice, np.uint8) for idx in range(1, np.max(labeled_mask) + 1): xs, ys = np.where(labeled_mask == idx) min_xs = np.min(xs) max_xs = np.max(xs) min_ys = np.min(ys) max_ys = np.max(ys) weakly_label_mask[min_xs: max_xs, min_ys: max_ys] = 1 liver_masks.append(np.asarray(seg_slice == 1, np.uint8)) tumor_weakly_masks.append(weakly_label_mask) # i += 1 return np.asarray(imgs, np.float32), np.asarray(masks, np.uint8), np.asarray(liver_masks, np.uint8), np.asarray( tumor_weakly_masks, np.uint8) def statics_num_slices_lesion(nii_dir): ''' 统计每个case,有多少slice具有病灶 :param nii_dir: :return: ''' mask_nii_paths = glob(os.path.join(nii_dir, 'segmentation-*.nii')) for mask_nii_path in mask_nii_paths: mask_img = read_nii(mask_nii_path) has_lesion = np.asarray(np.sum(np.sum(mask_img == 2, axis=0), axis=0)>0, np.bool) num_lesion_slices = np.sum(has_lesion) print os.path.basename(mask_nii_path), num_lesion_slices, np.shape(mask_img)[-1] if __name__ == '__main__': # for phasename in ['NC', 'ART', 'PV']: # convert_dicomseries2mhd( # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/' + phasename, # '/home/give/github/Cascaded-FCN-Tensorflow/Cascaded-FCN/tensorflow-unet/z_testdata/304176-2802027/MHD/' + phasename + '.mhd' # ) # names = os.listdir('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2') # for name in names: # path = os.path.join('/home/give/Documents/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2', name) # image = read_nil(path) # print(np.shape(image)) # conver2JPG single phase # image_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0' # save_dir = '/home/give/Documents/dataset/MICCAI2018_Detection/SinglePhase' # phase_name = 'NC' # MICCAI2018_Iterator(image_dir, dicom2jpg_singlephase, save_dir, phase_name) # conver2JPG multi phase # image_dir = '/home/give/Documents/dataset/LiverLesionDetection_Splited/0' # static_pixel_num(image_dir, 'PV') statics_num_slices_lesion('/media/give/CBMIR/ld/dataset/ISBI2017/media/nas/01_Datasets/CT/LITS/Training_Batch_2')
2.328125
2
setup.py
marcus-luck/zohoreader
1
2855
from setuptools import setup def readme(): with open('README.rst') as f: return f.read() setup(name='zohoreader', version='0.1', description='A simple reader for zoho projects API to get all projects, users and timereports', long_description=readme(), classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.5', ], keywords='zoho, API, zoho project', url='https://github.com/marcus-luck/zohoreader', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['zohoreader'], zip_safe=False, install_requires=[ 'requests>=2.12.4', 'python-dateutil>=2.7.2' ], test_suite='nose.collector', tests_require=['nose', 'nose-cover3'], include_package_data=True )
1.296875
1
web/repositories.bzl
Ubehebe/rules_webtesting
0
2856
<gh_stars>0 # Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Defines external repositories needed by rules_webtesting.""" load("//web/internal:platform_http_file.bzl", "platform_http_file") load("@bazel_gazelle//:deps.bzl", "go_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@bazel_tools//tools/build_defs/repo:java.bzl", "java_import_external") # NOTE: URLs are mirrored by an asynchronous review process. They must # be greppable for that to happen. It's OK to submit broken mirror # URLs, so long as they're correctly formatted. Bazel's downloader # has fast failover. def web_test_repositories(**kwargs): """Defines external repositories required by Webtesting Rules. This function exists for other Bazel projects to call from their WORKSPACE file when depending on rules_webtesting using http_archive. This function makes it easy to import these transitive dependencies into the parent workspace. This will check to see if a repository has been previously defined before defining a new repository. Alternatively, individual dependencies may be excluded with an "omit_" + name parameter. This is useful for users who want to be rigorous about declaring their own direct dependencies, or when another Bazel project is depended upon (e.g. rules_closure) that defines the same dependencies as this one (e.g. com_google_guava.) Alternatively, a whitelist model may be used by calling the individual functions this method references. Please note that while these dependencies are defined, they are not actually downloaded, unless a target is built that depends on them. Args: **kwargs: omit_... parameters used to prevent importing specific dependencies. """ if should_create_repository("bazel_skylib", kwargs): bazel_skylib() if should_create_repository("com_github_blang_semver", kwargs): com_github_blang_semver() if should_create_repository("com_github_gorilla_context", kwargs): com_github_gorilla_context() if should_create_repository("com_github_gorilla_mux", kwargs): com_github_gorilla_mux() if should_create_repository("com_github_tebeka_selenium", kwargs): com_github_tebeka_selenium() if should_create_repository("com_github_urllib3", kwargs): com_github_urllib3() if should_create_repository("com_google_code_findbugs_jsr305", kwargs): com_google_code_findbugs_jsr305() if should_create_repository("com_google_code_gson", kwargs): com_google_code_gson() if should_create_repository( "com_google_errorprone_error_prone_annotations", kwargs, ): com_google_errorprone_error_prone_annotations() if should_create_repository("com_google_guava", kwargs): com_google_guava() if should_create_repository("com_squareup_okhttp3_okhttp", kwargs): com_squareup_okhttp3_okhttp() if should_create_repository("com_squareup_okio", kwargs): com_squareup_okio() if should_create_repository("commons_codec", kwargs): commons_codec() if should_create_repository("commons_logging", kwargs): commons_logging() if should_create_repository("junit", kwargs): junit() if should_create_repository("net_bytebuddy", kwargs): net_bytebuddy() if should_create_repository("org_apache_commons_exec", kwargs): org_apache_commons_exec() if should_create_repository("org_apache_httpcomponents_httpclient", kwargs): org_apache_httpcomponents_httpclient() if should_create_repository("org_apache_httpcomponents_httpcore", kwargs): org_apache_httpcomponents_httpcore() if should_create_repository("org_hamcrest_core", kwargs): org_hamcrest_core() if should_create_repository("org_jetbrains_kotlin_stdlib", kwargs): org_jetbrains_kotlin_stdlib() if should_create_repository("org_json", kwargs): org_json() if should_create_repository("org_seleniumhq_py", kwargs): org_seleniumhq_py() if should_create_repository("org_seleniumhq_selenium_api", kwargs): org_seleniumhq_selenium_api() if should_create_repository("org_seleniumhq_selenium_remote_driver", kwargs): org_seleniumhq_selenium_remote_driver() if kwargs.keys(): print("The following parameters are unknown: " + str(kwargs.keys())) def should_create_repository(name, args): """Returns whether the name repository should be created. This allows creation of a repository to be disabled by either an "omit_" _+ name parameter or by previously defining a rule for the repository. The args dict will be mutated to remove "omit_" + name. Args: name: The name of the repository that should be checked. args: A dictionary that contains "omit_...": bool pairs. Returns: boolean indicating whether the repository should be created. """ key = "omit_" + name if key in args: val = args.pop(key) if val: return False if native.existing_rule(name): return False return True def browser_repositories(firefox = False, chromium = False, sauce = False): """Sets up repositories for browsers defined in //browsers/.... This should only be used on an experimental basis; projects should define their own browsers. Args: firefox: Configure repositories for //browsers:firefox-native. chromium: Configure repositories for //browsers:chromium-native. sauce: Configure repositories for //browser/sauce:chrome-win10. """ if chromium: org_chromium_chromedriver() org_chromium_chromium() if firefox: org_mozilla_firefox() org_mozilla_geckodriver() if sauce: com_saucelabs_sauce_connect() def bazel_skylib(): http_archive( name = "bazel_skylib", sha256 = "", strip_prefix = "bazel-skylib-e9fc4750d427196754bebb0e2e1e38d68893490a", urls = [ "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz", "https://github.com/bazelbuild/bazel-skylib/archive/e9fc4750d427196754bebb0e2e1e38d68893490a.tar.gz", ], ) def com_github_blang_semver(): go_repository( name = "com_github_blang_semver", importpath = "github.com/blang/semver", sha256 = "3d9da53f4c2d3169bfa9b25f2f36f301a37556a47259c870881524c643c69c57", strip_prefix = "semver-3.5.1", urls = [ "https://mirror.bazel.build/github.com/blang/semver/archive/v3.5.1.tar.gz", "https://github.com/blang/semver/archive/v3.5.1.tar.gz", ], ) def com_github_gorilla_context(): go_repository( name = "com_github_gorilla_context", importpath = "github.com/gorilla/context", sha256 = "2dfdd051c238695bf9ebfed0bf6a8c533507ac0893bce23be5930e973736bb03", strip_prefix = "context-1.1.1", urls = [ "https://mirror.bazel.build/github.com/gorilla/context/archive/v1.1.1.tar.gz", "https://github.com/gorilla/context/archive/v1.1.1.tar.gz", ], ) def com_github_gorilla_mux(): go_repository( name = "com_github_gorilla_mux", importpath = "github.com/gorilla/mux", sha256 = "0dc18fb09413efea7393e9c2bd8b5b442ce08e729058f5f7e328d912c6c3d3e3", strip_prefix = "mux-1.6.2", urls = [ "https://mirror.bazel.build/github.com/gorilla/mux/archive/v1.6.2.tar.gz", "https://github.com/gorilla/mux/archive/v1.6.2.tar.gz", ], ) def com_github_tebeka_selenium(): go_repository( name = "com_github_tebeka_selenium", importpath = "github.com/tebeka/selenium", sha256 = "c506637fd690f4125136233a3ea405908b8255e2d7aa2aa9d3b746d96df50dcd", strip_prefix = "selenium-a49cf4b98a36c2b21b1ccb012852bd142d5fc04a", urls = [ "https://mirror.bazel.build/github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz", "https://github.com/tebeka/selenium/archive/a49cf4b98a36c2b21b1ccb012852bd142d5fc04a.tar.gz", ], ) def com_github_urllib3(): http_archive( name = "com_github_urllib3", build_file = str(Label("//build_files:com_github_urllib3.BUILD")), sha256 = "a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf", strip_prefix = "urllib3-1.23", urls = [ "https://files.pythonhosted.org/packages/3c/d2/dc5471622bd200db1cd9319e02e71bc655e9ea27b8e0ce65fc69de0dac15/urllib3-1.23.tar.gz", ], ) def com_google_code_findbugs_jsr305(): java_import_external( name = "com_google_code_findbugs_jsr305", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", "https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", ], jar_sha256 = "766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7", licenses = ["notice"], # BSD 3-clause ) def com_google_code_gson(): java_import_external( name = "com_google_code_gson", jar_sha256 = "233a0149fc365c9f6edbd683cfe266b19bdc773be98eabdaf6b3c924b48e7d81", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar", "https://repo1.maven.org/maven2/com/google/code/gson/gson/2.8.5/gson-2.8.5.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def com_google_errorprone_error_prone_annotations(): java_import_external( name = "com_google_errorprone_error_prone_annotations", jar_sha256 = "10a5949aa0f95c8de4fd47edfe20534d2acefd8c224f8afea1f607e112816120", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar", "https://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.3.1/error_prone_annotations-2.3.1.jar", ], licenses = ["notice"], # Apache 2.0 ) def com_google_guava(): java_import_external( name = "com_google_guava", jar_sha256 = "a0e9cabad665bc20bcd2b01f108e5fc03f756e13aea80abaadb9f407033bea2c", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.9-jre.jar", "https://repo1.maven.org/maven2/com/google/guava/guava/26.0-jre/guava-26.0-jre.jar", ], licenses = ["notice"], # Apache 2.0 exports = [ "@com_google_code_findbugs_jsr305", "@com_google_errorprone_error_prone_annotations", ], ) def com_saucelabs_sauce_connect(): platform_http_file( name = "com_saucelabs_sauce_connect", licenses = ["by_exception_only"], # SauceLabs EULA amd64_sha256 = "dd53f2cdcec489fbc2443942b853b51bf44af39f230600573119cdd315ddee52", amd64_urls = [ "https://saucelabs.com/downloads/sc-4.5.1-linux.tar.gz", ], macos_sha256 = "920ae7bd5657bccdcd27bb596593588654a2820486043e9a12c9062700697e66", macos_urls = [ "https://saucelabs.com/downloads/sc-4.5.1-osx.zip", ], windows_sha256 = "ec11b4ee029c9f0cba316820995df6ab5a4f394053102e1871b9f9589d0a9eb5", windows_urls = [ "https://saucelabs.com/downloads/sc-4.4.12-win32.zip", ], ) def com_squareup_okhttp3_okhttp(): java_import_external( name = "com_squareup_okhttp3_okhttp", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar", "https://repo1.maven.org/maven2/com/squareup/okhttp3/okhttp/3.9.1/okhttp-3.9.1.jar", ], jar_sha256 = "a0d01017a42bba26e507fc6d448bb36e536f4b6e612f7c42de30bbdac2b7785e", licenses = ["notice"], # Apache 2.0 deps = [ "@com_squareup_okio", "@com_google_code_findbugs_jsr305", ], ) def com_squareup_okio(): java_import_external( name = "com_squareup_okio", jar_sha256 = "79b948cf77504750fdf7aeaf362b5060415136ab6635e5113bd22925e0e9e737", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar", "https://repo1.maven.org/maven2/com/squareup/okio/okio/2.0.0/okio-2.0.0.jar", ], licenses = ["notice"], # Apache 2.0 deps = [ "@com_google_code_findbugs_jsr305", "@org_jetbrains_kotlin_stdlib", ], ) def commons_codec(): java_import_external( name = "commons_codec", jar_sha256 = "e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar", "https://repo1.maven.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def commons_logging(): java_import_external( name = "commons_logging", jar_sha256 = "daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar", "https://repo1.maven.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def junit(): java_import_external( name = "junit", jar_sha256 = "59721f0805e223d84b90677887d9ff567dc534d7c502ca903c0c2b17f05c116a", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", "https://repo1.maven.org/maven2/junit/junit/4.12/junit-4.12.jar", ], licenses = ["reciprocal"], # Eclipse Public License 1.0 testonly_ = 1, deps = ["@org_hamcrest_core"], ) def net_bytebuddy(): java_import_external( name = "net_bytebuddy", jar_sha256 = "4b87ad52a8f64a1197508e176e84076584160e3d65229ff757efee870cd4a8e2", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar", "https://repo1.maven.org/maven2/net/bytebuddy/byte-buddy/1.8.19/byte-buddy-1.8.19.jar", ], licenses = ["notice"], # Apache 2.0 deps = ["@com_google_code_findbugs_jsr305"], ) def org_apache_commons_exec(): java_import_external( name = "org_apache_commons_exec", jar_sha256 = "cb49812dc1bfb0ea4f20f398bcae1a88c6406e213e67f7524fb10d4f8ad9347b", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar", "https://repo1.maven.org/maven2/org/apache/commons/commons-exec/1.3/commons-exec-1.3.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def org_apache_httpcomponents_httpclient(): java_import_external( name = "org_apache_httpcomponents_httpclient", jar_sha256 = "c03f813195e7a80e3608d0ddd8da80b21696a4c92a6a2298865bf149071551c7", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", "https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", ], licenses = ["notice"], # Apache License, Version 2.0 deps = [ "@org_apache_httpcomponents_httpcore", "@commons_logging", "@commons_codec", ], ) def org_apache_httpcomponents_httpcore(): java_import_external( name = "org_apache_httpcomponents_httpcore", jar_sha256 = "1b4a1c0b9b4222eda70108d3c6e2befd4a6be3d9f78ff53dd7a94966fdf51fc5", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar", "https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.jar", ], licenses = ["notice"], # Apache License, Version 2.0 ) def org_chromium_chromedriver(): platform_http_file( name = "org_chromium_chromedriver", licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT amd64_sha256 = "71eafe087900dbca4bc0b354a1d172df48b31a4a502e21f7c7b156d7e76c95c7", amd64_urls = [ "https://chromedriver.storage.googleapis.com/2.41/chromedriver_linux64.zip", ], macos_sha256 = "fd32a27148f44796a55f5ce3397015c89ebd9f600d9dda2bcaca54575e2497ae", macos_urls = [ "https://chromedriver.storage.googleapis.com/2.41/chromedriver_mac64.zip", ], windows_sha256 = "a8fa028acebef7b931ef9cb093f02865f9f7495e49351f556e919f7be77f072e", windows_urls = [ "https://chromedriver.storage.googleapis.com/2.38/chromedriver_win32.zip", ], ) def org_chromium_chromium(): platform_http_file( name = "org_chromium_chromium", licenses = ["notice"], # BSD 3-clause (maybe more?) amd64_sha256 = "6933d0afce6e17304b62029fbbd246cbe9e130eb0d90d7682d3765d3dbc8e1c8", amd64_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Linux_x64/561732/chrome-linux.zip", ], macos_sha256 = "084884e91841a923d7b6e81101f0105bbc3b0026f9f6f7a3477f5b313ee89e32", macos_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/561733/chrome-mac.zip", ], windows_sha256 = "d1bb728118c12ea436d8ea07dba980789e7d860aa664dd1fad78bc20e8d9391c", windows_urls = [ "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Win_x64/540270/chrome-win32.zip", ], ) def org_hamcrest_core(): java_import_external( name = "org_hamcrest_core", jar_sha256 = "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", "https://repo1.maven.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", ], licenses = ["notice"], # New BSD License testonly_ = 1, ) def org_jetbrains_kotlin_stdlib(): java_import_external( name = "org_jetbrains_kotlin_stdlib", jar_sha256 = "62eaf9cc6e746cef4593abe7cdb4dd48694ef5f817c852e0d9fbbd11fcfc564e", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar", "https://repo1.maven.org/maven2/org/jetbrains/kotlin/kotlin-stdlib/1.2.61/kotlin-stdlib-1.2.61.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 ) def org_json(): java_import_external( name = "org_json", jar_sha256 = "518080049ba83181914419d11a25d9bc9833a2d729b6a6e7469fa52851356da8", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar", "https://repo1.maven.org/maven2/org/json/json/20180813/json-20180813.jar", ], licenses = ["notice"], # MIT-style license ) def org_mozilla_firefox(): platform_http_file( name = "org_mozilla_firefox", licenses = ["reciprocal"], # MPL 2.0 amd64_sha256 = "3a729ddcb1e0f5d63933177a35177ac6172f12edbf9fbbbf45305f49333608de", amd64_urls = [ "https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2", "https://ftp.mozilla.org/pub/firefox/releases/61.0.2/linux-x86_64/en-US/firefox-61.0.2.tar.bz2", ], macos_sha256 = "bf23f659ae34832605dd0576affcca060d1077b7bf7395bc9874f62b84936dc5", macos_urls = [ "https://mirror.bazel.build/ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg", "https://ftp.mozilla.org/pub/firefox/releases/61.0.2/mac/en-US/Firefox%2061.0.2.dmg", ], ) def org_mozilla_geckodriver(): platform_http_file( name = "org_mozilla_geckodriver", licenses = ["reciprocal"], # MPL 2.0 amd64_sha256 = "c9ae92348cf00aa719be6337a608fae8304691a95668e8e338d92623ba9e0ec6", amd64_urls = [ "https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz", "https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-linux64.tar.gz", ], macos_sha256 = "ce4a3e9d706db94e8760988de1ad562630412fa8cf898819572522be584f01ce", macos_urls = [ "https://mirror.bazel.build/github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz", "https://github.com/mozilla/geckodriver/releases/download/v0.21.0/geckodriver-v0.21.0-macos.tar.gz", ], ) def org_seleniumhq_py(): http_archive( name = "org_seleniumhq_py", build_file = str(Label("//build_files:org_seleniumhq_py.BUILD")), sha256 = "f9ca21919b564a0a86012cd2177923e3a7f37c4a574207086e710192452a7c40", strip_prefix = "selenium-3.14.0", urls = [ "https://files.pythonhosted.org/packages/af/7c/3f76140976b1c8f8a6b437ccd1f04efaed37bdc2600530e76ba981c677b9/selenium-3.14.0.tar.gz", ], ) def org_seleniumhq_selenium_api(): java_import_external( name = "org_seleniumhq_selenium_api", jar_sha256 = "1fc941f86ba4fefeae9a705c1468e65beeaeb63688e19ad3fcbda74cc883ee5b", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar", "https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-api/3.14.0/selenium-api-3.14.0.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 testonly_ = 1, ) def org_seleniumhq_selenium_remote_driver(): java_import_external( name = "org_seleniumhq_selenium_remote_driver", jar_sha256 = "284cb4ea043539353bd5ecd774cbd726b705d423ea4569376c863d0b66e5eaf2", jar_urls = [ "https://mirror.bazel.build/repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar", "https://repo1.maven.org/maven2/org/seleniumhq/selenium/selenium-remote-driver/3.14.0/selenium-remote-driver-3.14.0.jar", ], licenses = ["notice"], # The Apache Software License, Version 2.0 testonly_ = 1, deps = [ "@com_google_code_gson", "@com_google_guava", "@net_bytebuddy", "@com_squareup_okhttp3_okhttp", "@com_squareup_okio", "@commons_codec", "@commons_logging", "@org_apache_commons_exec", "@org_apache_httpcomponents_httpclient", "@org_apache_httpcomponents_httpcore", "@org_seleniumhq_selenium_api", ], )
1.609375
2
code/tools/run_viz_single_task.py
santomon/taskonomy
789
2857
<gh_stars>100-1000 from __future__ import absolute_import, division, print_function import argparse import importlib import itertools import time from multiprocessing import Pool import numpy as np import os import pdb import pickle import subprocess import sys import tensorflow as tf import tensorflow.contrib.slim as slim import threading import init_paths from models.sample_models import * target_tasks = "autoencoder colorization curvature denoise edge2d edge3d ego_motion fix_pose impainting_whole jigsaw keypoint2d keypoint3d non_fixated_pose point_match reshade rgb2depth rgb2mist rgb2sfnorm room_layout segment25d segment2d vanishing_point_well_defined segmentsemantic_rb class_selected class_1000" list_of_tasks = target_tasks.split(" ") ON_TEST_SET = True IN_TRAIN_MODE = False parser = argparse.ArgumentParser(description='Viz Single Task') parser.add_argument('--idx', dest='idx', help='Task to run', type=int) parser.add_argument('--hs', dest='hs', help='Hidden size to use', type=int) parser.add_argument('--n-parallel', dest='n_parallel', help='Number of models to run in parallel', type=int) parser.set_defaults(n_parallel=1) tf.logging.set_verbosity(tf.logging.ERROR) ipython_std_out = sys.stdout # Disabe def blockPrint(): sys.stdout = open(os.devnull, 'w') # Restore def enablePrint(): sys.stdout = ipython_std_out # Force Print def forcePrint(str): enablePrint() print(str) sys.stdout.flush() blockPrint() def remove_dups(seq): seen = set() seen_add = seen.add return [x for x in seq if not (x in seen or seen_add(x))] pairs = list(itertools.product(list_of_tasks, list_of_tasks)) args = parser.parse_args() idx_to_run = args.idx if idx_to_run == -1: pairs_to_run = pairs else: pairs_to_run = pairs[idx_to_run:idx_to_run+1] def run_to_task(task_to): import general_utils from general_utils import RuntimeDeterminedEnviromentVars import models.architectures as architectures from data.load_ops import resize_rescale_image import utils from data.task_data_loading import load_and_specify_preprocessors_for_representation_extraction import lib.data.load_ops as load_ops tf.logging.set_verbosity(tf.logging.ERROR) all_outputs = {} pickle_dir = 'viz_output_single_task.pkl' import os if os.path.isfile(pickle_dir): with open( pickle_dir, 'rb') as fp: all_outputs = pickle.load(fp) for task in list_of_tasks: if task in all_outputs: print("{} already exists....\n\n\n".format(task)) continue print("Doing {task}".format(task=task)) general_utils = importlib.reload(general_utils) tf.reset_default_graph() training_runners = { 'sess': tf.InteractiveSession(), 'coord': tf.train.Coordinator() } # task = '{f}__{t}__{hs}'.format(f=task_from, t=task_to, hs=args.hs) CONFIG_DIR = '/home/ubuntu/task-taxonomy-331b/experiments/final/{TASK}'.format(TASK=task) ############## Load Configs ############## cfg = utils.load_config( CONFIG_DIR, nopause=True ) RuntimeDeterminedEnviromentVars.register_dict( cfg ) split_file = cfg['test_filenames'] if ON_TEST_SET else cfg['val_filenames'] cfg['train_filenames'] = split_file cfg['val_filenames'] = split_file cfg['test_filenames'] = split_file cfg['num_epochs'] = 1 cfg['randomize'] = False root_dir = cfg['root_dir'] cfg['num_read_threads'] = 1 print(cfg['log_root']) if task == 'jigsaw': continue cfg['model_path'] = os.path.join( cfg['log_root'], task, 'model.permanent-ckpt' ) print( cfg['model_path']) if cfg['model_path'] is None: continue ############## Set Up Inputs ############## # tf.logging.set_verbosity( tf.logging.INFO ) inputs = utils.setup_input( cfg, is_training=ON_TEST_SET, use_filename_queue=False ) # is_training determines whether to use train/validaiton RuntimeDeterminedEnviromentVars.load_dynamic_variables( inputs, cfg ) RuntimeDeterminedEnviromentVars.populate_registered_variables() start_time = time.time() # utils.print_start_info( cfg, inputs[ 'max_steps' ], is_training=False ) ############## Set Up Model ############## model = utils.setup_model( inputs, cfg, is_training=IN_TRAIN_MODE ) m = model[ 'model' ] model[ 'saver_op' ].restore( training_runners[ 'sess' ], cfg[ 'model_path' ] ) ############## Start dataloading workers ############## data_prefetch_init_fn = utils.get_data_prefetch_threads_init_fn( inputs, cfg, is_training=ON_TEST_SET, use_filename_queue=False ) prefetch_threads = threading.Thread( target=data_prefetch_init_fn, args=( training_runners[ 'sess' ], training_runners[ 'coord' ] )) prefetch_threads.start() ############## Run First Batch ############## if not hasattr(m, 'masks'): ( input_batch, target_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) mask_batch = 1. else: ( input_batch, target_batch, mask_batch, data_idx, predicted, loss, ) = training_runners['sess'].run( [ m.input_images, m.targets, m.masks, model[ 'data_idxs' ], m.decoder_output, m.total_loss] ) if task == 'segment2d' or task == 'segment25d': from sklearn.decomposition import PCA x = np.zeros((32,256,256,3), dtype='float') for i in range(predicted.shape[0]): embedding_flattened = np.squeeze(predicted[i]).reshape((-1,64)) pca = PCA(n_components=3) pca.fit(embedding_flattened) lower_dim = pca.transform(embedding_flattened).reshape((256,256,-1)) lower_dim = (lower_dim - lower_dim.min()) / (lower_dim.max() - lower_dim.min()) x[i] = lower_dim predicted = x ############## Clean Up ############## training_runners[ 'coord' ].request_stop() training_runners[ 'coord' ].join() # if os.path.isfile(pickle_dir): # with open(pickle_dir, 'rb') as fp: # all_outputs = pickle.load(fp) ############## Store to dict ############## to_store = { 'input': input_batch, 'target': target_batch, 'mask': mask_batch, 'data_idx':data_idx, 'output':predicted} all_outputs[task] = to_store print("Done: {}".format(task)) # os.system("sudo cp {d} /home/ubuntu/s3/model_log".format(d=pickle_dir)) ############## Reset graph and paths ############## tf.reset_default_graph() training_runners['sess'].close() try: del sys.modules[ 'config' ] except: pass sys.path = remove_dups(sys.path) print("FINISHED: {}\n\n\n\n\n\n".format(task)) pickle_dir = 'viz_output_single_task.pkl' with open( pickle_dir, 'wb') as fp: pickle.dump(all_outputs, fp) try: subprocess.call("aws s3 cp {} s3://task-preprocessing-512-oregon/visualizations/".format(pickle_dir), shell=True) except: subprocess.call("sudo cp {} /home/ubuntu/s3/visualizations/".format(pickle_dir), shell=True) return if __name__ == '__main__': run_to_task(None) # with Pool(args.n_parallel) as p: # p.map(run_to_task, list_of_tasks)
1.953125
2
stratum/portage/build_defs.bzl
cholve/stratum
267
2858
<reponame>cholve/stratum<filename>stratum/portage/build_defs.bzl # Copyright 2018 Google LLC # Copyright 2018-present Open Networking Foundation # SPDX-License-Identifier: Apache-2.0 """A portable build system for Stratum P4 switch stack. To use this, load() this file in a BUILD file, specifying the symbols needed. The public symbols are the macros: decorate(path) sc_cc_lib Declare a portable Library. sc_proto_lib Declare a portable .proto Library. sc_cc_bin Declare a portable Binary. sc_package Declare a portable tarball package. and the variables/lists: ALL_ARCHES All known arches. EMBEDDED_ARCHES All embedded arches. EMBEDDED_PPC Name of PowerPC arch - "ppc". EMBEDDED_X86 Name of "x86" arch. HOST_ARCH Name of default "host" arch. HOST_ARCHES All host arches. STRATUM_INTERNAL For declaring Stratum internal visibility. The macros are like cc_library(), proto_library(), and cc_binary(), but with different options and some restrictions. The key difference: you can supply lists of architectures for which they should be compiled - defaults to all if left unstated. Internally, libraries and binaries are generated for every listed architecture. The names are decorated to keep them different and allow all to be generated and addressed independently. This aspect of the system is suboptimal - something along the lines of augmenting context with a user defined configuration fragment would be a much cleaner solution. Currently supported architectures: ppc x86 """ load("//tools/build_defs/label:def.bzl", "parse_label") load( "//devtools/build_cleaner/skylark:build_defs.bzl", "register_extension_info", ) load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test") # Generic path & label helpers. ============================================ def _normpath(path): """Normalize a path. Normalizes a path by removing unnecessary path-up segments and its corresponding directories. Providing own implementation because import os is not allowed in build defs. For example ../../dir/to/deeply/nested/path/../../../other/path will become ../../dir/to/other/path Args: path: A valid absolute or relative path to normalize. Returns: A path equivalent to the input path with minimal use of path-up segments. Invalid input paths will stay invalid. """ sep = "/" level = 0 result = [] for d in path.split(sep): if d in ("", "."): if result: continue elif d == "..": if level > 0: result.pop() level += -1 continue else: level += 1 result.append(d) return sep.join(result) # Adds a suffix to a label, expanding implicit targets if needed. def decorate(label, suffix): if label.endswith(":"): # .../bar: -> .../bar label = label[:-1] if ":" in label: # .../bar:bat -> .../bar:bat_suffix return "%s_%s" % (label, suffix) elif label.startswith("//"): # //foo/bar -> //foo/bar:bar_suffix return "%s:%s_%s" % (label, label.split("/")[-1], suffix) else: # bar -> bar_suffix return "%s_%s" % (label, suffix) # Creates a relative filename from a label, replacing "//" and ":". def _make_filename(label): if label.startswith("//"): # //foo/bar:bat/baz -> google3_foo/bar/bat/baz return label.replace("//", "google3/").replace(":", "/") elif label.startswith(":"): # :bat/baz -> bat/baz return label[1:] else: # bat/baz -> bat/baz return label # Adds dquotes around a string. def dquote(s): return '"' + s + '"' # Adds squotes around a string. def squote(s): return "'" + s + "'" # Emulate Python 2.5+ str(startswith([prefix ...]) def starts_with(s, prefix_list): for prefix in prefix_list: if s.startswith(prefix): return prefix return None def sc_platform_select(host = None, ppc = None, x86 = None, default = None): """Public macro to alter blaze rules based on the platform architecture. Generates a blaze select(...) statement that can be used in most contexts to alter a blaze rule based on the target platform architecture. If no selection is provided for a given platform, {default} is used instead. A specific value or default must be provided for every target platform. Args: host: The value to use for host builds. ppc: The value to use for ppc builds. x86: The value to use for x86 builds. default: The value to use for any of {host,ppc,x86} that isn't specified. Returns: The requested selector. """ if default == None and (host == None or ppc == None or x86 == None): fail("Missing a select value for at least one platform in " + "sc_platform_select. Please add.") config_label_prefix = "//stratum:stratum_" return select({ "//conditions:default": (host or default), config_label_prefix + "ppc": (ppc or default), config_label_prefix + "x86": (x86 or default), }) # Generates an sc_platform_select based on a textual list of arches. def sc_platform_filter(value, default, arches): return sc_platform_select( host = value if "host" in arches else default, ppc = value if "ppc" in arches else default, x86 = value if "x86" in arches else default, ) def sc_platform_alias( name, host = None, ppc = None, x86 = None, default = None, visibility = None): """Public macro to create an alias that changes based on target arch. Generates a blaze alias that will select the appropriate target. If no selection is provided for a given platform and no default is set, a dummy default target is used instead. Args: name: The name of the alias target. host: The result of the alias for host builds. ppc: The result of the alias for ppc builds. x86: The result of the alias for x86 builds. default: The result of the alias for any of {host,ppc,x86} that isn't specified. visibility: The visibility of the alias target. """ native.alias( name = name, actual = sc_platform_select( default = default or "//stratum/portage:dummy", host = host, ppc = ppc, x86 = x86, ), visibility = visibility, ) # Embedded build definitions. ============================================== EMBEDDED_PPC = "ppc" EMBEDDED_X86 = "x86" EMBEDDED_ARCHES = [ EMBEDDED_PPC, EMBEDDED_X86, ] HOST_ARCH = "host" HOST_ARCHES = [HOST_ARCH] ALL_ARCHES = EMBEDDED_ARCHES + HOST_ARCHES # Identify Stratum platform arch for .pb.h shims and other portability hacks. _ARCH_DEFINES = sc_platform_select( default = ["STRATUM_ARCH_HOST"], ppc = ["STRATUM_ARCH_PPC"], x86 = ["STRATUM_ARCH_X86"], ) STRATUM_INTERNAL = [ "//stratum:__subpackages__", ] # # Build options for all embedded architectures # # Set _TRACE_SRCS to show sources in embedded sc_cc_lib compile steps. # This is more general than it may seem: genrule doesn't have hdrs or deps # attributes, so all embedded dependencies appear as a `src'. # TODO(unknown): if useful again then inject from cmdline else kill feature. _TRACE_SRCS = False # Used for all gcc invocations. _EMBEDDED_FLAGS = [ "-O0", # Don't use this for program-sizing build #-- "-Os", # Use this for program-sizing build "-g", # Don't use this for program-sizing build "-Wall", "-Werror", # Warn lots, and force fixing warnings. "-no-canonical-prefixes", # Don't mangle paths and confuse blaze. "-fno-builtin-malloc", # We'll use tcmalloc "-fno-builtin-calloc", "-fno-builtin-realloc", "-fno-builtin-free", "-D__STDC_FORMAT_MACROS=1", # TODO(unknown): Figure out how we can use $(CC_FLAGS) instead of this. "-D__GOOGLE_STL_LEGACY_COMPATIBILITY", ] # Used for C and C++ compiler invocations. _EMBEDDED_CFLAGS = [ "-I$(GENDIR)", ] # Used for C++ compiler invocations. _EMBEDDED_CXXFLAGS = [ "-std=gnu++11", # Allow C++11 features _and_ GNU extensions. ] # Used for linking binaries. _EMBEDDED_LDFLAGS = [ # "-static", # Use this for program-sizing build # "-Wl,--gc-sections,--no-wchar-size-warning", # Use this for program-sizing build ] # PPC ====================================================================== _PPC_GRTE = "//unsupported_toolchains/crosstoolng_powerpc32_8540/sysroot" # X86 ====================================================================== _X86_GRTE = "//grte/v4_x86/release/usr/grte/v4" # Portability definitions =================================================== def sc_cc_test( name, size = None, srcs = None, deps = None, data = None, defines = None, copts = None, linkopts = None, visibility = None): """Creates a cc_test rule that interacts safely with Stratum builds. Generates a cc_test rule that doesn't break the build when an embedded arch is selected. During embedded builds this target will generate a dummy binary and will not attempt to build any dependencies. Args: name: Analogous to cc_test name argument. size: Analogous to cc_test size argument. srcs: Analogous to cc_test srcs argument. deps: Analogous to cc_test deps argument. data: Analogous to cc_test data argument. defines: Analogous to cc_test defines argument. copts: Analogous to cc_test copts argument. linkopts: Analogous to cc_test linkopts argument. visibility: Analogous to cc_test visibility argument. """ cc_test( name = name, size = size or "small", srcs = sc_platform_select(host = srcs or [], default = []), deps = sc_platform_select( host = deps or [], default = ["//stratum/portage:dummy_with_main"], ), data = data or [], defines = defines, copts = copts, linkopts = linkopts, visibility = visibility, ) register_extension_info( extension_name = "sc_cc_test", label_regex_for_dep = "{extension_name}", ) def sc_cc_lib( name, deps = None, srcs = None, hdrs = None, arches = None, copts = None, defines = None, includes = None, include_prefix = None, strip_include_prefix = None, data = None, testonly = None, textual_hdrs = None, visibility = None, xdeps = None): """Creates rules for the given portable library and arches. Args: name: Analogous to cc_library name argument. deps: Analogous to cc_library deps argument. srcs: Analogous to cc_library srcs argument. hdrs: Analogous to cc_library hdrs argument. arches: List of architectures to generate this way. copts: Analogous to cc_library copts argument. defines: Symbols added as "-D" compilation options. includes: Paths to add as "-I" compilation options. include_prefix: Analogous to cc_library include_prefix argument. strip_include_prefix: Analogous to cc_library strip_include_prefix argument. data: Files to provide as data at runtime (host builds only). testonly: Standard blaze testonly parameter. textual_hdrs: Analogous to cc_library. visibility: Standard blaze visibility parameter. xdeps: External (file) dependencies of this library - no decorations assumed, used and exported as header, not for flags, libs, etc. """ alwayslink = 0 deps = depset(deps or []) srcs = depset(srcs or []) hdrs = depset(hdrs or []) xdeps = depset(xdeps or []) copts = depset(copts or []) includes = depset(includes or []) data = depset(data or []) textual_hdrs = depset(textual_hdrs or []) if srcs: if [s for s in srcs.to_list() if not s.endswith(".h")]: alwayslink = 1 if not arches: arches = ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES textual_plus = textual_hdrs | depset(deps.to_list()) cc_library( name = name, deps = sc_platform_filter(deps, [], arches), srcs = sc_platform_filter(srcs, [], arches), hdrs = sc_platform_filter(hdrs, [], arches), alwayslink = alwayslink, copts = sc_platform_filter(copts, [], arches), defines = defs_plus, includes = sc_platform_filter(includes, [], arches), include_prefix = include_prefix, strip_include_prefix = strip_include_prefix, testonly = testonly, textual_hdrs = sc_platform_filter( textual_plus | xdeps, [], arches, ), data = sc_platform_filter(data, [], arches), visibility = visibility, ) register_extension_info( extension_name = "sc_cc_lib", label_regex_for_dep = "{extension_name}", ) def sc_cc_bin( name, deps = None, srcs = None, arches = None, copts = None, defines = None, includes = None, testonly = None, visibility = None): """Creates rules for the given portable binary and arches. Args: name: Analogous to cc_binary name argument. deps: Analogous to cc_binary deps argument. srcs: Analogous to cc_binary srcs argument. arches: List of architectures to generate this way. copts: Analogous to cc_binary copts argument. defines: Symbols added as "-D" compilation options. includes: Paths to add as "-I" compilation options. testonly: Standard blaze testonly parameter. visibility: Standard blaze visibility parameter. """ deps = depset(deps or []) srcs = depset(srcs or []) if not arches: arches = ALL_ARCHES defs_plus = (defines or []) + _ARCH_DEFINES cc_binary( name = name, deps = sc_platform_filter( deps, ["//stratum/portage:dummy_with_main"], arches, ), srcs = sc_platform_filter(srcs, [], arches), copts = copts, defines = defs_plus, includes = includes, linkopts = ["-ldl", "-lutil"], testonly = testonly, visibility = visibility, ) register_extension_info( extension_name = "sc_cc_bin", label_regex_for_dep = "{extension_name}", ) # Protobuf ================================================================= _SC_GRPC_DEPS = [ "//sandblaze/prebuilt/grpc", "//sandblaze/prebuilt/grpc:grpc++_codegen_base", "//sandblaze/prebuilt/grpc:grpc++_codegen_proto_lib", ] _PROTOC = "@com_google_protobuf//:protobuf:protoc" _PROTOBUF = "@com_google_protobuf//:protobuf" _SC_GRPC_PLUGIN = "//sandblaze/prebuilt/protobuf:grpc_cpp_plugin" _GRPC_PLUGIN = "//grpc:grpc_cpp_plugin" def _loc(target): """Return target location for constructing commands. Args: target: Blaze target name available to this build. Returns: $(location target) """ return "$(location %s)" % target def _gen_proto_lib( name, srcs, hdrs, deps, arch, visibility, testonly, proto_include, grpc_shim_rule): """Creates rules and filegroups for embedded protobuf library. For every given ${src}.proto, generate: :${src}_${arch}.pb rule to run protoc ${src}.proto => ${src}.${arch}.pb.{h,cc} :${src}_${arch}.grpc.pb rule to run protoc w/ erpc plugin: ${src}.proto => ${src}.${arch}.grpc.pb.{h,cc} :${src}_${arch}_proto_rollup collects include options for protoc: ${src}_${arch}_proto_rollup.flags Feed each set into sc_cc_lib to wrap them them up into a usable library; note that ${src}_${arch}_erpc_proto depends on ${src}_${arch}_proto. Args: name: Base name for this library. srcs: List of proto files hdrs: More files to build into this library, but also exported for dependent rules to utilize. deps: List of deps for this library arch: Which architecture to build this library for. visibility: Standard blaze visibility parameter, passed through to subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Include path for generated sc_cc_libs. grpc_shim_rule: If needed, the name of the grpc shim for this proto lib. """ bash_vars = ["g3=$${PWD}"] # TODO(unknown): Switch protobuf to using the proto_include mechanism protoc_label = _PROTOC protobuf_label = _PROTOBUF protobuf_hdrs = "%s:well_known_types_srcs" % protobuf_label protobuf_srcs = [protobuf_hdrs] protobuf_include = "$${g3}/protobuf/src" if arch in EMBEDDED_ARCHES: grpc_plugin = _SC_GRPC_PLUGIN else: grpc_plugin = _GRPC_PLUGIN protoc_deps = [] for dep in deps: if dep.endswith("_proto"): protoc_deps.append("%s_%s_headers" % (dep, arch)) name_arch = decorate(name, arch) # We use this filegroup to accumulate the set of .proto files needed to # compile this proto. native.filegroup( name = decorate(name_arch, "headers"), srcs = hdrs + protoc_deps, visibility = visibility, ) my_proto_rollup = decorate(name_arch, "proto_rollup.flags") protoc_srcs_set = (srcs + hdrs + protoc_deps + protobuf_srcs + [my_proto_rollup]) gen_srcs = [] gen_hdrs = [] grpc_gen_hdrs = [] grpc_gen_srcs = [] tools = [protoc_label] grpc_tools = [protoc_label, grpc_plugin] protoc = "$${g3}/%s" % _loc(protoc_label) grpc_plugin = "$${g3}/%s" % _loc(grpc_plugin) cpp_out = "$${g3}/$(GENDIR)/%s/%s" % (native.package_name(), arch) accum_flags = [] full_proto_include = None if proto_include == ".": full_proto_include = native.package_name() elif proto_include: full_proto_include = "%s/%s" % (native.package_name(), proto_include) if full_proto_include: temp_prefix = "%s/%s" % (cpp_out, native.package_name()[len(full_proto_include):]) # We do a bit of extra work with these include flags to avoid generating # warnings. accum_flags.append( "$$(if [[ -e $(GENDIR)/%s ]]; then echo -IG3LOC/$(GENDIR)/%s; fi)" % (full_proto_include, full_proto_include), ) accum_flags.append( "$$(if [[ -e %s ]]; then echo -IG3LOC/%s; fi)" % (full_proto_include, full_proto_include), ) else: temp_prefix = "%s/%s" % (cpp_out, native.package_name()) proto_rollups = [ decorate(decorate(dep, arch), "proto_rollup.flags") for dep in deps if dep.endswith("_proto") ] proto_rollup_cmds = ["printf '%%s\n' %s" % flag for flag in accum_flags] proto_rollup_cmds.append("cat $(SRCS)") proto_rollup_cmd = "{ %s; } | sort -u -o $(@)" % "; ".join(proto_rollup_cmds) native.genrule( name = decorate(name_arch, "proto_rollup"), srcs = proto_rollups, outs = [my_proto_rollup], cmd = proto_rollup_cmd, visibility = visibility, testonly = testonly, ) for src in srcs + hdrs: if src.endswith(".proto"): src_stem = src[0:-6] src_arch = "%s_%s" % (src_stem, arch) temp_stem = "%s/%s" % (temp_prefix, src_stem) gen_stem = "%s.%s" % (src_stem, arch) # We can't use $${PWD} until this step, because our rollup command # might be generated on another forge server. proto_path_cmds = ["rollup=$$(sed \"s,G3LOC,$${PWD},g\" %s)" % _loc(my_proto_rollup)] proto_rollup_flags = ["$${rollup}"] if proto_include: # We'll be cd-ing to another directory before protoc, so # adjust our .proto path accordingly. proto_src_loc = "%s/%s" % (native.package_name(), src) if proto_src_loc.startswith(full_proto_include + "/"): proto_src_loc = proto_src_loc[len(full_proto_include) + 1:] else: print("Invalid proto include '%s' doesn't match src %s" % (full_proto_include, proto_src_loc)) # By cd-ing to another directory, we force protoc to produce # different symbols. Careful, our proto might be in GENDIR! proto_path_cmds.append("; ".join([ "if [[ -e %s ]]" % ("%s/%s" % (full_proto_include, proto_src_loc)), "then cd %s" % full_proto_include, "else cd $(GENDIR)/%s" % full_proto_include, "fi", ])) gendir_include = ["-I$${g3}/$(GENDIR)", "-I$${g3}", "-I."] else: proto_src_loc = "%s/%s" % (native.package_name(), src) proto_path_cmds.append("[[ -e %s ]] || cd $(GENDIR)" % proto_src_loc) gendir_include = ["-I$(GENDIR)", "-I."] # Generate messages gen_pb_h = gen_stem + ".pb.h" gen_pb_cc = gen_stem + ".pb.cc" gen_hdrs.append(gen_pb_h) gen_srcs.append(gen_pb_cc) cmds = bash_vars + [ "mkdir -p %s" % temp_prefix, ] + proto_path_cmds + [ " ".join([protoc] + gendir_include + proto_rollup_flags + [ "-I%s" % protobuf_include, "--cpp_out=%s" % cpp_out, proto_src_loc, ]), "cd $${g3}", "cp %s.pb.h %s" % (temp_stem, _loc(gen_pb_h)), "cp %s.pb.cc %s" % (temp_stem, _loc(gen_pb_cc)), ] pb_outs = [gen_pb_h, gen_pb_cc] native.genrule( name = src_arch + ".pb", srcs = protoc_srcs_set, outs = pb_outs, tools = tools, cmd = " && ".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) # Generate GRPC if grpc_shim_rule: gen_grpc_pb_h = gen_stem + ".grpc.pb.h" gen_grpc_pb_cc = gen_stem + ".grpc.pb.cc" grpc_gen_hdrs.append(gen_grpc_pb_h) grpc_gen_srcs.append(gen_grpc_pb_cc) cmds = bash_vars + [ "mkdir -p %s" % temp_prefix, ] + proto_path_cmds + [ " ".join([ protoc, "--plugin=protoc-gen-grpc-cpp=%s" % grpc_plugin, ] + gendir_include + proto_rollup_flags + [ "-I%s" % protobuf_include, "--grpc-cpp_out=%s" % cpp_out, proto_src_loc, ]), "cd $${g3}", "cp %s.grpc.pb.h %s" % (temp_stem, _loc(gen_grpc_pb_h)), "cp %s.grpc.pb.cc %s" % (temp_stem, _loc(gen_grpc_pb_cc)), ] grpc_pb_outs = [gen_grpc_pb_h, gen_grpc_pb_cc] native.genrule( name = src_arch + ".grpc.pb", srcs = protoc_srcs_set, outs = grpc_pb_outs, tools = grpc_tools, cmd = " && ".join(cmds), heuristic_label_expansion = 0, visibility = visibility, ) dep_set = depset(deps) | [protobuf_label] includes = [] if proto_include: includes = [proto_include] # Note: Public sc_proto_lib invokes this once per (listed) arch; # which then calls sc_cc_lib with same name for each arch; # multiple such calls are OK as long as the arches are disjoint. sc_cc_lib( name = decorate(name, arch), deps = dep_set, srcs = gen_srcs, hdrs = hdrs + gen_hdrs, arches = [arch], copts = [], includes = includes, testonly = testonly, textual_hdrs = gen_hdrs, visibility = visibility, ) if grpc_shim_rule: grpc_name = name[:-6] + "_grpc_proto" grpc_dep_set = dep_set | [name] | _SC_GRPC_DEPS grpc_gen_hdrs_plus = grpc_gen_hdrs + gen_hdrs sc_cc_lib( name = decorate(grpc_name, arch), deps = grpc_dep_set, srcs = grpc_gen_srcs, hdrs = hdrs + grpc_gen_hdrs_plus + [grpc_shim_rule], arches = [arch], copts = [], includes = includes, testonly = testonly, textual_hdrs = grpc_gen_hdrs_plus, visibility = visibility, ) def _gen_proto_shims(name, pb_modifier, srcs, arches, visibility): """Macro to build .pb.h multi-arch master switch for sc_proto_lib. For each src path.proto, generates path.pb.h consisting of: #ifdef logic to select path.${arch}.pb.h Also generates an alias that will select the appropriate proto target based on the currently selected platform architecture. Args: name: Base name for this library. pb_modifier: protoc plugin-dependent file extension (e.g.: .pb) srcs: List of proto files. arches: List of arches this shim should support. visibility: The blaze visibility of the generated alias. Returns: Name of shim rule for use in follow-on hdrs and/or src lists. """ outs = [] cmds = [] hdr_ext = pb_modifier + ".h" for src in srcs: pkg, filename = parse_label(src) if not filename.endswith(".proto"): continue hdr_stem = filename[0:-6] new_hdr_name = hdr_stem + hdr_ext outs.append(new_hdr_name) # Generate lines for shim switch file. # Lines expand inside squotes, so quote accordingly. include_fmt = "#include " + dquote(pkg + "/" + hdr_stem + ".%s" + hdr_ext) lines = [ "#if defined(STRATUM_ARCH_%s)" % "PPC", include_fmt % "ppc", "#elif defined(STRATUM_ARCH_%s)" % "X86", include_fmt % "x86", "#elif defined(STRATUM_ARCH_%s)" % "HOST", include_fmt % "host", "#else", "#error Unknown STRATUM_ARCH", "#endif", ] gen_cmds = [("printf '%%s\\n' '%s'" % line) for line in lines] new_hdr_loc = "$(location %s)" % new_hdr_name cmds.append("{ %s; } > %s" % (" && ".join(gen_cmds), new_hdr_loc)) shim_rule = decorate(name, "shims") native.genrule( name = shim_rule, srcs = srcs, outs = outs, cmd = " && ".join(cmds) or "true", ) sc_platform_alias( name = name, host = decorate(name, "host") if "host" in arches else None, ppc = decorate(name, "ppc") if "ppc" in arches else None, x86 = decorate(name, "x86") if "x86" in arches else None, visibility = visibility, ) return shim_rule def _gen_py_proto_lib(name, srcs, deps, visibility, testonly): """Creates a py_proto_library from the given srcs. There's no clean way to make python protos work with sc_proto_lib's proto_include field, so we keep this simple. For library "name", generates: * ${name}_default_pb, a regular proto library. * ${name}_py, a py_proto_library based on ${name}_default_pb. Args: name: Standard blaze name argument. srcs: Standard blaze srcs argument. deps: Standard blaze deps argument. visibility: Standard blaze visibility argument. testonly: Standard blaze testonly argument. """ regular_proto_name = decorate(name, "default_pb") py_name = decorate(name, "py") proto_library( name = regular_proto_name, srcs = srcs, deps = [decorate(dep, "default_pb") for dep in deps], visibility = visibility, testonly = testonly, ) native.py_proto_library( name = py_name, api_version = 2, deps = [regular_proto_name], visibility = visibility, testonly = testonly, ) # TODO(unknown): Add support for depending on normal proto_library rules. def sc_proto_lib( name = None, srcs = [], hdrs = [], deps = [], arches = [], visibility = None, testonly = None, proto_include = None, python_support = False, services = []): """Public macro to build multi-arch library from Message protobuf(s). For library "name", generates: * ${name}_shim aka .pb.h master switch - see _gen_proto_shims, above. * ${name}_${arch}_pb protobuf compile rules - one for each arch. * sc_cc_lib(name) with those as input. * ${name}_py a py_proto_library version of this library. Only generated if python_support == True. Args: name: Base name for this library. srcs: List of .proto files - private to this library. hdrs: As above, but also exported for dependent rules to utilize. deps: List of deps for this library arches: Which architectures to build this library for, None => ALL. visibility: Standard blaze visibility parameter, passed through to subsequent rules. testonly: Standard blaze testonly parameter. proto_include: Path to add to include path. This will affect the symbols generated by protoc, as well as the include paths used for both sc_cc_lib and sc_proto_lib rules that depend on this rule. Typically "." python_support: Defaults to False. If True, generate a python proto library from this rule. Any sc_proto_lib with python support may only depend on sc_proto_libs that also have python support, and may not use the proto_include field in this rule. services: List of services to enable {"grpc", "rpc"}; Only "grpc" is supported. So "rpc" and "grpc" are equivalent. """ if not arches: if testonly: arches = HOST_ARCHES else: arches = ALL_ARCHES service_enable = { "grpc": 0, } for service in services or []: if service == "grpc": service_enable["grpc"] = 1 elif service == "rpc": service_enable["grpc"] = 1 else: fail("service='%s' not in (grpc, rpc)" % service) deps = depset(deps or []) shim_rule = _gen_proto_shims( name = name, pb_modifier = ".pb", srcs = srcs + hdrs, arches = arches, visibility = visibility, ) grpc_shim_rule = None if (service_enable["grpc"]): grpc_shim_rule = _gen_proto_shims( name = decorate(name[:-6], "grpc_proto"), pb_modifier = ".grpc.pb", srcs = srcs + hdrs, arches = arches, visibility = visibility, ) for arch in arches: _gen_proto_lib( name = name, srcs = srcs, hdrs = [shim_rule] + hdrs, deps = deps, arch = arch, visibility = visibility, testonly = testonly, proto_include = proto_include, grpc_shim_rule = grpc_shim_rule, ) if python_support: if proto_include: fail("Cannot use proto_include on an sc_proto_lib with python support.") _gen_py_proto_lib( name = name, srcs = depset(srcs + hdrs), deps = deps, visibility = visibility, testonly = testonly, ) register_extension_info( extension_name = "sc_proto_lib", label_regex_for_dep = "{extension_name}", ) def sc_package( name = None, bins = None, data = None, deps = None, arches = None, visibility = None): """Public macro to package binaries and data for deployment. For package "name", generates: * ${name}_${arch}_bin and ${name}_${arch}_data filesets containing respectively all of the binaries and all of the data needed for this package and all dependency packages. * ${name}_${arch} fileset containing the corresponding bin and data filesets, mapped to bin/ and share/ respectively. * ${name}_${arch}_tarball rule builds that .tar.gz package. Args: name: Base name for this package. bins: List of sc_cc_bin rules to be packaged. data: List of files (and file producing rules) to be packaged. deps: List of other sc_packages to add to this package. arches: Which architectures to build this library for, None => EMBEDDED_ARCHES (HOST_ARCHES not generally supported). visibility: Standard blaze visibility parameter, passed through to all filesets. """ bins = depset(bins or []) data = depset(data or []) deps = depset(deps or []) if not arches: arches = EMBEDDED_ARCHES fileset_name = decorate(name, "fs") for extension, inputs in [ ("bin", ["%s.stripped" % b for b in bins.to_list()]), ("data", data), ]: native.Fileset( name = decorate(fileset_name, extension), out = decorate(name, extension), entries = [ native.FilesetEntry( files = inputs, ), ] + [ native.FilesetEntry(srcdir = decorate(dep, extension)) for dep in deps.to_list() ], visibility = visibility, ) # Add any platform specific files to the final tarball. platform_entries = sc_platform_select( # We use a different ppc toolchain for Stratum. # This means that we must provide portable shared libs for our ppc # executables. ppc = [native.FilesetEntry( srcdir = "%s:BUILD" % _PPC_GRTE, files = [":libs"], destdir = "lib/stratum", symlinks = "dereference", )], default = [], ) native.Fileset( name = fileset_name, out = name, entries = [ native.FilesetEntry( srcdir = decorate(name, "bin"), destdir = "bin", ), native.FilesetEntry( srcdir = decorate(name, "data"), destdir = "share", ), ] + platform_entries, visibility = visibility, ) outs = ["%s.tar.gz" % name] # Copy our files into a temporary directory and make any necessary changes # before tarballing. cmds = [ "TEMP_DIR=$(@D)/stratum_packaging_temp", "mkdir $${TEMP_DIR}", "cp -r %s $${TEMP_DIR}/tarball" % _loc(fileset_name), "if [[ -e $${TEMP_DIR}/tarball/bin ]]", "then for f in $${TEMP_DIR}/tarball/bin/*.stripped", " do mv $${f} $${f%.stripped}", # rename not available. "done", "fi", "tar czf %s -h -C $${TEMP_DIR}/tarball ." % _loc(name + ".tar.gz"), "rm -rf $${TEMP_DIR}", ] native.genrule( name = decorate(name, "tarball"), srcs = [":%s" % fileset_name], outs = outs, cmd = "; ".join(cmds), visibility = visibility, )
2.015625
2
src/genie/libs/parser/ios/tests/test_show_platform.py
miuvlad/genieparser
0
2859
<filename>src/genie/libs/parser/ios/tests/test_show_platform.py #!/bin/env python import unittest from unittest.mock import Mock from pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError,\ SchemaMissingKeyError from genie.libs.parser.ios.show_platform import ShowVersion,\ Dir,\ ShowRedundancy,\ ShowInventory,\ ShowBootvar, \ ShowProcessesCpuSorted,\ ShowProcessesCpu,\ ShowVersionRp,\ ShowPlatform,\ ShowPlatformPower,\ ShowProcessesCpuHistory,\ ShowProcessesCpuPlatform,\ ShowPlatformSoftwareStatusControl,\ ShowPlatformSoftwareSlotActiveMonitorMem,\ ShowPlatformHardware,\ ShowPlatformHardwarePlim,\ ShowPlatformHardwareQfpBqsOpmMapping,\ ShowPlatformHardwareQfpBqsIpmMapping,\ ShowPlatformHardwareSerdes,\ ShowPlatformHardwareSerdesInternal,\ ShowPlatformHardwareQfpBqsStatisticsChannelAll,\ ShowPlatformHardwareQfpInterfaceIfnameStatistics,\ ShowPlatformHardwareQfpStatisticsDrop,\ ShowEnvironment,\ ShowModule,\ ShowSwitch, ShowSwitchDetail from genie.libs.parser.iosxe.tests.test_show_platform import TestShowPlatform as test_show_platform_iosxe,\ TestShowPlatformPower as test_show_platform_power_iosxe,\ TestShowVersionRp as test_show_version_rp_iosxe,\ TestShowProcessesCpu as test_show_processes_cpu_iosxe,\ TestShowProcessesCpuHistory as test_show_processes_cpu_history_iosxe,\ TestShowProcessesCpuPlatform as test_show_processes_cpu_platform_iosxe,\ TestShowPlatformSoftwareStatusControlProcessorBrief as test_show_platform_software_status_control_processor_brief_iosxe,\ TestShowPlatformSoftwareSlotActiveMonitorMemSwap as test_show_platform_software_slot_active_monitor_Mem_iosxe,\ TestShowPlatformHardware as test_show_platform_hardware_iosxe,\ TestShowPlatformHardwarePlim as test_show_platform_hardware_plim_iosxe,\ TestShowPlatformHardwareQfpBqsOpmMapping as test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe,\ TestShowPlatformHardwareQfpBqsIpmMapping as test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe,\ TestShowPlatformHardwareSerdesStatistics as test_show_platform_hardware_serdes_statistics_iosxe,\ TestShowPlatformHardwareSerdesStatisticsInternal as test_show_platform_hardware_serdes_statistics_internal_iosxe,\ ShowPlatformHardwareQfpBqsStatisticsChannelAll as show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe,\ ShowPlatformHardwareQfpInterface as show_platform_hardware_qfp_interface_iosxe,\ TestShowPlatformHardwareQfpStatisticsDrop as test_show_platform_hardware_qfp_statistics_drop_iosxe,\ TestShowEnv as test_show_env_iosxe,\ TestShowModule as test_show_module_iosxe,\ TestShowSwitch as test_show_switch_iosxe,\ TestShowSwitchDetail as test_show_switch_detail_iosxe class TestShowVersion(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\ ROM: Bootstrap program is IOSv '''} golden_parsed_output_iosv = { "version": { "last_reload_reason": "Unknown reason", "hostname": "N95_1", "os": "IOS", "version_short": "15.6", "number_of_intfs": { "Gigabit Ethernet": "6" }, "version": "15.6(3)M2", "rtr_type": "IOSv", "chassis_sn": "9K66Z7TOKAACDEQA24N7S", "chassis": "IOSv", "image_id": "VIOS-ADVENTERPRISEK9-M", 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 29-Mar-17 14:05', "processor_type": "revision 1.0", "platform": "IOSv", "image_type": "production image", 'processor_board_flash': '10080K', 'returned_to_rom_by': 'reload', "main_mem": "435457", "mem_size": { "non-volatile configuration": "256" }, "system_image": "flash0:/vios-adventerprisek9-m", "curr_config_register": "0x0", "rom": "Bootstrap program is IOSv", "uptime": "1 day, 16 hours, 42 minutes" } } golden_output_iosv = {'execute.return_value': '''\ Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team ROM: Bootstrap program is IOSv N95_1 uptime is 1 day, 16 hours, 42 minutes System returned to ROM by reload System image file is "flash0:/vios-adventerprisek9-m" Last reload reason: Unknown reason This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to <EMAIL>. Cisco IOSv (revision 1.0) with with 435457K/87040K bytes of memory. Processor board ID 9K66Z7TOKAACDEQA24N7S 6 Gigabit Ethernet interfaces DRAM configuration is 72 bits wide with parity disabled. 256K bytes of non-volatile configuration memory. 2097152K bytes of ATA System CompactFlash 0 (Read/Write) 0K bytes of ATA CompactFlash 1 (Read/Write) 0K bytes of ATA CompactFlash 2 (Read/Write) 10080K bytes of ATA CompactFlash 3 (Read/Write) Configuration register is 0x0'''} golden_parsed_output_ios = { 'version': {'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version ' '15.2(3r)E, RELEASE SOFTWARE (fc1)', 'chassis': 'WS-C3750X-24P', 'chassis_sn': 'FDO2028F1WK', 'curr_config_register': '0xF', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Wed 26-Jun-13 09:56', 'hostname': 'R5', 'image_id': 'C3750E-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'main_mem': '262144', 'mem_size': {'flash-simulated non-volatile configuration': '512'}, 'next_reload_license_level': 'ipservices', 'number_of_intfs': {'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2', 'Virtual Ethernet': '2', 'Gigabit Ethernet': '28', 'FastEthernet': '1' }, 'os': 'IOS', 'platform': 'C3750E', 'processor_type': 'PowerPC405', 'returned_to_rom_by': 'power-on', 'rom': 'Bootstrap program is C3750E boot loader', 'rtr_type': 'WS-C3750X-24P', 'system_image': 'flash:c3750e-universalk9-mz', 'system_restarted_at': '12:22:21 PDT Mon Sep 10 2018', 'uptime': '9 weeks, 4 days, 2 hours, 3 minutes', 'version': '12.2(55)SE8', 'version_short': '12.2' } } golden_output_ios = {'execute.return_value': '''\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 12.2(55)SE8, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2013 by Cisco Systems, Inc. Compiled Wed 26-Jun-13 09:56 by prod_rel_team Image text-base: 0x00003000, data-base: 0x02800000 ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 15.2(3r)E, RELEASE SOFTWARE (fc1) R5 uptime is 9 weeks, 4 days, 2 hours, 3 minutes System returned to ROM by power-on System restarted at 12:22:21 PDT Mon Sep 10 2018 System image file is "flash:c3750e-universalk9-mz" This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to <EMAIL>. License Level: ipservices License Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24P (PowerPC405) processor (revision W0) with 262144K bytes of memory. Processor board ID FDO2028F1WK Last reset from power-on 2 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery mechanism is enabled. 512K bytes of flash-simulated non-volatile configuration memory. Base ethernet MAC Address : 84:3D:C6:FF:F1:B8 Motherboard assembly number : 73-15476-04 Motherboard serial number : FDO202907UH Model revision number : W0 Motherboard revision number : B0 Model number : WS-C3750X-24P-L Daughterboard assembly number : 800-32727-03 Daughterboard serial number : FDO202823P8 System serial number : FDO2028F1WK Top Assembly Part Number : 800-38990-01 Top Assembly Revision Number : F0 Version ID : V07 CLEI Code Number : CMMPP00DRB Hardware Board Revision Number : 0x05 Switch Ports Model SW Version SW Image ------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24P 12.2(55)SE8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_cat6k = { "version": { "os": "IOS", "version_short": "12.2", "platform": "s72033_rp", "version": "12.2(18)SXF7", "image_id": "s72033_rp-ADVENTERPRISEK9_WAN-M", 'compiled_by': 'kellythw', 'compiled_date': 'Thu 23-Nov-06 06:26', "image_type": "production image", "rom": "System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1)", "bootldr": "s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1)", "hostname": "cat6k_tb1", "uptime": "10 weeks, 5 days, 5 hours, 16 minutes", "system_image": "disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7", "chassis": "WS-C6503-E", "main_mem": "983008", "processor_type": "R7000", 'sp_by': 'power on', 'returned_to_rom_at': '21:57:23 UTC Sat Aug 28 2010', 'returned_to_rom_by': 'power cycle', "rtr_type": "WS-C6503-E", "chassis_sn": "FXS1821Q2H9", "last_reload_reason": "s/w reset", 'processor_board_flash': '65536K', "number_of_intfs": { "Gigabit Ethernet/IEEE 802.3": "50", 'Virtual Ethernet/IEEE 802.3': '1' }, "mem_size": {"non-volatile configuration": "1917", "packet buffer": "8192"}, "curr_config_register": "0x2102", } } golden_output_ios_cat6k = {'execute.return_value': ''' show version Cisco Internetwork Operating System Software IOS (tm) s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled Thu 23-Nov-06 06:26 by kellythw Image text-base: 0x40101040, data-base: 0x42D98000 ROM: System Bootstrap, Version 12.2(17r)S4, RELEASE SOFTWARE (fc1) BOOTLDR: s72033_rp Software (s72033_rp-ADVENTERPRISEK9_WAN-M), Version 12.2(18)SXF7, RELEASE SOFTWARE (fc1) cat6k_tb1 uptime is 10 weeks, 5 days, 5 hours, 16 minutes Time since cat6k_tb1 switched to active is 10 weeks, 5 days, 5 hours, 15 minutes System returned to ROM by power cycle at 21:57:23 UTC Sat Aug 28 2010 (SP by power on) System image file is "disk0:s72033-adventerprisek9_wan-mz.122-18.SXF7" This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to <EMAIL>. cisco WS-C6503-E (R7000) processor (revision 1.4) with 983008K/65536K bytes of memory. Processor board ID FXS1821Q2H9 SR71000 CPU at 600Mhz, Implementation 0x504, Rev 1.2, 512KB L2 Cache Last reset from s/w reset SuperLAT software (copyright 1990 by Meridian Technology Corp). X.25 software, Version 3.0.0. Bridging software. TN3270 Emulation software. 1 Virtual Ethernet/IEEE 802.3 interface 50 Gigabit Ethernet/IEEE 802.3 interfaces 1917K bytes of non-volatile configuration memory. 8192K bytes of packet buffer memory. 65536K bytes of Flash internal SIMM (Sector size 512K). Configuration register is 0x2102 '''} golden_output_ios_1 = {'execute.return_value': '''\ Cisco IOS Software, C3750E Software (C3750E-UNIVERSALK9-M), Version 15.2(2)E8, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2018 by Cisco Systems, Inc. Compiled Mon 22-Jan-18 04:07 by prod_rel_team ROM: Bootstrap program is C3750E boot loader BOOTLDR: C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1) sample_switch uptime is 8 weeks, 3 days, 10 hours, 27 minutes System returned to ROM by power-on System restarted at 05:06:40 GMT Tue Sep 10 2019 System image file is "flash:c3750e-universalk9-mz.152-2.E8.bin" Last reload reason: Reload command This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to <EMAIL>. License Level: ipservices License Type: Permanent Next reload license Level: ipservices cisco WS-C3750X-24S (PowerPC405) processor (revision A0) with 524288K bytes of memory. Processor board ID FDO1633Q14S Last reset from power-on 14 Virtual Ethernet interfaces 1 FastEthernet interface 28 Gigabit Ethernet interfaces 2 Ten Gigabit Ethernet interfaces The password-recovery mechanism is enabled. 512K bytes of flash-simulated non-volatile configuration memory. Base ethernet MAC Address : AC:F2:C5:FF:55:E7 Motherboard assembly number : 73-13061-04 Motherboard serial number : FDO1633Q14M Model revision number : A0 Motherboard revision number : A0 Model number : WS-C3750X-24S-E Daughterboard assembly number : 800-32727-03 Daughterboard serial number : FDO172217ED System serial number : FDO1633Q14S Top Assembly Part Number : 800-33746-04 Top Assembly Revision Number : B0 Version ID : V03 CLEI Code Number : CMMFF00ARC Hardware Board Revision Number : 0x04 Switch Ports Model SW Version SW Image ------ ----- ----- ---------- ---------- * 1 30 WS-C3750X-24S 15.2(2)E8 C3750E-UNIVERSALK9-M Configuration register is 0xF '''} golden_parsed_output_ios_1 = { 'version': {'version_short': '15.2', 'platform': 'C3750E', 'version': '15.2(2)E8', 'image_id': 'C3750E-UNIVERSALK9-M', 'os': 'IOS', 'image_type': 'production image', 'compiled_date': 'Mon 22-Jan-18 04:07', 'compiled_by': 'prod_rel_team', 'rom': 'Bootstrap program is C3750E boot loader', 'bootldr': 'C3750E Boot Loader (C3750X-HBOOT-M) Version 12.2(58r)SE, RELEASE SOFTWARE (fc1)', 'hostname': 'sample_switch', 'uptime': '8 weeks, 3 days, 10 hours, 27 minutes', 'returned_to_rom_by': 'power-on', 'system_restarted_at': '05:06:40 GMT Tue Sep 10 2019', 'system_image': 'flash:c3750e-universalk9-mz.152-2.E8.bin', 'last_reload_reason': 'power-on', 'license_level': 'ipservices', 'license_type': 'Permanent', 'next_reload_license_level': 'ipservices', 'chassis': 'WS-C3750X-24S', 'main_mem': '524288', 'processor_type': 'PowerPC405', 'rtr_type': 'WS-C3750X-24S', 'chassis_sn': 'FDO1633Q14S', 'number_of_intfs': { 'Virtual Ethernet': '14', 'FastEthernet': '1', 'Gigabit Ethernet': '28', 'Ten Gigabit Ethernet': '2' }, 'mem_size': { 'flash-simulated non-volatile configuration': '512' }, 'curr_config_register': '0xF' } } device_output = {'execute.return_value':''' best-c3945-IOS3#show version Cisco IOS Software, C3900 Software (C3900-UNIVERSALK9-M), Version 15.0(1)M7, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2011 by Cisco Systems, Inc. Compiled Fri 05-Aug-11 00:32 by prod_rel_team ROM: System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1) best-c3945-IOS3 uptime is 1 hour, 20 minutes System returned to ROM by reload at 10:26:47 EST Mon Dec 9 2019 System restarted at 10:27:57 EST Mon Dec 9 2019 System image file is "flash0:c3900-universalk9-mz.SPA.150-1.M7.bin" Last reload type: Normal Reload Last reload reason: Reload Command This product contains cryptographic features and is subject to United States and local country laws governing import, export, transfer and use. Delivery of Cisco cryptographic products does not imply third-party authority to import, export, distribute or use encryption. Importers, exporters, distributors and users are responsible for compliance with U.S. and local country laws. By using this product you agree to comply with applicable laws and regulations. If you are unable to comply with U.S. and local laws, return this product immediately. A summary of U.S. laws governing Cisco cryptographic products may be found at: http://www.cisco.com/wwl/export/crypto/tool/stqrg.html If you require further assistance please contact us by sending email to <EMAIL>. Cisco CISCO3945-CHASSIS (revision 1.1) with C3900-SPE150/K9 with 2027520K/69632K bytes of memory. Processor board ID FGL161010K8 2 FastEthernet interfaces 3 Gigabit Ethernet interfaces 1 Virtual Private Network (VPN) Module DRAM configuration is 72 bits wide with parity enabled. 255K bytes of non-volatile configuration memory. 2000880K bytes of ATA System CompactFlash 0 (Read/Write) License Info: License UDI: ------------------------------------------------- Device# PID SN ------------------------------------------------- *0 C3900-SPE150/K9 FOC16050QP6 Technology Package License Information for Module:'c3900' ----------------------------------------------------------------- Technology Technology-package Technology-package Current Type Next reboot ------------------------------------------------------------------ ipbase ipbasek9 Permanent ipbasek9 security securityk9 Permanent securityk9 uc None None None data datak9 Permanent datak9 Configuration register is 0x2102 '''} parsed_output = { 'version': { 'chassis': 'CISCO3945-CHASSIS', 'chassis_sn': 'FGL161010K8', 'compiled_by': 'prod_rel_team', 'compiled_date': 'Fri 05-Aug-11 00:32', 'curr_config_register': '0x2102', 'hostname': 'best-c3945-IOS3', 'image_id': 'C3900-UNIVERSALK9-M', 'image_type': 'production image', 'last_reload_reason': 'Reload Command', 'last_reload_type': 'Normal Reload', 'license_udi': { 'device_num': { '*0': { 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6' } } }, 'license_package': { 'data': { 'license_level': 'datak9', 'license_type': 'Permanent', 'next_reload_license_level': 'datak9', }, 'ipbase': { 'license_level': 'ipbasek9', 'license_type': 'Permanent', 'next_reload_license_level': 'ipbasek9', }, 'security': { 'license_level': 'securityk9', 'license_type': 'Permanent', 'next_reload_license_level': 'securityk9', }, 'uc': { 'license_level': 'None', 'license_type': 'None', 'next_reload_license_level': 'None', }, }, 'main_mem': '2027520', 'mem_size': { 'non-volatile configuration': '255', }, 'number_of_intfs': { 'FastEthernet': '2', 'Gigabit Ethernet': '3', }, 'os': 'IOS', 'platform': 'C3900', 'processor_board_flash': '2000880K', 'processor_type': 'C3900-SPE150/K9', 'returned_to_rom_at': '10:26:47 EST Mon Dec 9 2019', 'returned_to_rom_by': 'reload', 'rom': 'System Bootstrap, Version 15.0(1r)M13, RELEASE SOFTWARE (fc1)', 'rtr_type': 'CISCO3945-CHASSIS', 'system_image': 'flash0:c3900-universalk9-mz.SPA.150-1.M7.bin', 'system_restarted_at': '10:27:57 EST Mon Dec 9 2019', 'uptime': '1 hour, 20 minutes', 'version': '15.0(1)M7', 'version_short': '15.0', }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(AttributeError): parsered_output = version_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) version_obj = ShowVersion(device=self.dev1) with self.assertRaises(KeyError): parsed_output = version_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_ios(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios) def test_golden_ios_cat6k(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_cat6k) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_cat6k) def test_golden_ios_1(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_ios_1) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_ios_1) def test_golden_ios_2(self): self.maxDiff = None self.dev_iosv = Mock(**self.device_output) version_obj = ShowVersion(device=self.dev_iosv) parsed_output = version_obj.parse() self.assertEqual(parsed_output, self.parsed_output) class test_dir(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} semi_empty_output = {'execute.return_value': '''\ Directory of flash:/ '''} golden_parsed_output_iosv = { "dir": { "flash0:/": { "files": { "e1000_bia.txt": { "last_modified_date": "Oct 17 2018 18:57:18 +00:00", "index": "269", "size": "119", "permissions": "-rw-" }, "config": { "last_modified_date": "Oct 14 2013 00:00:00 +00:00", "index": "264", "size": "0", "permissions": "drw-" }, "nvram": { "last_modified_date": "Oct 17 2018 18:57:10 +00:00", "index": "268", "size": "524288", "permissions": "-rw-" }, "boot": { "last_modified_date": "Jan 30 2013 00:00:00 +00:00", "index": "1", "size": "0", "permissions": "drw-" }, "vios-adventerprisek9-m": { "last_modified_date": "Mar 29 2017 00:00:00 +00:00", "index": "267", "size": "147988420", "permissions": "-rw-" } }, "bytes_total": "2142715904", "bytes_free": "1989595136" }, "dir": "flash0:/" } } golden_output_iosv = {'execute.return_value': '''\ Directory of flash0:/ 1 drw- 0 Jan 30 2013 00:00:00 +00:00 boot 264 drw- 0 Oct 14 2013 00:00:00 +00:00 config 267 -rw- 147988420 Mar 29 2017 00:00:00 +00:00 vios-adventerprisek9-m 268 -rw- 524288 Oct 17 2018 18:57:10 +00:00 nvram 269 -rw- 119 Oct 17 2018 18:57:18 +00:00 e1000_bia.txt 2142715904 bytes total (1989595136 bytes free) '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsered_output = dir_obj.parse() def test_semi_empty(self): self.dev1 = Mock(**self.semi_empty_output) dir_obj = Dir(device=self.dev1) with self.assertRaises(SchemaMissingKeyError): parsed_output = dir_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) dir_obj = Dir(device=self.dev_iosv) parsed_output = dir_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_redundancy(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { "red_sys_info": { "last_switchover_reason": "unsupported", "maint_mode": "Disabled", "switchovers_system_experienced": "0", "available_system_uptime": "0 minutes", "communications": "Down", "hw_mode": "Simplex", "communications_reason": "Failure", "standby_failures": "0" }, "slot": { "slot 0": { "image_ver": "Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2)", "uptime_in_curr_state": "1 day, 16 hours, 42 minutes", "config_register": "0x0", "curr_sw_state": "ACTIVE" } } } golden_output_iosv = {'execute.return_value': '''\ Redundant System Information : ------------------------------ Available system uptime = 0 minutes Switchovers system experienced = 0 Standby failures = 0 Last switchover reason = unsupported Hardware Mode = Simplex Maintenance Mode = Disabled Communications = Down Reason: Failure Current Processor Information : ------------------------------- Active Location = slot 0 Current Software state = ACTIVE Uptime in current state = 1 day, 16 hours, 42 minutes Image Version = Cisco IOS Software, IOSv Software (VIOS-ADVENTERPRISEK9-M), Version 15.6(3)M2, RELEASE SOFTWARE (fc2) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2017 by Cisco Systems, Inc. Compiled Wed 29-Mar-17 14:05 by prod_rel_team Configuration register = 0x0 Peer (slot: 0) information is not available because it is in 'DISABLED' state '''} def test_empty(self): self.dev1 = Mock(**self.empty_output) redundancy_obj = ShowRedundancy(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = redundancy_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) redundancy_obj = ShowRedundancy(device=self.dev_iosv) parsed_output = redundancy_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class TestShowInventory(unittest.TestCase): dev1 = Device(name='empty') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { 'main': { 'chassis': { 'IOSv': { 'descr': 'IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0', 'name': 'IOSv', 'pid': 'IOSv', 'sn': '9K66Z7TOKAACDEQA24N7S', 'vid': '1.0', }, }, }, } golden_output_iosv = {'execute.return_value': '''\ NAME: "IOSv", DESCR: "IOSv chassis, Hw Serial#: 9K66Z7TOKAACDEQA24N7S, Hw Revision: 1.0" PID: IOSv , VID: 1.0, SN: 9K66Z7TOKAACDEQA24N7S '''} golden_parsed_output_2 = { "main": { "chassis": { "WS-C6504-E": { "name": "WS-C6504-E", "descr": "Cisco Systems Cisco 6500 4-slot Chassis System", "pid": "WS-C6504-E", "vid": "V01", "sn": "FXS1712Q1R8", } } }, "slot": { "CLK-7600 1": { "other": { "CLK-7600 1": { "name": "CLK-7600 1", "descr": "OSR-7600 Clock FRU 1", "pid": "CLK-7600", "vid": "", "sn": "FXS170802GL", } } }, "CLK-7600 2": { "other": { "CLK-7600 2": { "name": "CLK-7600 2", "descr": "OSR-7600 Clock FRU 2", "pid": "CLK-7600", "vid": "", "sn": "FXS170802GL", } } }, "FAN-MOD-4HS 1": { "other": { "FAN-MOD-4HS 1": { "name": "FAN-MOD-4HS 1", "descr": "High Speed Fan Module for CISCO7604 1", "pid": "FAN-MOD-4HS", "vid": "V01", "sn": "DCH170900PF", } } }, "PS 1 PWR-2700-AC/4": { "other": { "PS 1 PWR-2700-AC/4": { "name": "PS 1 PWR-2700-AC/4", "descr": "2700W AC power supply for CISCO7604 1", "pid": "PWR-2700-AC/4", "vid": "V03", "sn": "APS1707008Y", } } }, "PS 2 PWR-2700-AC/4": { "other": { "PS 2 PWR-2700-AC/4": { "name": "PS 2 PWR-2700-AC/4", "descr": "2700W AC power supply for CISCO7604 2", "pid": "PWR-2700-AC/4", "vid": "V03", "sn": "APS17070093", } } }, "1": { "rp": { "VS-SUP2T-10G": { "name": "1", "descr": "VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5", "pid": "VS-SUP2T-10G", "vid": "V05", "sn": "SAL17152N0F", "subslot": { "0": { "VS-F6K-MSFC5": { "descr": "VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0", "name": "msfc sub-module of 1", "pid": "VS-F6K-MSFC5", "sn": "SAL17142D06", "vid": "", }, "VS-F6K-PFC4": { "descr": "VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0", "name": "VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1", "pid": "VS-F6K-PFC4", "sn": "SAL17163901", "vid": "V03", }, }, "4": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te1/4", "name": "Transceiver Te1/4", "pid": "X2-10GB-SR", "sn": "ONT170202T1", "vid": "V06 ", } }, "5": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te1/5", "name": "Transceiver Te1/5", "pid": "X2-10GB-SR", "sn": "ONT1702033D", "vid": "V06 ", } }, }, } } }, "2": { "lc": { "WS-X6816-10GE": { "name": "2", "descr": "WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0", "pid": "WS-X6816-10GE", "vid": "V02", "sn": "SAL17152QB3", "subslot": { "0": { "WS-F6K-DFC4-E": { "descr": "WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2", "name": "WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2", "pid": "WS-F6K-DFC4-E", "sn": "SAL171846RF", "vid": "V02", } }, "1": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/1", "name": "Transceiver Te2/1", "pid": "X2-10GB-SR", "sn": "ONT17020338", "vid": "V06 ", } }, "2": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/2", "name": "Transceiver Te2/2", "pid": "X2-10GB-SR", "sn": "ONT1702020H", "vid": "V06 ", } }, "3": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/3", "name": "Transceiver Te2/3", "pid": "X2-10GB-SR", "sn": "ONT170202UU", "vid": "V06 ", } }, "4": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/4", "name": "Transceiver Te2/4", "pid": "X2-10GB-SR", "sn": "ONT170202T5", "vid": "V06 ", } }, "5": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/5", "name": "Transceiver Te2/5", "pid": "X2-10GB-SR", "sn": "AGA1515XZE2", "vid": "V05 ", } }, "6": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/6", "name": "Transceiver Te2/6", "pid": "X2-10GB-SR", "sn": "FNS153920YJ", "vid": "V06 ", } }, "16": { "X2-10GB-SR": { "descr": "X2 Transceiver 10Gbase-SR Te2/16", "name": "Transceiver Te2/16", "pid": "X2-10GB-SR", "sn": "ONT170201TT", "vid": "V06 ", } }, }, } } }, "3": { "lc": { "WS-X6824-SFP": { "name": "3", "descr": "WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0", "pid": "WS-X6824-SFP", "vid": "V01", "sn": "SAL17152EG9", "subslot": { "0": { "WS-F6K-DFC4-A": { "descr": "WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0", "name": "WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3", "pid": "WS-F6K-DFC4-A", "sn": "SAL171848KL", "vid": "V04", } } }, } } }, "4": { "lc": { "WS-X6748-GE-TX": { "name": "4", "descr": "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4", "pid": "WS-X6748-GE-TX", "vid": "V04", "sn": "SAL14017TWF", "subslot": { "0": { "WS-F6700-CFC": { "descr": "WS-F6700-CFC Centralized Forwarding Card Rev. 4.1", "name": "WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4", "pid": "WS-F6700-CFC", "sn": "SAL13516QS8", "vid": "V06", } } }, } } }, }, } golden_output_2 = {'execute.return_value': ''' NAME: "WS-C6504-E", DESCR: "Cisco Systems Cisco 6500 4-slot Chassis System" PID: WS-C6504-E , VID: V01, SN: FXS1712Q1R8 NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1" PID: CLK-7600 , VID: , SN: FXS170802GL NAME: "CLK-7600 2", DESCR: "OSR-7600 Clock FRU 2" PID: CLK-7600 , VID: , SN: FXS170802GL NAME: "1", DESCR: "VS-SUP2T-10G 5 ports Supervisor Engine 2T 10GE w/ CTS Rev. 1.5" PID: VS-SUP2T-10G , VID: V05, SN: SAL17152N0F NAME: "msfc sub-module of 1", DESCR: "VS-F6K-MSFC5 CPU Daughterboard Rev. 2.0" PID: VS-F6K-MSFC5 , VID: , SN: SAL17142D06 NAME: "VS-F6K-PFC4 Policy Feature Card 4 EARL sub-module of 1", DESCR: "VS-F6K-PFC4 Policy Feature Card 4 Rev. 2.0" PID: VS-F6K-PFC4 , VID: V03, SN: SAL17163901 NAME: "Transceiver Te1/4", DESCR: "X2 Transceiver 10Gbase-SR Te1/4" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T1 NAME: "Transceiver Te1/5", DESCR: "X2 Transceiver 10Gbase-SR Te1/5" PID: X2-10GB-SR , VID: V06 , SN: ONT1702033D NAME: "2", DESCR: "WS-X6816-10GE CEF720 16 port 10GE Rev. 2.0" PID: WS-X6816-10GE , VID: V02, SN: SAL17152QB3 NAME: "WS-F6K-DFC4-E Distributed Forwarding Card 4 EARL sub-module of 2", DESCR: "WS-F6K-DFC4-E Distributed Forwarding Card 4 Rev. 1.2" PID: WS-F6K-DFC4-E , VID: V02, SN: SAL171846RF NAME: "Transceiver Te2/1", DESCR: "X2 Transceiver 10Gbase-SR Te2/1" PID: X2-10GB-SR , VID: V06 , SN: ONT17020338 NAME: "Transceiver Te2/2", DESCR: "X2 Transceiver 10Gbase-SR Te2/2" PID: X2-10GB-SR , VID: V06 , SN: ONT1702020H NAME: "Transceiver Te2/3", DESCR: "X2 Transceiver 10Gbase-SR Te2/3" PID: X2-10GB-SR , VID: V06 , SN: ONT170202UU NAME: "Transceiver Te2/4", DESCR: "X2 Transceiver 10Gbase-SR Te2/4" PID: X2-10GB-SR , VID: V06 , SN: ONT170202T5 NAME: "Transceiver Te2/5", DESCR: "X2 Transceiver 10Gbase-SR Te2/5" PID: X2-10GB-SR , VID: V05 , SN: AGA1515XZE2 NAME: "Transceiver Te2/6", DESCR: "X2 Transceiver 10Gbase-SR Te2/6" PID: X2-10GB-SR , VID: V06 , SN: FNS153920YJ NAME: "Transceiver Te2/16", DESCR: "X2 Transceiver 10Gbase-SR Te2/16" PID: X2-10GB-SR , VID: V06 , SN: ONT170201TT NAME: "3", DESCR: "WS-X6824-SFP CEF720 24 port 1000mb SFP Rev. 1.0" PID: WS-X6824-SFP , VID: V01, SN: SAL17152EG9 NAME: "WS-F6K-DFC4-A Distributed Forwarding Card 4 EARL sub-module of 3", DESCR: "WS-F6K-DFC4-A Distributed Forwarding Card 4 Rev. 1.0" PID: WS-F6K-DFC4-A , VID: V04, SN: SAL171848KL NAME: "4", DESCR: "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 3.4" PID: WS-X6748-GE-TX , VID: V04, SN: SAL14017TWF NAME: "WS-F6700-CFC Centralized Forwarding Card EARL sub-module of 4", DESCR: "WS-F6700-CFC Centralized Forwarding Card Rev. 4.1" PID: WS-F6700-CFC , VID: V06, SN: SAL13516QS8 NAME: "FAN-MOD-4HS 1", DESCR: "High Speed Fan Module for CISCO7604 1" PID: FAN-MOD-4HS , VID: V01, SN: DCH170900PF NAME: "PS 1 PWR-2700-AC/4", DESCR: "2700W AC power supply for CISCO7604 1" PID: PWR-2700-AC/4 , VID: V03, SN: APS1707008Y NAME: "PS 2 PWR-2700-AC/4", DESCR: "2700W AC power supply for CISCO7604 2" PID: PWR-2700-AC/4 , VID: V03, SN: APS17070093 '''} golden_parsed_output_3 = { "main": { "chassis": { "WS-C6503-E": { "name": "WS-C6503-E", "descr": "Cisco Systems Catalyst 6500 3-slot Chassis System", "pid": "WS-C6503-E", "vid": "V03", "sn": "FXS1821Q2H9", } } }, "slot": { "CLK-7600 1": { "other": { "CLK-7600 1": { "name": "CLK-7600 1", "descr": "OSR-7600 Clock FRU 1", "pid": "CLK-7600", "vid": "", "sn": "FXS181101V4", } } }, "CLK-7600 2": { "other": { "CLK-7600 2": { "name": "CLK-7600 2", "descr": "OSR-7600 Clock FRU 2", "pid": "CLK-7600", "vid": "", "sn": "FXS181101V4", } } }, "1": { "rp": { "WS-SUP720-3BXL": { "name": "1", "descr": "WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6", "pid": "WS-SUP720-3BXL", "vid": "V05", "sn": "SAL11434P2C", "subslot": { "0": { "WS-SUP720": { "descr": "WS-SUP720 MSFC3 Daughterboard Rev. 3.1", "name": "msfc sub-module of 1", "pid": "WS-SUP720", "sn": "SAL11434N9G", "vid": "", }, "WS-F6K-PFC3BXL": { "descr": "WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8", "name": "switching engine sub-module of 1", "pid": "WS-F6K-PFC3BXL", "sn": "SAL11434LYG", "vid": "V01", }, } }, } } }, "2": { "lc": { "WS-X6748-GE-TX": { "name": "2", "descr": "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6", "pid": "WS-X6748-GE-TX", "vid": "V02", "sn": "SAL1128UPQ9", "subslot": { "0": { "WS-F6700-DFC3CXL": { "descr": "WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1", "name": "switching engine sub-module of 2", "pid": "WS-F6700-DFC3CXL", "sn": "SAL1214LAG5", "vid": "V01", } } }, } } }, "WS-C6503-E-FAN 1": { "other": { "WS-C6503-E-FAN 1": { "name": "WS-C6503-E-FAN 1", "descr": "Enhanced 3-slot Fan Tray 1", "pid": "WS-C6503-E-FAN", "vid": "V02", "sn": "DCH183500KW", } } }, "PS 1 PWR-1400-AC": { "other": { "PS 1 PWR-1400-AC": { "name": "PS 1 PWR-1400-AC", "descr": "AC power supply, 1400 watt 1", "pid": "PWR-1400-AC", "vid": "V01", "sn": "ABC0830J127", } } }, }, } golden_output_3 = {'execute.return_value': ''' # show inventory NAME: "WS-C6503-E", DESCR: "Cisco Systems Catalyst 6500 3-slot Chassis System" PID: WS-C6503-E , VID: V03, SN: FXS1821Q2H9 NAME: "CLK-7600 1", DESCR: "OSR-7600 Clock FRU 1" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: "CLK-7600 2", DESCR: "OSR-7600 Clock FRU 2" PID: CLK-7600 , VID: , SN: FXS181101V4 NAME: "1", DESCR: "WS-SUP720-3BXL 2 ports Supervisor Engine 720 Rev. 5.6" PID: WS-SUP720-3BXL , VID: V05, SN: SAL11434P2C NAME: "msfc sub-module of 1", DESCR: "WS-SUP720 MSFC3 Daughterboard Rev. 3.1" PID: WS-SUP720 , VID: , SN: SAL11434N9G NAME: "switching engine sub-module of 1", DESCR: "WS-F6K-PFC3BXL Policy Feature Card 3 Rev. 1.8" PID: WS-F6K-PFC3BXL , VID: V01, SN: SAL11434LYG NAME: "2", DESCR: "WS-X6748-GE-TX CEF720 48 port 10/100/1000mb Ethernet Rev. 2.6" PID: WS-X6748-GE-TX , VID: V02, SN: SAL1128UPQ9 NAME: "switching engine sub-module of 2", DESCR: "WS-F6700-DFC3CXL Distributed Forwarding Card 3 Rev. 1.1" PID: WS-F6700-DFC3CXL , VID: V01, SN: SAL1214LAG5 NAME: "WS-C6503-E-FAN 1", DESCR: "Enhanced 3-slot Fan Tray 1" PID: WS-C6503-E-FAN , VID: V02, SN: DCH183500KW NAME: "PS 1 PWR-1400-AC", DESCR: "AC power supply, 1400 watt 1" PID: PWR-1400-AC , VID: V01, SN: ABC0830J127 '''} golden_output_4 = {'execute.return_value': ''' NAME: "1", DESCR: "WS-C8888X-88" PID: WS-C0123X-45T-S , VID: V00 , SN: FDO123R12W NAME: "Switch 1 - Power Supply 1", DESCR: "ABC Power Supply" PID: C3KX-PWR-350WAC , VID: V01D , SN: DTN1504L0E9 NAME: "TenGigabitEthernet1/1/1", DESCR: "SFP-10GBase-SR" PID: SFP-10G-SR , VID: V03 , SN: SPC1519005V NAME: "2", DESCR: "WS-C3210X-48" PID: WS-C3210X-48T-S , VID: V02 , SN: FD5678Z90P NAME: "Switch 2 - Power Supply 1", DESCR: "BCA Power Supply" PID: C3KX-PWR-007CBA , VID: V01L , SN: LTP13579L3R NAME: "TenGigabitEthernet2/1/1", DESCR: "SFP-10GBase-LR" PID: SFP-10G-LR , VID: V02 , SN: ONT182746GZ NAME: "1", DESCR: "WS-C1010XR-48FPS-I" PID: WS-C1010XR-48FPS-I, VID: V05 , SN: FD2043B0K3 NAME: "Switch 1 - Power Supply 1", DESCR: "LLL Power Supply" PID: PWR-C2-2929WAC , VID: V02L , SN: LIT03728KKK NAME: "Switch 1 - FlexStackPlus Module", DESCR: "Stacking Module" PID: C1010X-STACK , VID: V02 , SN: FD232323XXZ NAME: "GigabitEthernet1/0/49", DESCR: "1000BaseSX SFP" PID: GLC-SX-MMD , VID: V01 , SN: ACW102938VS '''} golden_parsed_output_4 = { 'slot': { '1': { 'rp': { 'WS-C0123X-45T-S': { 'descr': 'WS-C8888X-88', 'name': '1', 'pid': 'WS-C0123X-45T-S', 'sn': 'FDO123R12W', 'subslot': { '1': { 'C3KX-PWR-350WAC': { 'descr': 'ABC Power Supply', 'name': 'Switch 1 - Power Supply 1', 'pid': 'C3KX-PWR-350WAC', 'sn': 'DTN1504L0E9', 'vid': 'V01D ', }, }, '1/1/1': { 'SFP-10G-SR': { 'descr': 'SFP-10GBase-SR', 'name': 'TenGigabitEthernet1/1/1', 'pid': 'SFP-10G-SR', 'sn': 'SPC1519005V', 'vid': 'V03 ', }, }, }, 'vid': 'V00 ', }, 'WS-C1010XR-48FPS-I': { 'descr': 'WS-C1010XR-48FPS-I', 'name': '1', 'pid': 'WS-C1010XR-48FPS-I', 'sn': 'FD2043B0K3', 'subslot': { '1': { 'C1010X-STACK': { 'descr': 'Stacking Module', 'name': 'Switch 1 - FlexStackPlus Module', 'pid': 'C1010X-STACK', 'sn': 'FD232323XXZ', 'vid': 'V02 ', }, 'PWR-C2-2929WAC': { 'descr': 'LLL Power Supply', 'name': 'Switch 1 - Power Supply 1', 'pid': 'PWR-C2-2929WAC', 'sn': 'LIT03728KKK', 'vid': 'V02L ', }, }, '1/0/49': { 'GLC-SX-MMD': { 'descr': '1000BaseSX SFP', 'name': 'GigabitEthernet1/0/49', 'pid': 'GLC-SX-MMD', 'sn': 'ACW102938VS', 'vid': 'V01 ', }, }, }, 'vid': 'V05 ', }, }, }, '2': { 'rp': { 'WS-C3210X-48T-S': { 'descr': 'WS-C3210X-48', 'name': '2', 'pid': 'WS-C3210X-48T-S', 'sn': 'FD5678Z90P', 'subslot': { '2': { 'C3KX-PWR-007CBA': { 'descr': 'BCA Power Supply', 'name': 'Switch 2 - Power Supply 1', 'pid': 'C3KX-PWR-007CBA', 'sn': 'LTP13579L3R', 'vid': 'V01L ', }, }, '2/1/1': { 'SFP-10G-LR': { 'descr': 'SFP-10GBase-LR', 'name': 'TenGigabitEthernet2/1/1', 'pid': 'SFP-10G-LR', 'sn': 'ONT182746GZ', 'vid': 'V02 ', }, }, }, 'vid': 'V02 ', }, }, }, }, } golden_output_5 = {'execute.return_value': ''' best-c3945-IOS3#show inventory NAME: "CISCO3945-CHASSIS", DESCR: "CISCO3945-CHASSIS" PID: CISCO3945-CHASSIS , VID: V05 , SN: FGL161010K8 NAME: "Cisco Services Performance Engine 150 for Cisco 3900 ISR on Slot 0", DESCR: "Cisco Services Performance Engine 150 for Cisco 3900 ISR" PID: C3900-SPE150/K9 , VID: V05 , SN: FOC16050QP6 NAME: "Two-Port Fast Ethernet High Speed WAN Interface Card on Slot 0 SubSlot 3", DESCR: "Two-Port Fast Ethernet High Speed WAN Interface Card" PID: HWIC-2FE , VID: V02 , SN: FOC16062824 NAME: "C3900 AC Power Supply 1", DESCR: "C3900 AC Power Supply 1" PID: PWR-3900-AC , VID: V03 , SN: QCS1604P0BT '''} golden_parsed_output_5 = { 'main': { 'chassis': { 'CISCO3945-CHASSIS': { 'descr': 'CISCO3945-CHASSIS', 'name': 'CISCO3945-CHASSIS', 'pid': 'CISCO3945-CHASSIS', 'sn': 'FGL161010K8', 'vid': 'V05 ', }, }, }, 'slot': { '0': { 'rp': { 'C3900-SPE150/K9': { 'descr': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR', 'name': 'Cisco Services Performance Engine 150 for Cisco 3900 ISR on Slot 0', 'pid': 'C3900-SPE150/K9', 'sn': 'FOC16050QP6', 'subslot': { '3': { 'HWIC-2FE': { 'descr': 'Two-Port Fast Ethernet High Speed WAN Interface Card', 'name': 'Two-Port Fast Ethernet High Speed WAN Interface Card on Slot 0 SubSlot 3', 'pid': 'HWIC-2FE', 'sn': 'FOC16062824', 'vid': 'V02 ', }, }, }, 'vid': 'V05 ', }, }, }, 'C3900 AC Power Supply 1': { 'other': { 'C3900 AC Power Supply 1': { 'descr': 'C3900 AC Power Supply 1', 'name': 'C3900 AC Power Supply 1', 'pid': 'PWR-3900-AC', 'sn': 'QCS1604P0BT', 'vid': 'V03 ', }, }, }, }, } golden_output_6 = {'execute.return_value': ''' NAME: "1", DESCR: "SM-ES2-16-P" PID: SM-ES2-16-P , VID: , SN: FOC09876NP3 '''} golden_parsed_output_6 = { 'slot': { '1': { 'lc': { 'SM-ES2-16-P': { 'descr': 'SM-ES2-16-P', 'name': '1', 'pid': 'SM-ES2-16-P', 'sn': 'FOC09876NP3', 'vid': '', }, }, }, }, } golden_output_7 = {'execute.return_value': ''' NAME: "2821 chassis", DESCR: "2821 chassis" PID: CISCO2821 , VID: V07 , SN: FTX1234AMWT NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675U0D NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC98675W3E NAME: "Virtual Private Network (VPN) Module on Slot 0", DESCR: "Encryption AIM Element" PID: AIM-VPN/SSL-2 , VID: V01, SN: FOC2837465E '''} golden_parsed_output_7 = { 'main': { 'chassis': { 'CISCO2821': { 'descr': '2821 chassis', 'name': '2821 chassis', 'pid': 'CISCO2821', 'sn': 'FTX1234AMWT', 'vid': 'V07 ', }, }, }, 'slot': { '0': { 'other': { 'AIM-VPN/SSL-2': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-2', 'sn': 'FOC2837465E', 'vid': 'V01', 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675U0D', 'vid': 'V01 ', }, }, '1': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 1', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC98675W3E', 'vid': 'V01 ', }, }, }, }, }, }, }, } golden_output_8 = {'execute.return_value': ''' NAME: "3825 chassis", DESCR: "3825 chassis" PID: CISCO3825 , VID: V05 , SN: FTX7908A3RQ NAME: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0", DESCR: "VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1" PID: VWIC2-2MFT-T1/E1 , VID: V01 , SN: FOC65428K9F NAME: "Wan Interface Card BRI U (2091, 3086) on Slot 0 SubSlot 1", DESCR: "Wan Interface Card BRI U (2091, 3086)" PID: WIC-1B-U-V2 , VID: V01, SN: 10293847 NAME: "PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot 4", DESCR: "PVDMII DSP SIMM with four DSPs" PID: PVDM2-64 , VID: V01 , SN: FOC63358WSI NAME: "High Density Voice Module - 8FXS/DID on Slot 1", DESCR: "High Density Voice Module - 8FXS/DID" PID: EVM-HD-8FXS/DID , VID: V04 , SN: FOC65798TG8 NAME: "Six port FXO voice interface daughtercard on Slot 1 SubSlot 1", DESCR: "Six port FXO voice interface daughtercard" PID: EM-HDA-6FXO , VID: V03 , SN: FOC85389QXB '''} golden_parsed_output_8 = { 'main': { 'chassis': { 'CISCO3825': { 'descr': '3825 chassis', 'name': '3825 chassis', 'pid': 'CISCO3825', 'sn': 'FTX7908A3RQ', 'vid': 'V05 ', }, }, }, 'slot': { '0': { 'rp': { 'CISCO3825': { 'subslot': { '0': { 'VWIC2-2MFT-T1/E1': { 'descr': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1', 'name': 'VWIC2-2MFT-T1/E1 - 2-Port RJ-48 Multiflex Trunk - T1/E1 on Slot 0 SubSlot 0', 'pid': 'VWIC2-2MFT-T1/E1', 'sn': 'FOC65428K9F', 'vid': 'V01 ', }, }, '1': { 'WIC-1B-U-V2': { 'descr': 'Wan Interface Card BRI U (2091, 3086)', 'name': 'Wan Interface Card BRI U (2091, 3086) on Slot 0 SubSlot 1', 'pid': 'WIC-1B-U-V2', 'sn': '10293847', 'vid': 'V01', }, }, '4': { 'PVDM2-64': { 'descr': 'PVDMII DSP SIMM with four DSPs', 'name': 'PVDMII DSP SIMM with four DSPs on Slot 0 SubSlot 4', 'pid': 'PVDM2-64', 'sn': 'FOC63358WSI', 'vid': 'V01 ', }, }, }, }, }, }, '1': { 'other': { 'EVM-HD-8FXS/DID': { 'descr': 'High Density Voice Module - 8FXS/DID', 'name': 'High Density Voice Module - 8FXS/DID on Slot 1', 'pid': 'EVM-HD-8FXS/DID', 'sn': 'FOC65798TG8', 'subslot': { '1': { 'EM-HDA-6FXO': { 'descr': 'Six port FXO voice interface daughtercard', 'name': 'Six port FXO voice interface daughtercard on Slot 1 SubSlot 1', 'pid': 'EM-HDA-6FXO', 'sn': 'FOC85389QXB', 'vid': 'V03 ', }, }, }, 'vid': 'V04 ', }, }, }, }, } golden_output_9 = {'execute.return_value': ''' NAME: "3845 chassis", DESCR: "3845 chassis" PID: CISCO3845 , VID: V05 , SN: FTX6666ARJ9 NAME: "c3845 Motherboard with Gigabit Ethernet on Slot 0", DESCR: "c3845 Motherboard with Gigabit Ethernet" PID: CISCO3845-MB , VID: V09 , SN: FOC729346GQ NAME: "Virtual Private Network (VPN) Module on Slot 0", DESCR: "Encryption AIM Element" PID: AIM-VPN/SSL-3 , VID: V01, SN: FOC758693YO NAME: "Clear/Subrate T3/E3 WAN on Slot 1", DESCR: "Clear/Subrate T3/E3 WAN" PID: NM-1T3/E3= , VID: V01 , SN: FOC28476ADM NAME: "16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2", DESCR: "16 Port 10BaseT/100BaseTX EtherSwitch" PID: NM-16ESW , VID: V01 , SN: FOC135464KO NAME: "Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0", DESCR: "Gigabit(1000BaseT) module for EtherSwitch NM" PID: GE-DCARD-ESW , VID: V01 , SN: FOC91864MNN '''} golden_parsed_output_9 = { 'main': { 'chassis': { 'CISCO3845': { 'descr': '3845 chassis', 'name': '3845 chassis', 'pid': 'CISCO3845', 'sn': 'FTX6666ARJ9', 'vid': 'V05 ', }, }, }, 'slot': { '0': { 'lc': { 'CISCO3845-MB': { 'descr': 'c3845 Motherboard with Gigabit Ethernet', 'name': 'c3845 Motherboard with Gigabit Ethernet on Slot 0', 'pid': 'CISCO3845-MB', 'sn': 'FOC729346GQ', 'vid': 'V09 ', }, }, 'other': { 'AIM-VPN/SSL-3': { 'descr': 'Encryption AIM Element', 'name': 'Virtual Private Network (VPN) Module on Slot 0', 'pid': 'AIM-VPN/SSL-3', 'sn': 'FOC758693YO', 'vid': 'V01', }, }, }, '1': { 'lc': { 'NM-1T3/E3=': { 'descr': 'Clear/Subrate T3/E3 WAN', 'name': 'Clear/Subrate T3/E3 WAN on Slot 1', 'pid': 'NM-1T3/E3=', 'sn': 'FOC28476ADM', 'vid': 'V01 ', }, }, }, '16': { 'lc': { 'NM-16ESW': { 'descr': '16 Port 10BaseT/100BaseTX EtherSwitch', 'name': '16 Port 10BaseT/100BaseTX EtherSwitch on Slot 2', 'pid': 'NM-16ESW', 'sn': 'FOC135464KO', 'subslot': { '0': { 'GE-DCARD-ESW': { 'descr': 'Gigabit(1000BaseT) module for EtherSwitch NM', 'name': 'Gigabit(1000BaseT) module for EtherSwitch NM on Slot 2 SubSlot 0', 'pid': 'GE-DCARD-ESW', 'sn': 'FOC91864MNN', 'vid': 'V01 ', }, }, }, 'vid': 'V01 ', }, }, }, }, } def test_empty(self): self.dev1 = Mock(**self.empty_output) inventory_obj = ShowInventory(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = inventory_obj.parse() def test_golden_iosv(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) inventory_obj = ShowInventory(device=self.dev_iosv) parsed_output = inventory_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) def test_golden_output_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_2) def test_golden_output_3(self): self.maxDiff = None self.device = Mock(**self.golden_output_3) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_3) def test_golden_output_4(self): self.maxDiff = None self.device = Mock(**self.golden_output_4) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_4) def test_golden_output_5(self): self.maxDiff = None self.device = Mock(**self.golden_output_5) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_5) def test_golden_output_6(self): self.maxDiff = None self.device = Mock(**self.golden_output_6) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_6) def test_golden_output_7(self): self.maxDiff = None self.device = Mock(**self.golden_output_7) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_7) def test_golden_output_8(self): self.maxDiff = None self.device = Mock(**self.golden_output_8) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_8) def test_golden_output_9(self): self.maxDiff = None self.device = Mock(**self.golden_output_9) obj = ShowInventory(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_9) class test_show_bootvar(unittest.TestCase): dev = Device(name='ios') dev_iosv = Device(name='iosv') empty_output = {'execute.return_value': ''} golden_parsed_output_iosv = { "active": { "boot_variable": "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12", "configuration_register": "0x2012" }, "next_reload_boot_variable": "disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12" } golden_output_iosv = {'execute.return_value': '''\ BOOT variable = disk0:s72033-adventerprisek9-mz.122-33.SRE0a-ssr-nxos-76k-1,12; CONFIG_FILE variable = BOOTLDR variable = Configuration register is 0x2012 Standby not ready to show bootvar '''} def test_empty(self): self.dev = Mock(**self.empty_output) platform_obj = ShowBootvar(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_iosv = Mock(**self.golden_output_iosv) platform_obj = ShowBootvar(device=self.dev_iosv) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_iosv) class test_show_processes_cpu_sorted_CPU(unittest.TestCase): dev = Device(name='c3850') empty_output = {'execute.return_value': ''} golden_parsed_output = { "five_sec_cpu_total": 13, "five_min_cpu": 15, "one_min_cpu": 23, "five_sec_cpu_interrupts": 0 } golden_output = {'execute.return_value': '''\ show processes cpu sorted 5min | inc CPU CPU utilization for five seconds: 13%/0%; one minute: 23%; five minutes: 15% '''} golden_parsed_output_1 = { "sort": { 1: { "invoked": 3321960, "usecs": 109, "tty": 0, "one_min_cpu": 0.54, "process": "PIM Process", "five_min_cpu": 0.48, "runtime": 362874, "pid": 368, "five_sec_cpu": 1.03 }, 2: { "invoked": 1466728, "usecs": 2442, "tty": 0, "one_min_cpu": 0.87, "process": "IOSv e1000", "five_min_cpu": 2.77, "runtime": 3582279, "pid": 84, "five_sec_cpu": 0.55 }, 3: { "invoked": 116196, "usecs": 976, "tty": 0, "one_min_cpu": 0.07, "process": "OSPF-1 Hello", "five_min_cpu": 0.07, "runtime": 113457, "pid": 412, "five_sec_cpu": 0.15 } }, "five_sec_cpu_total": 4, "five_min_cpu": 9, "one_min_cpu": 4, "nonzero_cpu_processes": [ "PIM Process", "IOSv e1000", "OSPF-1 Hello" ], "five_sec_cpu_interrupts": 0 } golden_output_1 = {'execute.return_value': ''' CPU utilization for five seconds: 4%/0%; one minute: 4%; five minutes: 9% PID Runtime(ms) Invoked uSecs 5Sec 1Min 5Min TTY Process 368 362874 3321960 109 1.03% 0.54% 0.48% 0 PIM Process 84 3582279 1466728 2442 0.55% 0.87% 2.77% 0 IOSv e1000 412 113457 116196 976 0.15% 0.07% 0.07% 0 OSPF-1 Hello '''} def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowProcessesCpuSorted(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse(key_word='CPU', sort_time='5min') self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.dev = Mock(**self.golden_output_1) obj = ShowProcessesCpuSorted(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) class test_show_processes_cpu(test_show_processes_cpu_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowProcessesCpu(device=self.device) parsed_output = obj.parse(key_word='process') self.assertEqual(parsed_output, self.golden_parsed_output_1) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowProcessesCpu(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_version_rp(test_show_version_rp_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='active', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_golden_standby(self): self.device = Mock(**self.golden_output_standby) obj = ShowVersionRp(device=self.device) parsed_output = obj.parse(rp='standby', status='running') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_standby) def test_golden_standby_offline(self): self.device = Mock(**self.golden_output_standby_offline) obj = ShowVersionRp(device=self.device) self.maxDiff = None with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(rp='standby', status='running') def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowVersionRp(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform(test_show_platform_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowPlatform(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_semi_empty(self): self.dev2 = Mock(**self.semi_empty_output) platform_obj = ShowPlatform(device=self.dev2) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden_c3850(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowPlatform(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_c3850) def test_golden_asr1k(self): self.maxDiff = None self.dev_asr1k = Mock(**self.golden_output_asr1k) platform_obj = ShowPlatform(device=self.dev_asr1k) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_asr1k) class test_show_platform_power(test_show_platform_power_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformPower(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformPower(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_history(test_show_processes_cpu_history_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowProcessesCpuHistory(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowProcessesCpuHistory(device=self.device) parsed_output = platform_obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_processes_cpu_platform(test_show_processes_cpu_platform_iosxe): def test_golden(self): self.device = Mock(**self.golden_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device) parsed_output = cpu_platform_obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output) def test_empty(self): self.device1 = Mock(**self.empty_output) cpu_platform_obj = ShowProcessesCpuPlatform(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = cpu_platform_obj.parse() class test_show_platform_software_status_control_processor_brief(test_show_platform_software_status_control_processor_brief_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareStatusControl(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_software_slot_active_monitor_Mem(test_show_platform_software_slot_active_monitor_Mem_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowPlatformSoftwareSlotActiveMonitorMem(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware(test_show_platform_hardware_iosxe): def test_golden_active(self): self.device = Mock(**self.golden_output_active) obj = ShowPlatformHardware(device=self.device) parsed_output = obj.parse() self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardware(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() class test_show_platform_hardware_plim(test_show_platform_hardware_plim_iosxe): def test_golden_port(self): self.device = Mock(**self.golden_output_port) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(port='0/0/0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_port) def test_golden_slot(self): self.device = Mock(**self.golden_output_slot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_slot) def test_golden_subslot(self): self.device = Mock(**self.golden_output_subslot) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(subslot='0/1') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_subslot) def test_golden_slot_internal(self): self.device = Mock(**self.golden_output_slot_internal) obj = ShowPlatformHardwarePlim(device=self.device) parsed_output = obj.parse(slot='0', internal=True) self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_slot_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwarePlim(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(port='0/0/0') class test_show_platform_hardware_qfp_bqs_opm_mapping(test_show_platform_hardware_qfp_bqs_opm_mapping_iosxe): def test_golden_active_opm(self): self.device = Mock(**self.golden_output_active_opm) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsOpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_qfp_bqs_ipm_mapping(test_show_platform_hardware_qfp_bqs_ipm_mapping_iosxe): def test_golden_active_ipm(self): self.device = Mock(**self.golden_output_active_ipm) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device) parsed_output = obj.parse(status='active', slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareQfpBqsIpmMapping(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(status='active', slot='0') class test_show_platform_hardware_serdes_statistics(test_show_platform_hardware_serdes_statistics_iosxe): def test_golden_serdes(self): self.device = Mock(**self.golden_output_serdes) obj = ShowPlatformHardwareSerdes(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual(parsed_output, self.golden_parsed_output_serdes) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdes(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class test_show_platform_hardware_serdes_statistics_internal(test_show_platform_hardware_serdes_statistics_internal_iosxe): def test_golden(self): self.device = Mock(**self.golden_output_serdes_internal) obj = ShowPlatformHardwareSerdesInternal(device=self.device) parsed_output = obj.parse(slot='0') self.maxDiff = None self.assertEqual( parsed_output, self.golden_parsed_output_serdes_internal) def test_empty(self): self.device1 = Mock(**self.empty_output) obj = ShowPlatformHardwareSerdesInternal(device=self.device1) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(slot='0') class show_platform_hardware_qfp_bqs_statistics_channel_all(show_platform_hardware_qfp_bqs_statistics_channel_all_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') def test_golden_active_ipm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_ipm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='ipm') self.assertEqual(parsed_output, self.golden_parsed_output_active_ipm) def test_golden_active_opm(self): self.maxDiff = None self.device = Mock(**self.golden_output_active_opm) platform_obj = ShowPlatformHardwareQfpBqsStatisticsChannelAll( device=self.device) parsed_output = platform_obj.parse( status='active', slot='0', iotype='opm') self.assertEqual(parsed_output, self.golden_parsed_output_active_opm) class show_platform_hardware_qfp_interface(show_platform_hardware_qfp_interface_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) platform_obj = ShowPlatformHardwareQfpInterfaceIfnameStatistics( device=self.device) parsed_output = platform_obj.parse( status='active', interface='gigabitEthernet 0/0/0') self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_platform_hardware_qfp_statistics_drop(test_show_platform_hardware_qfp_statistics_drop_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse(status='active') def test_golden_active(self): self.maxDiff = None self.device = Mock(**self.golden_output_active) platform_obj = ShowPlatformHardwareQfpStatisticsDrop( device=self.device) parsed_output = platform_obj.parse(status='active') self.assertEqual(parsed_output, self.golden_parsed_output_active) class test_show_env(test_show_env_iosxe): def test_empty(self): self.dev = Mock(**self.empty_output) obj = ShowEnvironment(device=self.dev) with self.assertRaises(SchemaEmptyParserError): parsered_output = obj.parse() def test_golden(self): self.maxDiff = None self.dev = Mock(**self.golden_output) obj = ShowEnvironment(device=self.dev) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) class test_show_module(test_show_module_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowModule(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowModule(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch(test_show_switch_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitch(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitch(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) class test_show_switch_detail(test_show_switch_detail_iosxe): def test_empty(self): self.dev1 = Mock(**self.empty_output) platform_obj = ShowSwitchDetail(device=self.dev1) with self.assertRaises(SchemaEmptyParserError): parsed_output = platform_obj.parse() def test_golden(self): self.maxDiff = None self.dev_c3850 = Mock(**self.golden_output_c3850) platform_obj = ShowSwitchDetail(device=self.dev_c3850) parsed_output = platform_obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_c3850) if __name__ == '__main__': unittest.main()
1.585938
2
autumn/projects/covid_19/sri_lanka/sri_lanka/project.py
emmamcbryde/AuTuMN-1
0
2860
<filename>autumn/projects/covid_19/sri_lanka/sri_lanka/project.py import numpy as np from autumn.calibration.proposal_tuning import perform_all_params_proposal_tuning from autumn.core.project import Project, ParameterSet, load_timeseries, build_rel_path, get_all_available_scenario_paths, \ use_tuned_proposal_sds from autumn.calibration import Calibration from autumn.calibration.priors import UniformPrior, BetaPrior,TruncNormalPrior from autumn.calibration.targets import ( NormalTarget, get_dispersion_priors_for_gaussian_targets, ) from autumn.models.covid_19 import base_params, build_model from autumn.settings import Region, Models from autumn.projects.covid_19.sri_lanka.sri_lanka.scenario_builder import get_all_scenario_dicts # Load and configure model parameters. default_path = build_rel_path("params/default.yml") #scenario_paths = [build_rel_path(f"params/scenario-{i}.yml") for i in range(7, 9)] mle_path = build_rel_path("params/mle-params.yml") baseline_params = base_params.update(default_path).update(mle_path, calibration_format=True) all_scenario_dicts = get_all_scenario_dicts("LKA") #scenario_params = [baseline_params.update(p) for p in scenario_paths] scenario_params = [baseline_params.update(sc_dict) for sc_dict in all_scenario_dicts] param_set = ParameterSet(baseline=baseline_params, scenarios=scenario_params) ts_set = load_timeseries(build_rel_path("timeseries.json")) notifications_ts = ts_set["notifications"].rolling(7).mean().loc[350::7] death_ts = ts_set["infection_deaths"].loc[350:] targets = [ NormalTarget(notifications_ts), NormalTarget(death_ts), ] priors = [ # Dispersion parameters based on targets *get_dispersion_priors_for_gaussian_targets(targets), *get_dispersion_priors_for_gaussian_targets(targets), # Regional parameters UniformPrior("contact_rate", [0.024, 0.027]), UniformPrior("infectious_seed", [275.0, 450.0]), # Detection UniformPrior("testing_to_detection.assumed_cdr_parameter", [0.009, 0.025]), UniformPrior("infection_fatality.multiplier", [0.09, 0.13]), #VoC UniformPrior("voc_emergence.alpha_beta.start_time", [370, 410]), UniformPrior("voc_emergence.alpha_beta.contact_rate_multiplier", [3.2, 4.5]), UniformPrior("voc_emergence.delta.start_time", [475, 530]), UniformPrior("voc_emergence.delta.contact_rate_multiplier", [8.5, 11.5]), ] # Load proposal sds from yml file # use_tuned_proposal_sds(priors, build_rel_path("proposal_sds.yml")) calibration = Calibration(priors, targets) # FIXME: Replace with flexible Python plot request API. import json plot_spec_filepath = build_rel_path("timeseries.json") with open(plot_spec_filepath) as f: plot_spec = json.load(f) project = Project( Region.SRI_LANKA, Models.COVID_19, build_model, param_set, calibration, plots=plot_spec ) #perform_all_params_proposal_tuning(project, calibration, priors, n_points=50, relative_likelihood_reduction=0.2)
1.859375
2
Analytics/resources/themes/test_subthemes.py
thanosbnt/SharingCitiesDashboard
4
2861
import unittest from http import HTTPStatus from unittest import TestCase import bcrypt from flask.ctx import AppContext from flask.testing import FlaskClient from app import create_app from models.theme import Theme, SubTheme from models.users import Users class TestSubTemes(TestCase): """ Unittest for the creation, renaming and deleting of Themes """ def setUp(self): """ Setup a FlaskClient for testing, creates an admin user and creates the authorization header for requests to the Flask Client and a dummy theme """ self.client, self.app_context = self.create_test_client() self.user = self.create_admin_user() self.auth_header = self.get_auth_header() self.theme = Theme.get_by_name("_test_add_Subtheme_") if not self.theme: self.theme = Theme("_test_add_Subtheme_") self.theme.save() self.theme.commit() self.theme = Theme.get_by_name("_test_add_Subtheme_") self.subtheme = self.create_dummy_subtheme() def create_test_client(self) -> (FlaskClient, AppContext): """ Create flask testing client :return: FlaskClient for tests and AppContext """ test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True) testing_client = test_app.test_client() test_app_context = test_app.app_context() test_app_context.push() return testing_client, test_app_context def create_dummy_subtheme(self) -> SubTheme: """ Create SubTheme for tests :return: SubTheme for tests """ subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') if not subtheme: subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_') subtheme.save() subtheme.commit() subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_') return subtheme def create_admin_user(self) -> Users: """ Create Admin user for tests :return: an admin user for tests """ password_hash = bcrypt.hashpw("<PASSWORD>".encode("utf-8"), bcrypt.gensalt()) user = Users.find_by_email("<EMAIL>") if not user: user = Users("Admin", "<EMAIL>", password_hash.decode("utf8"), True, True) try: user.save() user.commit() except Exception as e: pass return user def get_auth_header(self) -> {str: str}: """ Create an Authorization header for test :return: An authorization header """ response_login = self.client.post('/login', data=dict(email=self.user.email, password="<PASSWORD>", remember=True), follow_redirects=True) response_login_json = response_login.get_json() return {'Authorization': 'Bearer {}'.format(response_login_json["access_token"])} def test_add_subtheme(self): """ Create a new SubTheme and check the client response status code for http status 200 (OK) Check JSON response data for the expected message 'New theme created' and Theme name """ response = self.client.post('/admin/themes/add_subtheme', json={"theme_id": self.theme.id, "subtheme": "_TEST_SUB_THEME_2"}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) json_response = response.get_json() self.assertEqual(json_response["message"], "sub theme created") self.assertEqual(json_response["theme_id"], self.theme.id) self.assertEqual(json_response["subtheme"], "_TEST_SUB_THEME_2") def test_rename_subtheme_theme_id(self): """ Rename a SubTheme by theme_id and check the clients response status code for http status 200 (OK) Check response data for the expected message 'Subtheme renamed' and the Subtheme name has been changed """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": self.subtheme.t_id, "current_name": current_name, "new_name": "new_name_not_1" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response["id"], self.subtheme.id) self.assertEqual(response["message"], "Subtheme renamed") self.assertEqual(response["old_name"], current_name) self.assertEqual(response["new_name"], "new_name_not_1") def test_rename_subtheme_id(self): """ Rename a SubTheme by id and check the clients response status code for http status 200 (OK) Check response data for the expected message 'Subtheme renamed' and the Subtheme name has been changed """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() current_name = self.subtheme.name response = self.client.post('/admin/themes/rename_subtheme', json={"id": self.subtheme.id, "current_name": current_name, "new_name": "new_name_not_1" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.OK) response = response.get_json() self.assertEqual(response["id"], self.subtheme.id) self.assertEqual(response["message"], "Subtheme renamed") self.assertEqual(response["old_name"], current_name) self.assertEqual(response["new_name"], "new_name_not_1") def test_rename_non_existant_subtheme(self): """ Rename a SubTheme that does not exist and check the clients response status code for http status 404 (OK) """ response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": -1, "current_name": "a3d4f5g6h7j8k0", "new_name": "new_name_not_1" }, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_non_exsitant_subtheme(self): """ Delete a SubTheme that does not exist and check the client response status code for http status 404 """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={"name": "weA_gfj24fhurtyui", "theme_id": -1}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND) def test_delete_subtheme_by_id(self): """ Delete a SubTheme by id and check the client response status code for http status 204 (NO_CONTENT) """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={"id": self.subtheme.id}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def test_delete_subtheme_by_theme_id_and_name(self): """ Delete a SubTheme by theme_id and name: check the client response status code for http status 204 (NO_CONTENT) """ if not self.subtheme: self.subtheme = self.create_dummy_subtheme() response = self.client.post('/admin/themes/delete_subtheme', json={"theme_id": self.subtheme.t_id, "name": self.subtheme.name}, headers=self.auth_header) self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT) def tearDown(self): """ Handle the cleanup after tests""" self.subtheme = SubTheme.get_by_name("new_name_not_1") if not self.subtheme: self.subtheme = SubTheme.get_by_name("_TEST_SUB_THEME_") if self.subtheme: self.subtheme.delete() self.subtheme.commit() test_sub = SubTheme.get_by_name("_TEST_SUB_THEME_2") if test_sub: test_sub.delete() test_sub.commit() if self.theme: self.theme.delete() self.theme.commit() self.client.post('/logout', headers=self.auth_header) if self.user: self.user.delete() self.user.commit() self.app_context.pop() if __name__ == '__main__': unittest.main()
2.75
3
selfdrive/sensord/rawgps/structs.py
TC921/openpilot
0
2862
from struct import unpack_from, calcsize LOG_GNSS_POSITION_REPORT = 0x1476 LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477 LOG_GNSS_CLOCK_REPORT = 0x1478 LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480 LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756 LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886 LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1 LOG_GNSS_ME_DPO_STATUS = 0x1838 LOG_GNSS_CD_DB_REPORT = 0x147B LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488 LOG_GNSS_CONFIGURATION_STATE = 0x1516 glonass_measurement_report = """ uint8_t version; uint32_t f_count; uint8_t glonass_cycle_number; uint16_t glonass_number_of_days; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; """ glonass_measurement_report_sv = """ uint8_t sv_id; int8_t frequency_index; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint8_t hemming_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; """ gps_measurement_report = """ uint8_t version; uint32_t f_count; uint16_t week; uint32_t milliseconds; float time_bias; float clock_time_uncertainty; float clock_frequency_bias; float clock_frequency_uncertainty; uint8_t sv_count; """ gps_measurement_report_sv = """ uint8_t sv_id; uint8_t observation_state; // SVObservationStates uint8_t observations; uint8_t good_observations; uint16_t parity_error_count; uint8_t filter_stages; uint16_t carrier_noise; int16_t latency; uint8_t predetect_interval; uint16_t postdetections; uint32_t unfiltered_measurement_integral; float unfiltered_measurement_fraction; float unfiltered_time_uncertainty; float unfiltered_speed; float unfiltered_speed_uncertainty; uint32_t measurement_status; uint8_t misc_status; uint32_t multipath_estimate; float azimuth; float elevation; int32_t carrier_phase_cycles_integral; uint16_t carrier_phase_cycles_fraction; float fine_speed; float fine_speed_uncertainty; uint8_t cycle_slip_count; uint32_t pad; """ position_report = """ uint8 u_Version; /* Version number of DM log */ uint32 q_Fcount; /* Local millisecond counter */ uint8 u_PosSource; /* Source of position information */ /* 0: None 1: Weighted least-squares 2: Kalman filter 3: Externally injected 4: Internal database */ uint32 q_Reserved1; /* Reserved memory field */ uint16 w_PosVelFlag; /* Position velocity bit field: (see DM log 0x1476 documentation) */ uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see DM log 0x1476 documentation) */ uint8 u_FailureCode; /* Failure code: (see DM log 0x1476 documentation) */ uint16 w_FixEvents; /* Fix events bit field: (see DM log 0x1476 documentation) */ uint32 _fake_align_week_number; uint16 w_GpsWeekNumber; /* GPS week number of position */ uint32 q_GpsFixTimeMs; /* GPS fix time of week of in milliseconds */ uint8 u_GloNumFourYear; /* Number of Glonass four year cycles */ uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle */ uint32 q_GloFixTimeMs; /* Glonass fix time of day in milliseconds */ uint32 q_PosCount; /* Integer count of the number of unique positions reported */ uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in radians */ uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */ uint32 q_FltHeadingRad; /* User heading in radians */ uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */ uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate frame. In meters per second. */ uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up components of user velocity */ uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */ uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias in meters */ uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in meters */ uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */ uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in meters */ uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */ uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters */ uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */ uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in meters */ uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */ uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in meters */ uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */ uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in meters */ uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */ uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS in seconds */ uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in seconds */ uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters per second */ uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in meters per second */ uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed by WLS */ uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters */ uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed by WLS */ uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters */ uint32 align_Flt[14]; uint32 q_FltPdop; /* 3D position dilution of precision as computed from the unweighted uint32 q_FltHdop; /* Horizontal position dilution of precision as computed from the unweighted least-squares covariance matrix */ uint32 q_FltVdop; /* Vertical position dilution of precision as computed from the unweighted least-squares covariance matrix */ uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated with the uncertainty ellipse values */ uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect to true North, with increasing angles moving clockwise from North. In units of degrees. */ uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty error ellipse. In units of meters. */ uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty error ellipse. In units of meters. */ uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid altitude in meters */ uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1: Very Low 2: Low 3: Medium 4: High */ uint8 u_VerticalReliability; /* Vertical position reliability */ uint16 w_Reserved2; /* Reserved memory field */ uint32 q_FltGnssHeadingRad; /* User heading in radians derived from GNSS only solution */ uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from GNSS only solution */ uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were used to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set to 1 indicates that certain fields as defined by the SENSOR_AIDING_MASK were aided with sensor data*/ uint32 q_SensorAidMask; /* Denotes which component of the position report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */ uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used in the fix */ uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected by searcher, including ones not used in position calculation */ uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used in the fix */ uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected by searcher, including ones not used in position calculation */ uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used in the fix */ uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected by searcher, including ones not used in position calculation */ """ def name_to_camelcase(nam): ret = [] i = 0 while i < len(nam): if nam[i] == "_": ret.append(nam[i+1].upper()) i += 2 else: ret.append(nam[i]) i += 1 return ''.join(ret) def parse_struct(ss): st = "<" nams = [] for l in ss.strip().split("\n"): typ, nam = l.split(";")[0].split() #print(typ, nam) if typ == "float" or '_Flt' in nam: st += "f" elif typ == "double" or '_Dbl' in nam: st += "d" elif typ in ["uint8", "uint8_t"]: st += "B" elif typ in ["int8", "int8_t"]: st += "b" elif typ in ["uint32", "uint32_t"]: st += "I" elif typ in ["int32", "int32_t"]: st += "i" elif typ in ["uint16", "uint16_t"]: st += "H" elif typ in ["int16", "int16_t"]: st += "h" elif typ == "uint64": st += "Q" else: print("unknown type", typ) assert False if '[' in nam: cnt = int(nam.split("[")[1].split("]")[0]) st += st[-1]*(cnt-1) for i in range(cnt): nams.append("%s[%d]" % (nam.split("[")[0], i)) else: nams.append(nam) return st, nams def dict_unpacker(ss, camelcase = False): st, nams = parse_struct(ss) if camelcase: nams = [name_to_camelcase(x) for x in nams] sz = calcsize(st) return lambda x: dict(zip(nams, unpack_from(st, x))), sz
1.679688
2
python2.7libs/hammer_tools/content_browser.py
anvdev/Hammer-Tools
19
2863
from __future__ import print_function try: from PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5.QtCore import * except ImportError: from PySide2.QtWidgets import * from PySide2.QtGui import * from PySide2.QtCore import * import hou from hammer_tools.utils import createAction def isRevertToDefaultEvent(event): return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton class Slider(QSlider): def __init__(self, orientation=Qt.Horizontal, parent=None): super(Slider, self).__init__(orientation, parent) self.defaultValue = 0 self.valueLadderMode = False def revertToDefault(self): self.setValue(self.defaultValue) def setDefaultValue(self, value, reset=True): self.defaultValue = value if reset: self.revertToDefault() def mousePressEvent(self, event): if False: # Type hint event = QMouseEvent if event.button() == Qt.MiddleButton: return elif event.button() == Qt.LeftButton: event = QMouseEvent(QEvent.MouseButtonPress, event.pos(), Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier) super(Slider, self).mousePressEvent(event) def mouseMoveEvent(self, event): if False: # Type hint event = QMouseEvent if not self.valueLadderMode and event.buttons() == Qt.MiddleButton: try: hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int) except hou.OperationFailed: return else: self.valueLadderMode = True elif self.valueLadderMode: hou.ui.updateValueLadder(event.globalX(), event.globalY(), bool(event.modifiers() & Qt.AltModifier), bool(event.modifiers() & Qt.ShiftModifier)) else: super(Slider, self).mouseMoveEvent(event) def mouseReleaseEvent(self, event): if False: # Type hint event = QMouseEvent if self.valueLadderMode and event.button() == Qt.MiddleButton: hou.ui.closeValueLadder() self.valueLadderMode = False elif isRevertToDefaultEvent(event): self.revertToDefault() else: super(Slider, self).mouseReleaseEvent(event) class SearchField(QComboBox): def __init__(self, parent=None): super(SearchField, self).__init__(parent) self.setEditable(True) edit = self.lineEdit() edit.setPlaceholderText('Search...') edit.installEventFilter(self) edit.setFont(QFont('Segoe UI')) self.setFixedHeight(26) comp = self.completer() comp.setCompletionMode(QCompleter.PopupCompletion) comp.setFilterMode(Qt.MatchContains) comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel) comp.setMaxVisibleItems(5) popup = comp.popup() popup.setStyleSheet(hou.qt.styleSheet()) def mouseReleaseEvent(self, event): if False: # Type hint event = QMouseEvent if isRevertToDefaultEvent(event): self.clearEditText() def eventFilter(self, watched, event): if False: # Type hint watched = QObject event = QEvent if watched == self.lineEdit(): if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event): self.clearEditText() event.accept() return True return False def keyPressEvent(self, event): if False: # Type hint event = QKeyEvent key = event.key() mod = event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_Escape: self.clearEditText() else: super(SearchField, self).keyPressEvent(event) def hidePopup(self): super(SearchField, self).hidePopup() self.lineEdit().setFocus() link_or_state_icon = 'BUTTONS_link' embedded_icon = 'BUTTONS_pinned' class BrowserMode(QStandardItemModel): def __init__(self): super(BrowserMode, self).__init__() class BrowserTreeView(QTreeView): def __init__(self, parent=None): super(BrowserTreeView, self).__init__(parent) self.setAlternatingRowColors(True) class BrowserTableView(QListView): def __init__(self, parent=None): super(BrowserTableView, self).__init__(parent) self.setViewMode(QListView.IconMode) self.setResizeMode(QListView.Adjust) self.setSelectionMode(QAbstractItemView.ExtendedSelection) self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel) self.setIconSize(QSize(120, 90)) self.setUniformItemSizes(True) self.setContextMenuPolicy(Qt.CustomContextMenu) class ContentBrowser(QWidget): def __init__(self, parent=None): super(ContentBrowser, self).__init__(parent) self.setWindowTitle('Content Browser') self.setProperty('houdiniStyle', True) topLayout = QHBoxLayout() topLayout.setContentsMargins(4, 4, 4, 2) topLayout.setSpacing(2) self.refreshButton = QPushButton() self.refreshButton.setFixedSize(26, 26) self.refreshButton.setToolTip('Update\tF5') self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18)) self.refreshButton.setIconSize(QSize(18, 18)) topLayout.addWidget(self.refreshButton) sep = hou.qt.Separator() if False: # Type hint sep = QFrame sep.setFixedWidth(2) sep.setFrameShape(QFrame.VLine) topLayout.addWidget(sep) viewModeButtonGroup = QButtonGroup(self) viewModeButtonGroup.setExclusive(True) self.treeViewButton = QPushButton() self.treeViewButton.setFixedSize(26, 26) self.treeViewButton.setToolTip('Tree View\t\tCtrl+1') self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18)) self.treeViewButton.setIconSize(QSize(18, 18)) self.treeViewButton.setCheckable(True) viewModeButtonGroup.addButton(self.treeViewButton) topLayout.addWidget(self.treeViewButton) self.tableViewButton = QPushButton() self.tableViewButton.setFixedSize(26, 26) self.tableViewButton.setToolTip('Table View\tCtrl+2') self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18)) self.tableViewButton.setIconSize(QSize(18, 18)) self.tableViewButton.setCheckable(True) self.tableViewButton.toggle() viewModeButtonGroup.addButton(self.tableViewButton) topLayout.addWidget(self.tableViewButton) topLayout.addWidget(sep) self.searchField = SearchField() self.searchField.setToolTip('Search\tCtrl+F, F3') topLayout.addWidget(self.searchField) searchModeButtonGroup = QButtonGroup(self) searchModeButtonGroup.setExclusive(True) self.wholeSearchButton = QPushButton() self.wholeSearchButton.setFixedSize(26, 26) self.wholeSearchButton.setCheckable(True) self.wholeSearchButton.setToolTip('Whole word search') self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18)) self.wholeSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.wholeSearchButton) topLayout.addWidget(self.wholeSearchButton) self.fuzzySearchButton = QPushButton() self.fuzzySearchButton.setFixedSize(26, 26) self.fuzzySearchButton.setCheckable(True) self.fuzzySearchButton.toggle() self.fuzzySearchButton.setToolTip('Fuzzy search') self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18)) self.fuzzySearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.fuzzySearchButton) topLayout.addWidget(self.fuzzySearchButton) self.patternSearchButton = QPushButton() self.patternSearchButton.setFixedSize(26, 26) self.patternSearchButton.setCheckable(True) self.patternSearchButton.setToolTip('Search by Pattern') self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18)) self.patternSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.patternSearchButton) topLayout.addWidget(self.patternSearchButton) self.regexSearchButton = QPushButton() self.regexSearchButton.setFixedSize(26, 26) self.regexSearchButton.setCheckable(True) self.regexSearchButton.setToolTip('Search by Regular Expression') self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18)) self.regexSearchButton.setIconSize(QSize(18, 18)) searchModeButtonGroup.addButton(self.regexSearchButton) topLayout.addWidget(self.regexSearchButton) topLayout.addWidget(sep) topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\tF1')) middleLayout = QHBoxLayout() middleLayout.setContentsMargins(4, 0, 0, 4) middleLayout.setSpacing(4) self.viewLayout = QStackedLayout(middleLayout) model = QFileSystemModel() model.setRootPath('C:/') treeView = BrowserTreeView() treeView.setModel(model) treeView.setRootIndex(model.index('C:/')) self.viewLayout.addWidget(treeView) tableView = BrowserTableView() tableView.setModel(model) tableView.setRootIndex(model.index('C:/')) tableView.setSelectionModel(treeView.selectionModel()) self.viewLayout.addWidget(tableView) self.viewLayout.setCurrentIndex(1) self.treeViewButton.clicked.connect(self.switchToTreeView) self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')) self.tableViewButton.clicked.connect(self.switchToTableView) self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')) bottomLayout = QHBoxLayout() bottomLayout.setContentsMargins(4, 0, 4, 4) bottomLayout.setSpacing(2) settingsButton = QPushButton() settingsButton.setFixedSize(26, 26) settingsButton.setToolTip('Settings') settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18)) settingsButton.setIconSize(QSize(18, 18)) bottomLayout.addWidget(settingsButton) spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored) bottomLayout.addSpacerItem(spacer) self.scaleSlider = Slider() self.scaleSlider.setDefaultValue(50) self.scaleSlider.setFixedWidth(120) self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100)) bottomLayout.addWidget(self.scaleSlider) mainLayout = QVBoxLayout(self) mainLayout.setContentsMargins(0, 0, 0, 0) mainLayout.setSpacing(4) mainLayout.addLayout(topLayout) mainLayout.addLayout(middleLayout) mainLayout.addLayout(bottomLayout) def switchToTreeView(self): self.viewLayout.setCurrentIndex(0) self.scaleSlider.hide() self.treeViewButton.setChecked(True) def switchToTableView(self): self.viewLayout.setCurrentIndex(1) self.scaleSlider.show() self.tableViewButton.setChecked(True) def keyPressEvent(self, event): if False: # Type hint event = QKeyEvent key = event.key() mod = event.modifiers() if mod == Qt.NoModifier and key == Qt.Key_F5: pass elif mod == Qt.ControlModifier and key == Qt.Key_F: self.searchField.setFocus() elif mod == Qt.NoModifier and key == Qt.Key_F3: self.searchField.setFocus() elif mod == Qt.ControlModifier and key == Qt.Key_Equal: pass elif mod == Qt.ControlModifier and key == Qt.Key_Minus: pass elif mod == Qt.ControlModifier and key == Qt.Key_1: pass elif mod == Qt.ControlModifier and key == Qt.Key_2: pass elif mod == Qt.NoModifier and key == Qt.Key_F1: pass else: super(ContentBrowser, self).keyPressEvent(event) if __name__ == '__main__': app = QApplication([]) window = ContentBrowser() window.show() app.exec_()
2.375
2
rt-thread/applications/server/udp_sender.py
luhuadong/stm32f769-disco-demo
0
2864
#!/usr/bin/python3 """ UDP sender """ import socket import time import sys smsg = b'\xaa\x08\xfe\x00\xc9\xe6\x5f\xee' def main(): ip_port = ('192.168.3.188', 8888) if len(sys.argv) < 2: port = 8888 else: port = int(sys.argv[1]) # 1. 创建 udp 套接字 udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 2. 绑定本地信息 udp_socket.bind(('', port)) cnt = 100 loop = 4 print("send %d...", cnt*loop) # 3. 接收发送的数据 while cnt > 0: #loop = 10 #while loop > 0: for i in range(0, loop): udp_socket.sendto(smsg, ip_port) print('.', end=' ') #loop = loop -1 #recv_data = udp_socket.recvfrom(1024) #print(recv_data.decode('gbk')) #print(recv_data.decode('utf-8')) #print('.', end=' ') #data = recv_data.decode('utf-8') #print('0x%x'%data) cnt = cnt - 1 time.sleep(0.005) print("") print("finished") # 7. 关闭套接字 udp_socket.close() print("close") if __name__ == '__main__': main()
2.8125
3
yudzuki/role.py
LunaProject-Discord/yudzuki.py
6
2865
<gh_stars>1-10 __all__ = ( "Role", ) class Role: def __init__(self, data): self.data = data self._update(data) def _get_json(self): return self.data def __repr__(self): return ( f"<Role id={self.id} name={self.name}>" ) def __str__(self): return ( f"{self.name}" ) def _update(self, data): self._id = data["id"] self._color = data["color"] self._managed = data["managed"] self._name = data["name"] self._guild_id = data["guild_id"] self._mentionable = data["mentionable"] self._position = data["potition"] self._hoisted = data["hoisted"] @property def id(self): return self._id @property def color(self): return self._color @property def managed(self): return self._managed @property def name(self): return self._name @property def guild_id(self): return self._guild_id @property def mentionable(self): return self._mentionable @property def position(self): return self._position @property def hoisted(self): return self._hoisted
2.65625
3
jassen/django/project/project/urls.py
cabilangan112/intern-drf-blog
0
2866
"""project URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.conf.urls import url, include from rest_framework import routers from blog import views from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet from django.conf import settings from django.conf.urls.static import static router = routers.DefaultRouter() router.register(r'hide',HideViewSet, base_name='hiddinn') router.register(r'draft',DraftViewSet, base_name='draft') router.register(r'post', PostViewSet, base_name='post') router.register(r'comment', CommentViewSet, base_name='comment') router.register(r'tags', TagViewSet, base_name='tags') router.register(r'category', CategoryViewSet, base_name='category') from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), url(r'^', include(router.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ] urlpatterns.extend( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) )
2.640625
3
deep-learning-app/src/models/__init__.py
everbrez/Deep-Learning-based-Chemical-Graphics-Analysis-Platform
1
2867
<reponame>everbrez/Deep-Learning-based-Chemical-Graphics-Analysis-Platform print('init')
0.820313
1
chemmltoolkit/tensorflow/callbacks/variableScheduler.py
Andy-Wilkinson/ChemMLToolk
1
2868
<reponame>Andy-Wilkinson/ChemMLToolk import tensorflow as tf class VariableScheduler(tf.keras.callbacks.Callback): """Schedules an arbitary variable during training. Arguments: variable: The variable to modify the value of. schedule: A function that takes an epoch index (integer, indexed from 0) and current variable value as input and returns a new value to assign to the variable as output. verbose: int. 0: quiet, 1: update messages. """ def __init__(self, variable, schedule, verbose=0): super(VariableScheduler, self).__init__() self.variable = variable self.schedule = schedule self.verbose = verbose def on_epoch_begin(self, epoch, logs=None): value = self.variable.read_value() value = self.schedule(epoch, value) self.variable.assign(value) if self.verbose > 0: print(f'\nEpoch {epoch + 1}: VariableScheduler assigning ' f'variable {self.variable.name} to {value}.')
3.3125
3
join_peaks.py
nijibabulu/chip_tools
0
2869
#! /usr/bin/env python import os import sys import math import csv import collections import docopt import peakzilla_qnorm_mapq_patched as pz __doc__ = ''' Usage: join_peaks.py [options] PEAKS CHIP INPUT [ (PEAKS CHIP INPUT) ... ] This script finds peaks in common between multiple ChIP experiments determined by peakzilla. For each ChIP experiment, input a PEAKS file as otuput by peakzilla, and 2 BED files (CHIP and INPUT) as input to peakzilla. This will output a table with 3 columns identifying the peaks (Chromosome, Start, End, Name,'NPeaks','Spread','ChipSE','EnrichSE'). NPeaks signifies the number of peaks that were called among all the ChIP experiments, Spread is the difference between the biggest and smallest ChIP peak, ChipSE and EnrichSE are the standard error on the mean among the ChIP and Enrich values for the peaks. For each experinent "X", information about the peaks are output: 'XPZName','XPZScore', 'XPZChip','XPZInput','XPZEnrich','XPZFDR','XChip','XInput','XEnrich','XMapq'. All 'PZ' columns are the original output from peakzilla and the remaining columns are re-calculated in this script (also output regardless of the presence of a peak). Options: --max-distance=DIST maximum summit distance to join peaks [default: 10] ''' args = docopt.docopt(__doc__) #np.set_printoptions(precision=1,suppress=True) def stddev(l): mean = sum(l)/float(len(l)) variance = sum((x-mean)**2 for x in l)/(len(l)-1) return math.sqrt(variance) def std_err(l): return stddev(l)/math.sqrt(len(l)) class Peak(object): def dist(self,other): if self.chrom == other.chrom: return abs(self.center-other.center) else: return -1 def compute_fold_enrichment(self): self.computed_fold_enrichment = float(self.computed_chip )/self.computed_control class SlavePeak(Peak): def __init__(self,set_name,center): self.name = 'Slave' self.set_name = set_name self.center = center class PZPeak(Peak): def __init__(self,set_name,chrom,start,end,name,summit,score,chip,control, fold_enrichment,distribution_score,fdr): self.set_name = set_name self.chrom = chrom self.start = int(start) self.end = int(end) self.name = name self.center = int(summit) self.score = float(score) self.chip = float(chip) self.control = float(control) self.fold_enrichment = float(fold_enrichment) self.distribution_score = float(distribution_score) self.fdr = float(fdr) def width(self): return self.end-self.start+1 class JoinedPeak(Peak): WIDTH = 0 HEADER = ['#Chromosome','Start','End','Name','NPeaks','Spread','ChipSE','EnrichSE'] HEADER_TYPES = set() def __init__(self,pzpeak): self.chrom = pzpeak.chrom self.peaks = {} self.center = self.add(pzpeak) #pzpeak.center def can_add(self,pzpeak): return not pzpeak.set_name in self.peaks def add(self,pzpeak): self.HEADER_TYPES.add(pzpeak.set_name) self.peaks[pzpeak.set_name] = pzpeak return sum(p.center for p in self.peaks.values())/len(self.peaks) def name(self): return '%s_%d' % (self.chrom,self.center) @classmethod def header(cls): s = '\t'.join(cls.HEADER) + '\t' #'#Chromosome\tPosition\tNPeaks\tSpread\t' for htype in cls.HEADER_TYPES: s += '\t'.join( htype + '_' + x for x in [ 'PZName','PZScore','PZChip','PZInput','PZEnrich','PZFDR','Chip','Input','Enrich','Mapq'] ) + '\t' return s def __str__(self): s = '' called_peaks = 0 peak_signals = [] peak_enrichs = [] for set_name,peak in self.peaks.items(): if hasattr(peak,'score'): s += peak.name + '\t' + '\t'.join('%.2f' % x for x in [peak.score,peak.chip,peak.control,peak.fold_enrichment,peak.fdr]) + '\t' called_peaks += 1 #s += '%.1f\t%.1f\t%.1f\t%.1f\t' % ( #peak.score,peak.chip,peak.control,peak.fold_enrichment) else: s += 'NA\tNA\tNA\tNA\tNA\tNA\t' if hasattr(peak,'pzpeak'): s += '\t'.join('%.2f' % x for x in [ peak.pzpeak.nrom_signal,peak.pzpeak.norm_background,peak.pzpeak.fold_enrichment,peak.pzpeak.mapq_score ]) + '\t' peak_signals.append(peak.pzpeak.nrom_signal) peak_enrichs.append(peak.pzpeak.fold_enrichment) else: s += 'NA\tNA\tNA\tNA\tNA\t' #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment #s += '%.1f\t%.1f\t%.1f\t' % ( #peak.computed_chip,peak.computed_control,peak.computed_fold_enrichment) #s += '\t'.join([str(x) for x in #[peak.score,peak.chip,peak.fold_enrichment]]) try: if len(peak_signals): s = '\t'.join([self.chrom,str(self.center-self.WIDTH/2),str(self.center+self.WIDTH/2), self.chrom+'_'+str(self.center),str(called_peaks)]) +\ '\t%.2f\t%.2f\t%.2f\t' % ( max(peak_signals)/(min(peak_signals) + sys.float_info.epsilon), std_err(peak_signals), std_err(peak_enrichs), ) + s else: s = '\t'.join([self.chrom,str(self.center), self.chrom+'_'+str(self.center),str(called_peaks)]) +\ '\tNA\tNA\tNA\t' + s except: print max(peak_signals),min(peak_signals) raise return s class PeakScorer(pz.PeakContainer): def __init__(self, ip_tags, control_tags, peak_size, plus_model, minus_model): self.ip_tags = ip_tags self.control_tags = control_tags self.peak_size = peak_size self.peak_shift = (peak_size - 1) / 2 self.score_threshold = 10 self.plus_model = plus_model self.minus_model = minus_model self.peaks = collections.defaultdict(list) self.peak_count = 0 self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.position = 0 def fill_scores(self,chrom,libtype,scoretype): plus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '+')) plus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '+')) minus_tags = collections.deque(getattr(self,'%s_tags' % libtype).get_tags(chrom, '-')) minus_mapq = collections.deque(getattr(self,'%s_tags' % libtype).get_mapq(chrom, '-')) self.plus_window = collections.deque([]) self.minus_window = collections.deque([]) self.plus_mapq = collections.deque([]) self.minus_mapq = collections.deque([]) for peak in self.peaks[chrom]: # fill windows while plus_tags and plus_tags[0] <= (peak.position + self.peak_shift): self.plus_window.append(plus_tags.popleft()) self.plus_mapq.append(plus_mapq.popleft()) while minus_tags and minus_tags[0] <= (peak.position + self.peak_shift): self.minus_window.append(minus_tags.popleft()) self.minus_mapq.append(minus_mapq.popleft()) # get rid of old tags not fitting in the window any more while self.plus_window and self.plus_window[0] < (peak.position - self.peak_shift): self.plus_window.popleft() self.plus_mapq.popleft() while self.minus_window and self.minus_window[0] < (peak.position - self.peak_shift): self.minus_window.popleft() self.minus_mapq.popleft() # calculate normalized background level # add position to region if over threshold self.position = peak.position if libtype == 'ip': peak.mapq_score = float(sum(self.plus_mapq) + sum(self.minus_mapq) )/max(1,(len(self.plus_mapq) + len(self.minus_mapq))) #if peak.name == 'Peak_12869': #print zip(self.plus_window,self.plus_mapq) #print zip(self.minus_window,self.minus_mapq) #print sum(self.plus_mapq) , sum(self.minus_mapq), len(self.plus_mapq) , len(self.minus_mapq) #print peak.mapq_score setattr(peak,scoretype,self.calculate_score()) def score_peaks(self,peak_dict): for chrom,peaks in peak_dict.items(): for jp in peaks: jp.pzpeak = pz.Peak() jp.pzpeak.size = self.peak_size jp.pzpeak.shift = self.peak_shift jp.pzpeak.position = jp.center jp.pzpeak.name = jp.name self.peaks[chrom].append(jp.pzpeak) self.peak_count += 1 for chrom,peaks in self.peaks.items(): self.peaks[chrom] = sorted(self.peaks[chrom], lambda a,b: cmp(a.position,b.position)) self.fill_scores(chrom,'ip','score') self.fill_scores(chrom,'control','background') self.determine_fold_enrichment(chrom) self.determine_signal_over_background(chrom) class FileSet(object): def __init__(self,peakfile,chipfile,controlfile): self.peakfile = peakfile self.chip_file = chipfile self.chip_tags = pz.TagContainer(store_mapq=True) self.chip_tags(chipfile,True) self.control_file = controlfile self.control_tags = pz.TagContainer(store_mapq=True) self.control_tags(controlfile,True) #print self.chip_tags, self.control_tags def get_file(self,type): return getattr(self, '%s_file' % type) def get_tagcount(self,type): return getattr(self, '%s_tags' % type) maxdist = int(args['--max-distance']) peaksets = {} filesets = {} for peakfile,chipfile,controlfile in zip(args['PEAKS'],args['CHIP'],args['INPUT']): set_name = os.path.basename(peakfile).split('.')[0] peaksets[set_name] = collections.defaultdict(list) filesets[set_name] = FileSet(peakfile,chipfile,controlfile) r = csv.reader(open(peakfile),delimiter='\t') r.next() # header ''' #XXX: limit peaks maxpeaks = 20 peakcounter = 0 for row in r: if float(row[5]) >= 100 and float(row[8]) >= 10: peakcounter += 1 if peakcounter > maxpeaks: break peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) ''' for row in r: peaksets[set_name][row[0]].append(PZPeak(set_name,*row)) JoinedPeak.WIDTH += peaksets[set_name].itervalues().next()[0].width() JoinedPeak.WIDTH /= len(peaksets) # find closest peak to each peak in the new set # make new peaks when there's no qualifying one npeaks = 0 joined_peaks = collections.defaultdict(list) for set_name,peakset in peaksets.items(): for chrom,peaks in peakset.items(): for peak in peaks: closest = None for jp in joined_peaks[chrom]: dist = jp.dist(peak) if dist >= 0 and dist <= maxdist: if closest is None or closest.dist(peak) > dist: closest = jp if closest is None or not closest.can_add(peak): npeaks += 1 joined_peaks[chrom].append(JoinedPeak(peak)) else: closest.add(peak) plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) for set_name,fileset in filesets.items(): scorer = PeakScorer(fileset.chip_tags,fileset.control_tags, JoinedPeak.WIDTH,plus_model,minus_model) peaks_to_score = collections.defaultdict(list) for chrom,peaks in joined_peaks.items(): for jp in peaks: if set_name not in jp.peaks: jp.peaks[set_name] = SlavePeak(set_name,jp.center) peaks_to_score[chrom].append(jp.peaks[set_name]) scorer.score_peaks(peaks_to_score) print JoinedPeak.header() for chrom,peaks in joined_peaks.items(): for peak in peaks: print peak #plus_model,minus_model = pz.generate_ideal_model(JoinedPeak.WIDTH) #def get_coverage(fileset,type,jp,pseudocount=0): #score = 0 #start = max(0,jp.center-JoinedPeak.WIDTH/2) #for aln in fileset.get_file(type).fetch( #reference = jp.chrom, start = start, #end = jp.center+JoinedPeak.WIDTH/2): #if aln.is_reverse: #score += minus_model[aln.pos-start] #else: #score += plus_model[aln.pos-start] #return (score+pseudocount)*10.**6/fileset.get_tagcount(type) #return 10.**6*fileset.get_file(type).count( #reference = jp.chrom, #start = max(0,jp.center-JoinedPeak.WIDTH/2), #end = jp.center+JoinedPeak.WIDTH/2)/fileset.get_tagcount(type) #start = jp.center, #end = jp.center+1) #matrix = np.zeros((npeaks,len(peaksets)*2)) #i = 0 #for chrom,peaks in joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #control_coverage = get_coverage(filesets[set_name],'control',jp,pseudocount=1) #chip_coverage = get_coverage(filesets[set_name],'chip',jp) #matrix[i][j] = float(chip_coverage) #matrix[i][j+len(peaksets)] = float(control_coverage) #i += 1 #quantile_normalize.quantile_norm(matrix) #i = 0 #for chrom,peaks in joined_peaks.items(): #for jp in peaks: #for j,set_name in enumerate(peaksets.keys()): #if set_name not in jp.peaks: #jp.peaks[set_name] = SlavePeak( #set_name,matrix[i][j],matrix[i][j + len(peaksets)]) #else: #jp.peaks[set_name].computed_chip = matrix[i][j] #jp.peaks[set_name].computed_control = matrix[i][j+len(peaksets)] #jp.peaks[set_name].compute_fold_enrichment() #print jp #i += 1 ''' i = 0 for chrom,peaks in joined_peaks.items(): for jp in peaks: for j,set_name in enumerate(filesets.keys()): matrix[i][j] = float(jp.peaks[set_name].computed_chip) matrix[i][j+len(peaksets)] = float(jp.peaks[set_name].computed_control) i += 1 '''
2.859375
3
django_town/rest_swagger/views.py
uptown/django-town
0
2870
<filename>django_town/rest_swagger/views.py from django_town.rest import RestApiView, rest_api_manager from django_town.http import http_json_response from django_town.cache.utlis import SimpleCache from django_town.oauth2.swagger import swagger_authorizations_data from django_town.social.oauth2.permissions import OAuth2Authenticated, OAuth2AuthenticatedOrReadOnly from django_town.social.permissions import Authenticated, AuthenticatedOrReadOnly class ApiDocsView(RestApiView): def read(self, request, api_version): def load_cache(api_version="alpha"): manager = rest_api_manager(api_version) ret = {'title': manager.name, 'description': manager.description, 'apiVersion': manager.api_version, 'swaggerVersion': "1.2", 'basePath': manager.base_url, 'resourcePath': manager.base_url, 'info': manager.info, 'authorizations': swagger_authorizations_data()} apis = [] models = { "Error": { "id": "Error", "required": ['error'], "properties": { "error": { "type": "string" }, "field": { "type": "string" }, "message": { "type": "string" }, "resource": { "type": "string" } } } } for view_cls in manager.api_list: operations = [] global_params = [] path = view_cls.path() if path == "": continue if '{}' in path: path = path.replace('{}', '{pk}') global_params.append( { "paramType": "path", "name": 'pk', "description": 'primary key for object', "dataType": 'integer', "format": 'int64', "required": True, } ) responseMessages = [ { 'code': 404, "message": "not_found", "responseModel": "Error" }, { 'code': 500, "message": "internal_error", "responseModel": "Error" }, { 'code': 409, "message": "method_not_allowed", "responseModel": "Error" }, { 'code': 409, "message": "conflict", "responseModel": "Error" }, { 'code': 403, "message": "forbidden", "responseModel": "Error" }, { 'code': 401, "message": "permission_denied", "responseModel": "Error" }, { 'code': 401, "message": "unauthorized", "responseModel": "Error" }, { 'code': 400, "message": "form_invalid", "responseModel": "Error" }, { 'code': 400, "message": "form_required", "responseModel": "Error" }, { 'code': 400, "message": "bad_request", "responseModel": "Error" }, ] current_api = { 'path': path, 'description': view_cls.__doc__, } operations = [] if 'create' in view_cls.crud_method_names and hasattr(view_cls, 'create'): create_op = { 'method': 'POST', 'parameters': global_params, 'responseMessages': responseMessages, 'nickname': 'create ' + path, } operations.append(create_op) if 'read' in view_cls.crud_method_names and hasattr(view_cls, 'read'): op = { 'method': 'GET', 'responseMessages': responseMessages, 'nickname': 'read ' + path } params = global_params.copy() for each_permission in view_cls.permission_classes: if issubclass(each_permission, OAuth2Authenticated): params.append( { "paramType": "query", "name": 'access_token', "dataType": 'string', "required": True, } ) if hasattr(view_cls, 'read_safe_parameters'): for each in view_cls.read_safe_parameters: if isinstance(each, tuple): if each[1] == int: params.append( { "paramType": "query", "name": each[0], "dataType": 'int', "format": 'int64', "required": True, } ) elif each[1] == float: params.append( { "paramType": "query", "name": each[0], "dataType": 'float', "format": 'float', "required": True, } ) else: params.append( { "paramType": "query", "name": each[0], "dataType": 'string', "required": True, } ) else: params.append( { "paramType": "query", "name": each, "dataType": 'string', "required": True, } ) pass pass op['parameters'] = params operations.append(op) if 'update' in view_cls.crud_method_names and hasattr(view_cls, 'update'): op = { 'method': 'UPDATE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op) if 'delete' in view_cls.crud_method_names and hasattr(view_cls, 'delete'): op = { 'method': 'DELETE', 'parameters': global_params, 'responseMessages': responseMessages, 'errorResponses': [], 'nickname': 'read ' + path, } operations.append(op) current_api['operations'] = operations apis.append(current_api) ret['apis'] = apis ret["models"] = models return ret ret = SimpleCache(key_format="api-doc:%(api_version)s", duration=60 * 60 * 24, load_callback=load_cache).get(api_version=api_version) response = http_json_response(ret) response["Access-Control-Allow-Origin"] = "*" response["Access-Control-Allow-Methods"] = "GET" response["Access-Control-Max-Age"] = "1000" response["Access-Control-Allow-Headers"] = "*" return response
2.125
2
components/dash-core-components/tests/integration/dropdown/test_dynamic_options.py
mastermind88/dash
0
2871
from dash import Dash, Input, Output, dcc, html from dash.exceptions import PreventUpdate def test_dddo001_dynamic_options(dash_dcc): dropdown_options = [ {"label": "New York City", "value": "NYC"}, {"label": "Montreal", "value": "MTL"}, {"label": "San Francisco", "value": "SF"}, ] app = Dash(__name__) app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[]) @app.callback( Output("my-dynamic-dropdown", "options"), [Input("my-dynamic-dropdown", "search_value")], ) def update_options(search_value): if not search_value: raise PreventUpdate return [o for o in dropdown_options if search_value in o["label"]] dash_dcc.start_server(app) # Get the inner input used for search value. input_ = dash_dcc.find_element("#my-dynamic-dropdown input") # Focus on the input to open the options menu input_.send_keys("x") # No options to be found with `x` in them, should show the empty message. dash_dcc.wait_for_text_to_equal(".Select-noresults", "No results found") input_.clear() input_.send_keys("o") options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption") # Should show all options. assert len(options) == 3 # Searching for `on` input_.send_keys("n") options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption") assert len(options) == 1 print(options) assert options[0].text == "Montreal" assert dash_dcc.get_logs() == [] def test_dddo002_array_comma_value(dash_dcc): app = Dash(__name__) dropdown = dcc.Dropdown( options=["New York, NY", "Montreal, QC", "San Francisco, CA"], value=["San Francisco, CA"], multi=True, ) app.layout = html.Div(dropdown) dash_dcc.start_server(app) dash_dcc.wait_for_text_to_equal("#react-select-2--value-0", "San Francisco, CA\n ") assert dash_dcc.get_logs() == [] def test_dddo003_value_no_options(dash_dcc): app = Dash(__name__) app.layout = html.Div( [ dcc.Dropdown(value="foobar", id="dropdown"), ] ) dash_dcc.start_server(app) assert dash_dcc.get_logs() == [] dash_dcc.wait_for_element("#dropdown")
2.515625
3
Server.py
dipghoshraj/live-video-streming-with-web-socket
3
2872
<filename>Server.py<gh_stars>1-10 import cv2 import io import socket import struct import time import pickle import zlib client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_socket.connect(('127.0.0.1', 8485)) connection = client_socket.makefile('wb') cam = cv2.VideoCapture("E:/songs/Attention <NAME>(GabbarWorld.com) 1080p.mp4") cam.set(3, 320) cam.set(4, 240) img_counter = 0 encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90] while True: ret, frame = cam.read() result, frame = cv2.imencode('.jpg', frame, encode_param) # data = zlib.compress(pickle.dumps(frame, 0)) data = pickle.dumps(frame, 0) size = len(data) print("{}: {}".format(img_counter, size)) client_socket.sendall(struct.pack(">L", size) + data) img_counter += 1 cam.release()
2.609375
3
hal/agent/tf2_utils.py
gunpowder78/google-research
1
2873
<reponame>gunpowder78/google-research # coding=utf-8 # Copyright 2022 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for Tensorflow 2.0. Partially adapted from: https://www.tensorflow.org/tutorials/text/image_captioning """ # Lint as: python3 # pylint: disable=invalid-name from __future__ import absolute_import from __future__ import division import tensorflow as tf def film_params(sentence_embedding, n_layer_channel): """Generate FiLM parameters from a sentence embedding. Generate FiLM parameters from a sentence embedding. This method assumes a batch dimension exists. Args: sentence_embedding: a tensor containing batched sentenced embedding to be transformed n_layer_channel: a list of integers specifying how many channels are at each hidden layer to be FiLM'ed Returns: a tuple of tensors the same length as n_layer_channel. Each element contains all gamma_i and beta_i for a single hidden layer. """ n_total = sum(n_layer_channel) * 2 all_params = tf.layers.dense(sentence_embedding, n_total) all_params = tf.keras.layers.Dense( 2 * sum * (n_layer_channel), activation=tf.nn.relu) return tf.split(all_params, [c * 2 for c in n_layer_channel], 1) def stack_conv_layer(layer_cfg, padding='same'): """Stack convolution layers per layer_cfg. Args: layer_cfg: list of integer tuples specifying the parameter each layer; each tuple should be (channel, kernel size, strides) padding: what kind of padding the conv layers use Returns: the keras model with stacked conv layers """ layers = [] for cfg in layer_cfg[:-1]: layers.append( tf.keras.layers.Conv2D( filters=cfg[0], kernel_size=cfg[1], strides=cfg[2], activation=tf.nn.relu, padding=padding)) final_cfg = layer_cfg[-1] layers.append( tf.keras.layers.Conv2D( final_cfg[0], final_cfg[1], final_cfg[2], padding=padding)) return tf.keras.Sequential(layers) def stack_dense_layer(layer_cfg): """Stack Dense layers. Args: layer_cfg: list of integer specifying the number of units at each layer Returns: the keras model with stacked dense layers """ layers = [] for cfg in layer_cfg[:-1]: layers.append(tf.keras.layers.Dense(cfg, activation=tf.nn.relu)) layers.append(tf.keras.layers.Dense(layer_cfg[-1])) return tf.keras.Sequential(layers) def soft_variables_update(source_variables, target_variables, polyak_rate=1.0): """Update the target variables using exponential moving average. Specifically, v_s' = v_s * polyak_rate + (1-polyak_rate) * v_t Args: source_variables: the moving average variables target_variables: the new observations polyak_rate: rate of moving average Returns: Operation that does the update """ updates = [] for (v_s, v_t) in zip(source_variables, target_variables): v_t.shape.assert_is_compatible_with(v_s.shape) def update_fn(v1, v2): """Update variables.""" # For not trainable variables do hard updates. return v1.assign(polyak_rate * v1 + (1 - polyak_rate) * v2) update = update_fn(v_t, v_s) updates.append(update) return updates def vector_tensor_product(a, b): """"Returns keras layer that perfrom a outer product between a and b.""" # a shape: [B, ?, d], b shape: [B, ?, d] shape_layer = tf.keras.layers.Lambda(tf.shape) shape = shape_layer(b) shape_numpy = b.get_shape() variable_length = shape[1] # variable_len = ? expand_dims_layer_1 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) expand_dims_layer_2 = tf.keras.layers.Reshape((-1, 1, shape_numpy[-1])) a = expand_dims_layer_1(a) # a shape: [B, ?, 1, d] b = expand_dims_layer_2(b) # a shape: [B, ?, 1, d] tile_layer = tf.keras.layers.Lambda( lambda inputs: tf.tile(inputs[0], multiples=inputs[1])) a = tile_layer((a, [1, 1, variable_length, 1])) # a shape: [B, ?, ?, d] b = tile_layer((b, [1, 1, variable_length, 1])) # b shape: [B, ?, ?, d] b = tf.keras.layers.Permute((2, 1, 3))(b) # b shape: [B, ?, ?, d] return tf.keras.layers.concatenate([a, b]) # shape: [B, ?, ?, 2*d] class BahdanauAttention(tf.keras.Model): """Bahdanau Attention Layer. Attributes: w1: weights that process the feature w2: weights that process the memory state v: projection layer that project score vector to scalar """ def __init__(self, units): """Initialize Bahdanau attention layer. Args: units: size of the dense layers """ super(BahdanauAttention, self).__init__() self.W1 = tf.keras.layers.Dense(units) self.W2 = tf.keras.layers.Dense(units) self.V = tf.keras.layers.Dense(1) def call(self, features, hidden): # features(CNN_encoder output) shape == (batch_size, 64, embedding_dim) # hidden shape == (batch_size, hidden_size) # hidden_with_time_axis shape == (batch_size, 1, hidden_size) hidden_with_time_axis = tf.expand_dims(hidden, 1) # score shape == (batch_size, 64, hidden_size) score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis)) # attention_weights shape == (batch_size, 64, 1) # you get 1 at the last axis because you are applying score to self.V attention_weights = tf.nn.softmax(self.V(score), axis=1) # context_vector shape after sum == (batch_size, hidden_size) context_vector = attention_weights * features context_vector = tf.reduce_sum(context_vector, axis=1) return context_vector, attention_weights class GRUEnecoder(tf.keras.Model): """TF2.0 GRE encoder. Attributes: embedding: word embedding matrix gru: the GRU layer """ def __init__(self, embedding_dim, units, vocab_size): """Initialize the GRU encoder. Args: embedding_dim: dimension of word emebdding units: number of units of the memory state vocab_size: total number of vocabulary """ super(GRUEnecoder, self).__init__() self._units = units self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim) self.gru = tf.keras.layers.GRU( self.units, return_sequences=True, return_state=True, recurrent_initializer='glorot_uniform') def call(self, x, hidden): # x shape after passing through embedding == (batch_size, 1, embedding_dim) x = self.embedding(x) # passing the concatenated vector to the GRU output, state = self.gru(x) return output, state def reset_state(self, batch_size): return tf.zeros((batch_size, self._units))
2.65625
3
wolk/logger_factory.py
Wolkabout/WolkConnect-Python-
6
2874
<reponame>Wolkabout/WolkConnect-Python-<gh_stars>1-10 """LoggerFactory Module.""" # Copyright 2020 WolkAbout Technology s.r.o. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from typing import List from typing import Optional class LoggerFactory: """Factory for issuing ready to use loggers in other modules.""" def __init__(self, level=logging.INFO, console=True, log_file=None): # type: ignore """ Create a factory that will give loggers through calls to get_logger(). :param level: Set the desired logging level :type level: int or None :param console: Should the log messages be outputted to the console :type console: bool or None :param log_file: Name of the log file to output to :type log_file: str or None """ self.level = level self.device_key = None self.console = console self.log_file = log_file self.loggers: List[logging.Logger] = [] def set_device_key(self, device_key: str) -> None: """ Set device key. :param device_key: Device key :type device_key: str """ self.device_key = device_key def get_logger( self, name: str, level: Optional[int] = None ) -> logging.Logger: """ Return a ready to use logger instance. :param name: Name of the logger :type name: str :param level: Override the log level :type level: int or None :returns: Logger instance :rtype: logger """ logger = logging.getLogger(name) if level is not None: logger.setLevel(level) else: logger.setLevel(self.level) if self.device_key is not None: formatter = logging.Formatter( "%(asctime)s - '" + str(self.device_key) + "' - %(levelname)s [%(filename)s:%(lineno)s" + " - %(funcName)s()] - %(message)s" ) else: formatter = logging.Formatter( "%(asctime)s - %(levelname)s [%(filename)s:%(lineno)s" + " - %(funcName)s()] - %(message)s" ) if self.console: console_handler = logging.StreamHandler() if level is not None: console_handler.setLevel(level) else: console_handler.setLevel(self.level) console_handler.setFormatter(formatter) logger.addHandler(console_handler) if self.log_file is not None: file_handler = logging.FileHandler(self.log_file) if level is not None: file_handler.setLevel(level) else: file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) self.loggers.append(logger) return logger # Logging levels available: NOTSET, INFO, DEBUG logger_factory = LoggerFactory(level=logging.INFO) LEVELS = { "debug": logging.DEBUG, "info": logging.INFO, "warning": logging.WARNING, "error": logging.ERROR, "critical": logging.CRITICAL, "notset": logging.NOTSET, } def logging_config(level: str, log_file: Optional[str] = None) -> None: """ Set desired log level and designate a log file. :param level: Available levels : debug, info, notset :type level: str :param log_file: path to log file :type log_file: str or None """ if log_file is not None: logger_factory.log_file = log_file if level not in LEVELS: print(f"Invalid level '{level}'") return if LEVELS[level] == logger_factory.level: return logger_factory.level = LEVELS[level] for logger in logger_factory.loggers: logger.setLevel(logger_factory.level) for handler in logger.handlers: handler.setLevel(logger_factory.level)
2.609375
3
raw.py
andre-marcos-perez/data-pipeline-demo
3
2875
import json import gzip import requests from datetime import datetime import pendulum import boto3 from botocore.exceptions import ClientError from util.log import Log from settings.aws_settings import AWSSettings from settings.telegram_settings import TelegramSettings def lambda_handler(event: dict, context: dict) -> dict: log = Log.setup(name='logger') aws_settings = AWSSettings() telegram_settings = TelegramSettings() timezone = pendulum.timezone('America/Sao_Paulo') date = datetime.now(tz=timezone).strftime('%Y-%m-%d') timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S') try: token = telegram_settings.access_token base_url = f"https://api.telegram.org/bot{token}" data = json.loads(event["body"]) chat_id = data["message"]["chat"]["id"] if chat_id == telegram_settings.chat_id: client = boto3.client('s3') bucket = aws_settings.raw_bucket root_path = aws_settings.root_path try: with open(f"{root_path}/{timestamp}.json", mode='w', encoding='utf8') as fp: json.dump(data, fp) client.upload_file(f"{root_path}/{timestamp}.json", bucket, f"{date}/{timestamp}.json") except ClientError as exc: raise exc else: text = "I can't talk to strangers, sorry mate!" data = {"text": text, "chat_id": chat_id} data = gzip.compress(json.dumps(data).encode('utf-8')) headers = {'content-type': 'application/json', 'content-encoding': 'gzip'} url = base_url + "/sendMessage" requests.post(url=url, data=data, headers=headers) except Exception as exc: log.error(msg=exc) finally: return dict(statusCode="200")
2.109375
2
v2_hier/site_stat.py
ruslan-ok/ruslan
0
2876
"""Collecting statistics of site visits.""" import collections from datetime import datetime from functools import reduce from django.utils.translation import gettext_lazy as _ from hier.models import IPInfo, AccessLog, SiteStat from v2_hier.utils import APPS def get_site_stat(user): """Processing a new portion of log file records. The site applications that users have visited and information about their IP addresses will be shown. """ TOTAL_IP = _('total different').capitalize() + ' IP' TOTAL_LOG = _('total log records').capitalize() NEW_LOG = _('new log records').capitalize() cnt = collections.Counter() cnt[TOTAL_IP] = len(IPInfo.objects.all()) cnt[TOTAL_LOG] = len(AccessLog.objects.all()) #Determining the last previously processed log file entry last = datetime.min site_stat = None if SiteStat.objects.filter(user=user.id).exists(): site_stat = SiteStat.objects.filter(user = user.id).get() if site_stat.record and site_stat.record.event: last = site_stat.record.event # New records records = AccessLog.objects.filter(event__gt=last).order_by('-event') cnt[NEW_LOG] += len(records) # Save last processed log record last_rec = None if (len(records) > 0): last_rec = records[0] if site_stat: site_stat.record = last_rec site_stat.save() else: SiteStat.objects.create(user=user, record=last_rec) #raise Exception(last_rec.event) apps = {} for rec in records: uri = valid_uri(rec) if not uri: continue # Determining the access to the site application a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS)) if not a_app: continue app = a_app[0] if not app in apps: apps[app] = {} host = str(rec.host.info()) #raise Exception('aaa = ', aaa) if not host in apps[app]: apps[app][host] = [] page = '{} {}'.format(rec.method, uri) if not page in apps[app][host]: apps[app][host].append(page) return cnt.most_common(), apps def valid_uri(rec): if (rec.status >= 400) or (rec.status == 301): return None if 'favicon.ico' in rec.uri or '/static/' in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri: return None if ('/?' in rec.uri) and (rec.method != 'POST'): uri = rec.uri.split('?')[0] else: uri = rec.uri uri = uri.replace('/ru/', '/').replace('/en/', '/') if (uri == '/'): return None return uri
2.578125
3
cli/pcluster/utils.py
mkosmo/cfncluster
1
2877
<gh_stars>1-10 # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with # the License. A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and # limitations under the License. # fmt: off from __future__ import absolute_import, print_function # isort:skip from future import standard_library # isort:skip standard_library.install_aliases() # fmt: on import json import logging import os import sys import time import urllib.request import zipfile from io import BytesIO import boto3 import pkg_resources from botocore.exceptions import ClientError LOGGER = logging.getLogger(__name__) PCLUSTER_STACK_PREFIX = "parallelcluster-" PCLUSTER_ISSUES_LINK = "https://github.com/aws/aws-parallelcluster/issues" def get_stack_name(cluster_name): return PCLUSTER_STACK_PREFIX + cluster_name def get_region(): """Get AWS_DEFAULT_REGION from the environment.""" return os.environ.get("AWS_DEFAULT_REGION") def get_partition(): """Get partition for the AWS_DEFAULT_REGION set in the environment.""" return "aws-us-gov" if get_region().startswith("us-gov") else "aws" def paginate_boto3(method, **kwargs): """ Return a generator for a boto3 call, this allows pagination over an arbitrary number of responses. :param method: boto3 method :param kwargs: arguments to method :return: generator with boto3 results """ client = method.__self__ paginator = client.get_paginator(method.__name__) for page in paginator.paginate(**kwargs).result_key_iters(): for result in page: yield result def create_s3_bucket(bucket_name, region): """ Create a new S3 bucket. :param bucket_name: name of the S3 bucket to create :param region: aws region """ s3_client = boto3.client("s3") """ :type : pyboto3.s3 """ try: if region != "us-east-1": s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": region}) else: s3_client.create_bucket(Bucket=bucket_name) except s3_client.exceptions.BucketAlreadyOwnedByYou: print("Bucket already exists") def delete_s3_bucket(bucket_name): """ Delete an S3 bucket together with all stored objects. :param bucket_name: name of the S3 bucket to delete """ try: bucket = boto3.resource("s3").Bucket(bucket_name) bucket.objects.all().delete() bucket.delete() except boto3.client("s3").exceptions.NoSuchBucket: pass except ClientError: print("Failed to delete bucket %s. Please delete it manually." % bucket_name) def zip_dir(path): """ Create a zip archive containing all files and dirs rooted in path. The archive is created in memory and a file handler is returned by the function. :param path: directory containing the resources to archive. :return file handler pointing to the compressed archive. """ file_out = BytesIO() with zipfile.ZipFile(file_out, "w", zipfile.ZIP_DEFLATED) as ziph: for root, _, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), start=path)) file_out.seek(0) return file_out def upload_resources_artifacts(bucket_name, root): """ Upload to the specified S3 bucket the content of the directory rooted in root path. All dirs contained in root dir will be uploaded as zip files to $bucket_name/$dir_name/artifacts.zip. All files contained in root dir will be uploaded to $bucket_name. :param bucket_name: name of the S3 bucket where files are uploaded :param root: root directory containing the resources to upload. """ bucket = boto3.resource("s3").Bucket(bucket_name) for res in os.listdir(root): if os.path.isdir(os.path.join(root, res)): bucket.upload_fileobj(zip_dir(os.path.join(root, res)), "%s/artifacts.zip" % res) elif os.path.isfile(os.path.join(root, res)): bucket.upload_file(os.path.join(root, res), res) def _get_json_from_s3(region, file_name): """ Get pricing file (if none) and parse content as json. :param region: AWS Region :param file_name the object name to get :return: a json object representing the file content :raises ClientError if unable to download the file :raises ValueError if unable to decode the file content """ bucket_name = "{0}-aws-parallelcluster".format(region) file_contents = boto3.resource("s3").Object(bucket_name, file_name).get()["Body"].read().decode("utf-8") return json.loads(file_contents) def get_supported_features(region, feature): """ Get a json object containing the attributes supported by a feature, for example. { "Features": { "efa": { "instances": ["c5n.18xlarge", "p3dn.24xlarge", "i3en.24xlarge"], "baseos": ["alinux", "centos7"], "schedulers": ["sge", "slurm", "torque"] }, "batch": { "instances": ["r3.8xlarge", ..., "m5.4xlarge"] } } } :param region: AWS Region :param feature: the feature to search for, i.e. "efa" "awsbatch" :return: json object containing all the attributes supported by feature """ try: features = _get_json_from_s3(region, "features/feature_whitelist.json") supported_features = features.get("Features").get(feature) except (ValueError, ClientError, KeyError) as e: if isinstance(e, ClientError): code = e.response.get("Error").get("Code") if code == "InvalidAccessKeyId": error(e.response.get("Error").get("Message")) error( "Failed validate {0}. This is probably a bug on our end. " "Please submit an issue {1}".format(feature, PCLUSTER_ISSUES_LINK) ) return supported_features def get_instance_vcpus(region, instance_type): """ Get number of vcpus for the given instance type. :param region: AWS Region :param instance_type: the instance type to search for. :return: the number of vcpus or -1 if the instance type cannot be found or the pricing file cannot be retrieved/parsed """ try: instances = _get_json_from_s3(region, "instances/instances.json") vcpus = int(instances[instance_type]["vcpus"]) except (KeyError, ValueError, ClientError): vcpus = -1 return vcpus def get_supported_os(scheduler): """ Return a tuple of the os supported by parallelcluster for the specific scheduler. :param scheduler: the scheduler for which we want to know the supported os :return: a tuple of strings of the supported os """ return "alinux" if scheduler == "awsbatch" else "alinux", "centos6", "centos7", "ubuntu1604", "ubuntu1804" def get_supported_schedulers(): """ Return a tuple of the scheduler supported by parallelcluster. :return: a tuple of strings of the supported scheduler """ return "sge", "torque", "slurm", "awsbatch" def get_stack_output_value(stack_outputs, output_key): """ Get output value from Cloudformation Stack Output. :param stack_outputs: Cloudformation Stack Outputs :param output_key: Output Key :return: OutputValue if that output exists, otherwise None """ return next((o.get("OutputValue") for o in stack_outputs if o.get("OutputKey") == output_key), None) def get_stack(stack_name, cfn_client=None): """ Get the output for a DescribeStacks action for the given Stack. :param stack_name: the CFN Stack name :param cfn_client: boto3 cloudformation client :return: the Stack data type """ try: if not cfn_client: cfn_client = boto3.client("cloudformation") return cfn_client.describe_stacks(StackName=stack_name).get("Stacks")[0] except (ClientError, IndexError) as e: error(e.response.get("Error").get("Message")) def verify_stack_creation(stack_name, cfn_client): """ Wait for the stack creation to be completed and notify if the stack creation fails. :param stack_name: the stack name that we should verify :param cfn_client: the CloudFormation client to use to verify stack status :return: True if the creation was successful, false otherwise. """ status = get_stack(stack_name, cfn_client).get("StackStatus") resource_status = "" while status == "CREATE_IN_PROGRESS": status = get_stack(stack_name, cfn_client).get("StackStatus") events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents")[0] resource_status = ("Status: %s - %s" % (events.get("LogicalResourceId"), events.get("ResourceStatus"))).ljust( 80 ) sys.stdout.write("\r%s" % resource_status) sys.stdout.flush() time.sleep(5) # print the last status update in the logs if resource_status != "": LOGGER.debug(resource_status) if status != "CREATE_COMPLETE": LOGGER.critical("\nCluster creation failed. Failed events:") events = cfn_client.describe_stack_events(StackName=stack_name).get("StackEvents") for event in events: if event.get("ResourceStatus") == "CREATE_FAILED": LOGGER.info( " - %s %s %s", event.get("ResourceType"), event.get("LogicalResourceId"), event.get("ResourceStatusReason"), ) return False return True def get_templates_bucket_path(): """Return a string containing the path of bucket.""" region = get_region() s3_suffix = ".cn" if region.startswith("cn") else "" return "https://s3.{REGION}.amazonaws.com{S3_SUFFIX}/{REGION}-aws-parallelcluster/templates/".format( REGION=region, S3_SUFFIX=s3_suffix ) def get_installed_version(): """Get the version of the installed aws-parallelcluster package.""" return pkg_resources.get_distribution("aws-parallelcluster").version def check_if_latest_version(): """Check if the current package version is the latest one.""" try: latest = json.loads(urllib.request.urlopen("https://pypi.python.org/pypi/aws-parallelcluster/json").read())[ "info" ]["version"] if get_installed_version() < latest: print("Info: There is a newer version %s of AWS ParallelCluster available." % latest) except Exception: pass def warn(message): """Print a warning message.""" print("WARNING: {0}".format(message)) def error(message, fail_on_error=True): """Print an error message and Raise SystemExit exception to the stderr if fail_on_error is true.""" if fail_on_error: sys.exit("ERROR: {0}".format(message)) else: print("ERROR: {0}".format(message)) def get_cfn_param(params, key_name): """ Get parameter value from Cloudformation Stack Parameters. :param params: Cloudformation Stack Parameters :param key_name: Parameter Key :return: ParameterValue if that parameter exists, otherwise None """ param_value = next((i.get("ParameterValue") for i in params if i.get("ParameterKey") == key_name), "NONE") return param_value.strip() def get_efs_mount_target_id(efs_fs_id, avail_zone): """ Search for a Mount Target Id in given availability zone for the given EFS file system id. :param efs_fs_id: EFS file system Id :param avail_zone: Availability zone to verify :return: the mount_target_id or None """ mount_target_id = None if efs_fs_id: mount_targets = boto3.client("efs").describe_mount_targets(FileSystemId=efs_fs_id) for mount_target in mount_targets.get("MountTargets"): # Check to see if there is an existing mt in the az of the stack mount_target_subnet = mount_target.get("SubnetId") if avail_zone == get_avail_zone(mount_target_subnet): mount_target_id = mount_target.get("MountTargetId") return mount_target_id def get_avail_zone(subnet_id): avail_zone = None try: avail_zone = ( boto3.client("ec2").describe_subnets(SubnetIds=[subnet_id]).get("Subnets")[0].get("AvailabilityZone") ) except ClientError as e: LOGGER.debug( "Unable to detect availability zone for subnet {0}.\n{1}".format( subnet_id, e.response.get("Error").get("Message") ) ) return avail_zone def get_latest_alinux_ami_id(): """Get latest alinux ami id.""" try: alinux_ami_id = ( boto3.client("ssm") .get_parameters_by_path(Path="/aws/service/ami-amazon-linux-latest") .get("Parameters")[0] .get("Value") ) except ClientError as e: error("Unable to retrieve Amazon Linux AMI id.\n{0}".format(e.response.get("Error").get("Message"))) return alinux_ami_id def list_ec2_instance_types(): """Return a list of all the instance types available on EC2, independent by the region.""" return boto3.client("ec2").meta.service_model.shape_for("InstanceType").enum def get_master_server_id(stack_name): """Return the physical id of the master server, or [] if no master server.""" try: resources = boto3.client("cloudformation").describe_stack_resource( StackName=stack_name, LogicalResourceId="MasterServer" ) return resources.get("StackResourceDetail").get("PhysicalResourceId") except ClientError as e: error(e.response.get("Error").get("Message")) def _get_master_server_ip(stack_name): """ Get the IP Address of the MasterServer. :param stack_name: The name of the cloudformation stack :param config: Config object :return private/public ip address """ ec2 = boto3.client("ec2") master_id = get_master_server_id(stack_name) if not master_id: error("MasterServer not running. Can't SSH") instance = ec2.describe_instances(InstanceIds=[master_id]).get("Reservations")[0].get("Instances")[0] ip_address = instance.get("PublicIpAddress") if ip_address is None: ip_address = instance.get("PrivateIpAddress") state = instance.get("State").get("Name") if state != "running" or ip_address is None: error("MasterServer: %s\nCannot get ip address.", state.upper()) return ip_address def get_master_ip_and_username(cluster_name): cfn = boto3.client("cloudformation") try: stack_name = get_stack_name(cluster_name) stack_result = cfn.describe_stacks(StackName=stack_name).get("Stacks")[0] stack_status = stack_result.get("StackStatus") valid_status = ["CREATE_COMPLETE", "UPDATE_COMPLETE", "UPDATE_ROLLBACK_COMPLETE"] invalid_status = ["DELETE_COMPLETE", "DELETE_IN_PROGRESS"] if stack_status in invalid_status: error("Unable to retrieve master_ip and username for a stack in the status: {0}".format(stack_status)) elif stack_status in valid_status: outputs = stack_result.get("Outputs") master_ip = get_stack_output_value(outputs, "MasterPublicIP") or _get_master_server_ip(stack_name) username = get_stack_output_value(outputs, "ClusterUser") else: # Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running master_ip = _get_master_server_ip(stack_name) template = cfn.get_template(StackName=stack_name) mappings = template.get("TemplateBody").get("Mappings").get("OSFeatures") base_os = get_cfn_param(stack_result.get("Parameters"), "BaseOS") username = mappings.get(base_os).get("User") if not master_ip: error("Failed to get cluster {0} ip.".format(cluster_name)) if not username: error("Failed to get cluster {0} username.".format(cluster_name)) except ClientError as e: error(e.response.get("Error").get("Message")) return master_ip, username def get_cli_log_file(): return os.path.expanduser(os.path.join("~", ".parallelcluster", "pcluster-cli.log"))
2.0625
2
thinkutils_plus/eventbus/sample/myeventbus.py
ThinkmanWang/thinkutils_plus
0
2878
__author__ = 'Xsank' import time from thinkutils_plus.eventbus.eventbus import EventBus from myevent import GreetEvent from myevent import ByeEvent from mylistener import MyListener if __name__=="__main__": eventbus=EventBus() eventbus.register(MyListener()) ge=GreetEvent('world') be=ByeEvent('world') eventbus.async_post(be) eventbus.post(ge) time.sleep(0.1) eventbus.unregister(MyListener()) eventbus.destroy()
2.109375
2
tools/telemetry/telemetry/core/platform/android_device_unittest.py
kjthegod/chromium
1
2879
<filename>tools/telemetry/telemetry/core/platform/android_device_unittest.py # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from telemetry import benchmark from telemetry.core import browser_options from telemetry.core.platform import android_device from telemetry.core.platform import android_platform_backend from telemetry.unittest_util import system_stub class AndroidDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands']) def testGetAllAttachedAndroidDevices(self): self._android_device_stub.adb_commands.attached_devices = [ '01', '02'] self.assertEquals( set(['01', '02']), set(device.device_id for device in android_device.AndroidDevice.GetAllConnectedDevices() )) def tearDown(self): self._android_device_stub.Restore() class GetDeviceTest(unittest.TestCase): def setUp(self): self._android_device_stub = system_stub.Override( android_device, ['adb_commands', 'os', 'subprocess', 'logging']) self._apb_stub = system_stub.Override( android_platform_backend, ['adb_commands']) def tearDown(self): self._android_device_stub.Restore() self._apb_stub.Restore() def testNoAdbReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() def NoAdb(*_, **__): raise OSError('not found') self._android_device_stub.subprocess.Popen = NoAdb self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbNoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self.assertEquals([], self._android_device_stub.logging.warnings) self.assertIsNone(android_device.GetDevice(finder_options)) def testAdbPermissionsErrorReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.subprocess.Popen.communicate_result = ( 'List of devices attached\n????????????\tno permissions\n', '* daemon not running. starting it now on port 5037 *\n' '* daemon started successfully *\n') device = android_device.GetDevice(finder_options) self.assertEquals([ 'adb devices gave a permissions error. Consider running adb as root:', ' adb kill-server', ' sudo `which adb` devices\n\n'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbTwoDevicesReturnsNone(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '015d14fec128220d'] device = android_device.GetDevice(finder_options) self.assertEquals([ 'Multiple devices attached. Please specify one of the following:\n' ' --device=015d14fec128220c\n' ' --device=015d14fec128220d'], self._android_device_stub.logging.warnings) self.assertIsNone(device) def testAdbPickOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() finder_options.android_device = '555d14fecddddddd' # pick one self._android_device_stub.adb_commands.attached_devices = [ '015d14fec128220c', '555d14fecddddddd'] device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('555d14fecddddddd', device.device_id) def testAdbOneDeviceReturnsDeviceInstance(self): finder_options = browser_options.BrowserFinderOptions() self._android_device_stub.adb_commands.attached_devices = ( ['015d14fec128220c']) device = android_device.GetDevice(finder_options) self.assertEquals([], self._android_device_stub.logging.warnings) self.assertEquals('015d14fec128220c', device.device_id)
2.09375
2
logger.py
bekaaa/xgboost_tuner
0
2880
<reponame>bekaaa/xgboost_tuner<gh_stars>0 #! /usr/bin/env python import logging #--------------------------------------- class logger : ''' A ready to use logging class. All you need to do is set an object with the parameters (log_filename, directory to save it) then whenever you want to add text, type obj.add("some text"). The function obj.close() is not important, I just added it for coverage. You can edit any of the below configuration to whatever you like. ''' def __init__(self, filename, log_dir='../data/log'): self.log = None self.handler = None LOG_PATH = log_dir assert type(filename) == str or filename != '' self.logger = logging.getLogger(); self.logger.setLevel(logging.INFO) filename = LOG_PATH + str(filename) self.handler = logging.FileHandler(filename) self.handler.setLevel(logging.INFO) formatter = logging.Formatter( fmt='%(asctime)s : %(message)s', datefmt='%d-%m %H:%M' ) self.handler.setFormatter(formatter) self.logger.addHandler(self.handler) return #------------------------------------ def add(self, message): assert type(message) == str self.logger.info(message); return #------------------------------------ def close(self): self.logger.removeHandler(self.handler) return #----------------------------------------
2.640625
3
baselines/prep_baseline.py
lessleslie/slm-code-generation
64
2881
import json import multiprocessing as mp import re from argparse import ArgumentParser from enum import Enum, auto import javalang from functools import partial PRED_TOKEN = 'PRED' modifiers = ['public', 'private', 'protected', 'static'] class TargetType(Enum): seq = auto() tree = auto() @staticmethod def from_string(s): try: return TargetType[s] except KeyError: raise ValueError() target_type = TargetType.seq RE_WORDS = re.compile(r''' # Find words in a string. Order matters! [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word [A-Z]?[a-z]+ | # Capitalized words / all lower case [A-Z]+ | # All upper case \d+ | # Numbers _ | \" | .+ ''', re.VERBOSE) TREE_SPLIT = re.compile(r'([(),])') def split_subtokens(str): return [subtok for subtok in RE_WORDS.findall(str) if not subtok == '_'] def subtokenize(s): failed = False try: tokens = list(javalang.tokenizer.tokenize(s)) except: try: tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2] except: try: tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1] except: tokens = s.split() failed = True if failed: return [' _ '.join(split_subtokens(i)) for i in tokens if not i in modifiers] else: return [' _ '.join(split_subtokens(i.value)) for i in tokens if not i.value in modifiers] def subtokenize_tree(s): return ' '.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub) > 0]) def process_line(target_type, max_targets, max_nodes, line): obj = json.loads(line) left_context = obj['left_context'] right_context = obj['right_context'] target_seq = obj['target_seq'] num_targets = obj['num_targets'] num_nodes = obj['num_nodes'] if max_targets is not None and num_targets > max_targets: return None, None if max_nodes is not None and num_nodes > max_nodes: return None, None if target_type is TargetType.seq: target_pred = ' '.join(subtokenize(target_seq)).lower() elif target_type is TargetType.tree: target_pred = subtokenize_tree(obj['linearized_tree']) source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower()) return source, target_pred def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes): total_examples = 0 source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role) target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role) with open(source_output_path, 'w') as source_output_file: with open(target_output_path, 'w') as target_output_file: with open(file_path, 'r') as file: subtokenize_line = partial(process_line, target_type, max_targets, max_nodes) with mp.Pool(64) as pool: if data_file_role in ['test', 'val']: examples = [process_line(target_type, max_targets, max_nodes, line) for line in file] else: examples = pool.imap_unordered(subtokenize_line, file, chunksize=100) #examples = [process_line(target_type, max_targets, max_nodes, line) for line in file] for source_seq, target_seq in examples: if source_seq is None or target_seq is None: continue source_output_file.write(source_seq + '\n') target_output_file.write(target_seq + '\n') total_examples += 1 #print(source_seq, target_seq) print('File: ' + file_path) print('Total examples: ' + str(total_examples)) if __name__ == '__main__': parser = ArgumentParser() parser.add_argument("-trd", "--train_data", dest="train_data_path", help="path to training data file", required=True) parser.add_argument("-ted", "--test_data", dest="test_data_path", help="path to test data file", required=True) parser.add_argument("-vd", "--val_data", dest="val_data_path", help="path to validation data file", required=True) parser.add_argument("-o", "--output_name", dest="output_name", help="output name - the base name for the created dataset", metavar="FILE", required=True, default='data') parser.add_argument("--target_type", dest="target_type", type=TargetType.from_string, choices=list(TargetType), required=True) parser.add_argument("--max_targets", dest="max_targets", type=int, required=False, default=40) parser.add_argument("--max_nodes", dest="max_nodes", type=int, required=False, default=None) parser.add_argument('--local', action='store_true') args = parser.parse_args() train_data_path = args.train_data_path test_data_path = args.test_data_path val_data_path = args.val_data_path for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']): process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name, target_type=args.target_type, max_targets=args.max_targets, max_nodes=args.max_nodes)
2.4375
2
var/spack/repos/builtin/packages/r-multicool/package.py
varioustoxins/spack
0
2882
<gh_stars>0 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMulticool(RPackage): """Permutations of multisets in cool-lex order A set of tools to permute multisets without loops or hash tables and to generate integer partitions. The permutation functions are based on C code from <NAME>. Cool-lex order is similar to colexicographical order. The algorithm is described in <NAME>. (2009) <DOI:10.1145/1496770.1496877> Loopless Generation of Multiset Permutations by Prefix Shifts. Symposium on Discrete Algorithms, New York, United States. The permutation code is distributed without restrictions. The code for stable and efficient computation of multinomial coefficients comes from <NAME>. The code can be download from <http://tamivox.org/dave/multinomial/code.html> and is distributed without conditions. The package also generates the integer partitions of a positive, non-zero integer n. The C++ code for this is based on Python code from <NAME> which can be found here <https://jeromekelleher.net/tag/integer-partitions.html>. The C++ code and Python code are distributed without conditions.""" homepage = "https://cloud.r-project.org/package=multicool" url = "https://cloud.r-project.org/src/contrib/Archive/multicool/multicool_0.1-9.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/multicool/Archive/multicool" version('0.1-10', sha256='5bb0cb0d9eb64420c862877247a79bb0afadacfe23262ec8c3fa26e5e34d6ff9') version('0.1-9', sha256='bdf92571cef1b649952d155395a92b8683099ee13114f73a9d41fc5d7d49d329') depends_on('[email protected]:', type=('build', 'run'))
1.601563
2
updatetranslations.py
erincerys/ergo
1,122
2883
<reponame>erincerys/ergo<gh_stars>1000+ #!/usr/bin/env python3 # updatetranslations.py # # tl;dr this script updates our translation file with the newest, coolest strings we've added! # it manually searches the source code, extracts strings and then updates the language files. # Written in 2018 by <NAME> <<EMAIL>> # # To the extent possible under law, the author(s) have dedicated all copyright # and related and neighboring rights to this software to the public domain # worldwide. This software is distributed without any warranty. # # You should have received a copy of the CC0 Public Domain Dedication along # with this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. """updatetranslations.py Usage: updatetranslations.py run <irc-dir> <languages-dir> updatetranslations.py --version updatetranslations.py (-h | --help) Options: <irc-dir> Oragono's irc subdirectory where the Go code is kept. <languages-dir> Languages directory.""" import os import re import json from docopt import docopt import yaml ignored_strings = [ 'none', 'saset' ] if __name__ == '__main__': arguments = docopt(__doc__, version="0.1.0") if arguments['run']: # general IRC strings irc_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if filepath.endswith('.go'): content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\.t\("((?:[^"]|\\")+)"\)', content) for match in matches: if match not in irc_strings: irc_strings.append(match) matches = re.findall(r'\.t\(\`([^\`]+)\`\)', content) for match in matches: if match not in irc_strings: irc_strings.append(match) for s in ignored_strings: try: irc_strings.remove(s) except ValueError: # ignore any that don't exist ... print("irc strings:", len(irc_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'irc.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in irc_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in irc_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'help.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if '\n' in match and match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'help.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string.split('\n')[0]) # nickserv help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'nickserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("nickserv help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'nickserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # chanserv help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'chanserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("chanserv help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'chanserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string) # hostserv help entries help_strings = [] for subdir, dirs, files in os.walk(arguments['<irc-dir>']): for fname in files: filepath = subdir + os.sep + fname if fname == 'hostserv.go': content = open(filepath, 'r', encoding='UTF-8').read() matches = re.findall(r'\`([^\`]+)\`', content) for match in matches: if match not in help_strings: help_strings.append(match) for s in ignored_strings: try: help_strings.remove(s) except ValueError: # ignore any that don't exist ... print("hostserv help strings:", len(help_strings)) with open(os.path.join(arguments['<languages-dir>'], 'example', 'hostserv.lang.json'), 'w') as f: f.write(json.dumps({k:k for k in help_strings}, sort_keys=True, indent=2, separators=(',', ': '))) f.write('\n') for string in help_strings: if 1 < string.count('%s') + string.count('%d') + string.count('%f'): print(' confirm:', string)
2.1875
2
processing_tools/number_of_tenants.py
apanda/modeling
3
2884
<filename>processing_tools/number_of_tenants.py<gh_stars>1-10 import sys from collections import defaultdict def Process (fnames): tenant_time = defaultdict(lambda: defaultdict(lambda: 0.0)) tenant_run = defaultdict(lambda: defaultdict(lambda:0)) for fname in fnames: f = open(fname) for l in f: if l.startswith("tenant"): continue parts = l.strip().split() tenants = int(parts[0]) priv = int(parts[1]) pub = int(parts[2]) num_machines = tenants * priv * pub int_checks = (tenants * tenants * priv * (priv - 1)) / 2 int_time = int_checks * float(parts[3]) ext_checks = (tenants * priv) * ((tenants - 1) * pub) ext_time = ext_checks * float(parts[4]) oext_check = (tenants * priv) * (tenants * pub) oext_time = oext_check * float(parts[5]) total = int_time + ext_time + oext_time tenant_time[(priv, pub)][tenants] += total tenant_run[(priv, pub)][tenants] += 1 for k in sorted(tenant_run.keys()): print "# ----%s------"%(str(k)) for k2 in sorted(tenant_run[k].keys()): print "%d %d %f"%(k2, tenant_run[k][k2], \ tenant_time[k][k2]/float(tenant_run[k][k2])) print print #print "%d %d %f"%(k, runs[k], machines[k]/float(runs[k])) if __name__ == "__main__": Process(sys.argv[1:])
2.609375
3
pyfisher/mpi.py
borisbolliet/pyfisher
7
2885
from __future__ import print_function import numpy as np import os,sys,time """ Copied from orphics.mpi """ try: disable_mpi_env = os.environ['DISABLE_MPI'] disable_mpi = True if disable_mpi_env.lower().strip() == "true" else False except: disable_mpi = False """ Use the below cleanup stuff only for intel-mpi! If you use it on openmpi, you will have no traceback for errors causing hours of endless confusion and frustration! - Sincerely, past frustrated Mat """ # From Sigurd's enlib.mpi: # Uncaught exceptions don't cause mpi to abort. This can lead to thousands of # wasted CPU hours # def cleanup(type, value, traceback): # sys.__excepthook__(type, value, traceback) # MPI.COMM_WORLD.Abort(1) # sys.excepthook = cleanup class fakeMpiComm: """ A Simple Fake MPI implementation """ def __init__(self): pass def Get_rank(self): return 0 def Get_size(self): return 1 def Barrier(self): pass def Abort(self,dummy): pass try: if disable_mpi: raise from mpi4py import MPI except: if not(disable_mpi): print("WARNING: mpi4py could not be loaded. Falling back to fake MPI. This means that if you submitted multiple processes, they will all be assigned the same rank of 0, and they are potentially doing the same thing.") class template: pass MPI = template() MPI.COMM_WORLD = fakeMpiComm() def mpi_distribute(num_tasks,avail_cores,allow_empty=False): # copied to mapsims.convert_noise_templates if not(allow_empty): assert avail_cores<=num_tasks min_each, rem = divmod(num_tasks,avail_cores) num_each = np.array([min_each]*avail_cores) # first distribute equally if rem>0: num_each[-rem:] += 1 # add the remainder to the last set of cores (so that rank 0 never gets extra jobs) task_range = list(range(num_tasks)) # the full range of tasks cumul = np.cumsum(num_each).tolist() # the end indices for each task task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each core assert sum(num_each)==num_tasks assert len(num_each)==avail_cores assert len(task_dist)==avail_cores return num_each,task_dist def distribute(njobs,verbose=True,**kwargs): comm = MPI.COMM_WORLD rank = comm.Get_rank() numcores = comm.Get_size() num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs) if rank==0: print ("At most ", max(num_each) , " tasks...") my_tasks = each_tasks[rank] return comm,rank,my_tasks
2.28125
2
ebay.py
SpironoZeppeli/Magic-The-Scannening
0
2886
<reponame>SpironoZeppeli/Magic-The-Scannening import requests import urllib.request import urllib.parse import PIL import re import configparser import json from PIL import Image from ebaysdk.trading import Connection as Trading from ebaysdk.exception import ConnectionError from yaml import load from PyQt5.QtWidgets import QMessageBox class EbaySeller: def __init__(self): self.api = Trading() config = configparser.ConfigParser() config.read('config.ini') with open('details.yaml', 'r') as file: self.yaml_config = load(file) def upload_card(self, card_name, eu_card_price, us_card_price, card_id): if us_card_price != 0: card_price = us_card_price * 0.8 else: card_price = eu_card_price if card_price < 1: card_price = 1 card_price = str(round(card_price, 2)) try: card_image = 'http://gatherer.wizards.com/Handlers/Image.ashx?multiverseid=' + card_id + '&type=card' except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, wizards gatherer error") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() urllib.request.urlretrieve(card_image, 'temp.jpg') # Resize card base_height = 500 img = Image.open('temp.jpg') height_percent = (base_height / float(img.size[1])) wsize = int((float(img.size[0]) * float(height_percent))) img = img.resize((wsize, base_height), PIL.Image.ANTIALIAS) img.save('temp.png') # Upload to PictShare files = {'file': open('temp.png', 'rb')} try: r = requests.post('https://pictshare.net/api/upload.php', files=files) except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, PictShare error") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() print(r) r = r.text r = json.loads(r) print(r) r = r['url'] # Fix using regular expression, may not be needed at a later date r = re.sub('\\.net', '.net/', r) r = re.sub('\\.net//', '.net/', r) print(r) try: image = self.api.execute('UploadSiteHostedPictures', {'ExternalPictureURL': r}) image = image.dict() image = image['SiteHostedPictureDetails']['FullURL'] print(image) # Upload to ebay response = self.api.execute('AddFixedPriceItem', { 'Item': {'Title': card_name + ' MTG - NM/M', 'Description': card_name + ' MTG - NM/M', 'Quantity': '1', 'PictureDetails': {'PictureURL': image}, 'ReturnPolicy': {'ReturnsAcceptedOption': 'ReturnsNotAccepted'}, 'DispatchTimeMax': '3', 'ConditionID': '1000', 'StartPrice': card_price, 'PostalCode': self.yaml_config["PostalCode"], 'Currency': self.yaml_config["Currency"], 'Country': 'GB', 'ListingDuration': 'Days_30', 'PaymentMethods': 'PayPal', 'PayPalEmailAddress': self.yaml_config["PayPalEmailAddress"], 'PrimaryCategory': {'CategoryID': '38292'}, 'ShippingDetails': {'ShippingType': 'Flat', 'ShippingServiceOptions': {'ShippingServicePriority': '1', 'ShippingService': self.yaml_config[ "ShippingService"], 'ShippingServiceCost': '1'}}}}) print(response.dict()) print(response.reply) self.msg = QMessageBox() if response.reply.Ack == 'Failure': self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Complete, please check log.txt") self.msg.setStandardButtons(QMessageBox.Ok) with open('log.txt', 'a+') as log_file: log_file.write(response.reply) else: self.msg.setWindowTitle("Upload Complete") self.msg.setText("Upload Complete, please check your ebay account to confirm") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() except ConnectionError as e: print(e) print(e.response.dict()) def get_multiverse_id(self, name): try: name = re.sub(' ', '%20', name) r = requests.get('https://api.scryfall.com/cards/named?exact=' + name) r = json.loads(r.text) return r['multiverse_ids'][0] except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, scryfall error") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec() def get_card_info_and_sell(self, name): try: multiverse_id = self.get_multiverse_id(name) r = requests.get('http://api.cardsearch.nl/v1/prices?key=W00dw0rk$&mids[]=' + str(multiverse_id)) r = json.loads(r.text) r = r[0] card_name = r.get('name') eu_card_price = r.get('price_normal') us_card_price = r.get('us_normal') card_set = r.get('set_id') card_set_name = r.get('set_name') card_id = r.get('multiverse_id') # Display card info in CLI print('Name: ' + card_name) print('Set: ' + card_set) print('Set name: ' + card_set_name) print('Card ID: ' + str(card_id)) self.upload_card(card_name, eu_card_price, us_card_price, card_id) except: self.msg = QMessageBox() self.msg.setWindowTitle("Upload Failed") self.msg.setText("Upload Failed, card name not valid") self.msg.setStandardButtons(QMessageBox.Ok) self.msg.exec()
2.609375
3
bot/exts/github/github.py
v1nam/gurkbot
24
2887
import typing from bot.constants import BOT_REPO_URL from discord import Embed from discord.ext import commands from discord.ext.commands.cooldowns import BucketType from . import _issues, _profile, _source class Github(commands.Cog): """ Github Category cog, which contains commands related to github. Commands: ├ profile Fetches a user's GitHub information. ├ issue Command to retrieve issue(s) from a GitHub repository. └ source Displays information about the bot's source code. """ def __init__(self, bot: commands.Bot) -> None: self.bot = bot @commands.group(name="github", aliases=("gh",), invoke_without_command=True) async def github_group(self, ctx: commands.Context) -> None: """Commands for Github.""" await ctx.send_help(ctx.command) @github_group.command(name="profile") @commands.cooldown(1, 10, BucketType.user) async def profile(self, ctx: commands.Context, username: str) -> None: """ Fetches a user's GitHub information. Username is optional and sends the help command if not specified. """ github_profile = _profile.GithubInfo(self.bot.http_session) embed = await github_profile.get_github_info(username) await ctx.send(embed=embed) @github_group.command(name="issue", aliases=("pr",)) async def issue( self, ctx: commands.Context, numbers: commands.Greedy[int], repository: typing.Optional[str] = None, ) -> None: """Command to retrieve issue(s) from a GitHub repository.""" github_issue = _issues.Issues(self.bot.http_session) if not numbers: raise commands.MissingRequiredArgument(ctx.command.clean_params["numbers"]) if repository is None: user = "gurkult" else: user, _, repository = repository.rpartition("/") if user == "": user = "gurkult" embed = await github_issue.issue(ctx.message.channel, numbers, repository, user) await ctx.send(embed=embed) @github_group.command(name="source", aliases=("src", "inspect")) async def source_command( self, ctx: commands.Context, *, source_item: typing.Optional[str] = None ) -> None: """Displays information about the bot's source code.""" if source_item is None: embed = Embed(title="Gurkbot's GitHub Repository") embed.add_field(name="Repository", value=f"[Go to GitHub]({BOT_REPO_URL})") embed.set_thumbnail(url=self.bot.user.avatar_url) await ctx.send(embed=embed) return elif not ctx.bot.get_command(source_item): raise commands.BadArgument( f"Unable to convert `{source_item}` to valid command or Cog." ) github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url) embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item)) await ctx.send(embed=embed) def setup(bot: commands.Bot) -> None: """Load the Github cog.""" bot.add_cog(Github(bot))
2.6875
3
log/slack_sender.py
SmashKs/BarBarian
0
2888
<gh_stars>0 from slackclient import SlackClient from external import SLACK_API_KEY class SlackBot: API_CHAT_MSG = 'chat.postMessage' BOT_NAME = 'News Bot' DEFAULT_CHANNEL = 'news_notification' def __new__(cls, *p, **k): if '_the_instance' not in cls.__dict__: cls._the_instance = object.__new__(cls) return cls._the_instance def __init__(self): self.__slack_client = SlackClient(SLACK_API_KEY) def send_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, channel=channel, text=text) def send_formatted_msg_to(self, text='', channel=DEFAULT_CHANNEL): self.__slack_client.api_call(SlackBot.API_CHAT_MSG, username=SlackBot.BOT_NAME, mrkdwn=True, channel=channel, text=text) if __name__ == '__main__': SlackBot().send_msg_to('hello world!!')
2.5625
3
src/pytezos/block/forge.py
miracle2k/pytezos
98
2889
from typing import Any, Dict, List, Tuple from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]: if len(fitness) == 0: major = 0 minor = 1 else: major = int.from_bytes(bytes.fromhex(fitness[0]), 'big') minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1 return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex() def forge_int_fixed(value: int, length: int) -> bytes: return value.to_bytes(length, 'big') def forge_command(command: str) -> bytes: if command == 'activate': return b'\x00' raise NotImplementedError(command) def forge_fitness(fitness: List[str]) -> bytes: return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness))) def forge_priority(priority: int) -> bytes: return priority.to_bytes(2, 'big') def forge_content(content: Dict[str, Any]) -> bytes: res = b'' res += forge_command(content['command']) res += forge_base58(content['hash']) res += forge_fitness(content['fitness']) res += bytes.fromhex(content['protocol_parameters']) return res def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes: res = b'' if protocol_data.get('content'): res += forge_content(protocol_data['content']) else: res += forge_priority(protocol_data['priority']) res += bytes.fromhex(protocol_data['proof_of_work_nonce']) if protocol_data.get('seed_nonce_hash'): res += b'\xFF' res += forge_base58(protocol_data['seed_nonce_hash']) else: res += b'\x00' res += b'\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\x00' return res def forge_block_header(shell_header: Dict[str, Any]) -> bytes: res = forge_int_fixed(shell_header['level'], 4) res += forge_int_fixed(shell_header['proto'], 1) res += forge_base58(shell_header['predecessor']) res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8) res += forge_int_fixed(shell_header['validation_pass'], 1) res += forge_base58(shell_header['operations_hash']) res += forge_fitness(shell_header['fitness']) res += forge_base58(shell_header['context']) res += bytes.fromhex(shell_header['protocol_data']) return res
2.296875
2
python/paddle/fluid/tests/unittests/test_roi_pool_op.py
jichangjichang/Paddle
9
2890
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np import math import sys import paddle.compat as cpt from op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels = 3 self.height = 6 self.width = 4 # n, c, h, w self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height = 2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h + 1, 1)) roi_width = int(max(roi_end_w - roi_start_w + 1, 1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for c in range(self.channels): for ph in range(self.pooled_height): for pw in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph + 1) * bin_size_h)) wend = int(math.ceil((pw + 1) * bin_size_w)) hstart = min(max(hstart + roi_start_h, 0), self.height) hend = min(max(hend + roi_start_h, 0), self.height) wstart = min(max(wstart + roi_start_w, 0), self.width) wend = min(max(wend + roi_start_w, 0), self.width) is_empty = (hend <= hstart) or (wend <= wstart) if is_empty: out_data[i, c, ph, pw] = 0 else: out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i, c, ph, pw] = -1 for h in range(hstart, hend): for w in range(wstart, wend): if x_i[c, h, w] > out_data[i, c, ph, pw]: out_data[i, c, ph, pw] = x_i[c, h, w] argmax_data[i, c, ph, pw] = h * self.width + w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod = [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in range(bno + 1): x1 = np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale) roi = [bno, x1, y1, x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype("int64") def setUp(self): self.op_type = "roi_pool" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ == '__main__': unittest.main()
1.953125
2
testproject/testapp/tests/__init__.py
movermeyer/django-firestone
1
2891
<reponame>movermeyer/django-firestone from test_proxy import * from test_serializers import * from test_deserializers import * from test_exceptions import * from test_authentication import * from test_whole_flow import * from test_handlers_metaclass_magic import * from test_handlers_serialize_to_python import * from test_handlers_is_method_allowed import * from test_handlers_data_control import * from test_handlers_package import * from test_handlers_finalize_pending import * from test_handlers_cleanse_body import * from test_handlers_validate import * from test_handlers_clean_models import * from test_handlers_get import * from test_handlers_is_catastrophic import * from test_handlers_post import * from test_handlers_put import * from test_handlers_delete import * from test_handlers_patch_response import * from test_handlers_authentication_hook import * from test_handlers_filter_data import * from test_handlers_order import * from test_handlers_order_data import * from test_handlers_paginate import * from test_handlers_paginate_data import * from test_handlers_inject_data_hook import * from test_handlers_handle_exception import * from test_handlers_deserialize_body import *
1.585938
2
Day20.py
SheepiCagio/Advent-of-Code-2021
0
2892
<gh_stars>0 import numpy as np raw = open("inputs/20.txt","r").readlines() input_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in raw] test_raw = open("inputs/20_test.txt","r").readlines() test_array= [(i.replace('\n', '').replace('.','0').replace('#', '1')) for i in test_raw] def addLayerZero(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.zeros(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.zeros(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.zeros(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.zeros(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def addLayerOnes(grid): #if sum(np.asarray(grid)[:,0]) > 0: grid = np.hstack((np.ones(len(grid), dtype=int)[:, np.newaxis],grid)) #if sum(np.asarray(grid)[0,:]) > 0: grid = np.vstack((np.ones(len(grid[0]), dtype=int)[np.newaxis,:],grid)) # if sum(np.asarray(grid)[:,-1]) > 0: grid = np.hstack((grid,np.ones(len(grid), dtype=int)[:, np.newaxis])) # if sum(np.asarray(grid)[-1,:]) > 0: grid = np.vstack((grid, np.ones(len(grid[0]), dtype=int)[np.newaxis,:])) return grid def pictureEnhancer(input_array,iter): splitvalue = False index_string = '' grid = [] for i in input_array: if i == '': splitvalue = True continue if not splitvalue: index_string += i else: grid.append(list(i)) grid = [[int(i) for i in row] for row in grid] for x in range(1,iter+1): grid = enhancer(grid, index_string,x) print('The number of lit pixels is:', sum(sum(grid))) def enhancer(grid, index_string,iter): print(iter) if iter == 1 or index_string[0] == '0' or (iter % 2 == 1 and index_string[511] == '0'): grid = addLayerZero(grid) output_grid = np.zeros((len(grid),len(grid[0])),dtype=int) grid = addLayerZero(grid) elif (index_string[0] == '1' and index_string [511] == '1') or (iter % 2 == 0 and index_string[511] == '0'): grid = addLayerOnes(grid) output_grid = np.ones((len(grid),len(grid[0])),dtype=int) grid = addLayerOnes(grid) for i in range(1,len(grid)-1): for j in range(1, len(grid[i])-1): binStr = '' for k in range(-1,2): for l in range(-1,2): binStr += str(grid[i+k][j+l]) output_grid[i-1][j-1] = index_string[int(binStr,2)] return output_grid #pictureEnhancer(test_array,2) #pictureEnhancer(input_array,2) pictureEnhancer(test_array,50) pictureEnhancer(input_array,50)
2.578125
3
questions/53349623/main.py
sesu089/stackoverflow
302
2893
<gh_stars>100-1000 import sys from PyQt5 import QtCore, QtGui, QtWidgets class Demo(QtWidgets.QWidget): def __init__(self): super(Demo, self).__init__() self.button = QtWidgets.QPushButton() self.label = QtWidgets.QLabel(alignment=QtCore.Qt.AlignCenter) self.combo = QtWidgets.QComboBox(self) self.combo.currentIndexChanged.connect(self.change_func) self.trans = QtCore.QTranslator(self) self.v_layout = QtWidgets.QVBoxLayout(self) self.v_layout.addWidget(self.combo) self.v_layout.addWidget(self.button) self.v_layout.addWidget(self.label) options = ([('English', ''), ('français', 'eng-fr' ), ('中文', 'eng-chs'), ]) for i, (text, lang) in enumerate(options): self.combo.addItem(text) self.combo.setItemData(i, lang) self.retranslateUi() @QtCore.pyqtSlot(int) def change_func(self, index): data = self.combo.itemData(index) if data: self.trans.load(data) QtWidgets.QApplication.instance().installTranslator(self.trans) else: QtWidgets.QApplication.instance().removeTranslator(self.trans) def changeEvent(self, event): if event.type() == QtCore.QEvent.LanguageChange: self.retranslateUi() super(Demo, self).changeEvent(event) def retranslateUi(self): self.button.setText(QtWidgets.QApplication.translate('Demo', 'Start')) self.label.setText(QtWidgets.QApplication.translate('Demo', 'Hello, World')) if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) demo = Demo() demo.show() sys.exit(app.exec_())
2.390625
2
tests/test_error_descriptions_from_raises.py
iterait/apistrap
6
2894
import pytest from apistrap.flask import FlaskApistrap from apistrap.schemas import ErrorResponse @pytest.fixture() def app_with_raises(app): oapi = FlaskApistrap() @app.route("/", methods=["GET"]) def view(): """ Something something. :raises KeyError: KeyError description """ oapi.init_app(app) @pytest.fixture() def app_with_raises_and_handler(app): oapi = FlaskApistrap() oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse()) @app.route("/", methods=["GET"]) def view(): """ Something something. :raises KeyError: KeyError description """ oapi.init_app(app) def test_error_descriptions_from_raises(app_with_raises, client): response = client.get("/spec.json") assert response.json["paths"]["/"]["get"]["responses"] == { "500": { "description": "KeyError description", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } } def test_http_code_from_handler(app_with_raises_and_handler, client): response = client.get("/spec.json") assert response.json["paths"]["/"]["get"]["responses"] == { "515": { "description": "KeyError description", "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ErrorResponse" } } } } }
2.4375
2
projects/api/UsersApi.py
chamathshashika/projects-python-wrappers
0
2895
<reponame>chamathshashika/projects-python-wrappers #$Id$ from projects.util.ZohoHttpClient import ZohoHttpClient from projects.api.Api import Api from projects.parser.UsersParser import UsersParser base_url = Api().base_url zoho_http_client = ZohoHttpClient() parser = UsersParser() class UsersApi: """Users Api class is used to 1.Get all the users in the given project. """ def __init__(self, authtoken, portal_id): """Initialize Users api using user's authtoken and portal id. Args: authtoken(str): User's authtoken. portal_id(str): User's portal id. """ self.details = { 'authtoken': authtoken } self.portal_id = portal_id def get_users(self, project_id): """Get all the users in the given project. Args: project_id(long): Project id. Returns: list of instance: List of users object. """ url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/users/' response = zoho_http_client.get(url, self.details) return parser.get_users(response)
2.765625
3
useless/tuck_arms.py
leader1313/Baxter_teleoperation_system
0
2896
#!/usr/bin/env python # Copyright (c) 2013-2015, Rethink Robotics # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the Rethink Robotics nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ Tool to tuck/untuck Baxter's arms to/from the shipping pose """ import argparse from copy import deepcopy import rospy from std_msgs.msg import ( Empty, Bool, ) import baxter_interface from baxter_core_msgs.msg import ( CollisionAvoidanceState, ) from baxter_interface import CHECK_VERSION class Tuck(object): def __init__(self, tuck_cmd): self._done = False self._limbs = ('left', 'right') self._arms = { 'left': baxter_interface.Limb('left'), 'right': baxter_interface.Limb('right'), } self._tuck = tuck_cmd self._tuck_rate = rospy.Rate(20.0) # Hz self._tuck_threshold = 0.2 # radians self._peak_angle = -1.6 # radians self._arm_state = { 'tuck': {'left': 'none', 'right': 'none'}, 'collide': {'left': False, 'right': False}, 'flipped': {'left': False, 'right': False} } self._joint_moves = { 'tuck': { 'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0], 'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0] }, 'untuck': { 'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50], 'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50] } } self._collide_lsub = rospy.Subscriber( 'robot/limb/left/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'left') self._collide_rsub = rospy.Subscriber( 'robot/limb/right/collision_avoidance_state', CollisionAvoidanceState, self._update_collision, 'right') self._disable_pub = { 'left': rospy.Publisher( 'robot/limb/left/suppress_collision_avoidance', Empty, queue_size=10), 'right': rospy.Publisher( 'robot/limb/right/suppress_collision_avoidance', Empty, queue_size=10) } self._rs = baxter_interface.RobotEnable(CHECK_VERSION) self._enable_pub = rospy.Publisher('robot/set_super_enable', Bool, queue_size=10) def _update_collision(self, data, limb): self._arm_state['collide'][limb] = len(data.collision_object) > 0 self._check_arm_state() def _check_arm_state(self): """ Check for goals and behind collision field. If s1 joint is over the peak, collision will need to be disabled to get the arm around the head-arm collision force-field. """ diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold for limb in self._limbs: angles = [self._arms[limb].joint_angle(joint) for joint in self._arms[limb].joint_names()] # Check if in a goal position untuck_goal = map(diff_check, angles, self._joint_moves['untuck'][limb]) tuck_goal = map(diff_check, angles[0:2], self._joint_moves['tuck'][limb][0:2]) if all(untuck_goal): self._arm_state['tuck'][limb] = 'untuck' elif all(tuck_goal): self._arm_state['tuck'][limb] = 'tuck' else: self._arm_state['tuck'][limb] = 'none' # Check if shoulder is flipped over peak self._arm_state['flipped'][limb] = ( self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle) def _prepare_to_tuck(self): # If arms are in "tucked" state, disable collision avoidance # before enabling robot, to avoid arm jerking from "force-field". head = baxter_interface.Head() start_disabled = not self._rs.state().enabled at_goal = lambda: (abs(head.pan()) <= baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE) rospy.loginfo("Moving head to neutral position") while not at_goal() and not rospy.is_shutdown(): if start_disabled: [pub.publish(Empty()) for pub in self._disable_pub.values()] if not self._rs.state().enabled: self._enable_pub.publish(True) head.set_pan(0.0, 0.5, timeout=0) self._tuck_rate.sleep() if start_disabled: while self._rs.state().enabled == True and not rospy.is_shutdown(): [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def _move_to(self, tuck, disabled): if any(disabled.values()): [pub.publish(Empty()) for pub in self._disable_pub.values()] while (any(self._arm_state['tuck'][limb] != goal for limb, goal in tuck.viewitems()) and not rospy.is_shutdown()): if self._rs.state().enabled == False: self._enable_pub.publish(True) for limb in self._limbs: if disabled[limb]: self._disable_pub[limb].publish(Empty()) if limb in tuck: self._arms[limb].set_joint_positions(dict(zip( self._arms[limb].joint_names(), self._joint_moves[tuck[limb]][limb]))) self._check_arm_state() self._tuck_rate.sleep() if any(self._arm_state['collide'].values()): self._rs.disable() return def supervised_tuck(self): # Update our starting state to check if arms are tucked self._prepare_to_tuck() self._check_arm_state() # Tuck Arms if self._tuck == True: # If arms are already tucked, report this to user and exit. if all(self._arm_state['tuck'][limb] == 'tuck' for limb in self._limbs): rospy.loginfo("Tucking: Arms already in 'Tucked' position.") self._done = True return else: rospy.loginfo("Tucking: One or more arms not Tucked.") any_flipped = not all(self._arm_state['flipped'].values()) if any_flipped: rospy.loginfo( "Moving to neutral start position with collision %s.", "on" if any_flipped else "off") # Move to neutral pose before tucking arms to avoid damage self._check_arm_state() actions = dict() disabled = {'left': True, 'right': True} for limb in self._limbs: if not self._arm_state['flipped'][limb]: actions[limb] = 'untuck' disabled[limb] = False self._move_to(actions, disabled) # Disable collision and Tuck Arms rospy.loginfo("Tucking: Tucking with collision avoidance off.") actions = {'left': 'tuck', 'right': 'tuck'} disabled = {'left': True, 'right': True} self._move_to(actions, disabled) self._done = True return # Untuck Arms else: # If arms are tucked disable collision and untuck arms if any(self._arm_state['flipped'].values()): rospy.loginfo("Untucking: One or more arms Tucked;" " Disabling Collision Avoidance and untucking.") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return # If arms already untucked, move to neutral location else: rospy.loginfo("Untucking: Arms already Untucked;" " Moving to neutral position.") self._check_arm_state() suppress = deepcopy(self._arm_state['flipped']) actions = {'left': 'untuck', 'right': 'untuck'} self._move_to(actions, suppress) self._done = True return def clean_shutdown(self): """Handles ROS shutdown (Ctrl-C) safely.""" if not self._done: rospy.logwarn('Aborting: Shutting down safely...') if any(self._arm_state['collide'].values()): while self._rs.state().enabled != False: [pub.publish(Empty()) for pub in self._disable_pub.values()] self._enable_pub.publish(False) self._tuck_rate.sleep() def main(): parser = argparse.ArgumentParser() tuck_group = parser.add_mutually_exclusive_group(required=True) tuck_group.add_argument("-t","--tuck", dest="tuck", action='store_true', default=False, help="tuck arms") tuck_group.add_argument("-u", "--untuck", dest="untuck", action='store_true', default=False, help="untuck arms") args = parser.parse_args(rospy.myargv()[1:]) tuck = args.tuck rospy.loginfo("Initializing node... ") rospy.init_node("rsdk_tuck_arms") rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",)) tucker = Tuck(tuck) rospy.on_shutdown(tucker.clean_shutdown) tucker.supervised_tuck() rospy.loginfo("Finished tuck") if __name__ == "__main__": main()
1.539063
2
django-system/src/tsm_api/serializers.py
Deepak-Kharah/ioe-project
0
2897
<filename>django-system/src/tsm_api/serializers.py from rest_framework import serializers from .models import Measurement class MeasurementSerializer(serializers.ModelSerializer): class Meta: model = Measurement fields = '__all__'
1.492188
1
src/GalaxyDynamicsFromVc/units.py
pabferde/galaxy_dynamics_from_Vc
0
2898
<gh_stars>0 _Msun_kpc3_to_GeV_cm3_factor = 0.3/8.0e6 def Msun_kpc3_to_GeV_cm3(value): return value*_Msun_kpc3_to_GeV_cm3_factor
1.382813
1
Python/leetcode.031.next-permutation.py
tedye/leetcode
4
2899
<reponame>tedye/leetcode<gh_stars>1-10 class Solution(object): def nextPermutation(self, nums): """ :type nums: List[int] :rtype: void Do not return anything, modify nums in-place instead. """ if not nums: return n = len(nums)-1 while n > 0 and nums[n-1] >= nums[n]: n -= 1 t = n if t == 0: nums[:] = nums[::-1] return x = nums[n-1] while t < len(nums) and x < nums[t]: t += 1 temp = nums[t-1] nums[t-1] = nums[n-1] nums[n-1] = temp nums[n:] = nums[n:][::-1] return
3.203125
3