max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
7
115
max_stars_count
int64
101
368k
id
stringlengths
2
8
content
stringlengths
6
1.03M
library/oci_fast_connect_provider_service_facts.py
slmjy/oci-ansible-modules
106
141655
<filename>library/oci_fast_connect_provider_service_facts.py #!/usr/bin/python # Copyright (c) 2019, Oracle and/or its affiliates. # This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT for details. from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { "metadata_version": "1.1", "status": ["preview"], "supported_by": "community", } DOCUMENTATION = """ --- module: oci_fast_connect_provider_service_facts short_description: Fetches details of one or more OCI Fast Connect Provider Services description: - Fetches details of the OCI Fast Connect Provider Services version_added: "2.5" options: compartment_id: description: Identifier of the compartment under which the specified fast connect provider service exists required: false provider_service_id: description: Identifier of the fast connect provider service whose details needs to be fetched. required: false aliases: [ 'id' ] author: - "<NAME>(@debayan_gupta)" extends_documentation_fragment: [ oracle ] """ EXAMPLES = """ # Note: These examples do not set authentication details. # Fetch All Fast Connect Provider Services under a specific compartment - name: Fetch All Fast Connect Provider Services under a specific compartment oci_fast_connect_provider_service_facts: compartment_id: 'ocid1.compartment.oc1.iad.xxxxxEXAMPLExxxxx' # Fetch a specific Fast Connect Provider Service - name: Fetch a specific Fast Connect Provider Service oci_fast_connect_provider_service_facts: provider_service_id: 'ocid1.serviceprovider.oc1.iad.xxxxxEXAMPLExxxxx' """ RETURN = """ oci_fast_connect_provider_services: description: Attributes of the Fast Connect Provider Service. returned: success type: complex contains: description: description: A description of the service offered by the provider. returned: always type: string sample: https://megaport.al/ id: description: Identifier of the Fast Connect Provider Service returned: always type: string sample: ocid1.providerservice.oc1.iad.xxxxxEXAMPLExxxxx private_peering_bgp_management: description: Who is responsible for managing the private peering BGP information. returned: always type: string sample: CUSTOMER_MANAGED provider_name: description: The name of the provider. returned: always type: string sample: Megaport provider_service_name: description: The name of the service offered by the provider. returned: always type: string sample: Service public_peering_bgp_management: description: Who is responsible for managing the public peering BGP information. returned: always type: string sample: ORACLE_MANAGED supported_virtual_circuit_types: description: An array of virtual circuit types supported by this service. returned: always type: list sample: ["PRIVATE", "PUBLIC"] type: description: Provider service type. returned: always type: list sample: LAYER2 sample: [{ "description":"https://megaport.al/", "id":"ocid1.providerservice.oc1.iad.xxxxxEXAMPLExxxxx", "private_peering_bgp_management":"CUSTOMER_MANAGED", "provider_name":"Megaport", "provider_service_name":"Service", "public_peering_bgp_management":"ORACLE_MANAGED", "supported_virtual_circuit_types":null, "type":"LAYER2" }, { "description":"https://marketplaceportal.com/web/guest/login", "id":"ocid1.providerservice.oc1.iad.xxxxxEXAMPLExxxxx", "private_peering_bgp_management":"CUSTOMER_MANAGED", "provider_name":"Digital Realty", "provider_service_name":"Service Exchange", "public_peering_bgp_management":"ORACLE_MANAGED", "supported_virtual_circuit_types":[ "PRIVATE", "PUBLIC" ], "type":"LAYER2" }] """ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.oracle import oci_utils try: from oci.core import VirtualNetworkClient from oci.exceptions import ServiceError from oci.util import to_dict HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK = False def list_fast_connect_provider_services(virtual_network_client, module): result = dict(fast_connect_provider_services="") compartment_id = module.params.get("compartment_id") provider_service_id = module.params.get("provider_service_id") try: if compartment_id: existing_fast_connect_provider_services = oci_utils.list_all_resources( virtual_network_client.list_fast_connect_provider_services, compartment_id=compartment_id, ) elif provider_service_id: response = oci_utils.call_with_backoff( virtual_network_client.get_fast_connect_provider_service, provider_service_id=provider_service_id, ) existing_fast_connect_provider_services = [response.data] except ServiceError as ex: module.fail_json(msg=ex.message) result["fast_connect_provider_services"] = to_dict( existing_fast_connect_provider_services ) return result def main(): module_args = oci_utils.get_common_arg_spec() module_args.update( dict( compartment_id=dict(type="str", required=False), provider_service_id=dict(type="str", required=False, aliases=["id"]), ) ) module = AnsibleModule( argument_spec=module_args, mutually_exclusive=[["compartment_id", "id"]] ) if not HAS_OCI_PY_SDK: module.fail_json(msg="oci python sdk required for this module") virtual_network_client = oci_utils.create_service_client( module, VirtualNetworkClient ) result = list_fast_connect_provider_services(virtual_network_client, module) module.exit_json(**result) if __name__ == "__main__": main()
Python/owo.py
PushpneetSingh/Hello-world
1,428
141657
import time print("OwO What's This!!!") print("Here's my favorite activity in Japoneeese: プログラミング (yes google translate is good)") furry = "Rawr x3 nuzzles how are you pounces on you you're so warm o3o notices you have a bulge o: someone's happy ;) nuzzles your necky wecky~ murr~ hehehe rubbies your bulgy wolgy you're so big :oooo rubbies more on your bulgy wolgy it doesn't stop growing -///- kisses you and lickies you" furry = furry.split() for f in furry: time.sleep(0.1) print(f)
plugins/vsphere/resoto_plugin_vsphere/config.py
someengineering/resoto
126
141665
<filename>plugins/vsphere/resoto_plugin_vsphere/config.py<gh_stars>100-1000 from dataclasses import dataclass, field from typing import ClassVar, Optional @dataclass class VSphereConfig: kind: ClassVar[str] = "vsphere" user: Optional[str] = field(default=None, metadata={"description": "User name"}) password: Optional[str] = field(default=None, metadata={"description": "Password"}) host: Optional[str] = field( default=None, metadata={"description": "Host name/address"} ) port: int = field(default=443, metadata={"description": "TCP port"}) insecure: bool = field( default=True, metadata={ "description": "Allow insecure connection. Do not verify certificates." }, )
dataset/doc_text.py
jamaalhay/Final_Proj
104
141683
# -*- coding: utf-8 -*- import re import logging import torch import numpy as np logger = logging.getLogger(__name__) class DocText: """ define one sample text, like one context or one question """ def __init__(self, nlp, text, config): doc = nlp(text) self.config = config self.token = [] self.lemma = [] self.pos = [] self.ent = [] self.em = [] self.em_lemma = [] self.right_space = [] # record whether the right side of every token is a white space for t in doc: if t.is_space: continue self.token.append(t.text) end_idx = t.idx + len(t.text) if end_idx < len(text) and text[end_idx] in Space.WHITE_SPACE: self.right_space.append(1) else: self.right_space.append(0) if config['use_em_lemma']: self.lemma.append(t.lemma_) self.em_lemma.append(0) if config['use_pos']: self.pos.append(t.tag_) # also be t.pos_ if config['use_ent']: self.ent.append(t.ent_type_) if config['use_em']: self.em.append(0) def __len__(self): return len(self.token) def update_em(self, doc_text2): """ set the exact mach and exact match on lemma features :param doc_text2: the doc text to match :return: """ for i in range(len(self.em)): if self.config['use_em'] and self.token[i] in doc_text2.token: self.em[i] = 1 if self.config['use_em_lemma'] and self.lemma[i] in doc_text2.lemma: self.em_lemma[i] = 1 def to_id(self, feature_dict): """ transform raw text to feature vector representation. it's slow, only used for interactive mode. :param feature_dict: ['id2word', 'id2char' 'id2pos', 'id2ent'] :return: """ sen_id = [] add_features = {} feature_dict = {k: v.tolist() for k, v in feature_dict.items()} seq_len = len(self.token) if self.config['use_pos']: add_features['pos'] = torch.zeros((seq_len, len(feature_dict['id2pos'])), dtype=torch.float) if self.config['use_ent']: add_features['ent'] = torch.zeros((seq_len, len(feature_dict['id2ent'])), dtype=torch.float) if self.config['use_em']: add_features['em'] = torch.tensor(self.em, dtype=torch.float).unsqueeze(-1) if self.config['use_em_lemma']: add_features['em_lemma'] = torch.tensor(self.em_lemma, dtype=torch.float).unsqueeze(-1) for i in range(seq_len): # word word = self.token[i] if word in feature_dict['id2word']: sen_id.append(feature_dict['id2word'].index(word)) else: sen_id.append(0) # id=0 means padding value in preprocess logger.warning("word '%s' out of vocabulary" % word) # pos if self.config['use_pos']: pos = self.pos[i] if pos in feature_dict['id2pos']: add_features['pos'][i][feature_dict['id2pos'].index(pos)] = 1 else: logging.warning("pos '%s' out of vocabulary" % pos) # ent if self.config['use_ent']: ent = self.ent[i] if ent in feature_dict['id2ent']: add_features['ent'][i][feature_dict['id2ent'].index(ent)] = 1 else: logging.warning("ent '%s' out of vocabulary" % ent) rtn_features = None if len(add_features) > 0: rtn_features = torch.cat(list(add_features.values()), dim=-1) rtn_sen_id = torch.tensor(sen_id, dtype=torch.long) return rtn_sen_id, rtn_features class Space: WHITE_SPACE = ' \t\n\r\u00A0\u1680​\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a' \ '​​\u202f\u205f​\u3000\u2028\u2029' @staticmethod def is_white_space(c): return c in Space.WHITE_SPACE @staticmethod def remove_white_space(s): return re.sub('['+Space.WHITE_SPACE+']', '', s)
CTFd/api/v1/submissions.py
omertuc/CTFd
3,592
141686
<gh_stars>1000+ from typing import List from flask_restx import Namespace, Resource from CTFd.api.v1.helpers.request import validate_args from CTFd.api.v1.helpers.schemas import sqlalchemy_to_pydantic from CTFd.api.v1.schemas import ( APIDetailedSuccessResponse, PaginatedAPIListSuccessResponse, ) from CTFd.cache import clear_standings from CTFd.constants import RawEnum from CTFd.models import Submissions, db from CTFd.schemas.submissions import SubmissionSchema from CTFd.utils.decorators import admins_only from CTFd.utils.helpers.models import build_model_filters submissions_namespace = Namespace( "submissions", description="Endpoint to retrieve Submission" ) SubmissionModel = sqlalchemy_to_pydantic(Submissions) TransientSubmissionModel = sqlalchemy_to_pydantic(Submissions, exclude=["id"]) class SubmissionDetailedSuccessResponse(APIDetailedSuccessResponse): data: SubmissionModel class SubmissionListSuccessResponse(PaginatedAPIListSuccessResponse): data: List[SubmissionModel] submissions_namespace.schema_model( "SubmissionDetailedSuccessResponse", SubmissionDetailedSuccessResponse.apidoc() ) submissions_namespace.schema_model( "SubmissionListSuccessResponse", SubmissionListSuccessResponse.apidoc() ) @submissions_namespace.route("") class SubmissionsList(Resource): @admins_only @submissions_namespace.doc( description="Endpoint to get submission objects in bulk", responses={ 200: ("Success", "SubmissionListSuccessResponse"), 400: ( "An error occured processing the provided or stored data", "APISimpleErrorResponse", ), }, ) @validate_args( { "challenge_id": (int, None), "user_id": (int, None), "team_id": (int, None), "ip": (str, None), "provided": (str, None), "type": (str, None), "q": (str, None), "field": ( RawEnum( "SubmissionFields", { "challenge_id": "challenge_id", "user_id": "user_id", "team_id": "team_id", "ip": "ip", "provided": "provided", "type": "type", }, ), None, ), }, location="query", ) def get(self, query_args): q = query_args.pop("q", None) field = str(query_args.pop("field", None)) filters = build_model_filters(model=Submissions, query=q, field=field) args = query_args schema = SubmissionSchema(many=True) submissions = ( Submissions.query.filter_by(**args) .filter(*filters) .paginate(max_per_page=100) ) response = schema.dump(submissions.items) if response.errors: return {"success": False, "errors": response.errors}, 400 return { "meta": { "pagination": { "page": submissions.page, "next": submissions.next_num, "prev": submissions.prev_num, "pages": submissions.pages, "per_page": submissions.per_page, "total": submissions.total, } }, "success": True, "data": response.data, } @admins_only @submissions_namespace.doc( description="Endpoint to create a submission object. Users should interact with the attempt endpoint to submit flags.", responses={ 200: ("Success", "SubmissionListSuccessResponse"), 400: ( "An error occured processing the provided or stored data", "APISimpleErrorResponse", ), }, ) @validate_args(TransientSubmissionModel, location="json") def post(self, json_args): req = json_args Model = Submissions.get_child(type=req.get("type")) schema = SubmissionSchema(instance=Model()) response = schema.load(req) if response.errors: return {"success": False, "errors": response.errors}, 400 db.session.add(response.data) db.session.commit() response = schema.dump(response.data) db.session.close() # Delete standings cache clear_standings() return {"success": True, "data": response.data} @submissions_namespace.route("/<submission_id>") @submissions_namespace.param("submission_id", "A Submission ID") class Submission(Resource): @admins_only @submissions_namespace.doc( description="Endpoint to get submission objects in bulk", responses={ 200: ("Success", "SubmissionDetailedSuccessResponse"), 400: ( "An error occured processing the provided or stored data", "APISimpleErrorResponse", ), }, ) def get(self, submission_id): submission = Submissions.query.filter_by(id=submission_id).first_or_404() schema = SubmissionSchema() response = schema.dump(submission) if response.errors: return {"success": False, "errors": response.errors}, 400 return {"success": True, "data": response.data} @admins_only @submissions_namespace.doc( description="Endpoint to get submission objects in bulk", responses={ 200: ("Success", "APISimpleSuccessResponse"), 400: ( "An error occured processing the provided or stored data", "APISimpleErrorResponse", ), }, ) def delete(self, submission_id): submission = Submissions.query.filter_by(id=submission_id).first_or_404() db.session.delete(submission) db.session.commit() db.session.close() # Delete standings cache clear_standings() return {"success": True}
lldb/test/API/tools/lldb-server/TestGdbRemoteAttachWait.py
Machiry/checkedc-clang
250
141688
import os from time import sleep import gdbremote_testcase import lldbgdbserverutils from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class TestGdbRemoteAttachWait(gdbremote_testcase.GdbRemoteTestCaseBase): mydir = TestBase.compute_mydir(__file__) def test_attach_with_vAttachWait(self): exe = '%s_%d' % (self.testMethodName, os.getpid()) def launch_inferior(): inferior = self.launch_process_for_attach( inferior_args=["sleep:60"], exe_path=self.getBuildArtifact(exe)) self.assertIsNotNone(inferior) self.assertTrue(inferior.pid > 0) self.assertTrue( lldbgdbserverutils.process_is_running( inferior.pid, True)) return inferior self.build(dictionary={'EXE': exe}) self.set_inferior_startup_attach_manually() server = self.connect_to_debug_monitor() self.assertIsNotNone(server) # Launch the first inferior (we shouldn't attach to this one). launch_inferior() self.add_no_ack_remote_stream() self.test_sequence.add_log_lines([ # Do the attach. "read packet: $vAttachWait;{}#00".format(lldbgdbserverutils.gdbremote_hex_encode_string(exe)), ], True) # Run the stream until attachWait. context = self.expect_gdbremote_sequence() self.assertIsNotNone(context) # Sleep so we're sure that the inferior is launched after we ask for the attach. sleep(1) # Launch the second inferior (we SHOULD attach to this one). inferior_to_attach = launch_inferior() # Make sure the attach succeeded. self.test_sequence.add_log_lines([ {"direction": "send", "regex": r"^\$T([0-9a-fA-F]{2})[^#]*#[0-9a-fA-F]{2}$", "capture": {1: "stop_signal_hex"}}, ], True) self.add_process_info_collection_packets() # Run the stream sending the response.. context = self.expect_gdbremote_sequence() self.assertIsNotNone(context) # Gather process info response. process_info = self.parse_process_info_response(context) self.assertIsNotNone(process_info) # Ensure the process id matches what we expected. pid_text = process_info.get('pid', None) self.assertIsNotNone(pid_text) reported_pid = int(pid_text, base=16) self.assertEqual(reported_pid, inferior_to_attach.pid)
python/pycylon/test/test_frame.py
deHasara/cylon
229
141695
<reponame>deHasara/cylon<filename>python/pycylon/test/test_frame.py ## # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import numpy as np import pandas as pd import pycylon as cn import pyarrow as pa from pycylon import Series from pycylon.frame import DataFrame from pycylon import Table from pycylon.io import CSVReadOptions from pycylon.io import read_csv from pycylon import CylonContext from pycylon.indexing.cyindex import BaseArrowIndex import operator def test_initialization_1(): d1 = [[1, 2, 3], [4, 5, 6]] d2 = [np.array([1, 2, 3]), np.array([4, 5, 6])] d3 = {'0': [1, 2, 3], '1': [4, 5, 6]} d4 = pd.DataFrame(d3) d5 = pa.Table.from_pydict(d3) cdf1 = DataFrame(d1) cdf2 = DataFrame(d2) cdf3 = DataFrame(d3) cdf4 = DataFrame(d4) cdf5 = DataFrame(d5) assert cdf1.shape == cdf2.shape == cdf3.shape == cdf4.shape == cdf5.shape def test_get_set_item(): d1 = [[1, 2, 3], [4, 5, 6]] cdf1 = DataFrame(d1) print(cdf1) print(cdf1.columns) c1 = cdf1['0'] print(c1.shape) d1 = DataFrame([[10, 20, 30]]) print(d1.shape) print(cdf1) cdf1['0'] = d1 print(cdf1) def test_filter(): ctx: CylonContext = CylonContext(config=None, distributed=False) table1_path = 'data/input/user_usage_tm_1.csv' table2_path = 'data/input/user_usage_tm_2.csv' assert os.path.exists(table1_path) and os.path.exists(table2_path) csv_read_options = CSVReadOptions().use_threads(True).block_size(1 << 30) tb: Table = read_csv(ctx, table1_path, csv_read_options) df: DataFrame = DataFrame(tb) column_name = 'monthly_mb' ops = [operator.__or__, operator.__and__] or_limits = [600, 5000, 15000] and_limits = [0, 5000, 1000] comp_op_or = [operator.__gt__, operator.__le__, operator.__gt__] comp_op_and = [operator.__gt__, operator.__le__, operator.__gt__] limits = [or_limits, and_limits] comp_ops = [comp_op_or, comp_op_and] for op, limit, comp_op in zip(ops, limits, comp_ops): print("Op ", op) tb_cond_1 = comp_op[0](df[column_name], limit[0]) tb_cond_2 = comp_op[1](df[column_name], limit[1]) tb_cond_3 = comp_op[2](df[column_name], limit[2]) res_1_op = op(tb_cond_1, tb_cond_2) res_2_op = op(res_1_op, tb_cond_3) res_1 = df[res_1_op] res_2 = df[res_2_op] column_pdf_1 = res_1[column_name].to_pandas() column_pdf_2 = res_2[column_name].to_pandas() column_1 = column_pdf_1[column_name] for col in column_1: assert op(comp_op[0](col, limit[0]), comp_op[1](col, limit[1])) column_2 = column_pdf_2[column_name] for col in column_2: assert op(op(comp_op[0](col, limit[0]), comp_op[1](col, limit[1])), comp_op[2](col, limit[2])) def test_invert(): # Bool Invert Test data_list = [[False, True, False, True, True], [False, True, False, True, True]] pdf = pd.DataFrame(data_list) cdf = DataFrame(pdf) invert_cdf = ~cdf invert_pdf = ~pdf assert invert_cdf.to_pandas().values.tolist() == invert_pdf.values.tolist() def test_neg(): npr = np.array([[1, 2, 3, 4, 5, -6, -7], [-1, -2, -3, -4, -5, 6, 7]]) pdf = pd.DataFrame(npr) cdf = DataFrame(pdf) neg_cdf = -cdf neg_pdf = -pdf assert neg_cdf.to_pandas().values.tolist() == neg_pdf.values.tolist() def test_setitem(): npr = np.array([[1, 2, 3, 4, 5], [-1, -2, -3, -4, -5]]) pdf = pd.DataFrame(npr) cdf = DataFrame(pdf) # replacing an existing column cdf['0'] = cdf['4'] assert cdf['0'].to_pandas().values.tolist() == cdf['4'].to_pandas().values.tolist() # adding a new column at the end cdf['5'] = cdf['4'] assert cdf['5'].to_pandas().values.tolist() == cdf['4'].to_pandas().values.tolist() def test_math_ops_for_scalar(): npr = np.array([[20, 2, 3, 4, 5], [10, -20, -30, -40, -50], [10.2, 13.2, 16.4, 12.2, 10.8]]) pdf = pd.DataFrame(npr) cdf = DataFrame(pdf) from operator import add, sub, mul, truediv ops = [add, sub, mul, truediv] for op in ops: cdf_1 = cdf pdf_1 = pdf # test column division cdf_1['0'] = op(cdf_1['0'], 2) pdf_1[0] = op(pdf_1[0], 2) assert pdf_1.values.tolist() == cdf_1.to_pandas().values.tolist() # test table division cdf_2 = cdf pdf_2 = pdf cdf_2 = op(cdf_2, 2) pdf_2 = op(pdf, 2) assert pdf_2.values.tolist() == cdf_2.to_pandas().values.tolist() def test_i_bitwise_ops(): # TODO: Improve test and functionality: https://github.com/cylondata/cylon/issues/229 npr = np.array([[20, 2, 3, 4, 5], [10, -20, -30, -40, -50], [36.2, 13.2, 16.4, 12.2, 10.8]]) pdf = pd.DataFrame(npr) cdf = DataFrame(pdf) a = cdf['0'] > 10 b = cdf['1'] > 2 a_pdf = pdf[0] > 10 b_pdf = pdf[1] > 2 d = a & b a &= b d_pdf = a_pdf & b_pdf a_pdf &= b_pdf assert d.to_pandas().values.tolist() == a.to_pandas().values.tolist() assert a.to_pandas().values.flatten().tolist() == a_pdf.values.tolist() ## OR a = cdf['0'] > 10 b = cdf['1'] > 2 a_pdf = pdf[0] > 10 b_pdf = pdf[1] > 2 d = a | b a |= b d_pdf = a_pdf | b_pdf a_pdf |= b_pdf assert d.to_pandas().values.tolist() == a.to_pandas().values.tolist() assert a.to_pandas().values.flatten().tolist() == a_pdf.values.tolist() def test_math_i_ops_for_scalar(): npr = np.array([[20, 2, 3, 4, 5], [10, -20, -30, -40, -50], [12.2, 13.2, 16.4, 12.2, 10.8]]) pdf = pd.DataFrame(npr) cdf = DataFrame(pdf) cdf_1 = cdf pdf_1 = pdf # test column addition cdf_1['0'] += 2 pdf_1[0] += 2 assert pdf_1.values.tolist() == cdf_1.to_pandas().values.tolist() cdf_1['0'] -= 2 pdf_1[0] -= 2 assert pdf_1.values.tolist() == cdf_1.to_pandas().values.tolist() cdf_1['0'] *= 2 pdf_1[0] *= 2 assert pdf_1.values.tolist() == cdf_1.to_pandas().values.tolist() cdf_1['0'] /= 2 pdf_1[0] /= 2 assert pdf_1.values.tolist() == cdf_1.to_pandas().values.tolist() # test table division cdf_2 = cdf_1 pdf_2 = pdf cdf_2 += 2 pdf += 2 assert pdf_2.values.tolist() == cdf_2.to_pandas().values.tolist() cdf_2 -= 2 pdf -= 2 assert pdf_2.values.tolist() == cdf_2.to_pandas().values.tolist() cdf_2 *= 2 pdf *= 2 assert pdf_2.values.tolist() == cdf_2.to_pandas().values.tolist() cdf_2 /= 2 pdf /= 2 assert pdf_2.values.tolist() == cdf_2.to_pandas().values.tolist() def test_drop(): ctx: CylonContext = CylonContext(config=None, distributed=False) table1_path = 'data/input/user_usage_tm_1.csv' assert os.path.exists(table1_path) csv_read_options = CSVReadOptions().use_threads(True).block_size(1 << 30) tb: Table = read_csv(ctx, table1_path, csv_read_options) cdf = DataFrame(tb) drop_column = 'outgoing_sms_per_month' cdf_new = cdf.drop([drop_column]) assert not cdf_new.columns.__contains__(drop_column) def test_fillna(): data_list_numeric = [[1, 2, None, 4, 5], [6, 7, 8, 9, None]] fill_value = 0 cdf_numeric = DataFrame(data_list_numeric) cn_tb_numeric_fillna = cdf_numeric.fillna(fill_value) data_list = list(cn_tb_numeric_fillna.to_dict().values()) for col in data_list: assert not col.__contains__(None) assert col.__contains__(fill_value) def test_notna(): data = [[1, 2, 3, 4, 5, None], [None, 7, 8, 9, 10, 11]] cdf = DataFrame(data) df = cdf.to_pandas() assert df.notna().values.tolist() == cdf.notna().to_pandas().values.tolist() def test_notnull(): data = [[1, 2, 3, 4, 5, None], [None, 7, 8, 9, 10, 11]] cdf = DataFrame(data) df = cdf.to_pandas() assert df.notnull().values.tolist() == cdf.notnull().to_pandas().values.tolist() def test_isin(): pdf = pd.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]}) cdf = DataFrame(pdf) arr = [0, 2] assert (pdf.isin(arr).values.tolist() == cdf.isin(arr).to_pandas().values.tolist()) def test_isna(): data = [[1, 2, 3, 4, 5, None], [None, 7, 8, 9, 10, 11]] cdf = DataFrame(data) df = cdf.to_pandas() assert df.isna().values.tolist() == cdf.isna().to_pandas().values.tolist() def test_isnull(): data = [[1, 2, 3, 4, 5, None], [None, 7, 8, 9, 10, 11]] cdf = DataFrame(data) df = cdf.to_pandas() assert df.isnull().values.tolist() == cdf.isnull().to_pandas().values.tolist() def test_rename(): col_names = ['col1', 'col2', 'col3', 'col4'] data_list_numeric = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20]] ctx: CylonContext = CylonContext(config=None, distributed=False) index_values = [0, 1, 2, 3, 4] cn_tb = cn.Table.from_list(ctx, col_names, data_list_numeric) cn_tb.set_index(index_values) cdf = DataFrame(cn_tb) prev_col_names = cn_tb.column_names # with dictionary columns = {'col1': 'col-1', 'col3': 'col-3'} cdf.rename(columns) new_col_names = cdf.columns for key in columns: value = columns[key] assert prev_col_names.index(key) == new_col_names.index(value) # with list cn_tb_list = cn.Table.from_list(ctx, col_names, data_list_numeric) cn_tb_list.set_index(index_values) cdf_list = DataFrame(cn_tb_list) prev_col_names = cdf_list.columns new_column_names = ['col-1', 'col-2', 'col-3', 'col-4'] cdf_list.rename(new_column_names) assert cdf_list.columns == new_column_names def test_applymap(): pdf = pd.DataFrame([[1, 2.12], [3.356, 4.567]]) cdf = DataFrame(pdf) print(cdf.applymap(lambda x: len(str(x)))) assert (pdf.applymap(lambda x: len(str(x))).values.tolist() == cdf.applymap( lambda x: len(str(x))).to_pandas().values.tolist()) def test_data_frame(): df = {'col-0': [1, 2, 3], 'col-1': [4, 5, 6]} cdf = DataFrame(df) pdf = pd.DataFrame(df) assert len(cdf) == len(pdf) def test_iloc(): df = {'col-0': [1, 2, 3], 'col-1': [4, 5, 6]} cdf = DataFrame(df) pdf = pd.DataFrame(df) assert DataFrame(cdf.iloc[0:2]).values.tolist() == pdf.iloc[0:2].values.tolist() def test_index(): df = {'col-0': [1, 2, 3], 'col-1': [4, 5, 6]} cdf = DataFrame(df) assert isinstance(cdf.index, BaseArrowIndex) def test_get_hash_object(): df = {'col-0': [1, 2, 3], 'col-1': [4, 5, 6]} cdf = DataFrame(df) pdf = pd.DataFrame(df) assert cdf.get_hash_object().values.tolist() == pd.util.hash_pandas_object(pdf).values.tolist() def test_values(): df = {'col-0': [1, 2, 3], 'col-1': [4, 5, 6]} cdf = DataFrame(df) pdf = pd.DataFrame(df) assert cdf.values.tolist() == pdf.values.tolist() def test_dtypes(): df = {'col-0': [1, 2, 3], 'col-1': [4, 5, 6]} cdf = DataFrame(df) pdf = pd.DataFrame(df) print(cdf.dtypes) print(pdf.dtypes.to_dict()) def test_select_dtypes(): df = {'a': [1, 2] * 3, 'b': ['a', 'b'] * 3, 'c': [1.0, 2.0] * 3} cdf = DataFrame(df) pdf = DataFrame(df) assert cdf.select_dtypes(include='object').values.tolist() == [i for j in pdf.select_dtypes(include='object').values.tolist() for i in j] assert cdf.select_dtypes(exclude=['int64', 'object']).values.tolist() == pdf.select_dtypes(exclude=['int64', 'object']).values.tolist() assert len(cdf.select_dtypes(include='bool').values.tolist()) == 0
ParlAI/projects/convai2/interactive.py
UmaTaru/run
163
141696
<filename>ParlAI/projects/convai2/interactive.py<gh_stars>100-1000 #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Basic script which allows local human keyboard input to talk to a trained model. Examples -------- .. code-block:: shell python projects/convai2/interactive.py -mf models:convai2/kvmemnn/model When prompted, chat with the both, you will both be assigned personalities! Use "[DONE]" to indicate you are done with that chat partner, and want a new one. """ from parlai.core.params import ParlaiParser from parlai.core.agents import create_agent from parlai.core.worlds import create_task from parlai.agents.repeat_label.repeat_label import RepeatLabelAgent from parlai.agents.local_human.local_human import LocalHumanAgent import random def setup_args(parser=None): if parser is None: parser = ParlaiParser(True, True, 'Interactive chat with a model') parser.add_argument('-d', '--display-examples', type='bool', default=False) parser.add_argument( '--display-prettify', type='bool', default=False, help='Set to use a prettytable when displaying ' 'examples with text candidates', ) parser.add_argument( '--display-ignore-fields', type=str, default='label_candidates,text_candidates', help='Do not display these fields', ) parser.set_defaults(model_file='models:convai2/kvmemnn/model') LocalHumanAgent.add_cmdline_args(parser) return parser def interactive(opt, print_parser=None): if print_parser is not None: if print_parser is True and isinstance(opt, ParlaiParser): print_parser = opt elif print_parser is False: print_parser = None if isinstance(opt, ParlaiParser): print('[ Deprecated Warning: interactive should be passed opt not Parser ]') opt = opt.parse_args() opt['task'] = 'parlai.agents.local_human.local_human:LocalHumanAgent' # Create model and assign it to the specified task agent = create_agent(opt, requireModelExists=True) world = create_task(opt, agent) if print_parser: # Show arguments after loading model print_parser.opt = agent.opt print_parser.print_args() # Create ConvAI2 data so we can assign personas. convai2_opt = opt.copy() convai2_opt['task'] = 'convai2:both' convai2_agent = RepeatLabelAgent(convai2_opt) convai2_world = create_task(convai2_opt, convai2_agent) def get_new_personas(): # Find a new episode while True: convai2_world.parley() msg = convai2_world.get_acts()[0] if msg['episode_done']: convai2_world.parley() msg = convai2_world.get_acts()[0] break txt = msg.get('text', '').split('\n') bot_persona = "" for t in txt: if t.startswith("partner's persona:"): print(t.replace("partner's ", 'your ')) if t.startswith('your persona:'): bot_persona += t + '\n' print("Enter [DONE] if you want a new partner at any time.") return bot_persona # Now run interactive mode, chatting with personas! cnt = 0 while True: if cnt == 0: bot_persona = get_new_personas() # Run the parts of world.parley() in turn, # but insert persona into user message. acts = world.acts agents = world.agents acts[0] = agents[0].act() # add the persona on to the first message if cnt == 0: acts[0]['text'] = bot_persona + acts[0].get('text', 'hi') agents[1].observe(acts[0]) acts[1] = agents[1].act() agents[0].observe(acts[1]) world.update_counters() cnt = cnt + 1 if opt.get('display_examples'): print("---") print(world.display()) if world.episode_done(): print("CHAT DONE ") print("In case you were curious you were talking to this bot:") print(bot_persona.split('\n')) print("\n... preparing new chat... \n") cnt = 0 if __name__ == '__main__': random.seed(42) parser = setup_args() interactive(parser.parse_args(print_args=False), print_parser=parser)
modules/dbnd/test_dbnd/scenarios/pipelines/pipelines_with_errors.py
ipattarapong/dbnd
224
141699
<reponame>ipattarapong/dbnd import logging from time import sleep from dbnd import PipelineTask, band, output, parameter, pipeline, task from dbnd.tasks.basics import SimplestTask from dbnd.tasks.basics.simplest import SimpleTask from dbnd_test_scenarios.test_common.task.factories import TTask from targets import target def raise_failure(failure): if failure == "missing_params": return TTaskMissingParamsMultiple() elif failure == "read_error": return target("not_exists").read() else: raise Exception("just an error") def raise_2(failure): raise_failure(failure) def raise_3(failure): raise_2(failure) class ETaskFails(SimplestTask): def run(self): raise TypeError("Some user error") class EPipeWithTaskFails(PipelineTask): def band(self): return ETaskFails() class ENoAssignedOutput(PipelineTask): t1 = output t2 = output t_pipeline = output def band(self): self.t1 = SimplestTask() # t2 is missing # self.t2 = SimplestTask() class EWrongOutputAssignment(PipelineTask): t1 = output def band(self): # we should assign Task instances only or their outputs self.t1 = ENoAssignedOutput class EBandWithError(PipelineTask): t1 = output t2 = output def band(self): raise Exception("User exception in band method") @pipeline def e_band_raise(): raise Exception("User exception in band method") @task def e_task_fails(): raise_3(None) # raise Exception("User exception in band method") @pipeline def e_wrong_task_constructor(): return SimplestTask(not_existing_param=1) @task def e_task_with_kwargs(a=2, **kwargs): # (int, **Any) -> int logging.info("KWARGS: %s", kwargs) return a @pipeline def e_task_with_kwargs_pipeline(): return e_task_with_kwargs(10, kwarg=1) class TErrorRunTask(TTask): def run(self): raise TypeError("Some user error") class TError2RunTask(TTask): def run(self): raise Exception("Some user error") class TLongTimeRunning(TTask): sleep = parameter.value(default=0) def run(self): if self.sleep: sleep(self.sleep) super(TLongTimeRunning, self).run() raise Exception("Some user error") class TNestedPipeline(PipelineTask): long_time_run = output def band(self): self.long_time_run = TLongTimeRunning().simplest_output class TPipeWithErrors(PipelineTask): t1 = output t2 = output t_pipeline = output def band(self): self.t1 = TErrorRunTask() self.t2 = TError2RunTask() self.t_pipeline = TNestedPipeline().long_time_run class TPipelineWrongAssignment(PipelineTask): some_output = output def band(self): self.some_output = PipelineTask class TTaskMissingParamsMultiple(TTask): p1 = parameter[int] p2 = parameter[int] p3 = parameter[int] @band def pipe_bad_band(failure="missing_params"): if failure == "missing_params": return TTaskMissingParamsMultiple() elif failure == "read_error": return target("not_exists").read() elif failure == "task_run": return TErrorRunTask() else: raise Exception("just an error") @band def pipe_bad_band2(): return pipe_bad_band() @band def pipe_bad_band3(): return pipe_bad_band2() class TaskBadBand1(PipelineTask): failure = parameter[str] def band(self): raise_failure(self.failure) class TaskBadBand2(PipelineTask): def band(self): return TaskBadBand1() class TaskBadBand3(PipelineTask): def band(self): return TaskBadBand2() @pipeline def pipe_with_task_with_params(): return SimpleTask() @pipeline def p2_with_task_with_params(): return pipe_with_task_with_params()
orchestra/tests/test_dashboard.py
code-review-doctor/orchestra
444
141705
<filename>orchestra/tests/test_dashboard.py<gh_stars>100-1000 import json from django.test import override_settings from orchestra.models import Step from orchestra.models import Task from orchestra.models import Project from orchestra.models import TaskAssignment from orchestra.tests.helpers import OrchestraTransactionTestCase from orchestra.tests.helpers.fixtures import setup_models from orchestra.tests.helpers.iterations import verify_iterations from orchestra.utils.load_json import load_encoded_json from orchestra.utils.task_lifecycle import assign_task from orchestra.utils.task_lifecycle import create_subsequent_tasks class DashboardTestCase(OrchestraTransactionTestCase): def setUp(self): super().setUp() setup_models(self) @override_settings(MACHINE_STEP_SCHEDULER={ 'path': ('orchestra.utils.machine_step_scheduler.' 'SynchronousMachineStepScheduler'), }) def test_task_creation(self): """ Test human and machine task creation """ Task.objects.filter(status=Task.Status.AWAITING_PROCESSING).delete() project = self.projects['test_human_and_machine'] self.assertEqual(Task.objects.filter(project=project).count(), 0) create_subsequent_tasks(project) # Human Task was created self.assertEqual(Task.objects.filter(project=project).count(), 1) human_step = self.workflow_steps['test_workflow_2']['step4'] task = Task.objects.get(step=human_step, project=project) data = {'submit_key1': 'submit_val1'} assign_task(self.workers[0].id, task.id) # user 0 submits a task response = self._submit_assignment(self.clients[0], task.id, data=data) self.assertEqual(response.status_code, 200) # Machine Task was created self.assertEqual(Task.objects.filter(project=project).count(), 2) machine_step = self.workflow_steps['test_workflow_2']['simple_machine'] machine_task_assignment = ( TaskAssignment.objects .filter(task__step=machine_step, task__project=project)[0]) self.assertTrue(machine_task_assignment.status, TaskAssignment.Status.SUBMITTED) self.assertTrue(machine_task_assignment.in_progress_task_data, {'simple': 'json'}) self.assertTrue(machine_task_assignment.task.status, Task.Status.COMPLETE) def test_index(self): response = self.clients[0].get('/orchestra/app/') self.assertEqual(response.status_code, 200) def test_status(self): response = self.clients[0].get('/orchestra/status/') self.assertEqual(response.status_code, 200) def test_dashboard_tasks(self): self._check_client_dashboard_state(self.clients[0], 'pending_review') def test_dashboard_tasks_tags(self): url = '/orchestra/api/interface/dashboard_tasks/' response = self.clients[0].get(url) returned = load_encoded_json(response.content) self.assertEqual(len(returned['tasks'][0]['tags']), 0) # set tags for this task bad_formatted_tags = {'tags': [{'foo': 'bar'}]} valid_tags = {'tags': [{'label': 'foo', 'status': 'default'}]} task = TaskAssignment.objects.filter( worker=self.workers[0])[0].task with self.assertRaises(AttributeError): task.tags = bad_formatted_tags task.tags.save() task.tags = valid_tags task.save() response2 = self.clients[0].get(url) returned2 = load_encoded_json(response2.content) tags = returned2['tasks'][0]['tags'] self.assertEqual(len(tags), 1) self.assertEqual(tags[0]['label'], 'foo') self.assertEqual(tags[0]['status'], 'default') def test_entry_level_task_assignment(self): task = self.tasks['awaiting_processing'] assign_task(self.workers[0].id, task.id) # task assignment for invalid id should give bad request self._verify_bad_task_assignment_information( self.clients[0], {'task_id': -1}, 'Task matching query does not exist.') # task assignment for user3 not assigned to a task self._verify_bad_task_assignment_information( self.clients[2], {'task_id': task.id}, 'Worker is not associated with task') # task assignment is assigned to user 0 self._verify_good_task_assignment_information( self.clients[0], {'task_id': task.id}, task.project.short_description, 'Processing', 'Processing', False, False, {}, self.workers[0]) def _check_client_dashboard_state(self, client, non_empty_status): response = client.get('/orchestra/api/interface/dashboard_tasks/') returned = load_encoded_json(response.content) non_empty_tasks = [task for task in returned['tasks'] if task['state'] == non_empty_status] empty_tasks = [task for task in returned['tasks'] if task['state'] != non_empty_status] self.assertGreater(len(non_empty_tasks), 0) self.assertEqual(len(empty_tasks), 0) def test_reviewer_task_assignment(self): task = self.tasks['review_task'] assign_task( self.workers[1].id, task.id) self._verify_good_task_assignment_information( self.clients[1], {'task_id': task.id}, task.project.short_description, 'Processing', 'Reviewing', True, False, {'test_key': 'test_value'}, self.workers[1]) def test_save_entry_level_task_assignment(self): task = self.tasks['awaiting_processing'] assign_task(self.workers[0].id, task.id) # incorrect task id response = self.clients[0].post( '/orchestra/api/interface/save_task_assignment/', json.dumps({'task_id': -1, 'task_data': 'test'}), content_type='application/json') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'No task for given id') # user does not have a permission to save response = self.clients[1].post( '/orchestra/api/interface/save_task_assignment/', json.dumps({'task_id': task.id, 'task_data': 'test'}), content_type='application/json') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Worker is not associated with task') # save new info new_data = {'new_test_key': 'new_test_value'} response = self.clients[0].post( '/orchestra/api/interface/save_task_assignment/', json.dumps({'task_id': task.id, 'task_data': new_data}), content_type='application/json') self.assertEqual(response.status_code, 200) self._verify_good_task_assignment_information( self.clients[0], {'task_id': task.id}, task.project.short_description, 'Processing', 'Processing', False, False, new_data, self.workers[0]) def test_save_reviewer_task_assignment(self): new_data = {'new_test_key': 'new_test_value'} task = self.tasks['review_task'] assign_task(self.workers[1].id, task.id) # entry level worker can't update the data response = self.clients[0].post( '/orchestra/api/interface/save_task_assignment/', json.dumps({'task_id': task.id, 'task_data': new_data}), content_type='application/json') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Worker is not allowed to save') reviewer_data = {'reviewer_key': 'reviewer_value'} # reviewer can update the data response = self.clients[1].post( '/orchestra/api/interface/save_task_assignment/', json.dumps({'task_id': task.id, 'task_data': reviewer_data}), content_type='application/json') self.assertEqual(response.status_code, 200) self._verify_good_task_assignment_information( self.clients[1], {'task_id': task.id}, task.project.short_description, 'Processing', 'Reviewing', True, False, reviewer_data, self.workers[1]) def test_submit_entry_level_task_assignment(self): task = self.tasks['awaiting_processing'] assign_task(self.workers[0].id, task.id) verify_iterations(task.id) # user is not assigned to a task response = self._submit_assignment(self.clients[1], task.id) self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Task assignment with worker is in broken state.') # task does not exist response = self._submit_assignment(self.clients[1], -1) self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'No task for given id') # user 0 can only submit a task not reject response = self._submit_assignment( self.clients[0], task.id, command='reject') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Task not in rejectable state.') # user 0 can't call illegal commands response = self._submit_assignment( self.clients[0], task.id, command='approve') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Illegal command') data = {'submit_key1': 'submit_val1'} # user 0 can't submit a task if its submission prerequisites aren't # complete step = task.step step.submission_depends_on.set([ Step.objects.create( workflow_version=step.workflow_version, slug='imaginary_test_step', is_human=True, ) ]) step.save() response = self._submit_assignment( self.clients[0], task.id) self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Submission prerequisites are not complete.') step.submission_depends_on.set([]) step.save() data = {'submit_key1': 'submit_val1'} # user 0 submits a task response = self._submit_assignment( self.clients[0], task.id, data=data) self.assertEqual(response.status_code, 200) self._verify_good_task_assignment_information( self.clients[0], {'task_id': task.id}, task.project.short_description, 'Submitted', 'Pending Review', False, True, data, self.workers[0]) # Check that iteration has correct submitted state verify_iterations(task.id) # user cannot resubmit a task response = self._submit_assignment( self.clients[0], task.id) self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Worker is not allowed to submit') def test_submit_reviewer_task_assignment(self): data = {'submit_key1': 'submit_val1'} task = self.tasks['review_task'] assign_task(self.workers[1].id, task.id) verify_iterations(task.id) rejected_data = {'rejected_key': 'rejected_val'} # user 0 can retrieve data, but should see a read-only interface self._verify_good_task_assignment_information( self.clients[0], {'task_id': task.id}, task.project.short_description, 'Submitted', 'Reviewing', False, True, {'test_key': 'test_value'}, self.workers[0]) # user 1 should be able to review the post self._verify_good_task_assignment_information( self.clients[1], {'task_id': task.id}, task.project.short_description, 'Processing', 'Reviewing', True, False, {'test_key': 'test_value'}, self.workers[1]) # user 1 rejects a task response = self._submit_assignment( self.clients[1], task.id, data=rejected_data, command='reject') self.assertEqual(response.status_code, 200) verify_iterations(task.id) # user 0 should have the task back self._verify_good_task_assignment_information( self.clients[0], {'task_id': task.id}, task.project.short_description, 'Processing', 'Post-review Processing', False, False, rejected_data, self.workers[0]) # user 1 should no longer be able to modify the post self._verify_good_task_assignment_information( self.clients[1], {'task_id': task.id}, task.project.short_description, 'Submitted', 'Post-review Processing', True, True, rejected_data, self.workers[1]) # user 0 submits an updated data response = self._submit_assignment( self.clients[0], task.id, data=data) self.assertEqual(response.status_code, 200) verify_iterations(task.id) # check if the data is saved self._verify_good_task_assignment_information( self.clients[1], {'task_id': task.id}, task.project.short_description, 'Processing', 'Reviewing', True, False, data, self.workers[1]) accepted_data = {'accepted_key': 'accepted_val'} # user 1 accepts a task response = self._submit_assignment( self.clients[1], task.id, data=accepted_data, command='accept') self.assertEqual(response.status_code, 200) verify_iterations(task.id) # check if the accepted_data is saved # and task is pending for a second review. self._verify_good_task_assignment_information( self.clients[1], {'task_id': task.id}, task.project.short_description, 'Submitted', 'Pending Review', True, True, accepted_data, self.workers[1]) # make sure a task can't be submitted twice response = self._submit_assignment( self.clients[1], task.id, command='accept') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Worker is not allowed to submit') # user 3 is picking up a task as a reviewer assign_task(self.workers[3].id, task.id) verify_iterations(task.id) rejected_data = {'rejected_key': 'rejected_val'} # user 3 rejects a task response = self._submit_assignment( self.clients[3], task.id, data=rejected_data, command='reject') self.assertEqual(response.status_code, 200) returned = load_encoded_json(response.content) verify_iterations(task.id) # check if the rejected_data is saved self._verify_good_task_assignment_information( self.clients[3], {'task_id': task.id}, task.project.short_description, 'Submitted', 'Post-review Processing', True, True, rejected_data, self.workers[3]) # check client dashboards self._check_client_dashboard_state(self.clients[0], 'pending_review') self._check_client_dashboard_state(self.clients[1], 'returned') self._check_client_dashboard_state(self.clients[3], 'pending_processing') response = self._submit_assignment( self.clients[1], task.id) self.assertEqual(response.status_code, 200) verify_iterations(task.id) # check if client dashboards were updated self._check_client_dashboard_state(self.clients[0], 'pending_review') self._check_client_dashboard_state(self.clients[1], 'pending_review') self._check_client_dashboard_state(self.clients[3], 'in_progress') # check if the accepted_data is saved response = self._submit_assignment( self.clients[3], task.id, data=accepted_data, command='accept') self.assertEqual(response.status_code, 200) returned = load_encoded_json(response.content) verify_iterations(task.id) # check if task is complete self._verify_good_task_assignment_information( self.clients[3], {'task_id': task.id}, task.project.short_description, 'Submitted', 'Complete', True, True, accepted_data, self.workers[3]) # check that reviewer cannot reaccept task response = self._submit_assignment( self.clients[3], task.id, data=accepted_data, command='accept') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], 'Task already completed') def _verify_bad_task_assignment_information( self, client, post_data, error_message): response = client.post( '/orchestra/api/interface/task_assignment_information/', json.dumps(post_data), content_type='application/json') self.assertEqual(response.status_code, 400) returned = load_encoded_json(response.content) self.assertEqual(returned['message'], error_message) def _verify_good_task_assignment_information( self, client, post_data, project_description, assignment_status, task_status, is_reviewer, is_read_only, task_data, worker): response = client.post( '/orchestra/api/interface/task_assignment_information/', json.dumps(post_data), content_type='application/json') self.assertEqual(response.status_code, 200) task = Task.objects.get(id=post_data['task_id']) returned = load_encoded_json(response.content) expected = { 'project': {'details': project_description, 'id': task.project.id, 'project_data': {}, 'status': dict( Project.STATUS_CHOICES)[task.project.status], 'scratchpad_url': None}, 'status': assignment_status, 'task': {'data': task_data, 'status': task_status}, 'task_id': task.id, 'assignment_id': task.assignments.get(worker=worker).id, 'workflow': { 'slug': 'w1', 'name': 'Workflow One', }, 'workflow_version': { 'slug': 'test_workflow', 'name': 'The workflow'}, 'prerequisites': {}, 'step': {'slug': 'step1', 'name': 'The first step'}, 'is_reviewer': is_reviewer, 'is_read_only': is_read_only, 'is_project_admin': False, 'worker': { 'username': worker.user.username, 'first_name': worker.user.first_name, 'last_name': worker.user.last_name, } } self.assertEqual(returned, expected)
dataset/coco_karpathy_dataset.py
zengyan-97/X-VLM
107
141710
<gh_stars>100-1000 import os import json import random from collections import Counter import torch from torch.utils.data import Dataset from torchvision.datasets.utils import download_url from PIL import Image from dataset.utils import pre_caption class coco_karpathy_train(Dataset): def __init__(self, transform, image_root, ann_rpath, max_words=30, prompt=''): self.annotation = [] for f in ann_rpath: self.annotation += json.load(open(f, 'r')) self.transform = transform self.image_root = image_root self.max_words = max_words self.prompt = prompt self.img_ids = {} n = 0 for ann in self.annotation: img_id = ann['image_id'] if img_id not in self.img_ids.keys(): self.img_ids[img_id] = n n += 1 def __len__(self): return len(self.annotation) def __getitem__(self, index): ann = self.annotation[index] image_path = os.path.join(self.image_root, ann['image']) image = Image.open(image_path).convert('RGB') image = self.transform(image) caption = self.prompt + pre_caption(ann['caption'], self.max_words) return image, caption, self.img_ids[ann['image_id']] class coco_karpathy_train_scst(Dataset): def __init__(self, transform, image_root, ann_rpath, max_words=30, prompt=''): self.annotation = [] self.image_captions_map = {} for f in ann_rpath: for ann in json.load(open(f, 'r')): self.annotation.append(ann) if ann['image'] in self.image_captions_map.keys(): self.image_captions_map[ann['image']].append(ann['caption']) else: self.image_captions_map[ann['image']] = [ann['caption']] counter = Counter() for _, v in self.image_captions_map.items(): counter[len(v)] += 1 print("### image_captions_map, ", counter, flush=True) self.transform = transform self.image_root = image_root self.max_words = max_words self.prompt = prompt self.img_ids = {} n = 0 for ann in self.annotation: img_id = ann['image_id'] if img_id not in self.img_ids.keys(): self.img_ids[img_id] = n n += 1 def __len__(self): return len(self.annotation) def __getitem__(self, index): ann = self.annotation[index] image_path = os.path.join(self.image_root, ann['image']) image = Image.open(image_path).convert('RGB') image = self.transform(image) # w/o prompt captions_gt = [pre_caption(c, self.max_words) for c in self.image_captions_map[ann['image']]] return image, random.sample(captions_gt, 5) def collate_fn(self, batch_sample): batch = [] for x in zip(*batch_sample): batch.append(x) image_list, captions_gt_list = batch images = torch.stack(image_list) return images, captions_gt_list class coco_karpathy_caption_eval(Dataset): def __init__(self, transform, image_root, ann_rpath, split): self.annotation = json.load(open(ann_rpath, 'r')) self.transform = transform self.image_root = image_root def __len__(self): return len(self.annotation) def __getitem__(self, index): ann = self.annotation[index] image_path = os.path.join(self.image_root, ann['image']) image = Image.open(image_path).convert('RGB') image = self.transform(image) img_id = ann['image'].split('/')[-1].strip('.jpg').split('_')[-1] return image, int(img_id)
tests/api/test_text_impl.py
JosephWardDotTech/helium
1,656
141723
from helium._impl import TextImpl from helium._impl.selenium_wrappers import WebDriverWrapper from tests.api import BrowserAT class TextImplTest(BrowserAT): def get_page(self): return 'test_text_impl.html' def test_empty_search_text_xpath(self): xpath = TextImpl(WebDriverWrapper(self.driver))._get_search_text_xpath() text_elements = self.driver.find_elements_by_xpath(xpath) texts = [w.get_attribute('innerHTML') for w in text_elements] self.assertEqual( ["A paragraph", "A paragraph inside a div", "Another paragraph inside the div"], sorted(texts) )
benchmarks/sanic/micro.py
kzh3ka/japronto
9,472
141730
<gh_stars>1000+ from sanic import Sanic from sanic.response import text app = Sanic(__name__) @app.route("/") async def hello(request): return text("Hello world!") app.run(host="0.0.0.0", port=8080)
sensor.py
cynicer/hass-smartthinq
252
141746
<gh_stars>100-1000 import datetime import logging import time import voluptuous as vol from custom_components.smartthinq import ( CONF_LANGUAGE, KEY_SMARTTHINQ_DEVICES, LGDevice) import homeassistant.helpers.config_validation as cv from homeassistant.const import CONF_REGION, CONF_TOKEN, TIME_HOURS, PERCENTAGE from homeassistant.helpers.entity import Entity import wideq from wideq import dishwasher ATTR_DW_STATE = 'state' ATTR_DW_REMAINING_TIME = 'remaining_time' ATTR_DW_REMAINING_TIME_IN_MINUTES = 'remaining_time_in_minutes' ATTR_DW_INITIAL_TIME = 'initial_time' ATTR_DW_INITIAL_TIME_IN_MINUTES = 'initial_time_in_minutes' ATTR_DW_RESERVE_TIME = 'reserve_time' ATTR_DW_RESERVE_TIME_IN_MINUTES = 'reserve_time_in_minutes' ATTR_DW_COURSE = 'course' ATTR_DW_ERROR = 'error' ATTR_DW_DEVICE_TYPE = 'device_type' MAX_RETRIES = 5 KEY_DW_OFF = 'Off' KEY_DW_DISCONNECTED = 'Disconnected' LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the LG dishwasher entities""" refresh_token = hass.data[CONF_TOKEN] region = hass.data[CONF_REGION] language = hass.data[CONF_LANGUAGE] client = wideq.Client.from_token(refresh_token, region, language) sensors = [] for device_id in hass.data[KEY_SMARTTHINQ_DEVICES]: device = client.get_device(device_id) model = client.model_info(device) if device.type == wideq.DeviceType.DISHWASHER: base_name = "lg_dishwasher_" + device.id LOGGER.debug("Creating new LG DishWasher: %s" % base_name) try: sensors.append(LGDishWasherDevice(client, device, base_name)) except wideq.NotConnectedError: # Dishwashers are only connected when in use. Ignore # NotConnectedError on platform setup. pass if device.type == wideq.DeviceType.AC: fahrenheit = hass.config.units.temperature_unit != '°C' LOGGER.debug("Creating new LG AC: %s" % device.name) try: sensors.append(LGACFilterChangePeriod(client, device, fahrenheit)) sensors.append(LGACFilterUseTime(client, device, fahrenheit)) sensors.append(LGACFilterRemainingTime(client, device, fahrenheit)) sensors.append(LGACFilterHealth(client, device, fahrenheit)) except wideq.NotConnectedError: pass if sensors: add_entities(sensors, True) return True class LGDishWasherDevice(LGDevice): def __init__(self, client, device, name): """Initialize an LG DishWasher Device.""" super().__init__(client, device) # This constructor is called during platform creation. It must not # involve any API calls that actually need the dishwasher to be # connected, otherwise the device construction will fail and the entity # will not get created. Specifically, calls that depend on dishwasher # interaction should only happen in update(...), including the start of # the monitor task. self._dishwasher = dishwasher.DishWasherDevice(client, device) self._name = name self._status = None self._failed_request_count = 0 @property def state_attributes(self): """Return the optional state attributes for the dishwasher.""" data = {} data[ATTR_DW_REMAINING_TIME] = self.remaining_time data[ATTR_DW_REMAINING_TIME_IN_MINUTES] = self.remaining_time_in_minutes data[ATTR_DW_INITIAL_TIME] = self.initial_time data[ATTR_DW_INITIAL_TIME_IN_MINUTES] = self.initial_time_in_minutes data[ATTR_DW_RESERVE_TIME] = self.reserve_time data[ATTR_DW_RESERVE_TIME_IN_MINUTES] = self.reserve_time_in_minutes data[ATTR_DW_COURSE] = self.course data[ATTR_DW_ERROR] = self.error # For convenience, include the state as an attribute. data[ATTR_DW_STATE] = self.state return data @property def name(self): return self._name @property def state(self): if self._status: # Process is a more refined string to use for state, if it's present, # use it instead. return self._status.readable_process or self._status.readable_state return dishwasher.DISHWASHER_STATE_READABLE[ dishwasher.DishWasherState.OFF.name] @property def remaining_time(self): minutes = self.remaining_time_in_minutes if self._status else 0 return str(datetime.timedelta(minutes=minutes))[:-3] @property def remaining_time_in_minutes(self): # The API (indefinitely) returns 1 minute remaining when a cycle is # either in state off or complete, or process night-drying. Return 0 # minutes remaining in these instances, which is more reflective of # reality. if (self._status and (self._status.process == dishwasher.DishWasherProcess.NIGHT_DRYING or self._status.state == dishwasher.DishWasherState.OFF or self._status.state == dishwasher.DishWasherState.COMPLETE)): return 0 return self._status.remaining_time if self._status else 0 @property def initial_time(self): minutes = self.initial_time_in_minutes if self._status else 0 return str(datetime.timedelta(minutes=minutes))[:-3] @property def initial_time_in_minutes(self): # When in state OFF, the dishwasher still returns the initial program # length of the previously ran cycle. Instead, return 0 which is more # reflective of the dishwasher being off. if (self._status and self._status.state == dishwasher.DishWasherState.OFF): return 0 return self._status.initial_time if self._status else 0 @property def reserve_time(self): minutes = self.reserve_time_in_minutes if self._status else 0 return str(datetime.timedelta(minutes=minutes))[:-3] @property def reserve_time_in_minutes(self): return self._status.reserve_time if self._status else 0 @property def course(self): if self._status: if self._status.smart_course != KEY_DW_OFF: return self._status.smart_course else: return self._status.course return KEY_DW_OFF @property def error(self): if self._status: return self._status.error return KEY_DW_DISCONNECTED def _restart_monitor(self): try: self._dishwasher.monitor_start() except wideq.NotConnectedError: self._status = None except wideq.NotLoggedInError: LOGGER.info('Session expired. Refreshing.') self._client.refresh() def update(self): """Poll for dishwasher state updates.""" # This method is polled, so try to avoid sleeping in here. If an error # occurs, it will naturally be retried on the next poll. LOGGER.debug('Updating %s.', self.name) # On initial construction, the dishwasher monitor task # will not have been created. If so, start monitoring here. if getattr(self._dishwasher, 'mon', None) is None: self._restart_monitor() try: status = self._dishwasher.poll() except wideq.NotConnectedError: self._status = None return except wideq.NotLoggedInError: LOGGER.info('Session expired. Refreshing.') self._client.refresh() self._restart_monitor() return if status: LOGGER.debug('Status updated.') self._status = status self._failed_request_count = 0 return LOGGER.debug('No status available yet.') self._failed_request_count += 1 if self._failed_request_count >= MAX_RETRIES: # We tried several times but got no result. This might happen # when the monitoring request gets into a bad state, so we # restart the task. self._restart_monitor() self._failed_request_count = 0 class LGACFilter(Entity): def __init__(self, client, device, fahrenheit=True): self._client = client self._device = device self._fahrenheit = fahrenheit import wideq self._ac = wideq.ACDevice(client, device) self._ac.monitor_start() self._change_period = -1 self._use_time = -1 self._remaining_filter_time = -1 @property def unit_of_measurement(self): return TIME_HOURS def update(self): """Poll for updated device status. Set the `_state` field to a new data mapping. """ import wideq LOGGER.info('Updating %s.', self.name) for iteration in range(MAX_RETRIES): LOGGER.info('Polling...') try: state = self._ac.poll() except wideq.NotLoggedInError: LOGGER.info('Session expired. Refreshing.') self._client.refresh() self._ac.monitor_start() continue except wideq.NotConnectedError: LOGGER.info('Device not available.') return if state: filter_state = self._ac.get_filter_state() self._change_period = int(filter_state["ChangePeriod"]) self._use_time = int(filter_state["UseTime"]) self._remaining_filter_time = self._change_period - self._use_time LOGGER.info('Status updated.') return LOGGER.info('No status available yet.') time.sleep(2 ** iteration) # Exponential backoff. # We tried several times but got no result. This might happen # when the monitoring request gets into a bad state, so we # restart the task. LOGGER.warn('Status update failed.') self._ac.monitor_stop() self._ac.monitor_start() class LGACFilterChangePeriod(LGACFilter): @property def name(self): return self._device.name + "_ac.filter_change_period" @property def state(self): return self._change_period class LGACFilterUseTime(LGACFilter): @property def name(self): return self._device.name + "_ac.filter_use_time" @property def state(self): return self._use_time class LGACFilterRemainingTime(LGACFilter): @property def name(self): return self._device.name + "_ac.filter_remaining_time" @property def state(self): return self._remaining_filter_time class LGACFilterHealth(LGACFilter): @property def name(self): return self._device.name + "_ac.filter_health" @property def state(self): return round(100.0 * self._remaining_filter_time / self._change_period, 2) @property def unit_of_measurement(self): return PERCENTAGE
leetcode.com/python/841_Keys_and_Rooms.py
vansh-tiwari/coding-interview-gym
713
141762
class Solution(object): def canVisitAllRooms(self, rooms): """ :type rooms: List[List[int]] :rtype: bool """ seen = [False] * len(rooms) seen[0] = True stack = [0, ] while stack: roomIdx = stack.pop() for key in rooms[roomIdx]: if not seen[key]: seen[key] = True stack.append(key) return all(seen)
movie_recommender/src/als_recommender.py
KevinLiao159/ApiForDataScience
320
141773
import os import argparse import time import gc # spark imports from pyspark.sql import SparkSession, Row from pyspark.sql.functions import col, lower from pyspark.ml.evaluation import RegressionEvaluator from pyspark.ml.recommendation import ALS class AlsRecommender: """ This a collaborative filtering recommender with Alternating Least Square Matrix Factorization, which is implemented by Spark """ def __init__(self, spark_session, path_movies, path_ratings): self.spark = spark_session self.sc = spark_session.sparkContext self.moviesDF = self._load_file(path_movies) \ .select(['movieId', 'title']) self.ratingsDF = self._load_file(path_ratings) \ .select(['userId', 'movieId', 'rating']) self.model = ALS( userCol='userId', itemCol='movieId', ratingCol='rating', coldStartStrategy="drop") def _load_file(self, filepath): """ load csv file into memory as spark DF """ return self.spark.read.load(filepath, format='csv', header=True, inferSchema=True) def tune_model(self, maxIter, regParams, ranks, split_ratio=(6, 2, 2)): """ Hyperparameter tuning for ALS model Parameters ---------- maxIter: int, max number of learning iterations regParams: list of float, regularization parameter ranks: list of float, number of latent factors split_ratio: tuple, (train, validation, test) """ # split data train, val, test = self.ratingsDF.randomSplit(split_ratio) # holdout tuning self.model = tune_ALS(self.model, train, val, maxIter, regParams, ranks) # test model predictions = self.model.transform(test) evaluator = RegressionEvaluator(metricName="rmse", labelCol="rating", predictionCol="prediction") rmse = evaluator.evaluate(predictions) print('The out-of-sample RMSE of the best tuned model is:', rmse) # clean up del train, val, test, predictions, evaluator gc.collect() def set_model_params(self, maxIter, regParam, rank): """ set model params for pyspark.ml.recommendation.ALS Parameters ---------- maxIter: int, max number of learning iterations regParams: float, regularization parameter ranks: float, number of latent factors """ self.model = self.model \ .setMaxIter(maxIter) \ .setRank(rank) \ .setRegParam(regParam) def _regex_matching(self, fav_movie): """ return the closest matches via SQL regex. If no match found, return None Parameters ---------- fav_movie: str, name of user input movie Return ------ list of indices of the matching movies """ print('You have input movie:', fav_movie) matchesDF = self.moviesDF \ .filter( lower( col('title') ).like('%{}%'.format(fav_movie.lower())) ) \ .select('movieId', 'title') if not len(matchesDF.take(1)): print('Oops! No match is found') else: movieIds = matchesDF.rdd.map(lambda r: r[0]).collect() titles = matchesDF.rdd.map(lambda r: r[1]).collect() print('Found possible matches in our database: ' '{0}\n'.format([x for x in titles])) return movieIds def _append_ratings(self, userId, movieIds): """ append a user's movie ratings to ratingsDF Parameter --------- userId: int, userId of a user movieIds: int, movieIds of user's favorite movies """ # create new user rdd user_rdd = self.sc.parallelize( [(userId, movieId, 5.0) for movieId in movieIds]) # transform to user rows user_rows = user_rdd.map( lambda x: Row( userId=int(x[0]), movieId=int(x[1]), rating=float(x[2]) ) ) # transform rows to spark DF userDF = self.spark.createDataFrame(user_rows) \ .select(self.ratingsDF.columns) # append to ratingsDF self.ratingsDF = self.ratingsDF.union(userDF) def _create_inference_data(self, userId, movieIds): """ create a user with all movies except ones were rated for inferencing """ # filter movies other_movieIds = self.moviesDF \ .filter(~col('movieId').isin(movieIds)) \ .select(['movieId']) \ .rdd.map(lambda r: r[0]) \ .collect() # create inference rdd inferenceRDD = self.sc.parallelize( [(userId, movieId) for movieId in other_movieIds] ).map( lambda x: Row( userId=int(x[0]), movieId=int(x[1]), ) ) # transform to inference DF inferenceDF = self.spark.createDataFrame(inferenceRDD) \ .select(['userId', 'movieId']) return inferenceDF def _inference(self, model, fav_movie, n_recommendations): """ return top n movie recommendations based on user's input movie Parameters ---------- model: spark ALS model fav_movie: str, name of user input movie n_recommendations: int, top n recommendations Return ------ list of top n similar movie recommendations """ # create a userId userId = self.ratingsDF.agg({"userId": "max"}).collect()[0][0] + 1 # get movieIds of favorite movies movieIds = self._regex_matching(fav_movie) # append new user with his/her ratings into data self._append_ratings(userId, movieIds) # matrix factorization model = model.fit(self.ratingsDF) # get data for inferencing inferenceDF = self._create_inference_data(userId, movieIds) # make inference return model.transform(inferenceDF) \ .select(['movieId', 'prediction']) \ .orderBy('prediction', ascending=False) \ .rdd.map(lambda r: (r[0], r[1])) \ .take(n_recommendations) def make_recommendations(self, fav_movie, n_recommendations): """ make top n movie recommendations Parameters ---------- fav_movie: str, name of user input movie n_recommendations: int, top n recommendations """ # make inference and get raw recommendations print('Recommendation system start to make inference ...') t0 = time.time() raw_recommends = \ self._inference(self.model, fav_movie, n_recommendations) movieIds = [r[0] for r in raw_recommends] scores = [r[1] for r in raw_recommends] print('It took my system {:.2f}s to make inference \n\ '.format(time.time() - t0)) # get movie titles movie_titles = self.moviesDF \ .filter(col('movieId').isin(movieIds)) \ .select('title') \ .rdd.map(lambda r: r[0]) \ .collect() # print recommendations print('Recommendations for {}:'.format(fav_movie)) for i in range(len(movie_titles)): print('{0}: {1}, with rating ' 'of {2}'.format(i+1, movie_titles[i], scores[i])) class Dataset: """ data object make loading raw files easier """ def __init__(self, spark_session, filepath): """ spark dataset constructor """ self.spark = spark_session self.sc = spark_session.sparkContext self.filepath = filepath # build spark data object self.RDD = self.load_file_as_RDD(self.filepath) self.DF = self.load_file_as_DF(self.filepath) def load_file_as_RDD(self, filepath): ratings_RDD = self.sc.textFile(filepath) header = ratings_RDD.take(1)[0] return ratings_RDD \ .filter(lambda line: line != header) \ .map(lambda line: line.split(",")) \ .map(lambda tokens: (int(tokens[0]), int(tokens[1]), float(tokens[2]))) # noqa def load_file_as_DF(self, filepath): ratings_RDD = self.load_file_as_rdd(filepath) ratingsRDD = ratings_RDD.map(lambda tokens: Row( userId=int(tokens[0]), movieId=int(tokens[1]), rating=float(tokens[2]))) # noqa return self.spark.createDataFrame(ratingsRDD) def tune_ALS(model, train_data, validation_data, maxIter, regParams, ranks): """ grid search function to select the best model based on RMSE of validation data Parameters ---------- model: spark ML model, ALS train_data: spark DF with columns ['userId', 'movieId', 'rating'] validation_data: spark DF with columns ['userId', 'movieId', 'rating'] maxIter: int, max number of learning iterations regParams: list of float, one dimension of hyper-param tuning grid ranks: list of float, one dimension of hyper-param tuning grid Return ------ The best fitted ALS model with lowest RMSE score on validation data """ # initial min_error = float('inf') best_rank = -1 best_regularization = 0 best_model = None for rank in ranks: for reg in regParams: # get ALS model als = model.setMaxIter(maxIter).setRank(rank).setRegParam(reg) # train ALS model model = als.fit(train_data) # evaluate the model by computing the RMSE on the validation data predictions = model.transform(validation_data) evaluator = RegressionEvaluator(metricName="rmse", labelCol="rating", predictionCol="prediction") rmse = evaluator.evaluate(predictions) print('{} latent factors and regularization = {}: ' 'validation RMSE is {}'.format(rank, reg, rmse)) if rmse < min_error: min_error = rmse best_rank = rank best_regularization = reg best_model = model print('\nThe best model has {} latent factors and ' 'regularization = {}'.format(best_rank, best_regularization)) return best_model def parse_args(): parser = argparse.ArgumentParser( prog="Movie Recommender", description="Run ALS Movie Recommender") parser.add_argument('--path', nargs='?', default='../data/MovieLens', help='input data path') parser.add_argument('--movies_filename', nargs='?', default='movies.csv', help='provide movies filename') parser.add_argument('--ratings_filename', nargs='?', default='ratings.csv', help='provide ratings filename') parser.add_argument('--movie_name', nargs='?', default='', help='provide your favoriate movie name') parser.add_argument('--top_n', type=int, default=10, help='top n movie recommendations') return parser.parse_args() if __name__ == '__main__': # get args args = parse_args() data_path = args.path movies_filename = args.movies_filename ratings_filename = args.ratings_filename movie_name = args.movie_name top_n = args.top_n # initial spark spark = SparkSession \ .builder \ .appName("movie recommender") \ .getOrCreate() # initial recommender system recommender = AlsRecommender( spark, os.path.join(data_path, movies_filename), os.path.join(data_path, ratings_filename)) # set params recommender.set_model_params(10, 0.05, 20) # make recommendations recommender.make_recommendations(movie_name, top_n) # stop spark.stop()
impala/compat.py
wzhou-code/impyla
661
141776
# Copyright 2015 Cloudera Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=unused-import,wrong-import-position from __future__ import absolute_import import six if six.PY3: def lzip(*x): return list(zip(*x)) from decimal import Decimal elif six.PY2: lzip = zip try: from cdecimal import Decimal except ImportError: from decimal import Decimal # noqa try: _xrange = xrange except NameError: _xrange = range # python3 compatibilty
Validation/EcalHits/python/ecalBarrelSimHitsValidation_cfi.py
ckamtsikis/cmssw
852
141777
import FWCore.ParameterSet.Config as cms from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer ecalBarrelSimHitsValidation = DQMEDAnalyzer("EcalBarrelSimHitsValidation", moduleLabelG4 = cms.string('g4SimHits'), verbose = cms.untracked.bool(False), ValidationCollection = cms.string('EcalValidInfo'), EBHitsCollection = cms.string('EcalHitsEB') )
examples/training/nli/training_nli.py
ccolas/sentence-transformers
7,566
141799
""" The system trains BERT (or any other transformer model like RoBERTa, DistilBERT etc.) on the SNLI + MultiNLI (AllNLI) dataset with softmax loss function. At every 1000 training steps, the model is evaluated on the STS benchmark dataset Usage: python training_nli.py OR python training_nli.py pretrained_transformer_model_name """ from torch.utils.data import DataLoader import math from sentence_transformers import models, losses from sentence_transformers import LoggingHandler, SentenceTransformer, util, InputExample from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator import logging from datetime import datetime import sys import os import gzip import csv #### Just some code to print debug information to stdout logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO, handlers=[LoggingHandler()]) #### /print debug information to stdout #Check if dataset exsist. If not, download and extract it nli_dataset_path = 'data/AllNLI.tsv.gz' sts_dataset_path = 'data/stsbenchmark.tsv.gz' if not os.path.exists(nli_dataset_path): util.http_get('https://sbert.net/datasets/AllNLI.tsv.gz', nli_dataset_path) if not os.path.exists(sts_dataset_path): util.http_get('https://sbert.net/datasets/stsbenchmark.tsv.gz', sts_dataset_path) #You can specify any huggingface/transformers pre-trained model here, for example, bert-base-uncased, roberta-base, xlm-roberta-base model_name = sys.argv[1] if len(sys.argv) > 1 else 'bert-base-uncased' # Read the dataset train_batch_size = 16 model_save_path = 'output/training_nli_'+model_name.replace("/", "-")+'-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S") # Use Huggingface/transformers model (like BERT, RoBERTa, XLNet, XLM-R) for mapping tokens to embeddings word_embedding_model = models.Transformer(model_name) # Apply mean pooling to get one fixed sized sentence vector pooling_model = models.Pooling(word_embedding_model.get_word_embedding_dimension(), pooling_mode_mean_tokens=True, pooling_mode_cls_token=False, pooling_mode_max_tokens=False) model = SentenceTransformer(modules=[word_embedding_model, pooling_model]) # Read the AllNLI.tsv.gz file and create the training dataset logging.info("Read AllNLI train dataset") label2int = {"contradiction": 0, "entailment": 1, "neutral": 2} train_samples = [] with gzip.open(nli_dataset_path, 'rt', encoding='utf8') as fIn: reader = csv.DictReader(fIn, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: if row['split'] == 'train': label_id = label2int[row['label']] train_samples.append(InputExample(texts=[row['sentence1'], row['sentence2']], label=label_id)) train_dataloader = DataLoader(train_samples, shuffle=True, batch_size=train_batch_size) train_loss = losses.SoftmaxLoss(model=model, sentence_embedding_dimension=model.get_sentence_embedding_dimension(), num_labels=len(label2int)) #Read STSbenchmark dataset and use it as development set logging.info("Read STSbenchmark dev dataset") dev_samples = [] with gzip.open(sts_dataset_path, 'rt', encoding='utf8') as fIn: reader = csv.DictReader(fIn, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: if row['split'] == 'dev': score = float(row['score']) / 5.0 #Normalize score to range 0 ... 1 dev_samples.append(InputExample(texts=[row['sentence1'], row['sentence2']], label=score)) dev_evaluator = EmbeddingSimilarityEvaluator.from_input_examples(dev_samples, batch_size=train_batch_size, name='sts-dev') # Configure the training num_epochs = 1 warmup_steps = math.ceil(len(train_dataloader) * num_epochs * 0.1) #10% of train data for warm-up logging.info("Warmup-steps: {}".format(warmup_steps)) # Train the model model.fit(train_objectives=[(train_dataloader, train_loss)], evaluator=dev_evaluator, epochs=num_epochs, evaluation_steps=1000, warmup_steps=warmup_steps, output_path=model_save_path ) ############################################################################## # # Load the stored model and evaluate its performance on STS benchmark dataset # ############################################################################## test_samples = [] with gzip.open(sts_dataset_path, 'rt', encoding='utf8') as fIn: reader = csv.DictReader(fIn, delimiter='\t', quoting=csv.QUOTE_NONE) for row in reader: if row['split'] == 'test': score = float(row['score']) / 5.0 #Normalize score to range 0 ... 1 test_samples.append(InputExample(texts=[row['sentence1'], row['sentence2']], label=score)) model = SentenceTransformer(model_save_path) test_evaluator = EmbeddingSimilarityEvaluator.from_input_examples(test_samples, batch_size=train_batch_size, name='sts-test') test_evaluator(model, output_path=model_save_path)
Tools/sqfvalidator/sqf/database.py
Rowantrek/A3-Antistasi
161
141804
<gh_stars>100-1000 # This file is generated automatically by `build_database.py`. Change it there. from sqf.expressions import BinaryExpression, UnaryExpression, NullExpression from sqf.types import Keyword, Type, Nothing, Anything, String, Code, Array, Number, Boolean, Namespace, \ Object, Config, Script, Control, Group, Display, Side, Task, Location, NetObject, DiaryReport, TeamMember from sqf.interpreter_types import WhileType, \ ForType, SwitchType, IfType, TryType, WithType EXPRESSIONS = [ BinaryExpression(Array, Keyword('#'), Number, Anything), BinaryExpression(Number, Keyword('!='), Number, Boolean), BinaryExpression(String, Keyword('!='), String, Boolean), BinaryExpression(Object, Keyword('!='), Object, Boolean), BinaryExpression(Group, Keyword('!='), Group, Boolean), BinaryExpression(Side, Keyword('!='), Side, Boolean), BinaryExpression(String, Keyword('!='), String, Boolean), BinaryExpression(Config, Keyword('!='), Config, Boolean), BinaryExpression(Display, Keyword('!='), Display, Boolean), BinaryExpression(Control, Keyword('!='), Control, Boolean), BinaryExpression(TeamMember, Keyword('!='), TeamMember, Boolean), BinaryExpression(NetObject, Keyword('!='), NetObject, Boolean), BinaryExpression(Task, Keyword('!='), Task, Boolean), BinaryExpression(Location, Keyword('!='), Location, Boolean), BinaryExpression(Number, Keyword('%'), Number, Number), BinaryExpression(Boolean, Keyword('&&'), Boolean, Boolean), BinaryExpression(Boolean, Keyword('&&'), Code, Boolean), BinaryExpression(Number, Keyword('*'), Number, Number), BinaryExpression(Number, Keyword('+'), Number, Number), BinaryExpression(String, Keyword('+'), String, String), BinaryExpression(Array, Keyword('+'), Array, Array), BinaryExpression(Number, Keyword('-'), Number, Number), BinaryExpression(Array, Keyword('-'), Array, Array), BinaryExpression(Number, Keyword('/'), Number, Number), BinaryExpression(Config, Keyword('/'), String, Config), BinaryExpression(SwitchType, Keyword(':'), Code, Nothing), BinaryExpression(Number, Keyword('<'), Number, Boolean), BinaryExpression(Number, Keyword('<='), Number, Boolean), BinaryExpression(Number, Keyword('=='), Number, Boolean), BinaryExpression(String, Keyword('=='), String, Boolean), BinaryExpression(Object, Keyword('=='), Object, Boolean), BinaryExpression(Group, Keyword('=='), Group, Boolean), BinaryExpression(Side, Keyword('=='), Side, Boolean), BinaryExpression(String, Keyword('=='), String, Boolean), BinaryExpression(Config, Keyword('=='), Config, Boolean), BinaryExpression(Display, Keyword('=='), Display, Boolean), BinaryExpression(Control, Keyword('=='), Control, Boolean), BinaryExpression(TeamMember, Keyword('=='), TeamMember, Boolean), BinaryExpression(NetObject, Keyword('=='), NetObject, Boolean), BinaryExpression(Task, Keyword('=='), Task, Boolean), BinaryExpression(Location, Keyword('=='), Location, Boolean), BinaryExpression(Number, Keyword('>'), Number, Boolean), BinaryExpression(Number, Keyword('>='), Number, Boolean), BinaryExpression(Config, Keyword('>>'), String, Config), BinaryExpression(Number, Keyword('^'), Number, Number), BinaryExpression(Boolean, Keyword('||'), Boolean, Boolean), BinaryExpression(Boolean, Keyword('||'), Code, Boolean), UnaryExpression(Keyword('!'), Boolean, Boolean), UnaryExpression(Keyword('+'), Number, Number), UnaryExpression(Keyword('+'), Array, Array), UnaryExpression(Keyword('-'), Number, Number), UnaryExpression(Keyword('abs'), Number, Number), UnaryExpression(Keyword('acos'), Number, Number), UnaryExpression(Keyword('actionids'), Object, Array), UnaryExpression(Keyword('actionkeys'), String, Array), UnaryExpression(Keyword('actionkeysimages'), String, String), UnaryExpression(Keyword('actionkeysimages'), Array, String), UnaryExpression(Keyword('actionkeysnames'), String, String), UnaryExpression(Keyword('actionkeysnames'), Array, String), UnaryExpression(Keyword('actionkeysnamesarray'), String, Array), UnaryExpression(Keyword('actionkeysnamesarray'), Array, Array), UnaryExpression(Keyword('actionname'), String, String), UnaryExpression(Keyword('activateaddons'), Array, Nothing), UnaryExpression(Keyword('activatekey'), String, Nothing), UnaryExpression(Keyword('add3denconnection'), Array, Nothing), UnaryExpression(Keyword('add3deneventhandler'), Array, Number), UnaryExpression(Keyword('addcamshake'), Array, Nothing), UnaryExpression(Keyword('addforcegeneratorrtd'), Array, Number), UnaryExpression(Keyword('additempool'), Array, Nothing), UnaryExpression(Keyword('addmagazinepool'), Array, Nothing), UnaryExpression(Keyword('addmissioneventhandler'), Array, Number), UnaryExpression(Keyword('addmusiceventhandler'), Array, Number), UnaryExpression(Keyword('addswitchableunit'), Object, Nothing), UnaryExpression(Keyword('addtoremainscollector'), Array, Nothing), UnaryExpression(Keyword('addweaponpool'), Array, Nothing), UnaryExpression(Keyword('admin'), Number, Number), UnaryExpression(Keyword('agent'), TeamMember, Object), UnaryExpression(Keyword('agltoasl'), Array, Array), UnaryExpression(Keyword('aimpos'), Object, Array), UnaryExpression(Keyword('airdensityrtd'), Number, Number), UnaryExpression(Keyword('airplanethrottle'), Object, Number), UnaryExpression(Keyword('airportside'), Object, Nothing), UnaryExpression(Keyword('airportside'), Number, Nothing), UnaryExpression(Keyword('aisfinishheal'), Array, Nothing), UnaryExpression(Keyword('alive'), Object, Boolean), UnaryExpression(Keyword('allcontrols'), Display, Array), UnaryExpression(Keyword('allmissionobjects'), String, Array), UnaryExpression(Keyword('allsimpleobjects'), Array, Array), UnaryExpression(Keyword('allturrets'), Array, Array), UnaryExpression(Keyword('allturrets'), Object, Array), UnaryExpression(Keyword('allvariables'), Control, Array), UnaryExpression(Keyword('allvariables'), TeamMember, Array), UnaryExpression(Keyword('allvariables'), Namespace, Array), UnaryExpression(Keyword('allvariables'), Object, Array), UnaryExpression(Keyword('allvariables'), Group, Array), UnaryExpression(Keyword('allvariables'), Task, Array), UnaryExpression(Keyword('allvariables'), Location, Array), UnaryExpression(Keyword('animationnames'), Object, Array), UnaryExpression(Keyword('animationstate'), Object, String), UnaryExpression(Keyword('asin'), Number, Number), UnaryExpression(Keyword('asltoagl'), Array, Array), UnaryExpression(Keyword('asltoatl'), Array, Array), UnaryExpression(Keyword('assert'), Boolean, Boolean), UnaryExpression(Keyword('assignedcargo'), Object, Array), UnaryExpression(Keyword('assignedcommander'), Object, Object), UnaryExpression(Keyword('assigneddriver'), Object, Object), UnaryExpression(Keyword('assignedgunner'), Object, Object), UnaryExpression(Keyword('assigneditems'), Object, Array), UnaryExpression(Keyword('assignedtarget'), Object, Object), UnaryExpression(Keyword('assignedteam'), Object, String), UnaryExpression(Keyword('assignedvehicle'), Object, Object), UnaryExpression(Keyword('assignedvehiclerole'), Object, Array), UnaryExpression(Keyword('atan'), Number, Number), UnaryExpression(Keyword('atg'), Number, Number), UnaryExpression(Keyword('atltoasl'), Array, Array), UnaryExpression(Keyword('attachedobject'), Location, Object), UnaryExpression(Keyword('attachedobjects'), Object, Array), UnaryExpression(Keyword('attachedto'), Object, Object), UnaryExpression(Keyword('attackenabled'), Object, Boolean), UnaryExpression(Keyword('attackenabled'), Group, Boolean), UnaryExpression(Keyword('backpack'), Object, String), UnaryExpression(Keyword('backpackcargo'), Object, Array), UnaryExpression(Keyword('backpackcontainer'), Object, Object), UnaryExpression(Keyword('backpackitems'), Object, Array), UnaryExpression(Keyword('backpackmagazines'), Object, Array), UnaryExpression(Keyword('behaviour'), Object, String), UnaryExpression(Keyword('binocular'), Object, String), UnaryExpression(Keyword('boundingbox'), Object, Array), UnaryExpression(Keyword('boundingboxreal'), Object, Array), UnaryExpression(Keyword('boundingcenter'), Object, Array), UnaryExpression(Keyword('breakout'), String, Nothing), UnaryExpression(Keyword('breakto'), String, Nothing), UnaryExpression(Keyword('buldozer_enableroaddiag'), Boolean, Nothing), UnaryExpression(Keyword('buldozer_loadnewroads'), String, Boolean), UnaryExpression(Keyword('buttonaction'), Control, String), UnaryExpression(Keyword('buttonaction'), Number, String), UnaryExpression(Keyword('buttonsetaction'), Array, Nothing), UnaryExpression(Keyword('calculateplayervisibilitybyfriendly'), Boolean, Nothing), UnaryExpression(Keyword('call'), Code, Anything), UnaryExpression(Keyword('camcommitted'), Object, Boolean), UnaryExpression(Keyword('camdestroy'), Object, Nothing), UnaryExpression(Keyword('cameraeffectenablehud'), Boolean, Nothing), UnaryExpression(Keyword('camerainterest'), Object, Number), UnaryExpression(Keyword('campreloaded'), Object, Boolean), UnaryExpression(Keyword('camtarget'), Object, Object), UnaryExpression(Keyword('camusenvg'), Boolean, Nothing), UnaryExpression(Keyword('cancelsimpletaskdestination'), Task, Nothing), UnaryExpression(Keyword('canfire'), Object, Boolean), UnaryExpression(Keyword('canmove'), Object, Boolean), UnaryExpression(Keyword('canstand'), Object, Boolean), UnaryExpression(Keyword('cantriggerdynamicsimulation'), Object, Boolean), UnaryExpression(Keyword('canunloadincombat'), Object, Boolean), UnaryExpression(Keyword('captive'), Object, Boolean), UnaryExpression(Keyword('captivenum'), Object, Number), UnaryExpression(Keyword('case'), Type, SwitchType), UnaryExpression(Keyword('cbchecked'), Control, Boolean), UnaryExpression(Keyword('ceil'), Number, Number), UnaryExpression(Keyword('channelenabled'), Number, Array), UnaryExpression(Keyword('checkaifeature'), String, Boolean), UnaryExpression(Keyword('classname'), Location, String), UnaryExpression(Keyword('clear3deninventory'), Array, Nothing), UnaryExpression(Keyword('clearallitemsfrombackpack'), Object, Nothing), UnaryExpression(Keyword('clearbackpackcargo'), Object, Nothing), UnaryExpression(Keyword('clearbackpackcargoglobal'), Object, Nothing), UnaryExpression(Keyword('cleargroupicons'), Group, Nothing), UnaryExpression(Keyword('clearitemcargo'), Object, Nothing), UnaryExpression(Keyword('clearitemcargoglobal'), Object, Nothing), UnaryExpression(Keyword('clearmagazinecargo'), Object, Nothing), UnaryExpression(Keyword('clearmagazinecargoglobal'), Object, Nothing), UnaryExpression(Keyword('clearoverlay'), Control, Nothing), UnaryExpression(Keyword('clearweaponcargo'), Object, Nothing), UnaryExpression(Keyword('clearweaponcargoglobal'), Object, Nothing), UnaryExpression(Keyword('closedialog'), Number, Nothing), UnaryExpression(Keyword('closeoverlay'), Control, Nothing), UnaryExpression(Keyword('collapseobjecttree'), Control, Nothing), UnaryExpression(Keyword('collect3denhistory'), Code, Nothing), UnaryExpression(Keyword('collectivertd'), Object, Number), UnaryExpression(Keyword('combatmode'), Object, String), UnaryExpression(Keyword('combatmode'), Group, String), UnaryExpression(Keyword('commander'), Object, Object), UnaryExpression(Keyword('commandgetout'), Object, Nothing), UnaryExpression(Keyword('commandgetout'), Array, Nothing), UnaryExpression(Keyword('commandstop'), Object, Nothing), UnaryExpression(Keyword('commandstop'), Array, Nothing), UnaryExpression(Keyword('comment'), String, Nothing), UnaryExpression(Keyword('commitoverlay'), Control, Nothing), UnaryExpression(Keyword('compile'), String, Code), UnaryExpression(Keyword('compilefinal'), String, Code), UnaryExpression(Keyword('completedfsm'), Number, Boolean), UnaryExpression(Keyword('composetext'), Array, String), UnaryExpression(Keyword('confighierarchy'), Config, Array), UnaryExpression(Keyword('configname'), Config, String), UnaryExpression(Keyword('configproperties'), Array, Array), UnaryExpression(Keyword('configsourceaddonlist'), Config, Array), UnaryExpression(Keyword('configsourcemod'), Config, String), UnaryExpression(Keyword('configsourcemodlist'), Config, Array), UnaryExpression(Keyword('copytoclipboard'), String, Nothing), UnaryExpression(Keyword('cos'), Number, Number), UnaryExpression(Keyword('count'), Array, Number), UnaryExpression(Keyword('count'), String, Number), UnaryExpression(Keyword('count'), Config, Number), UnaryExpression(Keyword('create3dencomposition'), Array, Array), UnaryExpression(Keyword('create3denentity'), Array, Anything), UnaryExpression(Keyword('createagent'), Array, Object), UnaryExpression(Keyword('createcenter'), Side, Side), UnaryExpression(Keyword('createdialog'), String, Boolean), UnaryExpression(Keyword('creatediarylink'), Array, String), UnaryExpression(Keyword('creategeardialog'), Array, Nothing), UnaryExpression(Keyword('creategroup'), Side, Group), UnaryExpression(Keyword('creategroup'), Array, Group), UnaryExpression(Keyword('createguardedpoint'), Array, Nothing), UnaryExpression(Keyword('createlocation'), Array, Location), UnaryExpression(Keyword('createmarker'), Array, String), UnaryExpression(Keyword('createmarkerlocal'), Array, String), UnaryExpression(Keyword('createmine'), Array, Object), UnaryExpression(Keyword('createsimpleobject'), Array, Object), UnaryExpression(Keyword('createsoundsource'), Array, Object), UnaryExpression(Keyword('createteam'), Array, TeamMember), UnaryExpression(Keyword('createtrigger'), Array, Object), UnaryExpression(Keyword('createvehicle'), Array, Object), UnaryExpression(Keyword('createvehiclecrew'), Object, Nothing), UnaryExpression(Keyword('crew'), Object, Array), UnaryExpression(Keyword('ctaddheader'), Control, Array), UnaryExpression(Keyword('ctaddrow'), Control, Array), UnaryExpression(Keyword('ctclear'), Control, Nothing), UnaryExpression(Keyword('ctcursel'), Control, Number), UnaryExpression(Keyword('ctheadercount'), Control, Number), UnaryExpression(Keyword('ctrlactivate'), Control, Nothing), UnaryExpression(Keyword('ctrlangle'), Control, Array), UnaryExpression(Keyword('ctrlautoscrolldelay'), Control, Number), UnaryExpression(Keyword('ctrlautoscrollrewind'), Control, Boolean), UnaryExpression(Keyword('ctrlautoscrollspeed'), Control, Number), UnaryExpression(Keyword('ctrlchecked'), Control, Boolean), UnaryExpression(Keyword('ctrlclassname'), Control, String), UnaryExpression(Keyword('ctrlcommitted'), Control, Boolean), UnaryExpression(Keyword('ctrldelete'), Control, Boolean), UnaryExpression(Keyword('ctrlenable'), Array, Nothing), UnaryExpression(Keyword('ctrlenabled'), Control, Boolean), UnaryExpression(Keyword('ctrlenabled'), Number, Boolean), UnaryExpression(Keyword('ctrlfade'), Control, Number), UnaryExpression(Keyword('ctrlhtmlloaded'), Control, Boolean), UnaryExpression(Keyword('ctrlidc'), Control, Number), UnaryExpression(Keyword('ctrlidd'), Display, Number), UnaryExpression(Keyword('ctrlmapanimclear'), Control, Nothing), UnaryExpression(Keyword('ctrlmapanimcommit'), Control, Nothing), UnaryExpression(Keyword('ctrlmapanimdone'), Control, Boolean), UnaryExpression(Keyword('ctrlmapmouseover'), Control, Array), UnaryExpression(Keyword('ctrlmapscale'), Control, Number), UnaryExpression(Keyword('ctrlmodel'), Control, String), UnaryExpression(Keyword('ctrlmodeldirandup'), Control, Array), UnaryExpression(Keyword('ctrlmodelscale'), Control, Number), UnaryExpression(Keyword('ctrlparent'), Control, Display), UnaryExpression(Keyword('ctrlparentcontrolsgroup'), Control, Control), UnaryExpression(Keyword('ctrlposition'), Control, Array), UnaryExpression(Keyword('ctrlscale'), Control, Number), UnaryExpression(Keyword('ctrlsetfocus'), Control, Nothing), UnaryExpression(Keyword('ctrlsettext'), Array, Nothing), UnaryExpression(Keyword('ctrlshow'), Array, Nothing), UnaryExpression(Keyword('ctrlshown'), Control, Boolean), UnaryExpression(Keyword('ctrltext'), Control, String), UnaryExpression(Keyword('ctrltext'), Number, String), UnaryExpression(Keyword('ctrltextheight'), Control, Number), UnaryExpression(Keyword('ctrltextsecondary'), Control, String), UnaryExpression(Keyword('ctrltextwidth'), Control, Number), UnaryExpression(Keyword('ctrltype'), Control, Number), UnaryExpression(Keyword('ctrlvisible'), Number, Boolean), UnaryExpression(Keyword('ctrowcount'), Control, Number), UnaryExpression(Keyword('curatoraddons'), Object, Array), UnaryExpression(Keyword('curatorcameraarea'), Object, Array), UnaryExpression(Keyword('curatorcameraareaceiling'), Object, Number), UnaryExpression(Keyword('curatoreditableobjects'), Object, Array), UnaryExpression(Keyword('curatoreditingarea'), Object, Array), UnaryExpression(Keyword('curatoreditingareatype'), Object, Boolean), UnaryExpression(Keyword('curatorpoints'), Object, Number), UnaryExpression(Keyword('curatorregisteredobjects'), Object, Array), UnaryExpression(Keyword('curatorwaypointcost'), Object, Number), UnaryExpression(Keyword('currentcommand'), Object, String), UnaryExpression(Keyword('currentmagazine'), Object, String), UnaryExpression(Keyword('currentmagazinedetail'), Object, Array), UnaryExpression(Keyword('currentmuzzle'), Object, String), UnaryExpression(Keyword('currenttask'), Object, Task), UnaryExpression(Keyword('currenttasks'), TeamMember, Array), UnaryExpression(Keyword('currentthrowable'), Object, Array), UnaryExpression(Keyword('currentvisionmode'), Object, Number), UnaryExpression(Keyword('currentwaypoint'), Group, Number), UnaryExpression(Keyword('currentweapon'), Object, String), UnaryExpression(Keyword('currentweaponmode'), Object, String), UnaryExpression(Keyword('currentzeroing'), Object, Number), UnaryExpression(Keyword('cutobj'), Array, Nothing), UnaryExpression(Keyword('cutrsc'), Array, Nothing), UnaryExpression(Keyword('cuttext'), Array, Nothing), UnaryExpression(Keyword('damage'), Object, Number), UnaryExpression(Keyword('datetonumber'), Array, Number), UnaryExpression(Keyword('deactivatekey'), String, Nothing), UnaryExpression(Keyword('debriefingtext'), String, Number), UnaryExpression(Keyword('debuglog'), Type, Nothing), UnaryExpression(Keyword('default'), Code, Nothing), UnaryExpression(Keyword('deg'), Number, Number), UnaryExpression(Keyword('delete3denentities'), Array, Nothing), UnaryExpression(Keyword('deletecenter'), Side, Nothing), UnaryExpression(Keyword('deletecollection'), Object, Nothing), UnaryExpression(Keyword('deletegroup'), Group, Nothing), UnaryExpression(Keyword('deleteidentity'), String, Boolean), UnaryExpression(Keyword('deletelocation'), Location, Nothing), UnaryExpression(Keyword('deletemarker'), String, Nothing), UnaryExpression(Keyword('deletemarkerlocal'), String, Nothing), UnaryExpression(Keyword('deletesite'), Object, Nothing), UnaryExpression(Keyword('deletestatus'), String, Boolean), UnaryExpression(Keyword('deleteteam'), TeamMember, Nothing), UnaryExpression(Keyword('deletevehicle'), Object, Nothing), UnaryExpression(Keyword('deletewaypoint'), Array, Nothing), UnaryExpression(Keyword('detach'), Object, Nothing), UnaryExpression(Keyword('detectedmines'), Side, Array), UnaryExpression(Keyword('diag_captureframe'), Number, Nothing), UnaryExpression(Keyword('diag_captureframetofile'), Number, Nothing), UnaryExpression(Keyword('diag_captureslowframe'), Array, Nothing), UnaryExpression(Keyword('diag_codeperformance'), Array, Array), UnaryExpression(Keyword('diag_dynamicsimulationend'), String, Nothing), UnaryExpression(Keyword('diag_lightnewload'), String, Nothing), UnaryExpression(Keyword('diag_log'), Type, Nothing), UnaryExpression(Keyword('diag_logslowframe'), Array, Nothing), UnaryExpression(Keyword('diag_setlightnew'), String, Nothing), UnaryExpression(Keyword('didjipowner'), Object, Boolean), UnaryExpression(Keyword('difficultyenabled'), String, Boolean), UnaryExpression(Keyword('difficultyoption'), String, Number), UnaryExpression(Keyword('direction'), Object, Number), UnaryExpression(Keyword('direction'), Location, Number), UnaryExpression(Keyword('disablemapindicators'), Array, Nothing), UnaryExpression(Keyword('disableremotesensors'), Boolean, Nothing), UnaryExpression(Keyword('disableuserinput'), Boolean, Nothing), UnaryExpression(Keyword('displayparent'), Display, Display), UnaryExpression(Keyword('dissolveteam'), String, Nothing), UnaryExpression(Keyword('do3denaction'), String, Nothing), UnaryExpression(Keyword('dogetout'), Object, Nothing), UnaryExpression(Keyword('dogetout'), Array, Nothing), UnaryExpression(Keyword('dostop'), Object, Nothing), UnaryExpression(Keyword('dostop'), Array, Nothing), UnaryExpression(Keyword('drawicon3d'), Array, Nothing), UnaryExpression(Keyword('drawline3d'), Array, Nothing), UnaryExpression(Keyword('driver'), Object, Object), UnaryExpression(Keyword('drop'), Array, Nothing), UnaryExpression(Keyword('dynamicsimulationdistance'), String, Number), UnaryExpression(Keyword('dynamicsimulationdistancecoef'), String, Number), UnaryExpression(Keyword('dynamicsimulationenabled'), Object, Boolean), UnaryExpression(Keyword('dynamicsimulationenabled'), Group, Boolean), UnaryExpression(Keyword('echo'), String, Nothing), UnaryExpression(Keyword('edit3denmissionattributes'), String, Nothing), UnaryExpression(Keyword('effectivecommander'), Object, Object), UnaryExpression(Keyword('enableaudiofeature'), Array, Boolean), UnaryExpression(Keyword('enablecamshake'), Boolean, Nothing), UnaryExpression(Keyword('enablecaustics'), Boolean, Nothing), UnaryExpression(Keyword('enabledebriefingstats'), Array, Nothing), UnaryExpression(Keyword('enablediaglegend'), Boolean, Nothing), UnaryExpression(Keyword('enabledynamicsimulationsystem'), Boolean, Nothing), UnaryExpression(Keyword('enableengineartillery'), Boolean, Nothing), UnaryExpression(Keyword('enableenvironment'), Boolean, Nothing), UnaryExpression(Keyword('enableenvironment'), Array, Nothing), UnaryExpression(Keyword('enableradio'), Boolean, Nothing), UnaryExpression(Keyword('enablesatnormalondetail'), Boolean, Nothing), UnaryExpression(Keyword('enablesaving'), Boolean, Nothing), UnaryExpression(Keyword('enablesaving'), Array, Nothing), UnaryExpression(Keyword('enablesentences'), Boolean, Nothing), UnaryExpression(Keyword('enablestressdamage'), Boolean, Nothing), UnaryExpression(Keyword('enableteamswitch'), Boolean, Nothing), UnaryExpression(Keyword('enabletraffic'), Boolean, Nothing), UnaryExpression(Keyword('enableweapondisassembly'), Boolean, Nothing), UnaryExpression(Keyword('endmission'), String, Nothing), UnaryExpression(Keyword('enginesisonrtd'), Object, Array), UnaryExpression(Keyword('enginespowerrtd'), Object, Array), UnaryExpression(Keyword('enginesrpmrtd'), Object, Array), UnaryExpression(Keyword('enginestorquertd'), Object, Array), UnaryExpression(Keyword('entities'), String, Array), UnaryExpression(Keyword('entities'), Array, Array), UnaryExpression(Keyword('estimatedtimeleft'), Number, Nothing), UnaryExpression(Keyword('everybackpack'), Object, Array), UnaryExpression(Keyword('everycontainer'), Object, Array), UnaryExpression(Keyword('execfsm'), String, Number), UnaryExpression(Keyword('execvm'), String, Script), UnaryExpression(Keyword('exp'), Number, Number), UnaryExpression(Keyword('expecteddestination'), Object, Array), UnaryExpression(Keyword('exportjipmessages'), String, Nothing), UnaryExpression(Keyword('eyedirection'), Object, Array), UnaryExpression(Keyword('eyepos'), Object, Array), UnaryExpression(Keyword('face'), Object, String), UnaryExpression(Keyword('faction'), Object, String), UnaryExpression(Keyword('failmission'), String, Nothing), UnaryExpression(Keyword('fillweaponsfrompool'), Object, Nothing), UnaryExpression(Keyword('finddisplay'), Number, Display), UnaryExpression(Keyword('finite'), Number, Boolean), UnaryExpression(Keyword('firstbackpack'), Object, Object), UnaryExpression(Keyword('flag'), Object, Object), UnaryExpression(Keyword('flaganimationphase'), Object, Number), UnaryExpression(Keyword('flagowner'), Object, Object), UnaryExpression(Keyword('flagside'), Object, Side), UnaryExpression(Keyword('flagtexture'), Object, String), UnaryExpression(Keyword('fleeing'), Object, Boolean), UnaryExpression(Keyword('floor'), Number, Number), UnaryExpression(Keyword('for'), String, ForType), UnaryExpression(Keyword('for'), Array, ForType), UnaryExpression(Keyword('forceatpositionrtd'), Array, Array), UnaryExpression(Keyword('forcegeneratorrtd'), Number, Array), UnaryExpression(Keyword('forcemap'), Boolean, Nothing), UnaryExpression(Keyword('forcerespawn'), Object, Nothing), UnaryExpression(Keyword('format'), Array, String), UnaryExpression(Keyword('formation'), Object, String), UnaryExpression(Keyword('formation'), Group, String), UnaryExpression(Keyword('formation'), TeamMember, String), UnaryExpression(Keyword('formationdirection'), Object, Number), UnaryExpression(Keyword('formationleader'), Object, Object), UnaryExpression(Keyword('formationmembers'), Object, Array), UnaryExpression(Keyword('formationposition'), Object, Array), UnaryExpression(Keyword('formationtask'), Object, String), UnaryExpression(Keyword('formattext'), Array, String), UnaryExpression(Keyword('formleader'), Object, Object), UnaryExpression(Keyword('fromeditor'), TeamMember, Boolean), UnaryExpression(Keyword('fuel'), Object, Number), UnaryExpression(Keyword('fullcrew'), Object, Array), UnaryExpression(Keyword('fullcrew'), Array, Array), UnaryExpression(Keyword('gearidcammocount'), Number, Number), UnaryExpression(Keyword('gearslotammocount'), Control, Number), UnaryExpression(Keyword('gearslotdata'), Control, String), UnaryExpression(Keyword('get3denactionstate'), String, Number), UnaryExpression(Keyword('get3denconnections'), Type, Array), UnaryExpression(Keyword('get3denentity'), Number, Anything), UnaryExpression(Keyword('get3denentityid'), Type, Number), UnaryExpression(Keyword('get3dengrid'), String, Nothing), UnaryExpression(Keyword('get3denlayerentities'), Number, Array), UnaryExpression(Keyword('get3denselected'), String, Array), UnaryExpression(Keyword('getaimingcoef'), Object, Number), UnaryExpression(Keyword('getallenvsoundcontrollers'), Array, Array), UnaryExpression(Keyword('getallhitpointsdamage'), Object, Array), UnaryExpression(Keyword('getallownedmines'), Object, Array), UnaryExpression(Keyword('getallsoundcontrollers'), Object, Array), UnaryExpression(Keyword('getammocargo'), Object, Number), UnaryExpression(Keyword('getanimaimprecision'), Object, Number), UnaryExpression(Keyword('getanimspeedcoef'), Object, Number), UnaryExpression(Keyword('getarray'), Config, Array), UnaryExpression(Keyword('getartilleryammo'), Array, Array), UnaryExpression(Keyword('getassignedcuratorlogic'), Object, Object), UnaryExpression(Keyword('getassignedcuratorunit'), Object, Object), UnaryExpression(Keyword('getbackpackcargo'), Object, Array), UnaryExpression(Keyword('getbleedingremaining'), Object, Number), UnaryExpression(Keyword('getburningvalue'), Object, Number), UnaryExpression(Keyword('getcameraviewdirection'), Object, Array), UnaryExpression(Keyword('getcenterofmass'), Object, Array), UnaryExpression(Keyword('getconnecteduav'), Object, Object), UnaryExpression(Keyword('getcontainermaxload'), String, Number), UnaryExpression(Keyword('getcustomaimcoef'), Object, Number), UnaryExpression(Keyword('getcustomsoundcontroller'), Array, Number), UnaryExpression(Keyword('getcustomsoundcontrollercount'), Object, Number), UnaryExpression(Keyword('getdammage'), Object, Number), UnaryExpression(Keyword('getdescription'), Object, Array), UnaryExpression(Keyword('getdir'), Object, Number), UnaryExpression(Keyword('getdirvisual'), Object, Number), UnaryExpression(Keyword('getdlcassetsusagebyname'), String, Array), UnaryExpression(Keyword('getdlcs'), Number, Array), UnaryExpression(Keyword('getdlcusagetime'), Number, Number), UnaryExpression(Keyword('geteditorcamera'), Control, Object), UnaryExpression(Keyword('geteditormode'), Control, String), UnaryExpression(Keyword('getenginetargetrpmrtd'), Object, Array), UnaryExpression(Keyword('getfatigue'), Object, Number), UnaryExpression(Keyword('getfieldmanualstartpage'), Display, Array), UnaryExpression(Keyword('getforcedflagtexture'), Object, String), UnaryExpression(Keyword('getfuelcargo'), Object, Number), UnaryExpression(Keyword('getgroupiconparams'), Group, Array), UnaryExpression(Keyword('getgroupicons'), Group, Array), UnaryExpression(Keyword('getitemcargo'), Object, Array), UnaryExpression(Keyword('getmagazinecargo'), Object, Array), UnaryExpression(Keyword('getmarkercolor'), String, String), UnaryExpression(Keyword('getmarkerpos'), String, Array), UnaryExpression(Keyword('getmarkersize'), String, Array), UnaryExpression(Keyword('getmarkertype'), String, String), UnaryExpression(Keyword('getmass'), Object, Number), UnaryExpression(Keyword('getmissionconfig'), String, Config), UnaryExpression(Keyword('getmissionconfigvalue'), String, Anything), UnaryExpression(Keyword('getmissionconfigvalue'), Array, Anything), UnaryExpression(Keyword('getmissionlayerentities'), String, Array), UnaryExpression(Keyword('getmissionlayerentities'), Number, Array), UnaryExpression(Keyword('getmodelinfo'), Object, Array), UnaryExpression(Keyword('getnumber'), Config, Number), UnaryExpression(Keyword('getobjectdlc'), Object, Number), UnaryExpression(Keyword('getobjectmaterials'), Object, Array), UnaryExpression(Keyword('getobjecttextures'), Object, Array), UnaryExpression(Keyword('getobjecttype'), Object, Number), UnaryExpression(Keyword('getoxygenremaining'), Object, Number), UnaryExpression(Keyword('getpersonuseddlcs'), Object, Array), UnaryExpression(Keyword('getpilotcameradirection'), Object, Array), UnaryExpression(Keyword('getpilotcameraposition'), Object, Array), UnaryExpression(Keyword('getpilotcamerarotation'), Object, Array), UnaryExpression(Keyword('getpilotcameratarget'), Object, Array), UnaryExpression(Keyword('getplatenumber'), Object, String), UnaryExpression(Keyword('getplayerchannel'), Object, Number), UnaryExpression(Keyword('getplayerscores'), Object, Array), UnaryExpression(Keyword('getplayeruid'), Object, String), UnaryExpression(Keyword('getpos'), Object, Array), UnaryExpression(Keyword('getpos'), Location, Array), UnaryExpression(Keyword('getposasl'), Object, Array), UnaryExpression(Keyword('getposaslvisual'), Object, Array), UnaryExpression(Keyword('getposaslw'), Object, Array), UnaryExpression(Keyword('getposatl'), Object, Array), UnaryExpression(Keyword('getposatlvisual'), Object, Array), UnaryExpression(Keyword('getposvisual'), Object, Array), UnaryExpression(Keyword('getposworld'), Object, Array), UnaryExpression(Keyword('getpylonmagazines'), Object, Array), UnaryExpression(Keyword('getrepaircargo'), Object, Number), UnaryExpression(Keyword('getrotorbrakertd'), Object, Number), UnaryExpression(Keyword('getshotparents'), Object, Array), UnaryExpression(Keyword('getslingload'), Object, Object), UnaryExpression(Keyword('getstamina'), Object, Number), UnaryExpression(Keyword('getstatvalue'), String, Number), UnaryExpression(Keyword('getsuppression'), Object, Number), UnaryExpression(Keyword('getterrainheightasl'), Array, Number), UnaryExpression(Keyword('gettext'), Config, String), UnaryExpression(Keyword('gettrimoffsetrtd'), Object, Array), UnaryExpression(Keyword('getunitloadout'), Object, Array), UnaryExpression(Keyword('getunitloadout'), Array, Array), UnaryExpression(Keyword('getunitloadout'), String, Array), UnaryExpression(Keyword('getunitloadout'), Config, Array), UnaryExpression(Keyword('getusermfdtext'), Object, Array), UnaryExpression(Keyword('getusermfdvalue'), Object, Array), UnaryExpression(Keyword('getvehiclecargo'), Object, Array), UnaryExpression(Keyword('getweaponcargo'), Object, Array), UnaryExpression(Keyword('getweaponsway'), Object, Number), UnaryExpression(Keyword('getwingsorientationrtd'), Object, Number), UnaryExpression(Keyword('getwingspositionrtd'), Object, Number), UnaryExpression(Keyword('getwppos'), Array, Array), UnaryExpression(Keyword('goggles'), Object, String), UnaryExpression(Keyword('goto'), String, Nothing), UnaryExpression(Keyword('group'), Object, Group), UnaryExpression(Keyword('groupfromnetid'), String, Group), UnaryExpression(Keyword('groupid'), Group, String), UnaryExpression(Keyword('groupowner'), Group, Number), UnaryExpression(Keyword('groupselectedunits'), Object, Array), UnaryExpression(Keyword('gunner'), Object, Object), UnaryExpression(Keyword('handgunitems'), Object, Array), UnaryExpression(Keyword('handgunmagazine'), Object, Array), UnaryExpression(Keyword('handgunweapon'), Object, String), UnaryExpression(Keyword('handshit'), Object, Number), UnaryExpression(Keyword('haspilotcamera'), Object, Boolean), UnaryExpression(Keyword('hcallgroups'), Object, Array), UnaryExpression(Keyword('hcleader'), Group, Object), UnaryExpression(Keyword('hcremoveallgroups'), Object, Nothing), UnaryExpression(Keyword('hcselected'), Object, Array), UnaryExpression(Keyword('hcshowbar'), Boolean, Nothing), UnaryExpression(Keyword('headgear'), Object, String), UnaryExpression(Keyword('hidebody'), Object, Nothing), UnaryExpression(Keyword('hideobject'), Object, Nothing), UnaryExpression(Keyword('hideobjectglobal'), Object, Nothing), UnaryExpression(Keyword('hint'), String, Nothing), UnaryExpression(Keyword('hint'), String, Nothing), UnaryExpression(Keyword('hintc'), String, Nothing), UnaryExpression(Keyword('hintcadet'), String, Nothing), UnaryExpression(Keyword('hintcadet'), String, Nothing), UnaryExpression(Keyword('hintsilent'), String, Nothing), UnaryExpression(Keyword('hintsilent'), String, Nothing), UnaryExpression(Keyword('hmd'), Object, String), UnaryExpression(Keyword('hostmission'), Array, Nothing), UnaryExpression(Keyword('if'), Boolean, IfType), UnaryExpression(Keyword('image'), String, String), UnaryExpression(Keyword('importallgroups'), Control, Nothing), UnaryExpression(Keyword('importance'), Location, Number), UnaryExpression(Keyword('incapacitatedstate'), Object, String), UnaryExpression(Keyword('inflamed'), Object, Boolean), UnaryExpression(Keyword('infopanel'), String, Array), UnaryExpression(Keyword('infopanels'), Object, Array), UnaryExpression(Keyword('infopanels'), Array, Array), UnaryExpression(Keyword('ingameuiseteventhandler'), Array, Nothing), UnaryExpression(Keyword('inheritsfrom'), Config, Config), UnaryExpression(Keyword('inputaction'), String, Number), UnaryExpression(Keyword('isabletobreathe'), Object, Boolean), UnaryExpression(Keyword('isagent'), TeamMember, Boolean), UnaryExpression(Keyword('isaimprecisionenabled'), Object, Boolean), UnaryExpression(Keyword('isarray'), Config, Boolean), UnaryExpression(Keyword('isautohoveron'), Object, Boolean), UnaryExpression(Keyword('isautonomous'), Object, Boolean), UnaryExpression(Keyword('isautostartupenabledrtd'), Object, Array), UnaryExpression(Keyword('isautotrimonrtd'), Object, Boolean), UnaryExpression(Keyword('isbleeding'), Object, Boolean), UnaryExpression(Keyword('isburning'), Object, Boolean), UnaryExpression(Keyword('isclass'), Config, Boolean), UnaryExpression(Keyword('iscollisionlighton'), Object, Boolean), UnaryExpression(Keyword('iscopilotenabled'), Object, Boolean), UnaryExpression(Keyword('isdamageallowed'), Object, Boolean), UnaryExpression(Keyword('isdlcavailable'), Number, Boolean), UnaryExpression(Keyword('isengineon'), Object, Boolean), UnaryExpression(Keyword('isforcedwalk'), Object, Boolean), UnaryExpression(Keyword('isformationleader'), Object, Boolean), UnaryExpression(Keyword('isgroupdeletedwhenempty'), Group, Boolean), UnaryExpression(Keyword('ishidden'), Object, Boolean), UnaryExpression(Keyword('isinremainscollector'), Object, Boolean), UnaryExpression(Keyword('iskeyactive'), String, Boolean), UnaryExpression(Keyword('islaseron'), Object, Boolean), UnaryExpression(Keyword('islighton'), Object, Boolean), UnaryExpression(Keyword('islocalized'), String, Boolean), UnaryExpression(Keyword('ismanualfire'), Object, Boolean), UnaryExpression(Keyword('ismarkedforcollection'), Object, Boolean), UnaryExpression(Keyword('isnil'), Code, Boolean), UnaryExpression(Keyword('isnil'), String, Boolean), UnaryExpression(Keyword('isnull'), Object, Boolean), UnaryExpression(Keyword('isnull'), Group, Boolean), UnaryExpression(Keyword('isnull'), Script, Boolean), UnaryExpression(Keyword('isnull'), Config, Boolean), UnaryExpression(Keyword('isnull'), Display, Boolean), UnaryExpression(Keyword('isnull'), Control, Boolean), UnaryExpression(Keyword('isnull'), NetObject, Boolean), UnaryExpression(Keyword('isnull'), Task, Boolean), UnaryExpression(Keyword('isnull'), Location, Boolean), UnaryExpression(Keyword('isnumber'), Config, Boolean), UnaryExpression(Keyword('isobjecthidden'), Object, Boolean), UnaryExpression(Keyword('isobjectrtd'), Object, Boolean), UnaryExpression(Keyword('isonroad'), Object, Boolean), UnaryExpression(Keyword('isonroad'), Array, Boolean), UnaryExpression(Keyword('isplayer'), Object, Boolean), UnaryExpression(Keyword('isrealtime'), Control, Boolean), UnaryExpression(Keyword('isshowing3dicons'), Control, Boolean), UnaryExpression(Keyword('issimpleobject'), Object, Boolean), UnaryExpression(Keyword('issprintallowed'), Object, Boolean), UnaryExpression(Keyword('isstaminaenabled'), Object, Boolean), UnaryExpression(Keyword('istext'), Config, Boolean), UnaryExpression(Keyword('istouchingground'), Object, Boolean), UnaryExpression(Keyword('isturnedout'), Object, Boolean), UnaryExpression(Keyword('isuavconnected'), Object, Boolean), UnaryExpression(Keyword('isvehiclecargo'), Object, Object), UnaryExpression(Keyword('isvehicleradaron'), Object, Boolean), UnaryExpression(Keyword('iswalking'), Object, Boolean), UnaryExpression(Keyword('isweapondeployed'), Object, Boolean), UnaryExpression(Keyword('isweaponrested'), Object, Boolean), UnaryExpression(Keyword('itemcargo'), Object, Array), UnaryExpression(Keyword('items'), Object, Array), UnaryExpression(Keyword('itemswithmagazines'), Object, Array), UnaryExpression(Keyword('keyimage'), Number, String), UnaryExpression(Keyword('keyname'), Number, String), UnaryExpression(Keyword('landresult'), Object, String), UnaryExpression(Keyword('lasertarget'), Object, Object), UnaryExpression(Keyword('lbadd'), Array, Number), UnaryExpression(Keyword('lbclear'), Control, Nothing), UnaryExpression(Keyword('lbclear'), Number, Nothing), UnaryExpression(Keyword('lbcolor'), Array, Array), UnaryExpression(Keyword('lbcolorright'), Array, Array), UnaryExpression(Keyword('lbcursel'), Control, Number), UnaryExpression(Keyword('lbcursel'), Number, Number), UnaryExpression(Keyword('lbdata'), Array, String), UnaryExpression(Keyword('lbdelete'), Array, Nothing), UnaryExpression(Keyword('lbpicture'), Array, String), UnaryExpression(Keyword('lbpictureright'), Array, String), UnaryExpression(Keyword('lbselection'), Control, Array), UnaryExpression(Keyword('lbsetcolor'), Array, Nothing), UnaryExpression(Keyword('lbsetcolorright'), Array, Nothing), UnaryExpression(Keyword('lbsetcursel'), Array, Nothing), UnaryExpression(Keyword('lbsetdata'), Array, Nothing), UnaryExpression(Keyword('lbsetpicture'), Array, Nothing), UnaryExpression(Keyword('lbsetpicturecolor'), Array, Nothing), UnaryExpression(Keyword('lbsetpicturecolordisabled'), Array, Nothing), UnaryExpression(Keyword('lbsetpicturecolorselected'), Array, Nothing), UnaryExpression(Keyword('lbsetpictureright'), Array, Nothing), UnaryExpression(Keyword('lbsetselectcolor'), Array, Nothing), UnaryExpression(Keyword('lbsetselectcolorright'), Array, Nothing), UnaryExpression(Keyword('lbsettext'), Array, String), UnaryExpression(Keyword('lbsettooltip'), Array, Nothing), UnaryExpression(Keyword('lbsetvalue'), Array, Nothing), UnaryExpression(Keyword('lbsize'), Control, Number), UnaryExpression(Keyword('lbsize'), Number, Number), UnaryExpression(Keyword('lbsort'), Control, Nothing), UnaryExpression(Keyword('lbsort'), Array, Nothing), UnaryExpression(Keyword('lbsort'), Number, Nothing), UnaryExpression(Keyword('lbsortbyvalue'), Control, Nothing), UnaryExpression(Keyword('lbsortbyvalue'), Number, Nothing), UnaryExpression(Keyword('lbtext'), Array, String), UnaryExpression(Keyword('lbtextright'), Array, String), UnaryExpression(Keyword('lbvalue'), Array, Number), UnaryExpression(Keyword('leader'), Object, Object), UnaryExpression(Keyword('leader'), Group, Object), UnaryExpression(Keyword('leader'), TeamMember, TeamMember), UnaryExpression(Keyword('leaderboarddeinit'), String, Boolean), UnaryExpression(Keyword('leaderboardgetrows'), String, Array), UnaryExpression(Keyword('leaderboardinit'), String, Boolean), UnaryExpression(Keyword('leaderboardrequestrowsfriends'), String, Boolean), UnaryExpression(Keyword('leaderboardrequestrowsglobal'), Array, Boolean), UnaryExpression(Keyword('leaderboardrequestrowsglobalarounduser'), Array, Boolean), UnaryExpression(Keyword('leaderboardsrequestuploadscore'), Array, Boolean), UnaryExpression(Keyword('leaderboardsrequestuploadscorekeepbest'), Array, Boolean), UnaryExpression(Keyword('leaderboardstate'), String, Number), UnaryExpression(Keyword('lifestate'), Object, String), UnaryExpression(Keyword('lightdetachobject'), Object, Nothing), UnaryExpression(Keyword('lightison'), Object, String), UnaryExpression(Keyword('linearconversion'), Array, Number), UnaryExpression(Keyword('lineintersects'), Array, Boolean), UnaryExpression(Keyword('lineintersectsobjs'), Array, Array), UnaryExpression(Keyword('lineintersectssurfaces'), Array, Array), UnaryExpression(Keyword('lineintersectswith'), Array, Array), UnaryExpression(Keyword('list'), Object, Array), UnaryExpression(Keyword('listremotetargets'), Side, Array), UnaryExpression(Keyword('listvehiclesensors'), Object, Array), UnaryExpression(Keyword('ln'), Number, Number), UnaryExpression(Keyword('lnbaddarray'), Array, Number), UnaryExpression(Keyword('lnbaddcolumn'), Array, Number), UnaryExpression(Keyword('lnbaddrow'), Array, Number), UnaryExpression(Keyword('lnbclear'), Control, Nothing), UnaryExpression(Keyword('lnbclear'), Number, Nothing), UnaryExpression(Keyword('lnbcolor'), Array, Array), UnaryExpression(Keyword('lnbcolorright'), Array, Array), UnaryExpression(Keyword('lnbcurselrow'), Control, Number), UnaryExpression(Keyword('lnbcurselrow'), Number, Number), UnaryExpression(Keyword('lnbdata'), Array, String), UnaryExpression(Keyword('lnbdeletecolumn'), Array, Nothing), UnaryExpression(Keyword('lnbdeleterow'), Array, Nothing), UnaryExpression(Keyword('lnbgetcolumnsposition'), Control, Array), UnaryExpression(Keyword('lnbgetcolumnsposition'), Number, Array), UnaryExpression(Keyword('lnbpicture'), Array, String), UnaryExpression(Keyword('lnbpictureright'), Array, String), UnaryExpression(Keyword('lnbsetcolor'), Array, Nothing), UnaryExpression(Keyword('lnbsetcolorright'), Array, Nothing), UnaryExpression(Keyword('lnbsetcolumnspos'), Array, Nothing), UnaryExpression(Keyword('lnbsetcurselrow'), Array, Nothing), UnaryExpression(Keyword('lnbsetdata'), Array, Nothing), UnaryExpression(Keyword('lnbsetpicture'), Array, Nothing), UnaryExpression(Keyword('lnbsetpicturecolor'), Array, Nothing), UnaryExpression(Keyword('lnbsetpicturecolorright'), Array, Nothing), UnaryExpression(Keyword('lnbsetpicturecolorselected'), Array, Nothing), UnaryExpression(Keyword('lnbsetpicturecolorselectedright'), Array, Nothing), UnaryExpression(Keyword('lnbsetpictureright'), Array, Nothing), UnaryExpression(Keyword('lnbsettext'), Array, Nothing), UnaryExpression(Keyword('lnbsettextright'), Array, Nothing), UnaryExpression(Keyword('lnbsetvalue'), Array, Nothing), UnaryExpression(Keyword('lnbsize'), Control, Array), UnaryExpression(Keyword('lnbsize'), Number, Array), UnaryExpression(Keyword('lnbsort'), Array, Nothing), UnaryExpression(Keyword('lnbsortbyvalue'), Array, Nothing), UnaryExpression(Keyword('lnbtext'), Array, String), UnaryExpression(Keyword('lnbtextright'), Array, String), UnaryExpression(Keyword('lnbvalue'), Array, Number), UnaryExpression(Keyword('load'), Object, Number), UnaryExpression(Keyword('loadabs'), Object, Number), UnaryExpression(Keyword('loadbackpack'), Object, Number), UnaryExpression(Keyword('loadfile'), String, String), UnaryExpression(Keyword('loaduniform'), Object, Number), UnaryExpression(Keyword('loadvest'), Object, Number), UnaryExpression(Keyword('local'), Object, Boolean), UnaryExpression(Keyword('local'), Group, Boolean), UnaryExpression(Keyword('localize'), String, String), UnaryExpression(Keyword('locationposition'), Location, Array), UnaryExpression(Keyword('locked'), Object, Number), UnaryExpression(Keyword('lockeddriver'), Object, Boolean), UnaryExpression(Keyword('lockidentity'), Object, Boolean), UnaryExpression(Keyword('log'), Number, Number), UnaryExpression(Keyword('lognetwork'), Array, Number), UnaryExpression(Keyword('lognetworkterminate'), Number, Nothing), UnaryExpression(Keyword('magazinecargo'), Object, Array), UnaryExpression(Keyword('magazines'), Object, Array), UnaryExpression(Keyword('magazinesallturrets'), Object, Array), UnaryExpression(Keyword('magazinesammo'), Object, Array), UnaryExpression(Keyword('magazinesammocargo'), Object, Array), UnaryExpression(Keyword('magazinesammofull'), Object, Array), UnaryExpression(Keyword('magazinesdetail'), Object, Array), UnaryExpression(Keyword('magazinesdetailbackpack'), Object, Array), UnaryExpression(Keyword('magazinesdetailuniform'), Object, Array), UnaryExpression(Keyword('magazinesdetailvest'), Object, Array), UnaryExpression(Keyword('mapanimadd'), Array, Nothing), UnaryExpression(Keyword('mapcenteroncamera'), Control, Array), UnaryExpression(Keyword('mapgridposition'), Object, String), UnaryExpression(Keyword('mapgridposition'), Array, String), UnaryExpression(Keyword('markeralpha'), String, Number), UnaryExpression(Keyword('markerbrush'), String, String), UnaryExpression(Keyword('markercolor'), String, String), UnaryExpression(Keyword('markerdir'), String, Number), UnaryExpression(Keyword('markerpos'), String, Array), UnaryExpression(Keyword('markershape'), String, String), UnaryExpression(Keyword('markersize'), String, Array), UnaryExpression(Keyword('markertext'), String, String), UnaryExpression(Keyword('markertype'), String, String), UnaryExpression(Keyword('members'), TeamMember, Array), UnaryExpression(Keyword('menuaction'), Array, String), UnaryExpression(Keyword('menuadd'), Array, Number), UnaryExpression(Keyword('menuchecked'), Array, Boolean), UnaryExpression(Keyword('menuclear'), Control, Nothing), UnaryExpression(Keyword('menuclear'), Number, Nothing), UnaryExpression(Keyword('menucollapse'), Array, Nothing), UnaryExpression(Keyword('menudata'), Array, String), UnaryExpression(Keyword('menudelete'), Array, Nothing), UnaryExpression(Keyword('menuenable'), Array, Nothing), UnaryExpression(Keyword('menuenabled'), Array, Boolean), UnaryExpression(Keyword('menuexpand'), Array, Nothing), UnaryExpression(Keyword('menuhover'), Control, Array), UnaryExpression(Keyword('menuhover'), Number, Array), UnaryExpression(Keyword('menupicture'), Array, String), UnaryExpression(Keyword('menusetaction'), Array, Nothing), UnaryExpression(Keyword('menusetcheck'), Array, Nothing), UnaryExpression(Keyword('menusetdata'), Array, Nothing), UnaryExpression(Keyword('menusetpicture'), Array, Nothing), UnaryExpression(Keyword('menusetvalue'), Array, Nothing), UnaryExpression(Keyword('menushortcut'), Array, Number), UnaryExpression(Keyword('menushortcuttext'), Array, String), UnaryExpression(Keyword('menusize'), Array, Number), UnaryExpression(Keyword('menusort'), Array, Nothing), UnaryExpression(Keyword('menutext'), Array, String), UnaryExpression(Keyword('menuurl'), Array, String), UnaryExpression(Keyword('menuvalue'), Array, Number), UnaryExpression(Keyword('mineactive'), Object, Boolean), UnaryExpression(Keyword('modparams'), Array, Array), UnaryExpression(Keyword('moonphase'), Array, Number), UnaryExpression(Keyword('morale'), Object, Number), UnaryExpression(Keyword('move3dencamera'), Array, Nothing), UnaryExpression(Keyword('moveout'), Object, Nothing), UnaryExpression(Keyword('movetime'), Object, Number), UnaryExpression(Keyword('movetocompleted'), Object, Boolean), UnaryExpression(Keyword('movetofailed'), Object, Boolean), UnaryExpression(Keyword('name'), Object, String), UnaryExpression(Keyword('name'), Location, String), UnaryExpression(Keyword('namesound'), Object, String), UnaryExpression(Keyword('nearestbuilding'), Object, Object), UnaryExpression(Keyword('nearestbuilding'), Array, Object), UnaryExpression(Keyword('nearestlocation'), Array, Location), UnaryExpression(Keyword('nearestlocations'), Array, Array), UnaryExpression(Keyword('nearestlocationwithdubbing'), Array, Location), UnaryExpression(Keyword('nearestobject'), Array, Object), UnaryExpression(Keyword('nearestobjects'), Array, Array), UnaryExpression(Keyword('nearestterrainobjects'), Array, Array), UnaryExpression(Keyword('needreload'), Object, Number), UnaryExpression(Keyword('netid'), Object, String), UnaryExpression(Keyword('netid'), Group, String), UnaryExpression(Keyword('nextmenuitemindex'), Control, Number), UnaryExpression(Keyword('not'), Boolean, Boolean), UnaryExpression(Keyword('numberofenginesrtd'), Object, Number), UnaryExpression(Keyword('numbertodate'), Array, Array), UnaryExpression(Keyword('objectcurators'), Object, Array), UnaryExpression(Keyword('objectfromnetid'), String, Object), UnaryExpression(Keyword('objectparent'), Object, Object), UnaryExpression(Keyword('onbriefinggroup'), String, Nothing), UnaryExpression(Keyword('onbriefingnotes'), String, Nothing), UnaryExpression(Keyword('onbriefingplan'), String, Nothing), UnaryExpression(Keyword('onbriefingteamswitch'), String, Nothing), UnaryExpression(Keyword('oncommandmodechanged'), Code, Nothing), UnaryExpression(Keyword('oncommandmodechanged'), String, Nothing), UnaryExpression(Keyword('oneachframe'), Code, Nothing), UnaryExpression(Keyword('oneachframe'), String, Nothing), UnaryExpression(Keyword('ongroupiconclick'), Code, Nothing), UnaryExpression(Keyword('ongroupiconclick'), String, Nothing), UnaryExpression(Keyword('ongroupiconoverenter'), Code, Nothing), UnaryExpression(Keyword('ongroupiconoverenter'), String, Nothing), UnaryExpression(Keyword('ongroupiconoverleave'), Code, Nothing), UnaryExpression(Keyword('ongroupiconoverleave'), String, Nothing), UnaryExpression(Keyword('onhcgroupselectionchanged'), Code, Nothing), UnaryExpression(Keyword('onhcgroupselectionchanged'), String, Nothing), UnaryExpression(Keyword('onmapsingleclick'), Code, Nothing), UnaryExpression(Keyword('onmapsingleclick'), String, Nothing), UnaryExpression(Keyword('onplayerconnected'), Code, Nothing), UnaryExpression(Keyword('onplayerconnected'), String, Nothing), UnaryExpression(Keyword('onplayerdisconnected'), Code, Nothing), UnaryExpression(Keyword('onplayerdisconnected'), String, Nothing), UnaryExpression(Keyword('onpreloadfinished'), Code, Nothing), UnaryExpression(Keyword('onpreloadfinished'), String, Nothing), UnaryExpression(Keyword('onpreloadstarted'), Code, Nothing), UnaryExpression(Keyword('onpreloadstarted'), String, Nothing), UnaryExpression(Keyword('onteamswitch'), Code, Nothing), UnaryExpression(Keyword('onteamswitch'), String, Nothing), UnaryExpression(Keyword('opendlcpage'), Number, Boolean), UnaryExpression(Keyword('openmap'), Array, Boolean), UnaryExpression(Keyword('openmap'), Boolean, Boolean), UnaryExpression(Keyword('opensteamapp'), Number, Boolean), UnaryExpression(Keyword('openyoutubevideo'), String, Boolean), UnaryExpression(Keyword('owner'), Object, Number), UnaryExpression(Keyword('param'), Array, Anything), UnaryExpression(Keyword('params'), Array, Boolean), UnaryExpression(Keyword('parsenumber'), String, Number), UnaryExpression(Keyword('parsenumber'), Boolean, Number), UnaryExpression(Keyword('parsesimplearray'), String, Array), UnaryExpression(Keyword('parsetext'), String, String), UnaryExpression(Keyword('pickweaponpool'), Object, Nothing), UnaryExpression(Keyword('pitch'), Object, String), UnaryExpression(Keyword('playableslotsnumber'), Side, Number), UnaryExpression(Keyword('playersnumber'), Side, Number), UnaryExpression(Keyword('playmission'), Array, Nothing), UnaryExpression(Keyword('playmusic'), String, Nothing), UnaryExpression(Keyword('playmusic'), Array, Nothing), UnaryExpression(Keyword('playscriptedmission'), Array, Nothing), UnaryExpression(Keyword('playsound'), String, Nothing), UnaryExpression(Keyword('playsound'), Array, Nothing), UnaryExpression(Keyword('playsound3d'), Array, Nothing), UnaryExpression(Keyword('position'), Object, Array), UnaryExpression(Keyword('position'), Location, Array), UnaryExpression(Keyword('positioncameratoworld'), Array, Array), UnaryExpression(Keyword('ppeffectcommitted'), String, Boolean), UnaryExpression(Keyword('ppeffectcommitted'), Number, Boolean), UnaryExpression(Keyword('ppeffectcreate'), Array, Anything), UnaryExpression(Keyword('ppeffectdestroy'), Number, Nothing), UnaryExpression(Keyword('ppeffectdestroy'), Array, Nothing), UnaryExpression(Keyword('ppeffectenabled'), Number, Boolean), UnaryExpression(Keyword('precision'), Object, Number), UnaryExpression(Keyword('preloadcamera'), Array, Boolean), UnaryExpression(Keyword('preloadsound'), String, Boolean), UnaryExpression(Keyword('preloadtitleobj'), Array, Boolean), UnaryExpression(Keyword('preloadtitlersc'), Array, Boolean), UnaryExpression(Keyword('preprocessfile'), String, String), UnaryExpression(Keyword('preprocessfilelinenumbers'), String, String), UnaryExpression(Keyword('primaryweapon'), Object, String), UnaryExpression(Keyword('primaryweaponitems'), Object, Array), UnaryExpression(Keyword('primaryweaponmagazine'), Object, Array), UnaryExpression(Keyword('priority'), Task, Number), UnaryExpression(Keyword('private'), String, Nothing), UnaryExpression(Keyword('private'), Array, Nothing), UnaryExpression(Keyword('processdiarylink'), String, Nothing), UnaryExpression(Keyword('progressloadingscreen'), Number, Nothing), UnaryExpression(Keyword('progressposition'), Control, Number), UnaryExpression(Keyword('publicvariable'), String, Nothing), UnaryExpression(Keyword('publicvariableserver'), String, Nothing), UnaryExpression(Keyword('putweaponpool'), Object, Nothing), UnaryExpression(Keyword('queryitemspool'), String, Number), UnaryExpression(Keyword('querymagazinepool'), String, Number), UnaryExpression(Keyword('queryweaponpool'), String, Number), UnaryExpression(Keyword('rad'), Number, Number), UnaryExpression(Keyword('radiochannelcreate'), Array, Number), UnaryExpression(Keyword('random'), Array, Number), UnaryExpression(Keyword('random'), Number, Number), UnaryExpression(Keyword('rank'), Object, String), UnaryExpression(Keyword('rankid'), Object, Number), UnaryExpression(Keyword('rating'), Object, Number), UnaryExpression(Keyword('rectangular'), Location, Boolean), UnaryExpression(Keyword('registeredtasks'), TeamMember, Array), UnaryExpression(Keyword('reload'), Object, Nothing), UnaryExpression(Keyword('reloadenabled'), Object, Boolean), UnaryExpression(Keyword('remoteexec'), Array, Anything), UnaryExpression(Keyword('remoteexeccall'), Array, Anything), UnaryExpression(Keyword('remove3denconnection'), Array, Nothing), UnaryExpression(Keyword('remove3deneventhandler'), Array, Nothing), UnaryExpression(Keyword('remove3denlayer'), Number, Boolean), UnaryExpression(Keyword('removeall3deneventhandlers'), String, Nothing), UnaryExpression(Keyword('removeallactions'), Object, Nothing), UnaryExpression(Keyword('removeallassigneditems'), Object, Nothing), UnaryExpression(Keyword('removeallcontainers'), Object, Nothing), UnaryExpression(Keyword('removeallcuratoraddons'), Object, Nothing), UnaryExpression(Keyword('removeallcuratorcameraareas'), Object, Nothing), UnaryExpression(Keyword('removeallcuratoreditingareas'), Object, Nothing), UnaryExpression(Keyword('removeallhandgunitems'), Object, Nothing), UnaryExpression(Keyword('removeallitems'), Object, Nothing), UnaryExpression(Keyword('removeallitemswithmagazines'), Object, Nothing), UnaryExpression(Keyword('removeallmissioneventhandlers'), String, Nothing), UnaryExpression(Keyword('removeallmusiceventhandlers'), String, Nothing), UnaryExpression(Keyword('removeallownedmines'), Object, Nothing), UnaryExpression(Keyword('removeallprimaryweaponitems'), Object, Nothing), UnaryExpression(Keyword('removeallweapons'), Object, Nothing), UnaryExpression(Keyword('removebackpack'), Object, Nothing), UnaryExpression(Keyword('removebackpackglobal'), Object, Nothing), UnaryExpression(Keyword('removefromremainscollector'), Array, Nothing), UnaryExpression(Keyword('removegoggles'), Object, Nothing), UnaryExpression(Keyword('removeheadgear'), Object, Nothing), UnaryExpression(Keyword('removemissioneventhandler'), Array, Nothing), UnaryExpression(Keyword('removemusiceventhandler'), Array, Nothing), UnaryExpression(Keyword('removeswitchableunit'), Object, Nothing), UnaryExpression(Keyword('removeuniform'), Object, Nothing), UnaryExpression(Keyword('removevest'), Object, Nothing), UnaryExpression(Keyword('requiredversion'), String, Boolean), UnaryExpression(Keyword('resetsubgroupdirection'), Object, Nothing), UnaryExpression(Keyword('resources'), TeamMember, Array), UnaryExpression(Keyword('restarteditorcamera'), Control, Nothing), UnaryExpression(Keyword('reverse'), Array, Nothing), UnaryExpression(Keyword('roadat'), Object, Object), UnaryExpression(Keyword('roadat'), Array, Object), UnaryExpression(Keyword('roadsconnectedto'), Object, Array), UnaryExpression(Keyword('roledescription'), Object, String), UnaryExpression(Keyword('ropeattachedobjects'), Object, Array), UnaryExpression(Keyword('ropeattachedto'), Object, Object), UnaryExpression(Keyword('ropeattachenabled'), Object, Boolean), UnaryExpression(Keyword('ropecreate'), Array, Object), UnaryExpression(Keyword('ropecut'), Array, Nothing), UnaryExpression(Keyword('ropedestroy'), Object, Nothing), UnaryExpression(Keyword('ropeendposition'), Object, Array), UnaryExpression(Keyword('ropelength'), Object, Number), UnaryExpression(Keyword('ropes'), Object, Array), UnaryExpression(Keyword('ropeunwind'), Array, Nothing), UnaryExpression(Keyword('ropeunwound'), Object, Boolean), UnaryExpression(Keyword('rotorsforcesrtd'), Object, Array), UnaryExpression(Keyword('rotorsrpmrtd'), Object, Array), UnaryExpression(Keyword('round'), Number, Number), UnaryExpression(Keyword('save3deninventory'), Array, Nothing), UnaryExpression(Keyword('saveoverlay'), Control, Nothing), UnaryExpression(Keyword('savevar'), String, Nothing), UnaryExpression(Keyword('scopename'), String, Nothing), UnaryExpression(Keyword('score'), Object, Number), UnaryExpression(Keyword('scoreside'), Side, Number), UnaryExpression(Keyword('screenshot'), String, Boolean), UnaryExpression(Keyword('screentoworld'), Array, Array), UnaryExpression(Keyword('scriptdone'), Script, Boolean), UnaryExpression(Keyword('scriptname'), String, Nothing), UnaryExpression(Keyword('scudstate'), Object, Number), UnaryExpression(Keyword('secondaryweapon'), Object, String), UnaryExpression(Keyword('secondaryweaponitems'), Object, Array), UnaryExpression(Keyword('secondaryweaponmagazine'), Object, Array), UnaryExpression(Keyword('selectbestplaces'), Array, Array), UnaryExpression(Keyword('selectededitorobjects'), Control, Nothing), UnaryExpression(Keyword('selectionnames'), Object, Array), UnaryExpression(Keyword('selectmax'), Array, Anything), UnaryExpression(Keyword('selectmin'), Array, Anything), UnaryExpression(Keyword('selectplayer'), Object, Nothing), UnaryExpression(Keyword('selectrandom'), Array, Anything), UnaryExpression(Keyword('selectrandomweighted'), Array, Anything), UnaryExpression(Keyword('sendaumessage'), Array, Nothing), UnaryExpression(Keyword('sendudpmessage'), Array, Boolean), UnaryExpression(Keyword('servercommand'), String, Boolean), UnaryExpression(Keyword('servercommandavailable'), String, Boolean), UnaryExpression(Keyword('servercommandexecutable'), String, Boolean), UnaryExpression(Keyword('set3denattributes'), Array, Boolean), UnaryExpression(Keyword('set3dengrid'), Array, Nothing), UnaryExpression(Keyword('set3deniconsvisible'), Array, Nothing), UnaryExpression(Keyword('set3denlinesvisible'), Array, Nothing), UnaryExpression(Keyword('set3denmissionattributes'), Array, Nothing), UnaryExpression(Keyword('set3denmodelsvisible'), Array, Nothing), UnaryExpression(Keyword('set3denselected'), Array, Nothing), UnaryExpression(Keyword('setacctime'), Number, Nothing), UnaryExpression(Keyword('setaperture'), Number, Nothing), UnaryExpression(Keyword('setaperturenew'), Array, Nothing), UnaryExpression(Keyword('setarmorypoints'), Number, Nothing), UnaryExpression(Keyword('setcamshakedefparams'), Array, Nothing), UnaryExpression(Keyword('setcamshakeparams'), Array, Nothing), UnaryExpression(Keyword('setcompassoscillation'), Array, Nothing), UnaryExpression(Keyword('setcurrentchannel'), Number, Boolean), UnaryExpression(Keyword('setcustommissiondata'), Array, Nothing), UnaryExpression(Keyword('setcustomsoundcontroller'), Array, Boolean), UnaryExpression(Keyword('setdate'), Array, Nothing), UnaryExpression(Keyword('setdefaultcamera'), Array, Nothing), UnaryExpression(Keyword('setdetailmapblendpars'), Array, Nothing), UnaryExpression(Keyword('setgroupiconsselectable'), Boolean, Nothing), UnaryExpression(Keyword('setgroupiconsvisible'), Array, Nothing), UnaryExpression(Keyword('sethorizonparallaxcoef'), Number, Nothing), UnaryExpression(Keyword('sethudmovementlevels'), Array, Nothing), UnaryExpression(Keyword('setinfopanel'), Array, Boolean), UnaryExpression(Keyword('setlocalwindparams'), Array, Nothing), UnaryExpression(Keyword('setmouseposition'), Array, Nothing), UnaryExpression(Keyword('setmusiceventhandler'), Array, Nothing), UnaryExpression(Keyword('setobjectviewdistance'), Number, Nothing), UnaryExpression(Keyword('setobjectviewdistance'), Array, Nothing), UnaryExpression(Keyword('setplayable'), Object, Nothing), UnaryExpression(Keyword('setplayerrespawntime'), Number, Nothing), UnaryExpression(Keyword('setshadowdistance'), Number, Nothing), UnaryExpression(Keyword('setsimulweatherlayers'), Number, Nothing), UnaryExpression(Keyword('setstaminascheme'), String, Nothing), UnaryExpression(Keyword('setstatvalue'), Array, Boolean), UnaryExpression(Keyword('setsystemofunits'), Number, Nothing), UnaryExpression(Keyword('setterraingrid'), Number, Nothing), UnaryExpression(Keyword('settimemultiplier'), Number, Nothing), UnaryExpression(Keyword('settrafficdensity'), Array, Nothing), UnaryExpression(Keyword('settrafficdistance'), Number, Nothing), UnaryExpression(Keyword('settrafficgap'), Array, Nothing), UnaryExpression(Keyword('settrafficspeed'), Array, Nothing), UnaryExpression(Keyword('setviewdistance'), Number, Nothing), UnaryExpression(Keyword('setwind'), Array, Nothing), UnaryExpression(Keyword('setwinddir'), Array, Nothing), UnaryExpression(Keyword('showchat'), Boolean, Nothing), UnaryExpression(Keyword('showcinemaborder'), Boolean, Nothing), UnaryExpression(Keyword('showcommandingmenu'), String, Nothing), UnaryExpression(Keyword('showcompass'), Boolean, Nothing), UnaryExpression(Keyword('showcuratorcompass'), Boolean, Nothing), UnaryExpression(Keyword('showgps'), Boolean, Nothing), UnaryExpression(Keyword('showhud'), Boolean, Nothing), UnaryExpression(Keyword('showhud'), Array, Nothing), UnaryExpression(Keyword('showmap'), Boolean, Nothing), UnaryExpression(Keyword('showpad'), Boolean, Nothing), UnaryExpression(Keyword('showradio'), Boolean, Nothing), UnaryExpression(Keyword('showscoretable'), Number, Nothing), UnaryExpression(Keyword('showsubtitles'), Boolean, Boolean), UnaryExpression(Keyword('showuavfeed'), Boolean, Nothing), UnaryExpression(Keyword('showwarrant'), Boolean, Nothing), UnaryExpression(Keyword('showwatch'), Boolean, Nothing), UnaryExpression(Keyword('showwaypoints'), Boolean, Nothing), UnaryExpression(Keyword('side'), Object, Side), UnaryExpression(Keyword('side'), Group, Side), UnaryExpression(Keyword('side'), Location, Side), UnaryExpression(Keyword('simpletasks'), Object, Array), UnaryExpression(Keyword('simulationenabled'), Object, Boolean), UnaryExpression(Keyword('simulclouddensity'), Array, Number), UnaryExpression(Keyword('simulcloudocclusion'), Array, Number), UnaryExpression(Keyword('simulinclouds'), Array, Boolean), UnaryExpression(Keyword('sin'), Number, Number), UnaryExpression(Keyword('size'), Location, Array), UnaryExpression(Keyword('sizeof'), String, Number), UnaryExpression(Keyword('skill'), Object, Number), UnaryExpression(Keyword('skiptime'), Number, Nothing), UnaryExpression(Keyword('sleep'), Number, Nothing), UnaryExpression(Keyword('sliderposition'), Control, Number), UnaryExpression(Keyword('sliderposition'), Number, Number), UnaryExpression(Keyword('sliderrange'), Control, Array), UnaryExpression(Keyword('sliderrange'), Number, Array), UnaryExpression(Keyword('slidersetposition'), Array, Nothing), UnaryExpression(Keyword('slidersetrange'), Array, Nothing), UnaryExpression(Keyword('slidersetspeed'), Array, Nothing), UnaryExpression(Keyword('sliderspeed'), Control, Array), UnaryExpression(Keyword('sliderspeed'), Number, Array), UnaryExpression(Keyword('soldiermagazines'), Object, Array), UnaryExpression(Keyword('someammo'), Object, Boolean), UnaryExpression(Keyword('speaker'), Object, String), UnaryExpression(Keyword('speed'), Object, Number), UnaryExpression(Keyword('speedmode'), Object, String), UnaryExpression(Keyword('speedmode'), Group, String), UnaryExpression(Keyword('sqrt'), Number, Number), UnaryExpression(Keyword('squadparams'), Object, Array), UnaryExpression(Keyword('stance'), Object, String), UnaryExpression(Keyword('startloadingscreen'), Array, Nothing), UnaryExpression(Keyword('stopenginertd'), Object, Nothing), UnaryExpression(Keyword('stopped'), Object, Boolean), UnaryExpression(Keyword('str'), Type, String), UnaryExpression(Keyword('supportinfo'), String, Array), UnaryExpression(Keyword('surfaceiswater'), Array, Boolean), UnaryExpression(Keyword('surfacenormal'), Array, Array), UnaryExpression(Keyword('surfacetype'), Array, String), UnaryExpression(Keyword('switch'), Type, SwitchType), UnaryExpression(Keyword('switchcamera'), Object, Nothing), UnaryExpression(Keyword('synchronizedobjects'), Object, Array), UnaryExpression(Keyword('synchronizedtriggers'), Array, Array), UnaryExpression(Keyword('synchronizedwaypoints'), Object, Array), UnaryExpression(Keyword('synchronizedwaypoints'), Array, Array), UnaryExpression(Keyword('systemchat'), String, Nothing), UnaryExpression(Keyword('tan'), Number, Number), UnaryExpression(Keyword('taskalwaysvisible'), Task, Boolean), UnaryExpression(Keyword('taskchildren'), Task, Array), UnaryExpression(Keyword('taskcompleted'), Task, Boolean), UnaryExpression(Keyword('taskcustomdata'), Task, Array), UnaryExpression(Keyword('taskdescription'), Task, Array), UnaryExpression(Keyword('taskdestination'), Task, Array), UnaryExpression(Keyword('taskhint'), Array, Nothing), UnaryExpression(Keyword('taskmarkeroffset'), Object, Array), UnaryExpression(Keyword('taskparent'), Task, Task), UnaryExpression(Keyword('taskresult'), Task, Array), UnaryExpression(Keyword('taskstate'), Task, String), UnaryExpression(Keyword('tasktype'), Task, String), UnaryExpression(Keyword('teammember'), Object, TeamMember), UnaryExpression(Keyword('teamname'), TeamMember, String), UnaryExpression(Keyword('teamtype'), TeamMember, String), UnaryExpression(Keyword('terminate'), Script, Nothing), UnaryExpression(Keyword('terrainintersect'), Array, Boolean), UnaryExpression(Keyword('terrainintersectasl'), Array, Boolean), UnaryExpression(Keyword('terrainintersectatasl'), Array, Array), UnaryExpression(Keyword('text'), String, String), UnaryExpression(Keyword('text'), Location, String), UnaryExpression(Keyword('textlog'), Type, Nothing), UnaryExpression(Keyword('textlogformat'), Array, Nothing), UnaryExpression(Keyword('tg'), Number, Number), UnaryExpression(Keyword('throw'), Type, Nothing), UnaryExpression(Keyword('titlecut'), Array, Nothing), UnaryExpression(Keyword('titlefadeout'), Number, Nothing), UnaryExpression(Keyword('titleobj'), Array, Nothing), UnaryExpression(Keyword('titlersc'), Array, Nothing), UnaryExpression(Keyword('titletext'), Array, Nothing), UnaryExpression(Keyword('toarray'), String, Array), UnaryExpression(Keyword('tofixed'), Number, Nothing), UnaryExpression(Keyword('tolower'), String, String), UnaryExpression(Keyword('tostring'), Array, String), UnaryExpression(Keyword('toupper'), String, String), UnaryExpression(Keyword('triggeractivated'), Object, Boolean), UnaryExpression(Keyword('triggeractivation'), Object, Array), UnaryExpression(Keyword('triggerarea'), Object, Array), UnaryExpression(Keyword('triggerattachedvehicle'), Object, Object), UnaryExpression(Keyword('triggerstatements'), Object, Array), UnaryExpression(Keyword('triggertext'), Object, String), UnaryExpression(Keyword('triggertimeout'), Object, Array), UnaryExpression(Keyword('triggertimeoutcurrent'), Object, Number), UnaryExpression(Keyword('triggertype'), Object, String), UnaryExpression(Keyword('try'), Code, TryType), UnaryExpression(Keyword('tvadd'), Array, Number), UnaryExpression(Keyword('tvclear'), Number, Nothing), UnaryExpression(Keyword('tvclear'), Control, Nothing), UnaryExpression(Keyword('tvcollapse'), Array, Nothing), UnaryExpression(Keyword('tvcollapseall'), Number, Nothing), UnaryExpression(Keyword('tvcollapseall'), Control, Nothing), UnaryExpression(Keyword('tvcount'), Array, Number), UnaryExpression(Keyword('tvcursel'), Number, Array), UnaryExpression(Keyword('tvcursel'), Control, Array), UnaryExpression(Keyword('tvdata'), Array, String), UnaryExpression(Keyword('tvdelete'), Array, Nothing), UnaryExpression(Keyword('tvexpand'), Array, Nothing), UnaryExpression(Keyword('tvexpandall'), Number, Nothing), UnaryExpression(Keyword('tvexpandall'), Control, Nothing), UnaryExpression(Keyword('tvpicture'), Array, String), UnaryExpression(Keyword('tvpictureright'), Array, String), UnaryExpression(Keyword('tvsetcursel'), Array, Nothing), UnaryExpression(Keyword('tvsetdata'), Array, Nothing), UnaryExpression(Keyword('tvsetpicture'), Array, Nothing), UnaryExpression(Keyword('tvsetpicturecolor'), Array, Nothing), UnaryExpression(Keyword('tvsetpictureright'), Array, Nothing), UnaryExpression(Keyword('tvsetpicturerightcolor'), Array, Nothing), UnaryExpression(Keyword('tvsettext'), Array, String), UnaryExpression(Keyword('tvsettooltip'), Array, Nothing), UnaryExpression(Keyword('tvsetvalue'), Array, Nothing), UnaryExpression(Keyword('tvsort'), Array, Nothing), UnaryExpression(Keyword('tvsortbyvalue'), Array, Nothing), UnaryExpression(Keyword('tvtext'), Array, String), UnaryExpression(Keyword('tvtooltip'), Array, String), UnaryExpression(Keyword('tvvalue'), Array, Number), UnaryExpression(Keyword('type'), Task, String), UnaryExpression(Keyword('type'), Location, String), UnaryExpression(Keyword('typename'), Type, String), UnaryExpression(Keyword('typeof'), Object, String), UnaryExpression(Keyword('uavcontrol'), Object, Array), UnaryExpression(Keyword('uisleep'), Number, Nothing), UnaryExpression(Keyword('unassigncurator'), Object, Nothing), UnaryExpression(Keyword('unassignteam'), Object, Nothing), UnaryExpression(Keyword('unassignvehicle'), Object, Nothing), UnaryExpression(Keyword('underwater'), Object, Boolean), UnaryExpression(Keyword('uniform'), Object, String), UnaryExpression(Keyword('uniformcontainer'), Object, Object), UnaryExpression(Keyword('uniformitems'), Object, Array), UnaryExpression(Keyword('uniformmagazines'), Object, Array), UnaryExpression(Keyword('unitaddons'), String, Array), UnaryExpression(Keyword('unitaimposition'), Object, Array), UnaryExpression(Keyword('unitaimpositionvisual'), Object, Array), UnaryExpression(Keyword('unitbackpack'), Object, Object), UnaryExpression(Keyword('unitisuav'), Object, Boolean), UnaryExpression(Keyword('unitpos'), Object, String), UnaryExpression(Keyword('unitready'), Object, Boolean), UnaryExpression(Keyword('unitready'), Array, Boolean), UnaryExpression(Keyword('unitrecoilcoefficient'), Object, Number), UnaryExpression(Keyword('units'), Group, Array), UnaryExpression(Keyword('units'), Object, Array), UnaryExpression(Keyword('unlockachievement'), String, Boolean), UnaryExpression(Keyword('updateobjecttree'), Control, Nothing), UnaryExpression(Keyword('useaiopermapobstructiontest'), Boolean, Nothing), UnaryExpression(Keyword('useaisteeringcomponent'), Boolean, Nothing), UnaryExpression(Keyword('vectordir'), Object, Array), UnaryExpression(Keyword('vectordirvisual'), Object, Array), UnaryExpression(Keyword('vectormagnitude'), Array, Number), UnaryExpression(Keyword('vectormagnitudesqr'), Array, Number), UnaryExpression(Keyword('vectornormalized'), Array, Array), UnaryExpression(Keyword('vectorup'), Object, Array), UnaryExpression(Keyword('vectorupvisual'), Object, Array), UnaryExpression(Keyword('vehicle'), Object, Object), UnaryExpression(Keyword('vehiclecargoenabled'), Object, Boolean), UnaryExpression(Keyword('vehiclereceiveremotetargets'), Object, Boolean), UnaryExpression(Keyword('vehiclereportownposition'), Object, Boolean), UnaryExpression(Keyword('vehiclereportremotetargets'), Object, Boolean), UnaryExpression(Keyword('vehiclevarname'), Object, String), UnaryExpression(Keyword('velocity'), Object, Array), UnaryExpression(Keyword('velocitymodelspace'), Object, Array), UnaryExpression(Keyword('verifysignature'), String, Boolean), UnaryExpression(Keyword('vest'), Object, String), UnaryExpression(Keyword('vestcontainer'), Object, Object), UnaryExpression(Keyword('vestitems'), Object, Array), UnaryExpression(Keyword('vestmagazines'), Object, Array), UnaryExpression(Keyword('visibleposition'), Object, Array), UnaryExpression(Keyword('visiblepositionasl'), Object, Array), UnaryExpression(Keyword('waituntil'), Code, Nothing), UnaryExpression(Keyword('waypointattachedobject'), Array, Object), UnaryExpression(Keyword('waypointattachedvehicle'), Array, Object), UnaryExpression(Keyword('waypointbehaviour'), Array, String), UnaryExpression(Keyword('waypointcombatmode'), Array, String), UnaryExpression(Keyword('waypointcompletionradius'), Array, Number), UnaryExpression(Keyword('waypointdescription'), Array, String), UnaryExpression(Keyword('waypointforcebehaviour'), Array, Boolean), UnaryExpression(Keyword('waypointformation'), Array, String), UnaryExpression(Keyword('waypointhouseposition'), Array, Number), UnaryExpression(Keyword('waypointloiterradius'), Array, Number), UnaryExpression(Keyword('waypointloitertype'), Array, String), UnaryExpression(Keyword('waypointname'), Array, String), UnaryExpression(Keyword('waypointposition'), Array, Array), UnaryExpression(Keyword('waypoints'), Object, Array), UnaryExpression(Keyword('waypoints'), Group, Array), UnaryExpression(Keyword('waypointscript'), Array, String), UnaryExpression(Keyword('waypointsenableduav'), Object, Boolean), UnaryExpression(Keyword('waypointshow'), Array, String), UnaryExpression(Keyword('waypointspeed'), Array, String), UnaryExpression(Keyword('waypointstatements'), Array, Array), UnaryExpression(Keyword('waypointtimeout'), Array, Array), UnaryExpression(Keyword('waypointtimeoutcurrent'), Group, Number), UnaryExpression(Keyword('waypointtype'), Array, String), UnaryExpression(Keyword('waypointvisible'), Array, Number), UnaryExpression(Keyword('weaponcargo'), Object, Array), UnaryExpression(Keyword('weaponinertia'), Object, Array), UnaryExpression(Keyword('weaponlowered'), Object, Boolean), UnaryExpression(Keyword('weapons'), Object, Array), UnaryExpression(Keyword('weaponsitems'), Object, Array), UnaryExpression(Keyword('weaponsitemscargo'), Object, Array), UnaryExpression(Keyword('weaponstate'), Object, Array), UnaryExpression(Keyword('weaponstate'), Array, Array), UnaryExpression(Keyword('weightrtd'), Object, Array), UnaryExpression(Keyword('wfsidetext'), Side, String), UnaryExpression(Keyword('wfsidetext'), Object, String), UnaryExpression(Keyword('wfsidetext'), Group, String), UnaryExpression(Keyword('while'), Code, WhileType), UnaryExpression(Keyword('wingsforcesrtd'), Object, Array), UnaryExpression(Keyword('with'), Namespace, WithType), UnaryExpression(Keyword('worldtoscreen'), Array, Array), BinaryExpression(Object, Keyword('action'), Array, Nothing), BinaryExpression(Object, Keyword('actionparams'), Number, Array), BinaryExpression(Number, Keyword('add3denlayer'), String, Number), BinaryExpression(Object, Keyword('addaction'), Array, Number), BinaryExpression(Object, Keyword('addbackpack'), String, Nothing), BinaryExpression(Object, Keyword('addbackpackcargo'), Array, Nothing), BinaryExpression(Object, Keyword('addbackpackcargoglobal'), Array, Nothing), BinaryExpression(Object, Keyword('addbackpackglobal'), String, Nothing), BinaryExpression(Object, Keyword('addcuratoraddons'), Array, Nothing), BinaryExpression(Object, Keyword('addcuratorcameraarea'), Array, Nothing), BinaryExpression(Object, Keyword('addcuratoreditableobjects'), Array, Nothing), BinaryExpression(Object, Keyword('addcuratoreditingarea'), Array, Nothing), BinaryExpression(Object, Keyword('addcuratorpoints'), Number, Nothing), BinaryExpression(Control, Keyword('addeditorobject'), Array, String), BinaryExpression(Object, Keyword('addeventhandler'), Array, Number), BinaryExpression(Object, Keyword('addforce'), Array, Nothing), BinaryExpression(Object, Keyword('addgoggles'), String, Nothing), BinaryExpression(Group, Keyword('addgroupicon'), Array, Number), BinaryExpression(Object, Keyword('addhandgunitem'), String, Nothing), BinaryExpression(Object, Keyword('addheadgear'), String, Nothing), BinaryExpression(Object, Keyword('additem'), String, Nothing), BinaryExpression(Object, Keyword('additemcargo'), Array, Nothing), BinaryExpression(Object, Keyword('additemcargoglobal'), Array, Nothing), BinaryExpression(Object, Keyword('additemtobackpack'), String, Nothing), BinaryExpression(Object, Keyword('additemtouniform'), String, Nothing), BinaryExpression(Object, Keyword('additemtovest'), String, Nothing), BinaryExpression(Object, Keyword('addlivestats'), Number, Nothing), BinaryExpression(Object, Keyword('addmagazine'), String, Nothing), BinaryExpression(Object, Keyword('addmagazine'), Array, Nothing), BinaryExpression(Object, Keyword('addmagazineammocargo'), Array, Nothing), BinaryExpression(Object, Keyword('addmagazinecargo'), Array, Nothing), BinaryExpression(Object, Keyword('addmagazinecargoglobal'), Array, Nothing), BinaryExpression(Object, Keyword('addmagazineglobal'), String, Nothing), BinaryExpression(Object, Keyword('addmagazines'), Array, Nothing), BinaryExpression(Object, Keyword('addmagazineturret'), Array, Nothing), BinaryExpression(Control, Keyword('addmenu'), Array, Number), BinaryExpression(Control, Keyword('addmenuitem'), Array, Number), BinaryExpression(Object, Keyword('addmpeventhandler'), Array, Number), BinaryExpression(Object, Keyword('addownedmine'), Object, Nothing), BinaryExpression(Object, Keyword('addplayerscores'), Array, Nothing), BinaryExpression(Object, Keyword('addprimaryweaponitem'), String, Nothing), BinaryExpression(String, Keyword('addpublicvariableeventhandler'), Code, Nothing), BinaryExpression(String, Keyword('addpublicvariableeventhandler'), Array, Nothing), BinaryExpression(Object, Keyword('addrating'), Number, Nothing), BinaryExpression(TeamMember, Keyword('addresources'), Array, Nothing), BinaryExpression(Object, Keyword('addscore'), Number, Nothing), BinaryExpression(Side, Keyword('addscoreside'), Number, Nothing), BinaryExpression(Object, Keyword('addsecondaryweaponitem'), String, Nothing), BinaryExpression(TeamMember, Keyword('addteammember'), TeamMember, Nothing), BinaryExpression(Object, Keyword('addtorque'), Array, Nothing), BinaryExpression(Object, Keyword('adduniform'), String, Nothing), BinaryExpression(Group, Keyword('addvehicle'), Object, Nothing), BinaryExpression(Object, Keyword('addvest'), String, Nothing), BinaryExpression(Group, Keyword('addwaypoint'), Array, Array), BinaryExpression(Object, Keyword('addweapon'), String, Nothing), BinaryExpression(Object, Keyword('addweaponcargo'), Array, Nothing), BinaryExpression(Object, Keyword('addweaponcargoglobal'), Array, Nothing), BinaryExpression(Object, Keyword('addweaponglobal'), String, Nothing), BinaryExpression(Object, Keyword('addweaponitem'), Array, Nothing), BinaryExpression(Object, Keyword('addweaponturret'), Array, Nothing), BinaryExpression(Object, Keyword('aimedattarget'), Array, Number), BinaryExpression(Control, Keyword('allow3dmode'), Boolean, Nothing), BinaryExpression(Object, Keyword('allowcrewinimmobile'), Boolean, Nothing), BinaryExpression(Object, Keyword('allowcuratorlogicignoreareas'), Boolean, Nothing), BinaryExpression(Object, Keyword('allowdamage'), Boolean, Nothing), BinaryExpression(Object, Keyword('allowdammage'), Boolean, Nothing), BinaryExpression(Control, Keyword('allowfileoperations'), Boolean, Nothing), BinaryExpression(Object, Keyword('allowfleeing'), Number, Nothing), BinaryExpression(Group, Keyword('allowfleeing'), Number, Nothing), BinaryExpression(Array, Keyword('allowgetin'), Boolean, Nothing), BinaryExpression(Object, Keyword('allowsprint'), Boolean, Nothing), BinaryExpression(Object, Keyword('ammo'), String, Number), BinaryExpression(Object, Keyword('ammoonpylon'), String, Number), BinaryExpression(Object, Keyword('ammoonpylon'), Number, Number), BinaryExpression(Boolean, Keyword('and'), Boolean, Boolean), BinaryExpression(Boolean, Keyword('and'), Code, Boolean), BinaryExpression(Object, Keyword('animate'), Array, Nothing), BinaryExpression(Object, Keyword('animatebay'), Array, Nothing), BinaryExpression(Object, Keyword('animatedoor'), Array, Nothing), BinaryExpression(Object, Keyword('animatepylon'), Array, Nothing), BinaryExpression(Object, Keyword('animatesource'), Array, Nothing), BinaryExpression(Object, Keyword('animationphase'), String, Number), BinaryExpression(Object, Keyword('animationsourcephase'), String, Number), BinaryExpression(Array, Keyword('append'), Array, Nothing), BinaryExpression(Array, Keyword('apply'), Code, Array), BinaryExpression(Array, Keyword('arrayintersect'), Array, Array), BinaryExpression(Object, Keyword('assignascargo'), Object, Nothing), BinaryExpression(Object, Keyword('assignascargoindex'), Array, Nothing), BinaryExpression(Object, Keyword('assignascommander'), Object, Nothing), BinaryExpression(Object, Keyword('assignasdriver'), Object, Nothing), BinaryExpression(Object, Keyword('assignasgunner'), Object, Nothing), BinaryExpression(Object, Keyword('assignasturret'), Array, Nothing), BinaryExpression(Object, Keyword('assigncurator'), Object, Nothing), BinaryExpression(Object, Keyword('assignitem'), String, Nothing), BinaryExpression(Object, Keyword('assignteam'), String, Nothing), BinaryExpression(Object, Keyword('assigntoairport'), Object, Nothing), BinaryExpression(Object, Keyword('assigntoairport'), Number, Nothing), BinaryExpression(Number, Keyword('atan2'), Number, Number), BinaryExpression(Location, Keyword('attachobject'), Object, Nothing), BinaryExpression(Object, Keyword('attachto'), Array, Nothing), BinaryExpression(Object, Keyword('backpackspacefor'), String, Array), BinaryExpression(Type, Keyword('breakout'), String, Anything), BinaryExpression(Object, Keyword('buildingexit'), Number, Array), BinaryExpression(Object, Keyword('buildingpos'), Number, Array), BinaryExpression(Control, Keyword('buttonsetaction'), String, Nothing), BinaryExpression(Type, Keyword('call'), Code, Anything), BinaryExpression(String, Keyword('callextension'), String, String), BinaryExpression(String, Keyword('callextension'), Array, Array), BinaryExpression(Object, Keyword('camcommand'), String, Nothing), BinaryExpression(Object, Keyword('camcommit'), Number, Nothing), BinaryExpression(Object, Keyword('camcommitprepared'), Number, Nothing), BinaryExpression(Object, Keyword('camconstuctionsetparams'), Array, Nothing), BinaryExpression(String, Keyword('camcreate'), Array, Object), BinaryExpression(Object, Keyword('cameraeffect'), Array, Nothing), BinaryExpression(Object, Keyword('campreload'), Number, Nothing), BinaryExpression(Object, Keyword('campreparebank'), Number, Nothing), BinaryExpression(Object, Keyword('campreparedir'), Number, Nothing), BinaryExpression(Object, Keyword('campreparedive'), Number, Nothing), BinaryExpression(Object, Keyword('campreparefocus'), Array, Nothing), BinaryExpression(Object, Keyword('campreparefov'), Number, Nothing), BinaryExpression(Object, Keyword('campreparefovrange'), Array, Nothing), BinaryExpression(Object, Keyword('campreparepos'), Array, Nothing), BinaryExpression(Object, Keyword('campreparerelpos'), Array, Nothing), BinaryExpression(Object, Keyword('campreparetarget'), Object, Nothing), BinaryExpression(Object, Keyword('campreparetarget'), Array, Nothing), BinaryExpression(Object, Keyword('camsetbank'), Number, Nothing), BinaryExpression(Object, Keyword('camsetdir'), Array, Nothing), BinaryExpression(Object, Keyword('camsetdive'), Number, Nothing), BinaryExpression(Object, Keyword('camsetfocus'), Array, Nothing), BinaryExpression(Object, Keyword('camsetfov'), Number, Nothing), BinaryExpression(Object, Keyword('camsetfovrange'), Array, Nothing), BinaryExpression(Object, Keyword('camsetpos'), Array, Nothing), BinaryExpression(Object, Keyword('camsetrelpos'), Array, Nothing), BinaryExpression(Object, Keyword('camsettarget'), Object, Nothing), BinaryExpression(Object, Keyword('camsettarget'), Array, Nothing), BinaryExpression(Object, Keyword('canadd'), String, Boolean), BinaryExpression(Object, Keyword('canadd'), Array, Boolean), BinaryExpression(Object, Keyword('canadditemtobackpack'), String, Boolean), BinaryExpression(Object, Keyword('canadditemtobackpack'), Array, Boolean), BinaryExpression(Object, Keyword('canadditemtouniform'), String, Boolean), BinaryExpression(Object, Keyword('canadditemtouniform'), Array, Boolean), BinaryExpression(Object, Keyword('canadditemtovest'), String, Boolean), BinaryExpression(Object, Keyword('canadditemtovest'), Array, Boolean), BinaryExpression(Object, Keyword('canslingload'), Object, Boolean), BinaryExpression(Object, Keyword('canvehiclecargo'), Object, Array), BinaryExpression(TryType, Keyword('catch'), Code, Anything), BinaryExpression(Control, Keyword('cbsetchecked'), Boolean, Nothing), BinaryExpression(Array, Keyword('checkvisibility'), Array, Number), BinaryExpression(Type, Keyword('clear3denattribute'), String, Nothing), BinaryExpression(Display, Keyword('closedisplay'), Number, Nothing), BinaryExpression(Object, Keyword('commandartilleryfire'), Array, Nothing), BinaryExpression(Array, Keyword('commandartilleryfire'), Array, Nothing), BinaryExpression(Object, Keyword('commandchat'), String, Nothing), BinaryExpression(Array, Keyword('commandchat'), String, Nothing), BinaryExpression(Object, Keyword('commandfire'), Object, Nothing), BinaryExpression(Array, Keyword('commandfire'), Object, Nothing), BinaryExpression(Object, Keyword('commandfollow'), Object, Nothing), BinaryExpression(Array, Keyword('commandfollow'), Object, Nothing), BinaryExpression(Object, Keyword('commandfsm'), Array, Nothing), BinaryExpression(Array, Keyword('commandfsm'), Array, Nothing), BinaryExpression(Object, Keyword('commandmove'), Array, Nothing), BinaryExpression(Array, Keyword('commandmove'), Array, Nothing), BinaryExpression(Object, Keyword('commandradio'), String, Nothing), BinaryExpression(Array, Keyword('commandradio'), String, Nothing), BinaryExpression(Object, Keyword('commandsuppressivefire'), Object, Nothing), BinaryExpression(Object, Keyword('commandsuppressivefire'), Array, Nothing), BinaryExpression(Array, Keyword('commandsuppressivefire'), Object, Nothing), BinaryExpression(Array, Keyword('commandsuppressivefire'), Array, Nothing), BinaryExpression(Object, Keyword('commandtarget'), Object, Nothing), BinaryExpression(Array, Keyword('commandtarget'), Object, Nothing), BinaryExpression(Object, Keyword('commandwatch'), Array, Nothing), BinaryExpression(Array, Keyword('commandwatch'), Array, Nothing), BinaryExpression(Object, Keyword('commandwatch'), Object, Nothing), BinaryExpression(Array, Keyword('commandwatch'), Object, Nothing), BinaryExpression(String, Keyword('configclasses'), Config, Array), BinaryExpression(Object, Keyword('confirmsensortarget'), Array, Nothing), BinaryExpression(Object, Keyword('connectterminaltouav'), Object, Boolean), BinaryExpression(Control, Keyword('controlsgroupctrl'), Number, Control), BinaryExpression(Group, Keyword('copywaypoints'), Group, Nothing), BinaryExpression(Code, Keyword('count'), Array, Number), BinaryExpression(Object, Keyword('countenemy'), Array, Number), BinaryExpression(Object, Keyword('countfriendly'), Array, Number), BinaryExpression(Side, Keyword('countside'), Array, Number), BinaryExpression(String, Keyword('counttype'), Array, Number), BinaryExpression(Object, Keyword('countunknown'), Array, Number), BinaryExpression(Group, Keyword('create3denentity'), Array, Anything), BinaryExpression(Object, Keyword('creatediaryrecord'), Array, DiaryReport), BinaryExpression(Object, Keyword('creatediarysubject'), Array, Number), BinaryExpression(Display, Keyword('createdisplay'), String, Display), BinaryExpression(Control, Keyword('createmenu'), Number, Nothing), BinaryExpression(Display, Keyword('createmissiondisplay'), String, Display), BinaryExpression(Display, Keyword('createmissiondisplay'), Array, Display), BinaryExpression(Display, Keyword('creatempcampaigndisplay'), String, Nothing), BinaryExpression(Object, Keyword('createsimpletask'), Array, Task), BinaryExpression(String, Keyword('createsite'), Array, Object), BinaryExpression(TeamMember, Keyword('createtask'), Array, Task), BinaryExpression(String, Keyword('createunit'), Array, Nothing), BinaryExpression(Group, Keyword('createunit'), Array, Object), BinaryExpression(String, Keyword('createvehicle'), Array, Object), BinaryExpression(String, Keyword('createvehiclelocal'), Array, Object), BinaryExpression(Control, Keyword('ctdata'), Number, String), BinaryExpression(Control, Keyword('ctfindheaderrows'), Number, Array), BinaryExpression(Control, Keyword('ctfindrowheader'), Number, Number), BinaryExpression(Control, Keyword('ctheadercontrols'), Number, Array), BinaryExpression(Control, Keyword('ctremoveheaders'), Array, Nothing), BinaryExpression(Control, Keyword('ctremoverows'), Array, Nothing), BinaryExpression(Control, Keyword('ctrladdeventhandler'), Array, Number), BinaryExpression(Control, Keyword('ctrlanimatemodel'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlanimationphasemodel'), String, Number), BinaryExpression(Control, Keyword('ctrlchecked'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlcommit'), Number, Nothing), BinaryExpression(Display, Keyword('ctrlcreate'), Array, Control), BinaryExpression(Control, Keyword('ctrlenable'), Boolean, Nothing), BinaryExpression(Control, Keyword('ctrlmapanimadd'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlmapcursor'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlmapscreentoworld'), Array, Array), BinaryExpression(Control, Keyword('ctrlmapworldtoscreen'), Array, Array), BinaryExpression(Control, Keyword('ctrlremovealleventhandlers'), String, Nothing), BinaryExpression(Control, Keyword('ctrlremoveeventhandler'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetactivecolor'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetangle'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetautoscrolldelay'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetautoscrollrewind'), Boolean, Nothing), BinaryExpression(Control, Keyword('ctrlsetautoscrollspeed'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetbackgroundcolor'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetchecked'), Boolean, Nothing), BinaryExpression(Control, Keyword('ctrlsetchecked'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetdisabledcolor'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlseteventhandler'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetfade'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfont'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth1'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth1b'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth2'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth2b'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth3'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth3b'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth4'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth4b'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth5'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth5b'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth6'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfonth6b'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheight'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheighth1'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheighth2'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheighth3'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheighth4'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheighth5'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheighth6'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontheightsecondary'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontp'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontp'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontpb'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetfontsecondary'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetforegroundcolor'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetmodel'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetmodeldirandup'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetmodelscale'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetpixelprecision'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetpixelprecision'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsetposition'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsetscale'), Number, Nothing), BinaryExpression(Control, Keyword('ctrlsetstructuredtext'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsettext'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsettextcolor'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsettextcolorsecondary'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsettextsecondary'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsettooltip'), String, Nothing), BinaryExpression(Control, Keyword('ctrlsettooltipcolorbox'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsettooltipcolorshade'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlsettooltipcolortext'), Array, Nothing), BinaryExpression(Control, Keyword('ctrlshow'), Boolean, Nothing), BinaryExpression(Control, Keyword('ctrowcontrols'), Number, Array), BinaryExpression(Control, Keyword('ctsetcursel'), Number, Nothing), BinaryExpression(Control, Keyword('ctsetdata'), Array, Nothing), BinaryExpression(Control, Keyword('ctsetheadertemplate'), Config, Nothing), BinaryExpression(Control, Keyword('ctsetrowtemplate'), Config, Nothing), BinaryExpression(Control, Keyword('ctsetvalue'), Array, Nothing), BinaryExpression(Control, Keyword('ctvalue'), Number, Number), BinaryExpression(Object, Keyword('curatorcoef'), String, Number), BinaryExpression(Object, Keyword('currentmagazinedetailturret'), Array, String), BinaryExpression(Object, Keyword('currentmagazineturret'), Array, String), BinaryExpression(Object, Keyword('currentweaponturret'), Array, String), BinaryExpression(Object, Keyword('customchat'), Array, Nothing), BinaryExpression(Object, Keyword('customradio'), Array, Nothing), BinaryExpression(String, Keyword('cutfadeout'), Number, Number), BinaryExpression(Number, Keyword('cutfadeout'), Number, Nothing), BinaryExpression(String, Keyword('cutobj'), Array, Number), BinaryExpression(Number, Keyword('cutobj'), Array, Nothing), BinaryExpression(String, Keyword('cutrsc'), Array, Number), BinaryExpression(Number, Keyword('cutrsc'), Array, Nothing), BinaryExpression(String, Keyword('cuttext'), Array, Number), BinaryExpression(Number, Keyword('cuttext'), Array, Nothing), BinaryExpression(Number, Keyword('debugfsm'), Boolean, Nothing), BinaryExpression(Array, Keyword('deleteat'), Number, Anything), BinaryExpression(Control, Keyword('deleteeditorobject'), String, Anything), BinaryExpression(Group, Keyword('deletegroupwhenempty'), Boolean, Nothing), BinaryExpression(Array, Keyword('deleterange'), Array, Nothing), BinaryExpression(TeamMember, Keyword('deleteresources'), Array, Nothing), BinaryExpression(Object, Keyword('deletevehiclecrew'), Object, Nothing), BinaryExpression(Object, Keyword('diarysubjectexists'), String, Boolean), BinaryExpression(Object, Keyword('directsay'), String, Nothing), BinaryExpression(Object, Keyword('disableai'), String, Nothing), BinaryExpression(Object, Keyword('disablecollisionwith'), Object, Nothing), BinaryExpression(Object, Keyword('disableconversation'), Boolean, Nothing), BinaryExpression(Object, Keyword('disablenvgequipment'), Boolean, Nothing), BinaryExpression(Object, Keyword('disabletiequipment'), Boolean, Nothing), BinaryExpression(Object, Keyword('disableuavconnectability'), Array, Nothing), BinaryExpression(Display, Keyword('displayaddeventhandler'), Array, Number), BinaryExpression(Display, Keyword('displayctrl'), Number, Control), BinaryExpression(Display, Keyword('displayremovealleventhandlers'), String, Nothing), BinaryExpression(Display, Keyword('displayremoveeventhandler'), Array, Nothing), BinaryExpression(Display, Keyword('displayseteventhandler'), Array, Nothing), BinaryExpression(Object, Keyword('distance'), Object, Number), BinaryExpression(Object, Keyword('distance'), Array, Number), BinaryExpression(Array, Keyword('distance'), Object, Number), BinaryExpression(Array, Keyword('distance'), Array, Number), BinaryExpression(Location, Keyword('distance'), Location, Number), BinaryExpression(Location, Keyword('distance'), Array, Number), BinaryExpression(Array, Keyword('distance'), Location, Number), BinaryExpression(Object, Keyword('distance2d'), Object, Number), BinaryExpression(Object, Keyword('distance2d'), Array, Number), BinaryExpression(Array, Keyword('distance2d'), Object, Number), BinaryExpression(Array, Keyword('distance2d'), Array, Number), BinaryExpression(Object, Keyword('distancesqr'), Object, Number), BinaryExpression(Object, Keyword('distancesqr'), Array, Number), BinaryExpression(Array, Keyword('distancesqr'), Object, Number), BinaryExpression(Array, Keyword('distancesqr'), Array, Number), BinaryExpression(Location, Keyword('distancesqr'), Location, Number), BinaryExpression(Location, Keyword('distancesqr'), Array, Number), BinaryExpression(Array, Keyword('distancesqr'), Location, Number), BinaryExpression(WhileType, Keyword('do'), Code, Nothing), BinaryExpression(WithType, Keyword('do'), Code, Nothing), BinaryExpression(ForType, Keyword('do'), Code, Anything), BinaryExpression(SwitchType, Keyword('do'), Code, Anything), BinaryExpression(Object, Keyword('doartilleryfire'), Array, Nothing), BinaryExpression(Array, Keyword('doartilleryfire'), Array, Nothing), BinaryExpression(Object, Keyword('dofire'), Object, Nothing), BinaryExpression(Array, Keyword('dofire'), Object, Nothing), BinaryExpression(Object, Keyword('dofollow'), Object, Nothing), BinaryExpression(Array, Keyword('dofollow'), Object, Nothing), BinaryExpression(Object, Keyword('dofsm'), Array, Nothing), BinaryExpression(Array, Keyword('dofsm'), Array, Nothing), BinaryExpression(Object, Keyword('domove'), Array, Nothing), BinaryExpression(Array, Keyword('domove'), Array, Nothing), BinaryExpression(Object, Keyword('doorphase'), String, Number), BinaryExpression(Object, Keyword('dosuppressivefire'), Object, Nothing), BinaryExpression(Object, Keyword('dosuppressivefire'), Array, Nothing), BinaryExpression(Array, Keyword('dosuppressivefire'), Object, Nothing), BinaryExpression(Array, Keyword('dosuppressivefire'), Array, Nothing), BinaryExpression(Object, Keyword('dotarget'), Object, Nothing), BinaryExpression(Array, Keyword('dotarget'), Object, Nothing), BinaryExpression(Object, Keyword('dowatch'), Array, Nothing), BinaryExpression(Array, Keyword('dowatch'), Array, Nothing), BinaryExpression(Object, Keyword('dowatch'), Object, Nothing), BinaryExpression(Array, Keyword('dowatch'), Object, Nothing), BinaryExpression(Control, Keyword('drawarrow'), Array, Nothing), BinaryExpression(Control, Keyword('drawellipse'), Array, Nothing), BinaryExpression(Control, Keyword('drawicon'), Array, Nothing), BinaryExpression(Control, Keyword('drawline'), Array, Nothing), BinaryExpression(Control, Keyword('drawlink'), Array, Nothing), BinaryExpression(Control, Keyword('drawlocation'), Location, Nothing), BinaryExpression(Control, Keyword('drawpolygon'), Array, Nothing), BinaryExpression(Control, Keyword('drawrectangle'), Array, Nothing), BinaryExpression(Control, Keyword('drawtriangle'), Array, Nothing), BinaryExpression(Control, Keyword('editobject'), String, Anything), BinaryExpression(Control, Keyword('editorseteventhandler'), Array, Nothing), BinaryExpression(Code, Keyword('else'), Code, Array), BinaryExpression(Object, Keyword('emptypositions'), String, Number), BinaryExpression(Object, Keyword('enableai'), String, Nothing), BinaryExpression(String, Keyword('enableaifeature'), Boolean, Nothing), BinaryExpression(Object, Keyword('enableaimprecision'), Boolean, Nothing), BinaryExpression(Object, Keyword('enableattack'), Boolean, Nothing), BinaryExpression(Group, Keyword('enableattack'), Boolean, Nothing), BinaryExpression(Object, Keyword('enableautostartuprtd'), Boolean, Nothing), BinaryExpression(Object, Keyword('enableautotrimrtd'), Boolean, Boolean), BinaryExpression(Number, Keyword('enablechannel'), Boolean, Nothing), BinaryExpression(Number, Keyword('enablechannel'), Array, Nothing), BinaryExpression(Object, Keyword('enablecollisionwith'), Object, Nothing), BinaryExpression(Object, Keyword('enablecopilot'), Boolean, Nothing), BinaryExpression(Object, Keyword('enabledynamicsimulation'), Boolean, Nothing), BinaryExpression(Group, Keyword('enabledynamicsimulation'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablefatigue'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablegunlights'), String, Nothing), BinaryExpression(Group, Keyword('enablegunlights'), String, Nothing), BinaryExpression(Object, Keyword('enableinfopanelcomponent'), Array, Boolean), BinaryExpression(Array, Keyword('enableinfopanelcomponent'), Array, Boolean), BinaryExpression(Object, Keyword('enableirlasers'), Boolean, Nothing), BinaryExpression(Group, Keyword('enableirlasers'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablemimics'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablepersonturret'), Array, Nothing), BinaryExpression(Object, Keyword('enablereload'), Boolean, Nothing), BinaryExpression(Object, Keyword('enableropeattach'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablesimulation'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablesimulationglobal'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablestamina'), Boolean, Nothing), BinaryExpression(Object, Keyword('enableuavconnectability'), Array, Nothing), BinaryExpression(Object, Keyword('enableuavwaypoints'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablevehiclecargo'), Boolean, Nothing), BinaryExpression(Object, Keyword('enablevehiclesensor'), Array, Nothing), BinaryExpression(Object, Keyword('enableweapondisassembly'), Boolean, Nothing), BinaryExpression(Object, Keyword('engineon'), Boolean, Nothing), BinaryExpression(Control, Keyword('evalobjectargument'), Array, Anything), BinaryExpression(Type, Keyword('exec'), String, Nothing), BinaryExpression(Control, Keyword('execeditorscript'), Array, Anything), BinaryExpression(Type, Keyword('execfsm'), String, Number), BinaryExpression(Type, Keyword('execvm'), String, Script), BinaryExpression(IfType, Keyword('exitwith'), Code, Anything), BinaryExpression(Number, Keyword('fademusic'), Number, Nothing), BinaryExpression(Number, Keyword('faderadio'), Number, Nothing), BinaryExpression(Number, Keyword('fadesound'), Number, Nothing), BinaryExpression(Number, Keyword('fadespeech'), Number, Nothing), BinaryExpression(Array, Keyword('find'), Type, Number), BinaryExpression(String, Keyword('find'), String, Number), BinaryExpression(Object, Keyword('findcover'), Array, Object), BinaryExpression(Control, Keyword('findeditorobject'), Array, String), BinaryExpression(Control, Keyword('findeditorobject'), Type, String), BinaryExpression(Array, Keyword('findemptyposition'), Array, Array), BinaryExpression(Array, Keyword('findemptypositionready'), Array, Boolean), BinaryExpression(Array, Keyword('findif'), Code, Number), BinaryExpression(Object, Keyword('findnearestenemy'), Object, Object), BinaryExpression(Object, Keyword('findnearestenemy'), Array, Object), BinaryExpression(Object, Keyword('fire'), String, Nothing), BinaryExpression(Object, Keyword('fire'), Array, Nothing), BinaryExpression(Object, Keyword('fireattarget'), Array, Boolean), BinaryExpression(Object, Keyword('flyinheight'), Number, Nothing), BinaryExpression(Object, Keyword('flyinheightasl'), Array, Nothing), BinaryExpression(Object, Keyword('forceadduniform'), String, Nothing), BinaryExpression(Object, Keyword('forceflagtexture'), String, Nothing), BinaryExpression(Object, Keyword('forcefollowroad'), Boolean, Nothing), BinaryExpression(Object, Keyword('forcespeed'), Number, Nothing), BinaryExpression(Object, Keyword('forcewalk'), Boolean, Nothing), BinaryExpression(Object, Keyword('forceweaponfire'), Array, Nothing), BinaryExpression(Code, Keyword('foreach'), Array, Nothing), BinaryExpression(Code, Keyword('foreachmember'), TeamMember, Nothing), BinaryExpression(Code, Keyword('foreachmemberagent'), TeamMember, Nothing), BinaryExpression(Code, Keyword('foreachmemberteam'), TeamMember, Nothing), BinaryExpression(Object, Keyword('forgettarget'), Object, Nothing), BinaryExpression(Group, Keyword('forgettarget'), Object, Nothing), BinaryExpression(ForType, Keyword('from'), Number, ForType), BinaryExpression(Object, Keyword('get3denattribute'), String, Array), BinaryExpression(Group, Keyword('get3denattribute'), String, Array), BinaryExpression(Array, Keyword('get3denattribute'), String, Array), BinaryExpression(String, Keyword('get3denattribute'), String, Array), BinaryExpression(Number, Keyword('get3denattribute'), String, Array), BinaryExpression(String, Keyword('get3denmissionattribute'), String, Anything), BinaryExpression(Object, Keyword('getartilleryeta'), Array, Number), BinaryExpression(Object, Keyword('getcargoindex'), Object, Number), BinaryExpression(Object, Keyword('getcompatiblepylonmagazines'), String, Array), BinaryExpression(Object, Keyword('getcompatiblepylonmagazines'), Number, Array), BinaryExpression(String, Keyword('getcompatiblepylonmagazines'), String, Array), BinaryExpression(String, Keyword('getcompatiblepylonmagazines'), Number, Array), BinaryExpression(Object, Keyword('getdir'), Object, Number), BinaryExpression(Object, Keyword('getdir'), Array, Number), BinaryExpression(Array, Keyword('getdir'), Object, Number), BinaryExpression(Array, Keyword('getdir'), Array, Number), BinaryExpression(Control, Keyword('geteditorobjectscope'), String, String), BinaryExpression(Array, Keyword('getenvsoundcontroller'), String, Number), BinaryExpression(Side, Keyword('getfriend'), Side, Number), BinaryExpression(Number, Keyword('getfsmvariable'), String, Anything), BinaryExpression(Number, Keyword('getfsmvariable'), Array, Anything), BinaryExpression(Group, Keyword('getgroupicon'), Number, Array), BinaryExpression(Object, Keyword('gethidefrom'), Object, Array), BinaryExpression(Object, Keyword('gethit'), String, Number), BinaryExpression(Object, Keyword('gethitindex'), Number, Number), BinaryExpression(Object, Keyword('gethitpointdamage'), String, Number), BinaryExpression(Control, Keyword('getobjectargument'), Array, String), BinaryExpression(Control, Keyword('getobjectchildren'), String, Array), BinaryExpression(Control, Keyword('getobjectproxy'), String, Object), BinaryExpression(Object, Keyword('getpos'), Array, Array), BinaryExpression(Array, Keyword('getpos'), Array, Array), BinaryExpression(Object, Keyword('getreldir'), Object, Number), BinaryExpression(Object, Keyword('getreldir'), Array, Number), BinaryExpression(Object, Keyword('getrelpos'), Array, Array), BinaryExpression(Object, Keyword('getsoundcontroller'), String, Number), BinaryExpression(Object, Keyword('getsoundcontrollerresult'), Config, Number), BinaryExpression(Object, Keyword('getspeed'), String, Number), BinaryExpression(Object, Keyword('getunittrait'), String, Anything), BinaryExpression(Display, Keyword('getvariable'), String, Anything), BinaryExpression(Display, Keyword('getvariable'), Array, Anything), BinaryExpression(Control, Keyword('getvariable'), String, Anything), BinaryExpression(Control, Keyword('getvariable'), Array, Anything), BinaryExpression(Object, Keyword('getvariable'), String, Anything), BinaryExpression(Object, Keyword('getvariable'), Array, Anything), BinaryExpression(Group, Keyword('getvariable'), String, Anything), BinaryExpression(Group, Keyword('getvariable'), Array, Anything), BinaryExpression(Namespace, Keyword('getvariable'), String, Anything), BinaryExpression(Namespace, Keyword('getvariable'), Array, Anything), BinaryExpression(TeamMember, Keyword('getvariable'), String, Anything), BinaryExpression(TeamMember, Keyword('getvariable'), Array, Anything), BinaryExpression(Task, Keyword('getvariable'), String, Anything), BinaryExpression(Task, Keyword('getvariable'), Array, Anything), BinaryExpression(Location, Keyword('getvariable'), String, Anything), BinaryExpression(Location, Keyword('getvariable'), Array, Anything), BinaryExpression(Object, Keyword('glanceat'), Object, Nothing), BinaryExpression(Object, Keyword('glanceat'), Array, Nothing), BinaryExpression(Array, Keyword('glanceat'), Object, Nothing), BinaryExpression(Array, Keyword('glanceat'), Array, Nothing), BinaryExpression(Object, Keyword('globalchat'), String, Nothing), BinaryExpression(Object, Keyword('globalradio'), String, Nothing), BinaryExpression(Object, Keyword('groupchat'), String, Nothing), BinaryExpression(Object, Keyword('groupradio'), String, Nothing), BinaryExpression(Object, Keyword('groupselectunit'), Array, Nothing), BinaryExpression(Object, Keyword('hasweapon'), String, Boolean), BinaryExpression(Object, Keyword('hcgroupparams'), Group, Array), BinaryExpression(Object, Keyword('hcremovegroup'), Group, Nothing), BinaryExpression(Object, Keyword('hcselectgroup'), Array, Nothing), BinaryExpression(Object, Keyword('hcsetgroup'), Array, Nothing), BinaryExpression(Object, Keyword('hideobject'), Boolean, Nothing), BinaryExpression(Object, Keyword('hideobjectglobal'), Boolean, Nothing), BinaryExpression(Object, Keyword('hideselection'), Array, Nothing), BinaryExpression(String, Keyword('hintc'), String, Nothing), BinaryExpression(String, Keyword('hintc'), String, Nothing), BinaryExpression(String, Keyword('hintc'), Array, Nothing), BinaryExpression(Control, Keyword('htmlload'), String, Nothing), BinaryExpression(Type, Keyword('in'), Array, Boolean), BinaryExpression(Object, Keyword('in'), Object, Boolean), BinaryExpression(Array, Keyword('in'), Location, Boolean), BinaryExpression(Object, Keyword('inarea'), Object, Boolean), BinaryExpression(Array, Keyword('inarea'), Object, Boolean), BinaryExpression(Object, Keyword('inarea'), String, Boolean), BinaryExpression(Array, Keyword('inarea'), String, Boolean), BinaryExpression(Object, Keyword('inarea'), Array, Boolean), BinaryExpression(Array, Keyword('inarea'), Array, Boolean), BinaryExpression(Object, Keyword('inarea'), Location, Boolean), BinaryExpression(Array, Keyword('inarea'), Location, Boolean), BinaryExpression(Array, Keyword('inareaarray'), Object, Array), BinaryExpression(Array, Keyword('inareaarray'), String, Array), BinaryExpression(Array, Keyword('inareaarray'), Array, Array), BinaryExpression(Array, Keyword('inareaarray'), Location, Array), BinaryExpression(Object, Keyword('inflame'), Boolean, Nothing), BinaryExpression(Object, Keyword('infopanelcomponentenabled'), Array, Boolean), BinaryExpression(Array, Keyword('infopanelcomponentenabled'), Array, Boolean), BinaryExpression(Object, Keyword('infopanelcomponents'), String, Array), BinaryExpression(Array, Keyword('infopanelcomponents'), String, Array), BinaryExpression(Array, Keyword('inpolygon'), Array, Boolean), BinaryExpression(Array, Keyword('inrangeofartillery'), Array, Boolean), BinaryExpression(Control, Keyword('inserteditorobject'), Array, String), BinaryExpression(Array, Keyword('intersect'), Array, Array), BinaryExpression(Type, Keyword('isequalto'), Type, Boolean), BinaryExpression(Type, Keyword('isequaltype'), Type, Boolean), BinaryExpression(Array, Keyword('isequaltypeall'), Type, Boolean), BinaryExpression(Type, Keyword('isequaltypeany'), Array, Boolean), BinaryExpression(Array, Keyword('isequaltypearray'), Array, Boolean), BinaryExpression(Type, Keyword('isequaltypeparams'), Array, Boolean), BinaryExpression(Object, Keyword('isflashlighton'), String, Boolean), BinaryExpression(Array, Keyword('isflatempty'), Array, Array), BinaryExpression(Object, Keyword('isirlaseron'), String, Boolean), BinaryExpression(String, Keyword('iskindof'), String, Boolean), BinaryExpression(String, Keyword('iskindof'), Array, Boolean), BinaryExpression(Object, Keyword('iskindof'), String, Boolean), BinaryExpression(Object, Keyword('issensortargetconfirmed'), Side, Boolean), BinaryExpression(Object, Keyword('isuavconnectable'), Array, Boolean), BinaryExpression(Object, Keyword('isuniformallowed'), String, Boolean), BinaryExpression(Object, Keyword('isvehiclesensorenabled'), String, Array), BinaryExpression(Array, Keyword('join'), Object, Nothing), BinaryExpression(Array, Keyword('join'), Group, Nothing), BinaryExpression(Object, Keyword('joinas'), Array, Nothing), BinaryExpression(Object, Keyword('joinassilent'), Array, Nothing), BinaryExpression(Array, Keyword('joinsilent'), Object, Nothing), BinaryExpression(Array, Keyword('joinsilent'), Group, Nothing), BinaryExpression(Array, Keyword('joinstring'), String, String), BinaryExpression(Object, Keyword('kbadddatabase'), String, Boolean), BinaryExpression(Object, Keyword('kbadddatabasetargets'), String, Boolean), BinaryExpression(Object, Keyword('kbaddtopic'), Array, Nothing), BinaryExpression(Object, Keyword('kbhastopic'), String, Boolean), BinaryExpression(Object, Keyword('kbreact'), Array, Nothing), BinaryExpression(Object, Keyword('kbremovetopic'), String, Nothing), BinaryExpression(Object, Keyword('kbtell'), Array, Nothing), BinaryExpression(Object, Keyword('kbwassaid'), Array, Boolean), BinaryExpression(Object, Keyword('knowsabout'), Object, Number), BinaryExpression(Group, Keyword('knowsabout'), Object, Number), BinaryExpression(Side, Keyword('knowsabout'), Object, Number), BinaryExpression(Object, Keyword('land'), String, Nothing), BinaryExpression(Object, Keyword('landat'), Object, Nothing), BinaryExpression(Object, Keyword('landat'), Number, Nothing), BinaryExpression(Control, Keyword('lbadd'), String, Number), BinaryExpression(Control, Keyword('lbcolor'), Number, Array), BinaryExpression(Control, Keyword('lbcolorright'), Number, Array), BinaryExpression(Control, Keyword('lbdata'), Number, String), BinaryExpression(Control, Keyword('lbdelete'), Number, Nothing), BinaryExpression(Control, Keyword('lbisselected'), Number, Boolean), BinaryExpression(Control, Keyword('lbpicture'), Number, String), BinaryExpression(Control, Keyword('lbpictureright'), Number, String), BinaryExpression(Control, Keyword('lbsetcolor'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetcolorright'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetcursel'), Number, Nothing), BinaryExpression(Control, Keyword('lbsetdata'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicture'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicturecolor'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicturecolordisabled'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicturecolorselected'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpictureright'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicturerightcolor'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicturerightcolordisabled'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetpicturerightcolorselected'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetselectcolor'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetselectcolorright'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetselected'), Array, Nothing), BinaryExpression(Control, Keyword('lbsettext'), Array, Nothing), BinaryExpression(Control, Keyword('lbsettextright'), Array, Nothing), BinaryExpression(Control, Keyword('lbsettooltip'), Array, Nothing), BinaryExpression(Control, Keyword('lbsetvalue'), Array, Nothing), BinaryExpression(Control, Keyword('lbtext'), Number, String), BinaryExpression(Control, Keyword('lbtextright'), Number, String), BinaryExpression(Control, Keyword('lbvalue'), Number, Number), BinaryExpression(Object, Keyword('leavevehicle'), Object, Nothing), BinaryExpression(Group, Keyword('leavevehicle'), Object, Nothing), BinaryExpression(Object, Keyword('lightattachobject'), Array, Nothing), BinaryExpression(Object, Keyword('limitspeed'), Number, Nothing), BinaryExpression(Object, Keyword('linkitem'), String, Nothing), BinaryExpression(Control, Keyword('listobjects'), String, Array), BinaryExpression(Control, Keyword('lnbaddcolumn'), Number, Number), BinaryExpression(Control, Keyword('lnbaddrow'), Array, Number), BinaryExpression(Control, Keyword('lnbcolor'), Array, Array), BinaryExpression(Control, Keyword('lnbcolorright'), Array, Array), BinaryExpression(Control, Keyword('lnbdata'), Array, String), BinaryExpression(Control, Keyword('lnbdeletecolumn'), Number, Nothing), BinaryExpression(Control, Keyword('lnbdeleterow'), Number, Nothing), BinaryExpression(Control, Keyword('lnbpicture'), Array, String), BinaryExpression(Control, Keyword('lnbpictureright'), Array, String), BinaryExpression(Control, Keyword('lnbsetcolor'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetcolorright'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetcolumnspos'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetcurselrow'), Number, Nothing), BinaryExpression(Control, Keyword('lnbsetdata'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetpicture'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetpicturecolor'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetpicturecolorright'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetpicturecolorselected'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetpicturecolorselectedright'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetpictureright'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsettext'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsettextright'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsetvalue'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsort'), Array, Nothing), BinaryExpression(Control, Keyword('lnbsortbyvalue'), Array, Nothing), BinaryExpression(Control, Keyword('lnbtext'), Array, String), BinaryExpression(Control, Keyword('lnbtextright'), Array, String), BinaryExpression(Control, Keyword('lnbvalue'), Array, Number), BinaryExpression(Object, Keyword('loadidentity'), String, Boolean), BinaryExpression(Object, Keyword('loadmagazine'), Array, Nothing), BinaryExpression(Control, Keyword('loadoverlay'), Config, Nothing), BinaryExpression(Object, Keyword('loadstatus'), String, Boolean), BinaryExpression(Object, Keyword('lock'), Boolean, Nothing), BinaryExpression(Object, Keyword('lock'), Number, Nothing), BinaryExpression(Object, Keyword('lockcamerato'), Array, Nothing), BinaryExpression(Object, Keyword('lockcargo'), Array, Nothing), BinaryExpression(Object, Keyword('lockcargo'), Boolean, Nothing), BinaryExpression(Object, Keyword('lockdriver'), Boolean, Nothing), BinaryExpression(Object, Keyword('lockedcargo'), Number, Boolean), BinaryExpression(Object, Keyword('lockedturret'), Array, Boolean), BinaryExpression(Object, Keyword('lockturret'), Array, Nothing), BinaryExpression(Object, Keyword('lockwp'), Boolean, Nothing), BinaryExpression(Group, Keyword('lockwp'), Boolean, Nothing), BinaryExpression(Object, Keyword('lookat'), Object, Nothing), BinaryExpression(Object, Keyword('lookat'), Array, Nothing), BinaryExpression(Array, Keyword('lookat'), Object, Nothing), BinaryExpression(Array, Keyword('lookat'), Array, Nothing), BinaryExpression(Control, Keyword('lookatpos'), Array, Nothing), BinaryExpression(Object, Keyword('magazinesturret'), Array, Array), BinaryExpression(Object, Keyword('magazineturretammo'), Array, Number), BinaryExpression(Control, Keyword('mapcenteroncamera'), Boolean, Nothing), BinaryExpression(Number, Keyword('max'), Number, Number), BinaryExpression(Control, Keyword('menuaction'), Array, String), BinaryExpression(Control, Keyword('menuadd'), Array, Number), BinaryExpression(Control, Keyword('menuchecked'), Array, Boolean), BinaryExpression(Control, Keyword('menucollapse'), Array, Nothing), BinaryExpression(Control, Keyword('menudata'), Array, String), BinaryExpression(Control, Keyword('menudelete'), Array, Nothing), BinaryExpression(Control, Keyword('menuenable'), Array, Nothing), BinaryExpression(Control, Keyword('menuenabled'), Array, Boolean), BinaryExpression(Control, Keyword('menuexpand'), Array, Nothing), BinaryExpression(Control, Keyword('menupicture'), Array, String), BinaryExpression(Control, Keyword('menusetaction'), Array, Nothing), BinaryExpression(Control, Keyword('menusetcheck'), Array, Nothing), BinaryExpression(Control, Keyword('menusetdata'), Array, Nothing), BinaryExpression(Control, Keyword('menusetpicture'), Array, Nothing), BinaryExpression(Control, Keyword('menusetvalue'), Array, Nothing), BinaryExpression(Control, Keyword('menushortcut'), Array, Number), BinaryExpression(Control, Keyword('menushortcuttext'), Array, String), BinaryExpression(Control, Keyword('menusize'), Array, Number), BinaryExpression(Control, Keyword('menusort'), Array, Nothing), BinaryExpression(Control, Keyword('menutext'), Array, String), BinaryExpression(Control, Keyword('menuurl'), Array, String), BinaryExpression(Control, Keyword('menuvalue'), Array, Number), BinaryExpression(Number, Keyword('min'), Number, Number), BinaryExpression(Object, Keyword('minedetectedby'), Side, Boolean), BinaryExpression(Number, Keyword('mod'), Number, Number), BinaryExpression(Object, Keyword('modeltoworld'), Array, Array), BinaryExpression(Object, Keyword('modeltoworldvisual'), Array, Array), BinaryExpression(Object, Keyword('modeltoworldvisualworld'), Array, Array), BinaryExpression(Object, Keyword('modeltoworldworld'), Array, Array), BinaryExpression(Object, Keyword('move'), Array, Nothing), BinaryExpression(Group, Keyword('move'), Array, Nothing), BinaryExpression(Object, Keyword('moveinany'), Object, Boolean), BinaryExpression(Object, Keyword('moveincargo'), Object, Nothing), BinaryExpression(Object, Keyword('moveincargo'), Array, Nothing), BinaryExpression(Object, Keyword('moveincommander'), Object, Nothing), BinaryExpression(Object, Keyword('moveindriver'), Object, Nothing), BinaryExpression(Object, Keyword('moveingunner'), Object, Nothing), BinaryExpression(Object, Keyword('moveinturret'), Array, Nothing), BinaryExpression(Control, Keyword('moveobjecttoend'), String, Nothing), BinaryExpression(Object, Keyword('moveto'), Array, Nothing), BinaryExpression(Object, Keyword('nearentities'), Number, Array), BinaryExpression(Object, Keyword('nearentities'), Array, Array), BinaryExpression(Array, Keyword('nearentities'), Number, Array), BinaryExpression(Array, Keyword('nearentities'), Array, Array), BinaryExpression(Array, Keyword('nearestobject'), String, Object), BinaryExpression(Array, Keyword('nearestobject'), Number, Object), BinaryExpression(Object, Keyword('nearobjects'), Number, Array), BinaryExpression(Object, Keyword('nearobjects'), Array, Array), BinaryExpression(Array, Keyword('nearobjects'), Number, Array), BinaryExpression(Array, Keyword('nearobjects'), Array, Array), BinaryExpression(Object, Keyword('nearobjectsready'), Number, Boolean), BinaryExpression(Array, Keyword('nearobjectsready'), Number, Boolean), BinaryExpression(Object, Keyword('nearroads'), Number, Array), BinaryExpression(Array, Keyword('nearroads'), Number, Array), BinaryExpression(Object, Keyword('nearsupplies'), Number, Array), BinaryExpression(Object, Keyword('nearsupplies'), Array, Array), BinaryExpression(Array, Keyword('nearsupplies'), Number, Array), BinaryExpression(Array, Keyword('nearsupplies'), Array, Array), BinaryExpression(Object, Keyword('neartargets'), Number, Array), BinaryExpression(Control, Keyword('newoverlay'), Config, Nothing), BinaryExpression(Control, Keyword('nmenuitems'), String, Number), BinaryExpression(Control, Keyword('nmenuitems'), Number, Number), BinaryExpression(String, Keyword('objstatus'), String, Nothing), BinaryExpression(Control, Keyword('ondoubleclick'), String, Anything), BinaryExpression(Type, Keyword('onmapsingleclick'), Code, Nothing), BinaryExpression(Type, Keyword('onmapsingleclick'), String, Nothing), BinaryExpression(Control, Keyword('onshownewobject'), String, Anything), BinaryExpression(Boolean, Keyword('or'), Boolean, Boolean), BinaryExpression(Boolean, Keyword('or'), Code, Boolean), BinaryExpression(Array, Keyword('ordergetin'), Boolean, Nothing), BinaryExpression(Type, Keyword('param'), Array, Anything), BinaryExpression(Type, Keyword('params'), Array, Boolean), BinaryExpression(Object, Keyword('playaction'), String, Nothing), BinaryExpression(Object, Keyword('playactionnow'), String, Nothing), BinaryExpression(Object, Keyword('playgesture'), String, Nothing), BinaryExpression(Object, Keyword('playmove'), String, Nothing), BinaryExpression(Object, Keyword('playmovenow'), String, Nothing), BinaryExpression(Control, Keyword('posscreentoworld'), Array, Array), BinaryExpression(Control, Keyword('posworldtoscreen'), Array, Array), BinaryExpression(String, Keyword('ppeffectadjust'), Array, Nothing), BinaryExpression(Number, Keyword('ppeffectadjust'), Array, Nothing), BinaryExpression(String, Keyword('ppeffectcommit'), Number, Nothing), BinaryExpression(Number, Keyword('ppeffectcommit'), Number, Nothing), BinaryExpression(Array, Keyword('ppeffectcommit'), Number, Nothing), BinaryExpression(String, Keyword('ppeffectenable'), Boolean, Nothing), BinaryExpression(Array, Keyword('ppeffectenable'), Boolean, Nothing), BinaryExpression(Number, Keyword('ppeffectenable'), Boolean, Nothing), BinaryExpression(Number, Keyword('ppeffectforceinnvg'), Boolean, Nothing), BinaryExpression(Number, Keyword('preloadobject'), Object, Boolean), BinaryExpression(Number, Keyword('preloadobject'), String, Boolean), BinaryExpression(Control, Keyword('progresssetposition'), Number, Nothing), BinaryExpression(Number, Keyword('publicvariableclient'), String, Nothing), BinaryExpression(Array, Keyword('pushback'), Type, Number), BinaryExpression(Array, Keyword('pushbackunique'), Type, Number), BinaryExpression(Number, Keyword('radiochanneladd'), Array, Nothing), BinaryExpression(Number, Keyword('radiochannelremove'), Array, Nothing), BinaryExpression(Number, Keyword('radiochannelsetcallsign'), String, Nothing), BinaryExpression(Number, Keyword('radiochannelsetlabel'), String, Nothing), BinaryExpression(Number, Keyword('random'), Number, Number), BinaryExpression(Number, Keyword('random'), Array, Number), BinaryExpression(TeamMember, Keyword('registertask'), String, Boolean), BinaryExpression(Object, Keyword('remotecontrol'), Object, Nothing), BinaryExpression(Type, Keyword('remoteexec'), Array, Anything), BinaryExpression(Type, Keyword('remoteexeccall'), Array, Anything), BinaryExpression(Object, Keyword('removeaction'), Number, Nothing), BinaryExpression(Object, Keyword('removealleventhandlers'), String, Nothing), BinaryExpression(Object, Keyword('removeallmpeventhandlers'), String, Nothing), BinaryExpression(Object, Keyword('removecuratoraddons'), Array, Nothing), BinaryExpression(Object, Keyword('removecuratorcameraarea'), Number, Nothing), BinaryExpression(Object, Keyword('removecuratoreditableobjects'), Array, Nothing), BinaryExpression(Object, Keyword('removecuratoreditingarea'), Number, Nothing), BinaryExpression(Control, Keyword('removedrawicon'), Array, Nothing), BinaryExpression(Control, Keyword('removedrawlinks'), Array, Nothing), BinaryExpression(Object, Keyword('removeeventhandler'), Array, Nothing), BinaryExpression(Group, Keyword('removegroupicon'), Number, Nothing), BinaryExpression(Object, Keyword('removehandgunitem'), String, Nothing), BinaryExpression(Object, Keyword('removeitem'), String, Nothing), BinaryExpression(Object, Keyword('removeitemfrombackpack'), String, Nothing), BinaryExpression(Object, Keyword('removeitemfromuniform'), String, Nothing), BinaryExpression(Object, Keyword('removeitemfromvest'), String, Nothing), BinaryExpression(Object, Keyword('removeitems'), String, Nothing), BinaryExpression(Object, Keyword('removemagazine'), String, Nothing), BinaryExpression(Object, Keyword('removemagazine'), Array, Nothing), BinaryExpression(Object, Keyword('removemagazineglobal'), String, Nothing), BinaryExpression(Object, Keyword('removemagazines'), String, Nothing), BinaryExpression(Object, Keyword('removemagazinesturret'), Array, Nothing), BinaryExpression(Object, Keyword('removemagazineturret'), Array, Nothing), BinaryExpression(Control, Keyword('removemenuitem'), Number, Nothing), BinaryExpression(Control, Keyword('removemenuitem'), String, Nothing), BinaryExpression(Object, Keyword('removempeventhandler'), Array, Nothing), BinaryExpression(Object, Keyword('removeownedmine'), Object, Nothing), BinaryExpression(Object, Keyword('removeprimaryweaponitem'), String, Nothing), BinaryExpression(Object, Keyword('removesecondaryweaponitem'), String, Nothing), BinaryExpression(Object, Keyword('removesimpletask'), Task, Nothing), BinaryExpression(TeamMember, Keyword('removeteammember'), TeamMember, Nothing), BinaryExpression(Object, Keyword('removeweapon'), String, Nothing), BinaryExpression(Object, Keyword('removeweaponattachmentcargo'), Array, Nothing), BinaryExpression(Object, Keyword('removeweaponcargo'), Array, Nothing), BinaryExpression(Object, Keyword('removeweaponglobal'), String, Nothing), BinaryExpression(Object, Keyword('removeweaponturret'), Array, Nothing), BinaryExpression(Side, Keyword('reportremotetarget'), Array, Nothing), BinaryExpression(Array, Keyword('resize'), Number, Nothing), BinaryExpression(Object, Keyword('respawnvehicle'), Array, Nothing), BinaryExpression(Object, Keyword('reveal'), Object, Nothing), BinaryExpression(Group, Keyword('reveal'), Object, Nothing), BinaryExpression(Object, Keyword('reveal'), Array, Nothing), BinaryExpression(Group, Keyword('reveal'), Array, Nothing), BinaryExpression(Side, Keyword('revealmine'), Object, Nothing), BinaryExpression(Array, Keyword('ropeattachto'), Object, Nothing), BinaryExpression(Object, Keyword('ropedetach'), Object, Nothing), BinaryExpression(Object, Keyword('saveidentity'), String, Boolean), BinaryExpression(Object, Keyword('savestatus'), String, Boolean), BinaryExpression(Object, Keyword('say'), String, Nothing), BinaryExpression(Array, Keyword('say'), String, Nothing), BinaryExpression(Object, Keyword('say'), Array, Nothing), BinaryExpression(Array, Keyword('say'), Array, Nothing), BinaryExpression(Object, Keyword('say2d'), String, Nothing), BinaryExpression(Array, Keyword('say2d'), String, Nothing), BinaryExpression(Object, Keyword('say2d'), Array, Nothing), BinaryExpression(Array, Keyword('say2d'), Array, Nothing), BinaryExpression(Object, Keyword('say3d'), String, Nothing), BinaryExpression(Array, Keyword('say3d'), String, Nothing), BinaryExpression(Object, Keyword('say3d'), Array, Nothing), BinaryExpression(Array, Keyword('say3d'), Array, Nothing), BinaryExpression(Array, Keyword('select'), Number, Anything), BinaryExpression(Array, Keyword('select'), Boolean, Anything), BinaryExpression(Array, Keyword('select'), Array, Array), BinaryExpression(String, Keyword('select'), Array, String), BinaryExpression(Array, Keyword('select'), Code, Array), BinaryExpression(Config, Keyword('select'), Number, Config), BinaryExpression(Object, Keyword('selectdiarysubject'), String, Nothing), BinaryExpression(Control, Keyword('selecteditorobject'), String, Anything), BinaryExpression(Object, Keyword('selectionposition'), String, Array), BinaryExpression(Object, Keyword('selectionposition'), Array, Array), BinaryExpression(Group, Keyword('selectleader'), Object, Nothing), BinaryExpression(Array, Keyword('selectrandomweighted'), Array, Anything), BinaryExpression(Object, Keyword('selectweapon'), String, Nothing), BinaryExpression(Object, Keyword('selectweaponturret'), Array, Nothing), BinaryExpression(Object, Keyword('sendsimplecommand'), String, Nothing), BinaryExpression(TeamMember, Keyword('sendtask'), Array, Task), BinaryExpression(Task, Keyword('sendtaskresult'), Array, Nothing), BinaryExpression(String, Keyword('servercommand'), String, Boolean), BinaryExpression(Array, Keyword('set'), Array, Nothing), BinaryExpression(Type, Keyword('set3denattribute'), Array, Boolean), BinaryExpression(Type, Keyword('set3denlayer'), Number, Boolean), BinaryExpression(Array, Keyword('set3denlogictype'), String, Nothing), BinaryExpression(String, Keyword('set3denmissionattribute'), Array, Nothing), BinaryExpression(Array, Keyword('set3denobjecttype'), String, Nothing), BinaryExpression(Object, Keyword('setactualcollectivertd'), Number, Nothing), BinaryExpression(Object, Keyword('setairplanethrottle'), Number, Nothing), BinaryExpression(Object, Keyword('setairportside'), Side, Nothing), BinaryExpression(Number, Keyword('setairportside'), Side, Nothing), BinaryExpression(Object, Keyword('setammo'), Array, Nothing), BinaryExpression(Object, Keyword('setammocargo'), Number, Nothing), BinaryExpression(Object, Keyword('setammoonpylon'), Array, Nothing), BinaryExpression(Object, Keyword('setanimspeedcoef'), Number, Nothing), BinaryExpression(String, Keyword('setattributes'), Array, String), BinaryExpression(String, Keyword('setattributes'), Array, String), BinaryExpression(Object, Keyword('setautonomous'), Boolean, Nothing), BinaryExpression(Object, Keyword('setbehaviour'), String, Nothing), BinaryExpression(Group, Keyword('setbehaviour'), String, Nothing), BinaryExpression(Object, Keyword('setbleedingremaining'), Number, Nothing), BinaryExpression(Object, Keyword('setbrakesrtd'), Array, Nothing), BinaryExpression(Object, Keyword('setcamerainterest'), Number, Nothing), BinaryExpression(Boolean, Keyword('setcamuseti'), Number, Nothing), BinaryExpression(Object, Keyword('setcaptive'), Boolean, Nothing), BinaryExpression(Object, Keyword('setcaptive'), Number, Nothing), BinaryExpression(Object, Keyword('setcenterofmass'), Array, Nothing), BinaryExpression(Object, Keyword('setcollisionlight'), Boolean, Nothing), BinaryExpression(Object, Keyword('setcombatmode'), String, Nothing), BinaryExpression(Group, Keyword('setcombatmode'), String, Nothing), BinaryExpression(TeamMember, Keyword('setcombatmode'), String, Nothing), BinaryExpression(Object, Keyword('setconvoyseparation'), Number, Nothing), BinaryExpression(Object, Keyword('setcuratorcameraareaceiling'), Number, Nothing), BinaryExpression(Object, Keyword('setcuratorcoef'), Array, Nothing), BinaryExpression(Object, Keyword('setcuratoreditingareatype'), Boolean, Nothing), BinaryExpression(Object, Keyword('setcuratorwaypointcost'), Number, Nothing), BinaryExpression(Object, Keyword('setcurrenttask'), Task, Nothing), BinaryExpression(Group, Keyword('setcurrentwaypoint'), Array, Nothing), BinaryExpression(Object, Keyword('setcustomaimcoef'), Number, Nothing), BinaryExpression(Object, Keyword('setcustomweightrtd'), Number, Nothing), BinaryExpression(Object, Keyword('setdamage'), Number, Nothing), BinaryExpression(Object, Keyword('setdamage'), Array, Nothing), BinaryExpression(Object, Keyword('setdammage'), Number, Nothing), BinaryExpression(String, Keyword('setdebriefingtext'), Array, Nothing), BinaryExpression(Object, Keyword('setdestination'), Array, Nothing), BinaryExpression(Object, Keyword('setdir'), Number, Nothing), BinaryExpression(Location, Keyword('setdirection'), Number, Nothing), BinaryExpression(Control, Keyword('setdrawicon'), Array, Nothing), BinaryExpression(Object, Keyword('setdriveonpath'), Array, Nothing), BinaryExpression(Object, Keyword('setdropinterval'), Number, Nothing), BinaryExpression(String, Keyword('setdynamicsimulationdistance'), Number, Nothing), BinaryExpression(String, Keyword('setdynamicsimulationdistancecoef'), Number, Nothing), BinaryExpression(Control, Keyword('seteditormode'), String, Nothing), BinaryExpression(Control, Keyword('seteditorobjectscope'), Array, Nothing), BinaryExpression(Object, Keyword('seteffectcondition'), String, Nothing), BinaryExpression(Array, Keyword('seteffectcondition'), String, Nothing), BinaryExpression(Object, Keyword('setenginerpmrtd'), Array, Nothing), BinaryExpression(Object, Keyword('setface'), String, Nothing), BinaryExpression(Object, Keyword('setfaceanimation'), Number, Nothing), BinaryExpression(Object, Keyword('setfatigue'), Number, Nothing), BinaryExpression(Object, Keyword('setfeaturetype'), Number, Boolean), BinaryExpression(Object, Keyword('setflaganimationphase'), Number, Nothing), BinaryExpression(Object, Keyword('setflagowner'), Object, Nothing), BinaryExpression(Object, Keyword('setflagside'), Side, Nothing), BinaryExpression(Object, Keyword('setflagtexture'), String, Nothing), BinaryExpression(Number, Keyword('setfog'), Number, Nothing), BinaryExpression(Number, Keyword('setfog'), Array, Nothing), BinaryExpression(Number, Keyword('setforcegeneratorrtd'), Array, Nothing), BinaryExpression(Object, Keyword('setformation'), String, Nothing), BinaryExpression(Group, Keyword('setformation'), String, Nothing), BinaryExpression(TeamMember, Keyword('setformation'), String, Nothing), BinaryExpression(Object, Keyword('setformationtask'), String, Nothing), BinaryExpression(Object, Keyword('setformdir'), Number, Nothing), BinaryExpression(Group, Keyword('setformdir'), Number, Nothing), BinaryExpression(Side, Keyword('setfriend'), Array, Nothing), BinaryExpression(TeamMember, Keyword('setfromeditor'), Boolean, Nothing), BinaryExpression(Number, Keyword('setfsmvariable'), Array, Nothing), BinaryExpression(Object, Keyword('setfuel'), Number, Nothing), BinaryExpression(Object, Keyword('setfuelcargo'), Number, Nothing), BinaryExpression(Group, Keyword('setgroupicon'), Array, Nothing), BinaryExpression(Group, Keyword('setgroupiconparams'), Array, Nothing), BinaryExpression(Object, Keyword('setgroupid'), Array, Nothing), BinaryExpression(Group, Keyword('setgroupid'), Array, Nothing), BinaryExpression(Object, Keyword('setgroupidglobal'), Array, Nothing), BinaryExpression(Group, Keyword('setgroupidglobal'), Array, Nothing), BinaryExpression(Group, Keyword('setgroupowner'), Number, Boolean), BinaryExpression(Number, Keyword('setgusts'), Number, Nothing), BinaryExpression(Object, Keyword('sethidebehind'), Array, Nothing), BinaryExpression(Object, Keyword('sethit'), Array, Nothing), BinaryExpression(Object, Keyword('sethitindex'), Array, Nothing), BinaryExpression(Object, Keyword('sethitpointdamage'), Array, Nothing), BinaryExpression(Object, Keyword('setidentity'), String, Nothing), BinaryExpression(Location, Keyword('setimportance'), Number, Nothing), BinaryExpression(TeamMember, Keyword('setleader'), TeamMember, Nothing), BinaryExpression(Object, Keyword('setlightambient'), Array, Nothing), BinaryExpression(Object, Keyword('setlightattenuation'), Array, Nothing), BinaryExpression(Object, Keyword('setlightbrightness'), Number, Nothing), BinaryExpression(Object, Keyword('setlightcolor'), Array, Nothing), BinaryExpression(Object, Keyword('setlightdaylight'), Boolean, Nothing), BinaryExpression(Object, Keyword('setlightflaremaxdistance'), Number, Nothing), BinaryExpression(Object, Keyword('setlightflaresize'), Number, Nothing), BinaryExpression(Object, Keyword('setlightintensity'), Number, Nothing), BinaryExpression(Number, Keyword('setlightnings'), Number, Nothing), BinaryExpression(Object, Keyword('setlightuseflare'), Boolean, Nothing), BinaryExpression(Object, Keyword('setmagazineturretammo'), Array, Nothing), BinaryExpression(String, Keyword('setmarkeralpha'), Number, Nothing), BinaryExpression(String, Keyword('setmarkeralphalocal'), Number, Nothing), BinaryExpression(String, Keyword('setmarkerbrush'), String, Nothing), BinaryExpression(String, Keyword('setmarkerbrushlocal'), String, Nothing), BinaryExpression(String, Keyword('setmarkercolor'), String, Nothing), BinaryExpression(String, Keyword('setmarkercolorlocal'), String, Nothing), BinaryExpression(String, Keyword('setmarkerdir'), Number, Nothing), BinaryExpression(String, Keyword('setmarkerdirlocal'), Number, Nothing), BinaryExpression(String, Keyword('setmarkerpos'), Array, Nothing), BinaryExpression(String, Keyword('setmarkerposlocal'), Array, Nothing), BinaryExpression(String, Keyword('setmarkershape'), String, Nothing), BinaryExpression(String, Keyword('setmarkershapelocal'), String, Nothing), BinaryExpression(String, Keyword('setmarkersize'), Array, Nothing), BinaryExpression(String, Keyword('setmarkersizelocal'), Array, Nothing), BinaryExpression(String, Keyword('setmarkertext'), String, Nothing), BinaryExpression(String, Keyword('setmarkertextlocal'), String, Nothing), BinaryExpression(String, Keyword('setmarkertype'), String, Nothing), BinaryExpression(String, Keyword('setmarkertypelocal'), String, Nothing), BinaryExpression(Object, Keyword('setmass'), Number, Nothing), BinaryExpression(Object, Keyword('setmass'), Array, Nothing), BinaryExpression(Object, Keyword('setmimic'), String, Nothing), BinaryExpression(Object, Keyword('setmusiceffect'), String, Nothing), BinaryExpression(Array, Keyword('setmusiceffect'), String, Nothing), BinaryExpression(Object, Keyword('setname'), String, Nothing), BinaryExpression(Object, Keyword('setname'), Array, Nothing), BinaryExpression(Location, Keyword('setname'), String, Nothing), BinaryExpression(Object, Keyword('setnamesound'), String, Nothing), BinaryExpression(Control, Keyword('setobjectarguments'), Array, Anything), BinaryExpression(Object, Keyword('setobjectmaterial'), Array, Nothing), BinaryExpression(Object, Keyword('setobjectmaterialglobal'), Array, Nothing), BinaryExpression(Control, Keyword('setobjectproxy'), Array, Anything), BinaryExpression(Object, Keyword('setobjecttexture'), Array, Nothing), BinaryExpression(Object, Keyword('setobjecttextureglobal'), Array, Nothing), BinaryExpression(Number, Keyword('setovercast'), Number, Nothing), BinaryExpression(Object, Keyword('setowner'), Number, Boolean), BinaryExpression(Object, Keyword('setoxygenremaining'), Number, Nothing), BinaryExpression(Object, Keyword('setparticlecircle'), Array, Nothing), BinaryExpression(Object, Keyword('setparticleclass'), String, Nothing), BinaryExpression(Object, Keyword('setparticlefire'), Array, Nothing), BinaryExpression(Object, Keyword('setparticleparams'), Array, Nothing), BinaryExpression(Object, Keyword('setparticlerandom'), Array, Nothing), BinaryExpression(Object, Keyword('setpilotcameradirection'), Array, Nothing), BinaryExpression(Object, Keyword('setpilotcamerarotation'), Array, Nothing), BinaryExpression(Object, Keyword('setpilotcameratarget'), Object, Boolean), BinaryExpression(Object, Keyword('setpilotcameratarget'), Array, Boolean), BinaryExpression(Object, Keyword('setpilotlight'), Boolean, Nothing), BinaryExpression(String, Keyword('setpipeffect'), Array, Nothing), BinaryExpression(Object, Keyword('setpitch'), Number, Nothing), BinaryExpression(Object, Keyword('setplatenumber'), String, Nothing), BinaryExpression(Object, Keyword('setpos'), Array, Nothing), BinaryExpression(Object, Keyword('setposasl'), Array, Nothing), BinaryExpression(Object, Keyword('setposasl2'), Array, Nothing), BinaryExpression(Object, Keyword('setposaslw'), Array, Nothing), BinaryExpression(Object, Keyword('setposatl'), Array, Nothing), BinaryExpression(Location, Keyword('setposition'), Array, Nothing), BinaryExpression(Object, Keyword('setposworld'), Array, Nothing), BinaryExpression(Object, Keyword('setpylonloadout'), Array, Boolean), BinaryExpression(Object, Keyword('setpylonspriority'), Array, Nothing), BinaryExpression(Number, Keyword('setradiomsg'), String, Nothing), BinaryExpression(Number, Keyword('setrain'), Number, Nothing), BinaryExpression(Number, Keyword('setrainbow'), Number, Nothing), BinaryExpression(Object, Keyword('setrandomlip'), Boolean, Nothing), BinaryExpression(Object, Keyword('setrank'), String, Nothing), BinaryExpression(Location, Keyword('setrectangular'), Boolean, Nothing), BinaryExpression(Object, Keyword('setrepaircargo'), Number, Nothing), BinaryExpression(Object, Keyword('setrotorbrakertd'), Number, Nothing), BinaryExpression(Object, Keyword('setshotparents'), Array, Nothing), BinaryExpression(Location, Keyword('setside'), Side, Nothing), BinaryExpression(Task, Keyword('setsimpletaskalwaysvisible'), Boolean, Nothing), BinaryExpression(Task, Keyword('setsimpletaskcustomdata'), Array, Nothing), BinaryExpression(Task, Keyword('setsimpletaskdescription'), Array, Nothing), BinaryExpression(Task, Keyword('setsimpletaskdestination'), Array, Nothing), BinaryExpression(Task, Keyword('setsimpletasktarget'), Array, Nothing), BinaryExpression(Task, Keyword('setsimpletasktype'), String, Nothing), BinaryExpression(Location, Keyword('setsize'), Array, Nothing), BinaryExpression(Object, Keyword('setskill'), Array, Nothing), BinaryExpression(Object, Keyword('setskill'), Number, Nothing), BinaryExpression(Object, Keyword('setslingload'), Object, Boolean), BinaryExpression(Object, Keyword('setsoundeffect'), Array, Nothing), BinaryExpression(Array, Keyword('setsoundeffect'), Array, Nothing), BinaryExpression(Object, Keyword('setspeaker'), String, Nothing), BinaryExpression(Location, Keyword('setspeech'), String, Nothing), BinaryExpression(Object, Keyword('setspeedmode'), String, Nothing), BinaryExpression(Group, Keyword('setspeedmode'), String, Nothing), BinaryExpression(Object, Keyword('setstamina'), Number, Nothing), BinaryExpression(Object, Keyword('setsuppression'), Number, Nothing), BinaryExpression(Object, Keyword('settargetage'), String, Nothing), BinaryExpression(Object, Keyword('settaskmarkeroffset'), Array, Nothing), BinaryExpression(Task, Keyword('settaskresult'), Array, Nothing), BinaryExpression(Task, Keyword('settaskstate'), String, Nothing), BinaryExpression(Location, Keyword('settext'), String, Nothing), BinaryExpression(Object, Keyword('settitleeffect'), Array, Nothing), BinaryExpression(Array, Keyword('settitleeffect'), Array, Nothing), BinaryExpression(Object, Keyword('settriggeractivation'), Array, Nothing), BinaryExpression(Object, Keyword('settriggerarea'), Array, Nothing), BinaryExpression(Object, Keyword('settriggerstatements'), Array, Nothing), BinaryExpression(Object, Keyword('settriggertext'), String, Nothing), BinaryExpression(Object, Keyword('settriggertimeout'), Array, Nothing), BinaryExpression(Object, Keyword('settriggertype'), String, Nothing), BinaryExpression(Location, Keyword('settype'), String, Nothing), BinaryExpression(Object, Keyword('setunconscious'), Boolean, Nothing), BinaryExpression(Object, Keyword('setunitability'), Number, Nothing), BinaryExpression(Object, Keyword('setunitloadout'), Array, Nothing), BinaryExpression(Object, Keyword('setunitloadout'), String, Nothing), BinaryExpression(Object, Keyword('setunitloadout'), Config, Nothing), BinaryExpression(Object, Keyword('setunitpos'), String, Nothing), BinaryExpression(Object, Keyword('setunitposweak'), String, Nothing), BinaryExpression(Object, Keyword('setunitrank'), String, Nothing), BinaryExpression(Object, Keyword('setunitrecoilcoefficient'), Number, Nothing), BinaryExpression(Object, Keyword('setunittrait'), Array, Nothing), BinaryExpression(Object, Keyword('setunloadincombat'), Array, Nothing), BinaryExpression(Object, Keyword('setuseractiontext'), Array, Nothing), BinaryExpression(Object, Keyword('setusermfdtext'), Array, Nothing), BinaryExpression(Object, Keyword('setusermfdvalue'), Array, Nothing), BinaryExpression(Display, Keyword('setvariable'), Array, Nothing), BinaryExpression(Control, Keyword('setvariable'), Array, Nothing), BinaryExpression(Object, Keyword('setvariable'), Array, Nothing), BinaryExpression(Group, Keyword('setvariable'), Array, Nothing), BinaryExpression(Namespace, Keyword('setvariable'), Array, Nothing), BinaryExpression(TeamMember, Keyword('setvariable'), Array, Nothing), BinaryExpression(Task, Keyword('setvariable'), Array, Nothing), BinaryExpression(Location, Keyword('setvariable'), Array, Nothing), BinaryExpression(Object, Keyword('setvectordir'), Array, Nothing), BinaryExpression(Object, Keyword('setvectordirandup'), Array, Nothing), BinaryExpression(Object, Keyword('setvectorup'), Array, Nothing), BinaryExpression(Object, Keyword('setvehicleammo'), Number, Nothing), BinaryExpression(Object, Keyword('setvehicleammodef'), Number, Nothing), BinaryExpression(Object, Keyword('setvehiclearmor'), Number, Nothing), BinaryExpression(Object, Keyword('setvehiclecargo'), Object, Boolean), BinaryExpression(Object, Keyword('setvehicleid'), Number, Nothing), BinaryExpression(Object, Keyword('setvehiclelock'), String, Nothing), BinaryExpression(Object, Keyword('setvehicleposition'), Array, Boolean), BinaryExpression(Object, Keyword('setvehicleradar'), Number, Nothing), BinaryExpression(Object, Keyword('setvehiclereceiveremotetargets'), Boolean, Nothing), BinaryExpression(Object, Keyword('setvehiclereportownposition'), Boolean, Nothing), BinaryExpression(Object, Keyword('setvehiclereportremotetargets'), Boolean, Nothing), BinaryExpression(Object, Keyword('setvehicletipars'), Array, Nothing), BinaryExpression(Object, Keyword('setvehiclevarname'), String, Nothing), BinaryExpression(Object, Keyword('setvelocity'), Array, Nothing), BinaryExpression(Object, Keyword('setvelocitymodelspace'), Array, Nothing), BinaryExpression(Object, Keyword('setvelocitytransformation'), Array, Nothing), BinaryExpression(Control, Keyword('setvisibleiftreecollapsed'), Array, Nothing), BinaryExpression(Object, Keyword('setwantedrpmrtd'), Array, Nothing), BinaryExpression(Number, Keyword('setwaves'), Number, Nothing), BinaryExpression(Array, Keyword('setwaypointbehaviour'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointcombatmode'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointcompletionradius'), Number, Nothing), BinaryExpression(Array, Keyword('setwaypointdescription'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointforcebehaviour'), Boolean, Nothing), BinaryExpression(Array, Keyword('setwaypointformation'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointhouseposition'), Number, Nothing), BinaryExpression(Array, Keyword('setwaypointloiterradius'), Number, Nothing), BinaryExpression(Array, Keyword('setwaypointloitertype'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointname'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointposition'), Array, Nothing), BinaryExpression(Array, Keyword('setwaypointscript'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointspeed'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointstatements'), Array, Nothing), BinaryExpression(Array, Keyword('setwaypointtimeout'), Array, Nothing), BinaryExpression(Array, Keyword('setwaypointtype'), String, Nothing), BinaryExpression(Array, Keyword('setwaypointvisible'), Boolean, Nothing), BinaryExpression(Object, Keyword('setweaponreloadingtime'), Array, Boolean), BinaryExpression(Number, Keyword('setwinddir'), Number, Nothing), BinaryExpression(Number, Keyword('setwindforce'), Number, Nothing), BinaryExpression(Number, Keyword('setwindstr'), Number, Nothing), BinaryExpression(Object, Keyword('setwingforcescalertd'), Array, Nothing), BinaryExpression(Array, Keyword('setwppos'), Array, Nothing), BinaryExpression(Control, Keyword('show3dicons'), Boolean, Nothing), BinaryExpression(Control, Keyword('showlegend'), Boolean, Nothing), BinaryExpression(Control, Keyword('showneweditorobject'), Array, Anything), BinaryExpression(Array, Keyword('showwaypoint'), String, Nothing), BinaryExpression(Object, Keyword('sidechat'), String, Nothing), BinaryExpression(Array, Keyword('sidechat'), String, Nothing), BinaryExpression(Object, Keyword('sideradio'), String, Nothing), BinaryExpression(Array, Keyword('sideradio'), String, Nothing), BinaryExpression(Object, Keyword('skill'), String, Number), BinaryExpression(Object, Keyword('skillfinal'), String, Number), BinaryExpression(Control, Keyword('slidersetposition'), Number, Nothing), BinaryExpression(Control, Keyword('slidersetrange'), Array, Nothing), BinaryExpression(Control, Keyword('slidersetspeed'), Array, Nothing), BinaryExpression(Array, Keyword('sort'), Boolean, Nothing), BinaryExpression(Type, Keyword('spawn'), Code, Script), BinaryExpression(String, Keyword('splitstring'), String, Array), BinaryExpression(ForType, Keyword('step'), Number, ForType), BinaryExpression(Object, Keyword('stop'), Boolean, Nothing), BinaryExpression(Object, Keyword('suppressfor'), Number, Nothing), BinaryExpression(Object, Keyword('swimindepth'), Number, Nothing), BinaryExpression(Object, Keyword('switchaction'), String, Nothing), BinaryExpression(Object, Keyword('switchcamera'), String, Nothing), BinaryExpression(Object, Keyword('switchgesture'), String, Nothing), BinaryExpression(Object, Keyword('switchlight'), String, Nothing), BinaryExpression(Object, Keyword('switchmove'), String, Nothing), BinaryExpression(Object, Keyword('synchronizeobjectsadd'), Array, Nothing), BinaryExpression(Object, Keyword('synchronizeobjectsremove'), Array, Nothing), BinaryExpression(Object, Keyword('synchronizetrigger'), Array, Nothing), BinaryExpression(Array, Keyword('synchronizewaypoint'), Array, Nothing), BinaryExpression(Object, Keyword('synchronizewaypoint'), Array, Nothing), BinaryExpression(Object, Keyword('targetknowledge'), Object, Array), BinaryExpression(Object, Keyword('targets'), Array, Array), BinaryExpression(Array, Keyword('targetsaggregate'), Array, Array), BinaryExpression(Object, Keyword('targetsquery'), Array, Array), BinaryExpression(IfType, Keyword('then'), Code, Anything), BinaryExpression(IfType, Keyword('then'), Array, Anything), BinaryExpression(IfType, Keyword('throw'), Type, Nothing), BinaryExpression(ForType, Keyword('to'), Number, ForType), BinaryExpression(Number, Keyword('tofixed'), Number, String), BinaryExpression(Object, Keyword('triggerattachobject'), Number, Nothing), BinaryExpression(Object, Keyword('triggerattachvehicle'), Array, Nothing), BinaryExpression(Object, Keyword('triggerdynamicsimulation'), Boolean, Nothing), BinaryExpression(Type, Keyword('try'), Code, TryType), BinaryExpression(Object, Keyword('turretlocal'), Array, Boolean), BinaryExpression(Object, Keyword('turretowner'), Array, Number), BinaryExpression(Object, Keyword('turretunit'), Array, Object), BinaryExpression(Control, Keyword('tvadd'), Array, Number), BinaryExpression(Control, Keyword('tvcollapse'), Array, Nothing), BinaryExpression(Control, Keyword('tvcount'), Array, Number), BinaryExpression(Control, Keyword('tvdata'), Array, String), BinaryExpression(Control, Keyword('tvdelete'), Array, Nothing), BinaryExpression(Control, Keyword('tvexpand'), Array, Nothing), BinaryExpression(Control, Keyword('tvpicture'), Array, String), BinaryExpression(Control, Keyword('tvpictureright'), Array, String), BinaryExpression(Control, Keyword('tvsetcolor'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetcursel'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetdata'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicture'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicturecolor'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicturecolordisabled'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicturecolorselected'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpictureright'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicturerightcolor'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicturerightcolordisabled'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetpicturerightcolorselected'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetselectcolor'), Array, Nothing), BinaryExpression(Control, Keyword('tvsettext'), Array, Nothing), BinaryExpression(Control, Keyword('tvsettooltip'), Array, Nothing), BinaryExpression(Control, Keyword('tvsetvalue'), Array, Nothing), BinaryExpression(Control, Keyword('tvsort'), Array, Nothing), BinaryExpression(Control, Keyword('tvsortbyvalue'), Array, Nothing), BinaryExpression(Control, Keyword('tvtext'), Array, String), BinaryExpression(Control, Keyword('tvtooltip'), Array, String), BinaryExpression(Control, Keyword('tvvalue'), Array, Number), BinaryExpression(Object, Keyword('unassignitem'), String, Nothing), BinaryExpression(Group, Keyword('unitsbelowheight'), Number, Array), BinaryExpression(Array, Keyword('unitsbelowheight'), Number, Array), BinaryExpression(Object, Keyword('unlinkitem'), String, Nothing), BinaryExpression(TeamMember, Keyword('unregistertask'), String, Boolean), BinaryExpression(Control, Keyword('updatedrawicon'), Array, Nothing), BinaryExpression(Control, Keyword('updatemenuitem'), Array, Nothing), BinaryExpression(Object, Keyword('useaudiotimeformoves'), Boolean, Nothing), BinaryExpression(Array, Keyword('vectoradd'), Array, Array), BinaryExpression(Array, Keyword('vectorcos'), Array, Number), BinaryExpression(Array, Keyword('vectorcrossproduct'), Array, Array), BinaryExpression(Array, Keyword('vectordiff'), Array, Array), BinaryExpression(Array, Keyword('vectordistance'), Array, Number), BinaryExpression(Array, Keyword('vectordistancesqr'), Array, Number), BinaryExpression(Array, Keyword('vectordotproduct'), Array, Number), BinaryExpression(Array, Keyword('vectorfromto'), Array, Array), BinaryExpression(Object, Keyword('vectormodeltoworld'), Array, Array), BinaryExpression(Object, Keyword('vectormodeltoworldvisual'), Array, Array), BinaryExpression(Array, Keyword('vectormultiply'), Number, Array), BinaryExpression(Object, Keyword('vectorworldtomodel'), Array, Array), BinaryExpression(Object, Keyword('vectorworldtomodelvisual'), Array, Array), BinaryExpression(Object, Keyword('vehiclechat'), String, Nothing), BinaryExpression(Object, Keyword('vehicleradio'), String, Nothing), BinaryExpression(Array, Keyword('waypointattachobject'), Object, Nothing), BinaryExpression(Array, Keyword('waypointattachobject'), Number, Nothing), BinaryExpression(Array, Keyword('waypointattachvehicle'), Object, Nothing), BinaryExpression(Object, Keyword('weaponaccessories'), String, Array), BinaryExpression(Object, Keyword('weaponaccessoriescargo'), Array, Array), BinaryExpression(Object, Keyword('weapondirection'), String, Array), BinaryExpression(Object, Keyword('weaponsturret'), Array, Array), BinaryExpression(Object, Keyword('worldtomodel'), Array, Array), BinaryExpression(Object, Keyword('worldtomodelvisual'), Array, Array), NullExpression(Keyword('acctime'), Number), NullExpression(Keyword('activatedaddons'), Array), NullExpression(Keyword('agents'), Array), NullExpression(Keyword('airdensitycurvertd'), Array), NullExpression(Keyword('all3denentities'), Array), NullExpression(Keyword('allairports'), Array), NullExpression(Keyword('allcurators'), Array), NullExpression(Keyword('allcutlayers'), Array), NullExpression(Keyword('alldead'), Array), NullExpression(Keyword('alldeadmen'), Array), NullExpression(Keyword('alldisplays'), Array), NullExpression(Keyword('allgroups'), Array), NullExpression(Keyword('allmapmarkers'), Array), NullExpression(Keyword('allmines'), Array), NullExpression(Keyword('allplayers'), Array), NullExpression(Keyword('allsites'), Array), NullExpression(Keyword('allunits'), Array), NullExpression(Keyword('allunitsuav'), Array), NullExpression(Keyword('armorypoints'), Number), NullExpression(Keyword('benchmark'), Number), NullExpression(Keyword('blufor'), Side), NullExpression(Keyword('briefingname'), String), NullExpression(Keyword('buldozer_isenabledroaddiag'), Boolean), NullExpression(Keyword('buldozer_reloadopermap'), Nothing), NullExpression(Keyword('cadetmode'), Boolean), NullExpression(Keyword('cameraon'), Object), NullExpression(Keyword('cameraview'), String), NullExpression(Keyword('campaignconfigfile'), Config), NullExpression(Keyword('cansuspend'), Boolean), NullExpression(Keyword('cheatsenabled'), Boolean), NullExpression(Keyword('civilian'), Side), NullExpression(Keyword('clearforcesrtd'), Nothing), NullExpression(Keyword('clearitempool'), Nothing), NullExpression(Keyword('clearmagazinepool'), Nothing), NullExpression(Keyword('clearradio'), Nothing), NullExpression(Keyword('clearweaponpool'), Nothing), NullExpression(Keyword('clientowner'), Number), NullExpression(Keyword('commandingmenu'), String), NullExpression(Keyword('configfile'), Config), NullExpression(Keyword('confignull'), Config), NullExpression(Keyword('controlnull'), Control), NullExpression(Keyword('copyfromclipboard'), String), NullExpression(Keyword('curatorcamera'), Object), NullExpression(Keyword('curatormouseover'), Object), NullExpression(Keyword('curatorselected'), Array), NullExpression(Keyword('current3denoperation'), String), NullExpression(Keyword('currentchannel'), Number), NullExpression(Keyword('currentnamespace'), Namespace, action=lambda i: 'missionNamespace'), NullExpression(Keyword('cursorobject'), Object), NullExpression(Keyword('cursortarget'), Object), NullExpression(Keyword('date'), Array), NullExpression(Keyword('daytime'), Number), NullExpression(Keyword('diag_activemissionfsms'), Array), NullExpression(Keyword('diag_activescripts'), Array), NullExpression(Keyword('diag_activesqfscripts'), Array), NullExpression(Keyword('diag_activesqsscripts'), Array), NullExpression(Keyword('diag_fps'), Number), NullExpression(Keyword('diag_fpsmin'), Number), NullExpression(Keyword('diag_frameno'), Number), NullExpression(Keyword('diag_ticktime'), Number), NullExpression(Keyword('dialog'), Boolean), NullExpression(Keyword('didjip'), Boolean), NullExpression(Keyword('difficulty'), Number), NullExpression(Keyword('difficultyenabledrtd'), Boolean), NullExpression(Keyword('disabledebriefingstats'), Nothing), NullExpression(Keyword('disableserialization'), Nothing), NullExpression(Keyword('displaynull'), Display), NullExpression(Keyword('distributionregion'), Number), NullExpression(Keyword('dynamicsimulationsystemenabled'), Boolean), NullExpression(Keyword('east'), Side), NullExpression(Keyword('enableenddialog'), Nothing), NullExpression(Keyword('endl'), String), NullExpression(Keyword('endloadingscreen'), Nothing), NullExpression(Keyword('environmentenabled'), Array), NullExpression(Keyword('estimatedendservertime'), Number), NullExpression(Keyword('exit'), Nothing), NullExpression(Keyword('false'), Boolean), NullExpression(Keyword('finishmissioninit'), Nothing), NullExpression(Keyword('fog'), Number), NullExpression(Keyword('fogforecast'), Number), NullExpression(Keyword('fogparams'), Array), NullExpression(Keyword('forcedmap'), Array), NullExpression(Keyword('forceend'), Nothing), NullExpression(Keyword('forceweatherchange'), Nothing), NullExpression(Keyword('freelook'), Boolean), NullExpression(Keyword('get3dencamera'), Object), NullExpression(Keyword('get3deniconsvisible'), Array), NullExpression(Keyword('get3denlinesvisible'), Array), NullExpression(Keyword('get3denmouseover'), Array), NullExpression(Keyword('getartillerycomputersettings'), Array), NullExpression(Keyword('getcalculateplayervisibilitybyfriendly'), Boolean), NullExpression(Keyword('getclientstate'), String), NullExpression(Keyword('getclientstatenumber'), Number), NullExpression(Keyword('getcursorobjectparams'), Array), NullExpression(Keyword('getdlcassetsusage'), Array), NullExpression(Keyword('getelevationoffset'), Number), NullExpression(Keyword('getmissiondlcs'), Array), NullExpression(Keyword('getmissionlayers'), Array), NullExpression(Keyword('getmouseposition'), Array), NullExpression(Keyword('getmusicplayedtime'), Number), NullExpression(Keyword('getobjectviewdistance'), Array), NullExpression(Keyword('getremotesensorsdisabled'), Boolean), NullExpression(Keyword('getresolution'), Array), NullExpression(Keyword('getshadowdistance'), Number), NullExpression(Keyword('getterraingrid'), Number), NullExpression(Keyword('gettotaldlcusagetime'), Number), NullExpression(Keyword('groupiconselectable'), Boolean), NullExpression(Keyword('groupiconsvisible'), Array), NullExpression(Keyword('grpnull'), Group), NullExpression(Keyword('gusts'), Number), NullExpression(Keyword('halt'), Nothing), NullExpression(Keyword('hasinterface'), Boolean), NullExpression(Keyword('hcshownbar'), Boolean), NullExpression(Keyword('hudmovementlevels'), Array), NullExpression(Keyword('humidity'), Number), NullExpression(Keyword('independent'), Side), NullExpression(Keyword('initambientlife'), Nothing), NullExpression(Keyword('is3den'), Boolean), NullExpression(Keyword('is3denmultiplayer'), Boolean), NullExpression(Keyword('isautotest'), Boolean), NullExpression(Keyword('isdedicated'), Boolean), NullExpression(Keyword('isfilepatchingenabled'), Boolean), NullExpression(Keyword('isinstructorfigureenabled'), Boolean), NullExpression(Keyword('ismultiplayer'), Boolean), NullExpression(Keyword('ismultiplayersolo'), Boolean), NullExpression(Keyword('ispipenabled'), Boolean), NullExpression(Keyword('isremoteexecuted'), Boolean), NullExpression(Keyword('isremoteexecutedjip'), Boolean), NullExpression(Keyword('isserver'), Boolean), NullExpression(Keyword('issteammission'), Boolean), NullExpression(Keyword('isstreamfriendlyuienabled'), Boolean), NullExpression(Keyword('isstressdamageenabled'), Boolean), NullExpression(Keyword('istuthintsenabled'), Boolean), NullExpression(Keyword('isuicontext'), Boolean), NullExpression(Keyword('language'), String), NullExpression(Keyword('librarycredits'), Array), NullExpression(Keyword('librarydisclaimers'), Array), NullExpression(Keyword('lightnings'), Number), NullExpression(Keyword('linebreak'), String), NullExpression(Keyword('loadgame'), Nothing), NullExpression(Keyword('locationnull'), Location), NullExpression(Keyword('logentities'), Nothing), NullExpression(Keyword('mapanimclear'), Nothing), NullExpression(Keyword('mapanimcommit'), Nothing), NullExpression(Keyword('mapanimdone'), Boolean), NullExpression(Keyword('markasfinishedonsteam'), Boolean), NullExpression(Keyword('missionconfigfile'), Config), NullExpression(Keyword('missiondifficulty'), Number), NullExpression(Keyword('missionname'), String), NullExpression(Keyword('missionnamespace'), Namespace, action=lambda i: 'missionNamespace'), NullExpression(Keyword('missionstart'), Array), NullExpression(Keyword('missionversion'), Number), NullExpression(Keyword('moonintensity'), Number), NullExpression(Keyword('musicvolume'), Number), NullExpression(Keyword('netobjnull'), NetObject), NullExpression(Keyword('nextweatherchange'), Number), NullExpression(Keyword('nil'), Anything), NullExpression(Keyword('objnull'), Object), NullExpression(Keyword('opencuratorinterface'), Nothing), NullExpression(Keyword('opfor'), Side), NullExpression(Keyword('overcast'), Number), NullExpression(Keyword('overcastforecast'), Number), NullExpression(Keyword('parsingnamespace'), Namespace, action=lambda i: 'missionNamespace'), NullExpression(Keyword('particlesquality'), Number), NullExpression(Keyword('pi'), Number), NullExpression(Keyword('pixelgrid'), Number), NullExpression(Keyword('pixelgridbase'), Number), NullExpression(Keyword('pixelgridnouiscale'), Number), NullExpression(Keyword('pixelh'), Number), NullExpression(Keyword('pixelw'), Number), NullExpression(Keyword('playableunits'), Array), NullExpression(Keyword('player'), Object), NullExpression(Keyword('playerrespawntime'), Number), NullExpression(Keyword('playerside'), Side), NullExpression(Keyword('productversion'), Array), NullExpression(Keyword('profilename'), String), NullExpression(Keyword('profilenamespace'), Namespace, action=lambda i: 'missionNamespace'), NullExpression(Keyword('profilenamesteam'), String), NullExpression(Keyword('radiovolume'), Number), NullExpression(Keyword('rain'), Number), NullExpression(Keyword('rainbow'), Number), NullExpression(Keyword('remoteexecutedowner'), Number), NullExpression(Keyword('resetcamshake'), Nothing), NullExpression(Keyword('resistance'), Side), NullExpression(Keyword('reversedmousey'), Boolean), NullExpression(Keyword('runinitscript'), Nothing), NullExpression(Keyword('safezoneh'), Number), NullExpression(Keyword('safezonew'), Number), NullExpression(Keyword('safezonewabs'), Number), NullExpression(Keyword('safezonex'), Number), NullExpression(Keyword('safezonexabs'), Number), NullExpression(Keyword('safezoney'), Number), NullExpression(Keyword('savegame'), Nothing), NullExpression(Keyword('savejoysticks'), Nothing), NullExpression(Keyword('saveprofilenamespace'), Nothing), NullExpression(Keyword('savingenabled'), Boolean), NullExpression(Keyword('scriptnull'), Script), NullExpression(Keyword('selectnoplayer'), Nothing), NullExpression(Keyword('servername'), String), NullExpression(Keyword('servertime'), Number), NullExpression(Keyword('shownartillerycomputer'), Boolean), NullExpression(Keyword('shownchat'), Boolean), NullExpression(Keyword('showncompass'), Boolean), NullExpression(Keyword('showncuratorcompass'), Boolean), NullExpression(Keyword('showngps'), Boolean), NullExpression(Keyword('shownhud'), Array), NullExpression(Keyword('shownmap'), Boolean), NullExpression(Keyword('shownpad'), Boolean), NullExpression(Keyword('shownradio'), Boolean), NullExpression(Keyword('shownscoretable'), Number), NullExpression(Keyword('shownuavfeed'), Boolean), NullExpression(Keyword('shownwarrant'), Boolean), NullExpression(Keyword('shownwatch'), Boolean), NullExpression(Keyword('sideambientlife'), Side), NullExpression(Keyword('sideempty'), Side), NullExpression(Keyword('sideenemy'), Side), NullExpression(Keyword('sidefriendly'), Side), NullExpression(Keyword('sidelogic'), Side), NullExpression(Keyword('sideunknown'), Side), NullExpression(Keyword('simulweathersync'), Nothing), NullExpression(Keyword('slingloadassistantshown'), Boolean), NullExpression(Keyword('soundvolume'), Number), NullExpression(Keyword('sunormoon'), Number), NullExpression(Keyword('switchableunits'), Array), NullExpression(Keyword('systemofunits'), Number), NullExpression(Keyword('tasknull'), Task), NullExpression(Keyword('teammembernull'), TeamMember), NullExpression(Keyword('teams'), Array), NullExpression(Keyword('teamswitch'), Nothing), NullExpression(Keyword('teamswitchenabled'), Boolean), NullExpression(Keyword('time'), Number), NullExpression(Keyword('timemultiplier'), Number), NullExpression(Keyword('true'), Boolean), NullExpression(Keyword('uinamespace'), Namespace, action=lambda i: 'missionNamespace'), NullExpression(Keyword('userinputdisabled'), Boolean), NullExpression(Keyword('vehicles'), Array), NullExpression(Keyword('viewdistance'), Number), NullExpression(Keyword('visiblecompass'), Boolean), NullExpression(Keyword('visiblegps'), Boolean), NullExpression(Keyword('visiblemap'), Boolean), NullExpression(Keyword('visiblescoretable'), Boolean), NullExpression(Keyword('visiblewatch'), Boolean), NullExpression(Keyword('waves'), Number), NullExpression(Keyword('west'), Side), NullExpression(Keyword('wind'), Array), NullExpression(Keyword('winddir'), Number), NullExpression(Keyword('windrtd'), Array), NullExpression(Keyword('windstr'), Number), NullExpression(Keyword('worldname'), String), NullExpression(Keyword('worldsize'), Number) ]
blog/tests.py
regulusweb/wagtail_blog
274
141810
import doctest import json from django.core.management import call_command from django.contrib.auth.models import User from django.test import TestCase from django_comments_xtd.models import XtdComment from wagtail.core.models import Page import responses from .models import ( BlogPage, BlogTag, BlogPageTag, BlogIndexPage, BlogCategory, BlogCategoryBlogPage, ) from .management.commands.wordpress_to_wagtail import Command from . import wp_xml_parser from .wordpress_import import WordpressImport def load_tests(loader, tests, ignore): tests.addTests(doctest.DocTestSuite(wp_xml_parser)) return tests from django.urls import reverse from django.contrib.auth.models import Group class BlogTests(TestCase): def setUp(self): home = Page.objects.get(slug="home") self.user = User.objects.create_user("test", "<EMAIL>", "pass") self.xml_path = "example_export.xml" self.blog_index = home.add_child( instance=BlogIndexPage( title="Blog Index", slug="blog", search_description="x", owner=self.user ) ) def test_index(self): url = self.blog_index.url res = self.client.get(url) self.assertEqual(res.status_code, 200) blog_page = self.blog_index.add_child( instance=BlogPage( title="Blog Page", slug="blog_page1", search_description="x", owner=self.user, ) ) url = blog_page.url res = self.client.get(url) self.assertContains(res, "Blog Page") def test_author(self): # make super to access admin self.user.is_superuser = True self.user.save() self.assertTrue(self.client.login(username="test", password="<PASSWORD>")) # make an is_staff admin staff_user = User.objects.create_user("mr.staff", "<EMAIL>", "pass") staff_user.is_staff = True staff_user.save() # make some groups bloggers = "Bloggers" Group.objects.create(name=bloggers) others = "Others" Group.objects.create(name=others) # make a non-admin Blogger author author_user = User.objects.create_user("mr.author", "<EMAIL>", "<PASSWORD>") author_user.groups.add(Group.objects.get(name=bloggers)) author_user.save() # make a blog page blog_page = self.blog_index.add_child( instance=BlogPage( title="Blog Page", slug="blog_page1", search_description="x", owner=self.user, ) ) with self.settings( BLOG_LIMIT_AUTHOR_CHOICES_GROUP=None, BLOG_LIMIT_AUTHOR_CHOICES_ADMIN=False ): response = self.client.get( reverse("wagtailadmin_pages:edit", args=(blog_page.id,)), follow=True ) self.assertEqual(response.status_code, 200) self.assertContains(response, "mr.staff") self.assertNotContains(response, "mr.author") with self.settings( BLOG_LIMIT_AUTHOR_CHOICES_GROUP=bloggers, BLOG_LIMIT_AUTHOR_CHOICES_ADMIN=False, ): response = self.client.get( reverse("wagtailadmin_pages:edit", args=(blog_page.id,)), follow=True ) self.assertEqual(response.status_code, 200) self.assertNotContains(response, "mr.staff") self.assertContains(response, "mr.author") with self.settings( BLOG_LIMIT_AUTHOR_CHOICES_GROUP=bloggers, BLOG_LIMIT_AUTHOR_CHOICES_ADMIN=True, ): response = self.client.get( reverse("wagtailadmin_pages:edit", args=(blog_page.id,)), follow=True ) self.assertEqual(response.status_code, 200) self.assertContains(response, "mr.staff") self.assertContains(response, "mr.author") with self.settings( BLOG_LIMIT_AUTHOR_CHOICES_GROUP=[bloggers, others], BLOG_LIMIT_AUTHOR_CHOICES_ADMIN=False, ): response = self.client.get( reverse("wagtailadmin_pages:edit", args=(blog_page.id,)), follow=True ) self.assertEqual(response.status_code, 200) self.assertNotContains(response, "mr.staff") self.assertContains(response, "mr.author") with self.settings( BLOG_LIMIT_AUTHOR_CHOICES_GROUP=[bloggers, others], BLOG_LIMIT_AUTHOR_CHOICES_ADMIN=True, ): response = self.client.get( reverse("wagtailadmin_pages:edit", args=(blog_page.id,)), follow=True ) self.assertEqual(response.status_code, 200) self.assertContains(response, "mr.staff") self.assertContains(response, "mr.author") def test_latest_entries_feed(self): self.blog_index.add_child( instance=BlogPage( title="Blog Page", slug="blog_page1", search_description="x", owner=self.user, ) ) res = self.client.get( "{0}{1}/rss/".format(self.blog_index.url, self.blog_index.slug) ) self.assertContains(res, "Blog Page") self.assertContains(res, "<rss") self.assertContains(res, 'version="2.0"') self.assertContains(res, "</rss>") def test_latest_entries_feed_atom(self): self.blog_index.add_child( instance=BlogPage( title="Blog Page", slug="blog_page1", search_description="x", owner=self.user, ) ) res = self.client.get( "{0}{1}/atom/".format(self.blog_index.url, self.blog_index.slug) ) self.assertContains(res, "Blog Page") self.assertContains(res, "<feed") self.assertContains(res, 'xmlns="http://' 'www.w3.org/2005/Atom"') self.assertContains(res, "</feed>") def test_import_url(self): """ Tests migrate_wordpress command - the command should do the following: 1. create BlogPage objects from a given BlogIndex 2. create category and tag objects as BlogCategory, BlogTag, BlogPageBlogCategory and BlogPageTag objects The test imports from test-data.json which includes one wordpress blog post with 11 tags and 2 categories """ command = Command() command.username = None command.password = <PASSWORD> command.should_import_comments = True command.url = "just_testing" with open("test-data.json") as test_json: posts = json.load(test_json) command.create_blog_pages(posts, self.blog_index) self.assertEquals(Page.objects.all().count(), 4) self.assertEquals(BlogPage.objects.all().count(), 1) page = BlogPage.objects.get() self.assertEqual(page.title, "My wordpress title") self.assertInHTML("<strong>Bold here</strong>", page.body) self.assertEqual(page.categories.count(), 2) self.assertEqual(page.tags.count(), 11) self.assertEqual(page.owner.id, 2) self.assertEqual(BlogCategory.objects.all().count(), 2) self.assertEqual(BlogTag.objects.all().count(), 11) self.assertEqual(BlogCategoryBlogPage.objects.all().count(), 2) self.assertEqual(BlogPageTag.objects.all().count(), 11) parent_category = BlogCategory.objects.get(slug="writing-wisdom") child_category = BlogCategory.objects.get(slug="swoon-reads") self.assertTrue(child_category.parent is not None) self.assertEqual(child_category.parent, parent_category) self.assertEqual(child_category.slug, "swoon-reads") self.assertEqual(parent_category.slug, "writing-wisdom") comments = XtdComment.objects.all() self.assertEqual(comments.count(), 2) parent_comment = XtdComment.objects.get(level=0) child_comment = XtdComment.objects.get(level=1) self.assertEqual(parent_comment.id, child_comment.parent_id) def test_import_xml(self): """ Tests migrate_wordpress command - the command should do the following: 1. create BlogPage objects from a given BlogIndex 2. create category and tag objects as BlogCategory, BlogTag, BlogPageBlogCategory and BlogPageTag objects The test imports from example_export.xml which includes a wordpress blog """ command = Command() command.handle(xml=self.xml_path, blog_index="blog") self.assertEquals(Page.objects.all().count(), 6) self.assertEquals(BlogPage.objects.all().count(), 3) page = BlogPage.objects.filter( slug="10-things-super-successful-people-do-during-lunch" ).get() self.assertEqual( page.title, "10 Things Super Successful People Do During Lunch" ) self.assertEqual( page.body, "<p>Before you spend another lunch scarfing down food at your desk with your eyes glued to your computer screen, here's some food for thought.</p>", ) self.assertEqual(page.categories.count(), 2) self.assertEqual(page.tags.count(), 1) self.assertEqual(page.owner.id, 2) self.assertEqual(BlogCategory.objects.all().count(), 2) self.assertEqual(BlogTag.objects.all().count(), 1) self.assertEqual(BlogCategoryBlogPage.objects.all().count(), 2) self.assertEqual(BlogPageTag.objects.all().count(), 1) parent_category = BlogCategory.objects.get(slug="marketing-2") child_category = BlogCategory.objects.get(slug="cheat-sheets") self.assertTrue(child_category.parent is not None) self.assertEqual(child_category.parent, parent_category) self.assertEqual(child_category.slug, "cheat-sheets") self.assertEqual(parent_category.slug, "marketing-2") # Assert that <p> tags were added to the post that didn't contain them page = BlogPage.objects.filter( slug="asa-releases-2013-economic-analysis-of-staffing-industry-trends" ).get() self.assertEqual( page.body, '<p>The American Staffing Association has released its 2013 economic analysis,"Navigating the 1% Economy." Written by ASA chief operating officer <NAME>, CSP, the report takes an in-depth look at recent staffing employment trends and what these suggest about the current economic environment and future labor market conditions.</p>', ) def test_import_xml_comments(self): """ Comment data in XML should be inserted and threaded correctly """ call_command( "wordpress_to_wagtail", "blog", xml=self.xml_path, import_comments=True ) comments = XtdComment.objects.all() self.assertEqual(comments.count(), 2) parent_comment = XtdComment.objects.get(level=0) child_comment = XtdComment.objects.get(level=1) self.assertEqual(parent_comment.id, child_comment.parent_id) def test_unique_category_slug(self): """ Ensure unique slugs are generated without erroring """ BlogCategory.objects.create(name="one") BlogCategory.objects.create(name="one#") BlogCategory.objects.create(name="one!") class BlogAPIImportTests(TestCase): @responses.activate def test_import(self): url = "https://public-api.wordpress.com/wp/v2/sites/davidmburke.com" with open("test_v2_resp.json") as json_file: data = json.load(json_file) responses.add( responses.GET, url + "/posts?per_page=50&_embed=1", json=data, status=404, headers={"X-WP-TotalPages": "1"}, ) home = Page.objects.get(slug="home") self.user = User.objects.create_user("test", "<EMAIL>", "<PASSWORD>") blog_index = home.add_child( instance=BlogIndexPage( title="Blog Index", slug="blog", search_description="x", owner=self.user ) ) importer = WordpressImport(url, create_users=True) importer.convert_images = True importer.get_posts() posts = BlogPage.objects.all() self.assertEqual(len(posts), 1) self.assertEqual(posts[0].blog_categories.all().count(), 2) self.assertEqual(posts[0].tags.all().count(), 2)
tests/test_model_to_dict.py
boba-and-beer/vectorhub
385
141811
<filename>tests/test_model_to_dict.py<gh_stars>100-1000 from vectorhub.auto_encoder import * def test_get_model_definitions(): assert isinstance(get_model_definitions(json_fn=None), list) assert isinstance(get_model_definitions(json_fn=None)[0], dict) assert len(get_model_definitions(json_fn=None)) > 0
tests/ut/python/dataset/test_exceptions.py
PowerOlive/mindspore
3,200
141814
<reponame>PowerOlive/mindspore<gh_stars>1000+ # Copyright 2019 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import pytest import mindspore.dataset as ds import mindspore.dataset.vision.c_transforms as vision from mindspore import log as logger DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"] SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json" def test_exception_01(): """ Test single exception with invalid input """ logger.info("test_exception_01") data = ds.TFRecordDataset(DATA_DIR, columns_list=["image"]) with pytest.raises(TypeError) as info: data.map(operations=vision.Resize(100, 100), input_columns=["image"]) assert "Argument interpolation with value 100 is not of type [<enum 'Inter'>]" in str(info.value) def test_exception_02(): """ Test exceptions with invalid input, and test valid input """ logger.info("test_exception_02") num_samples = -1 with pytest.raises(ValueError) as info: ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], num_samples=num_samples) assert 'num_samples exceeds the boundary between 0 and 9223372036854775807(INT64_MAX)' in str(info.value) num_samples = 1 data = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], num_samples=num_samples) data = data.map(operations=vision.Decode(), input_columns=["image"]) data = data.map(operations=vision.Resize((100, 100)), input_columns=["image"]) # Confirm 1 sample in dataset assert sum([1 for _ in data]) == 1 num_iters = 0 for _ in data.create_dict_iterator(num_epochs=1): num_iters += 1 assert num_iters == 1 if __name__ == '__main__': test_exception_01() test_exception_02()
comtypes/test/find_memleak.py
vasily-v-ryabov/comtypes
212
141830
<reponame>vasily-v-ryabov/comtypes import unittest, gc from ctypes import * from ctypes.wintypes import * ################################################################ class PROCESS_MEMORY_COUNTERS(Structure): _fields_ = [("cb", DWORD), ("PageFaultCount", DWORD), ("PeakWorkingSetSize", c_size_t), ("WorkingSetSize", c_size_t), ("QuotaPeakPagedPoolUsage", c_size_t), ("QuotaPagedPoolUsage", c_size_t), ("QuotaPeakNonPagedPoolUsage", c_size_t), ("QuotaNonPagedPoolUsage", c_size_t), ("PagefileUsage", c_size_t), ("PeakPagefileUsage", c_size_t)] def __init__(self): self.cb = sizeof(self) def dump(self): for n, _ in self._fields_[2:]: print n, getattr(self, n)/1e6 try: windll.psapi.GetProcessMemoryInfo.argtypes = (HANDLE, POINTER(PROCESS_MEMORY_COUNTERS), DWORD) except WindowsError: # cannot search for memory leaks on Windows CE def find_memleak(func, loops=None): return 0 else: def wss(): # Return the working set size (memory used by process) pmi = PROCESS_MEMORY_COUNTERS() if not windll.psapi.GetProcessMemoryInfo(-1, byref(pmi), sizeof(pmi)): raise WinError() return pmi.WorkingSetSize LOOPS = 10, 1000 def find_memleak(func, loops=LOOPS): # call 'func' several times, so that memory consumption # stabilizes: for j in xrange(loops[0]): for k in xrange(loops[1]): func() gc.collect(); gc.collect(); gc.collect() bytes = wss() # call 'func' several times, recording the difference in # memory consumption before and after the call. Repeat this a # few times, and return a list containing the memory # consumption differences. for j in xrange(loops[0]): for k in xrange(loops[1]): func() gc.collect(); gc.collect(); gc.collect() # return the increased in process size result = wss() - bytes # Sometimes the process size did decrease, we do not report leaks # in this case: return max(result, 0)
mayan/apps/checkouts/tests/test_links.py
bonitobonita24/Mayan-EDMS
343
141845
<filename>mayan/apps/checkouts/tests/test_links.py from mayan.apps.documents.permissions import permission_document_file_new from mayan.apps.documents.tests.base import GenericDocumentViewTestCase from mayan.apps.sources.links import link_document_file_upload from mayan.apps.sources.tests.mixins.base_mixins import SourceTestMixin from ..links import link_check_out_document, link_check_out_info from ..permissions import ( permission_document_check_out, permission_document_check_out_detail_view ) from .mixins import DocumentCheckoutTestMixin class CheckoutLinksTestCase( DocumentCheckoutTestMixin, GenericDocumentViewTestCase ): auto_upload_test_document = False def setUp(self): super().setUp() self._create_test_document_stub() def _resolve_document_check_out_link(self): self.add_test_view(test_object=self.test_document) context = self.get_test_view() context['user'] = self._test_case_user return link_check_out_document.resolve(context=context) def _resolve_document_check_out_info_link(self): self.add_test_view(test_object=self.test_document) context = self.get_test_view() context['user'] = self._test_case_user return link_check_out_info.resolve(context=context) def test_document_check_out_link_no_permission(self): resolved_link = self._resolve_document_check_out_link() self.assertEqual(resolved_link, None) def test_document_check_out_link_with_access(self): self.grant_access( obj=self.test_document, permission=permission_document_check_out ) resolved_link = self._resolve_document_check_out_link() self.assertNotEqual(resolved_link, None) def test_document_check_out_info_link_no_permission(self): resolved_link = self._resolve_document_check_out_info_link() self.assertEqual(resolved_link, None) def test_document_check_out_info_link_with_access(self): self.grant_access( obj=self.test_document, permission=permission_document_check_out_detail_view ) resolved_link = self._resolve_document_check_out_info_link() self.assertNotEqual(resolved_link, None) class DocumentFileListViewTestCase( DocumentCheckoutTestMixin, SourceTestMixin, GenericDocumentViewTestCase ): auto_upload_test_document = False def setUp(self): super().setUp() self._create_test_document_stub() def _get_document_new_file_link(self): self.grant_access( obj=self.test_document, permission=permission_document_file_new ) self.grant_access( obj=self.test_source, permission=permission_document_file_new ) self.add_test_view(test_object=self.test_document) context = self.get_test_view() return link_document_file_upload.resolve(context=context) def test_document_file_new_not_blocked(self): resolved_link = self._get_document_new_file_link() self.assertNotEqual(resolved_link, None) def test_document_file_new_blocked_different_user(self): self._silence_logger(name='mayan.apps.sources.links') self._create_test_user() self._check_out_test_document(user=self.test_user) resolved_link = self._get_document_new_file_link() self.assertEqual(resolved_link, None) def test_document_file_new_blocked_same_user(self): self._check_out_test_document() resolved_link = self._get_document_new_file_link() self.assertNotEqual(resolved_link, None)
lib/PyAMF-0.7.2/pyamf/tests/remoting/__init__.py
MiCHiLU/google_appengine_sdk
790
141857
# Copyright (c) The PyAMF Project. # See LICENSE.txt for details. """ Remoting tests. @since: 0.1.0 """
imperative/python/megengine/optimizer/optimizer.py
Olalaye/MegEngine
5,168
141869
# -*- coding: utf-8 -*- # MegEngine is Licensed under the Apache License, Version 2.0 (the "License") # # Copyright (c) 2014-2021 Megvii Inc. All rights reserved. # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. import copy from abc import ABCMeta, abstractmethod from collections.abc import Iterable from typing import Dict from typing import Iterable as Iter from typing import Union import numpy as np from ..core._imperative_rt.core2 import pop_scope, push_scope, set_option from ..core.tensor.utils import set_convert_inputs from ..tensor import Parameter, Tensor from ..utils.deprecation import deprecated class _RequiredParameter: def __repr__(self): return "<required parameter>" required = _RequiredParameter() class Optimizer(metaclass=ABCMeta): r"""Base class for all optimizers. Args: params: specifies what Tensors should be optimized. defaults: a dict of default parameters of Optimizer, like learning rate or momentum. """ def __init__( # pylint: disable=too-many-branches self, params: Union[Iter[Parameter], dict], defaults: dict, ): self._state = dict() self._defaults = defaults self._disable_type_convert = False if isinstance(params, (Parameter, dict)): params = [params] else: if not isinstance(params, Iterable): raise TypeError( "params argument given to the optimizer should be " "Parameter or dict, or Iterable of them" ) self.param_groups = [] # type: list param_groups = list(params) if len(param_groups) == 0: raise ValueError("optimizer got an empty parameter list") param_type = type(param_groups[0]) for param in param_groups: if not isinstance(param, param_type): raise TypeError( "types of params argument given to the optimizer shoud be same" ) if not isinstance(param_groups[0], dict): param_groups = [{"params": param_groups}] for group in param_groups: self.add_param_group(group) for group in self.param_groups: self._create_state(group) def add_param_group(self, param_group: dict): r"""Add a param group to ``param_groups`` of the :class:`~megengine.optim.optimizer.Optimizer`. This can be useful when fine tuning a pre-trained network as frozen layers can be made trainable and added to the :class:`~megengine.optim.optimizer.Optimizer` as training progresses. Args: param_group: specifies what tensors should be optimized along with group. """ assert isinstance(param_group, dict), "param group must be a dict" if isinstance(param_group["params"], Parameter): param_group["params"] = [param_group["params"]] else: param_group["params"] = list(param_group["params"]) for param in param_group["params"]: if not isinstance(param, Parameter): raise TypeError( "optimizer can only optimize Parameters, but one of the params is " + str(type(param)) ) param._reset(Tensor(param.numpy(), no_cache=True)) for name, default in self._defaults.items(): if default is required and name not in param_group: raise ValueError( "parameter group didn't specify a value of " "required optimization parameter " + name ) param_group.setdefault(name, default) param_set = set() for group in self.param_groups: param_set.update(set(map(id, group["params"]))) assert param_set.isdisjoint( set(map(id, param_group["params"])) ), "some parameters appear in more than one parameter group" self.param_groups.append(param_group) def _add_state(self, param, state_name, initializer=None): if initializer is None: initializer = np.zeros(param.shape, dtype=np.float32) state_dict = self._state.setdefault(param, {}) assert state_name not in state_dict state = Tensor(initializer, no_cache=True) state_dict[state_name] = state @abstractmethod def _create_state(self, param_group): pass @abstractmethod def _updates(self, param_group): pass def _get_params(self): params = [] for group in self.param_groups: for param in group["params"]: params.append(param) return params def step(self): r"""Performs a single optimization step.""" # set the globle state `_enable_convert_inputs` to `False` to disable # the `convert_inputs` for param updates set_option("record_computing_path", 0) if self._disable_type_convert: backup = set_convert_inputs(False) for group in self.param_groups: if isinstance(group["params"], set): raise TypeError( "optimized parameters need to be organized in ordered collections, " "but the ordering of parameters in sets will change between runs. " "Please use a list instead." ) push_scope("step") self._updates(group) pop_scope("step") if self._disable_type_convert: # restore the globle state `_enable_convert_inputs` set_convert_inputs(backup) set_option("record_computing_path", 1) return self @deprecated(version="1.0", reason="use clear_grad instead") def zero_grad(self): for param_group in self.param_groups: for param in param_group["params"]: if param.grad is not None: param.grad.reset_zero() def clear_grad(self): r"""Set the grad attribute to None for all parameters.""" for param_group in self.param_groups: push_scope("clear_grad") for param in param_group["params"]: param.grad = None pop_scope("clear_grad") def state_dict(self, keep_var=False) -> Dict: r"""Export the optimizer state. Return: optimizer state. Can be loaded by :meth:`load_state_dict`. """ param_groups = [] state = dict() param2id = dict() cur_id = 0 for group in self.param_groups: for param in group["params"]: if param not in param2id: param2id[param] = cur_id cur_id += 1 for param, st in self._state.items(): _st = copy.copy(st) if not keep_var: for k, v in st.items(): _st[k] = v.numpy() state[param2id[param]] = _st for group in self.param_groups: param_group = {k: v for k, v in group.items() if k != "params"} param_group["params"] = [param2id[param] for param in group["params"]] param_groups.append(param_group) return {"param_groups": param_groups, "state": state} def load_state_dict(self, state: dict): r"""Loads the optimizer state. Args: state: optimizer state. Should be an object returned from a call to :meth:`state_dict`. """ if len(self.param_groups) != len(state["param_groups"]): raise ValueError( "loaded state dict has a different number of parameter groups" ) for group_new, group_saved in zip(self.param_groups, state["param_groups"]): if len(group_new["params"]) != len(group_saved["params"]): raise ValueError( "loaded state dict contains a parameter group that " "doesn't match the size of optimizer's group" ) for param_new, param_saved in zip( group_new["params"], group_saved["params"] ): p = param_new self._state[p] = state["state"][param_saved].copy() for k, v in self._state[p].items(): if isinstance(v, Tensor): self._state[p][k] = v.detach() else: self._state[p][k] = Tensor(v) if set(group_new.keys()) != set(group_saved.keys()): raise ValueError( "loaded state dict contains a parameter group that " "doesn't match the keys of optimizer's group" ) for key in group_new.keys(): if key != "params": group_new[key] = group_saved[key] if len(self._state.keys()) != len(state["state"].keys()): raise ValueError( "loaded state dict contains a state that doesn't match " "the size of optimizer's state" ) def backward(self, loss): raise NotImplementedError("use autodiff.GradManager instead") def bcast_param(self): raise NotImplementedError("use distributed.bcast_list_ instead")
getpaid/post_forms.py
prog32/django-getpaid
220
141913
from django import forms class PaymentHiddenInputsPostForm(forms.Form): def __init__(self, fields, *args, **kwargs): super().__init__(*args, **kwargs) for key in fields: self.fields[key] = forms.CharField( initial=fields[key], widget=forms.HiddenInput )
code/custom_dataloaders.py
aosokin/biogans
105
141919
<reponame>aosokin/biogans import os from torchvision.datasets.folder import default_loader, find_classes, make_dataset, IMG_EXTENSIONS import torchvision.transforms as transforms import torch.utils.data as data import numpy as np import torch import random from tqdm import tqdm from utils import parallel_process def read_image_for_pytorch(image_file_name): img = default_loader(image_file_name) # PIL image mode: 1, L, P, I, F, RGB, YCbCr, RGBA, CMYK if img.mode == 'YCbCr': nchannel = 3 else: nchannel = len(img.mode) # convert to numpy array img = np.array(img.getdata()).reshape(img.size[1], img.size[0], nchannel) # permute dimensions img = np.transpose(img, (2, 0, 1)).copy() return img def read_all_images(root, num_workers=4): classes, class_to_idx = find_classes(root) dataset = make_dataset(root, class_to_idx) if len(dataset) == 0: raise (RuntimeError("Found 0 images in subfolders of: " + root + "\n" + "Supported image extensions are: " + ",".join(IMG_EXTENSIONS))) num_images = len(dataset) paths = [dataset[i_image][0] for i_image in range(num_images)] print("Reading {0} images with {1} workers".format(num_images, num_workers)) if num_workers > 1: images = parallel_process(paths, read_image_for_pytorch, n_jobs=num_workers) else: images = [] for p in tqdm(paths): images.append(read_image_for_pytorch(p)) image_cache = {} for i, image in enumerate(images): path, target = dataset[i] image_cache[path] = image return image_cache class ImageFolderWithCache(data.Dataset): def __init__(self, data_path, image_cache, do_random_flips=False, normalization=transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))): classes, class_to_idx = find_classes(data_path) imgs = make_dataset(data_path, class_to_idx) if len(imgs) == 0: raise(RuntimeError("Found 0 images in subfolders of: " + data_path + "\n" "Supported image extensions are: " + ",".join(IMG_EXTENSIONS))) self.root = data_path self.imgs = imgs self.classes = classes self.class_to_idx = class_to_idx self.normalization = normalization self.do_random_flips = do_random_flips self.image_cache = image_cache def read_image_with_cache(self, image_file_name): if image_file_name not in self.image_cache: self.image_cache[image_file_name] = read_image_for_pytorch(image_file_name) return self.image_cache[image_file_name] def __getitem__(self, index): path, target = self.imgs[index] img = self.read_image_with_cache(path) # pytorch does not have reverse indexing, so I;m using numpy for that if self.do_random_flips: if random.random() < 0.5: img = img[:, ::-1, :] if random.random() < 0.5: img = img[:, :, ::-1] img = img.copy() # convert to torch tensor manually (torchvision.transforms.ToTensor is buggy) img = torch.from_numpy(img.astype(np.float32)) / 255.0 assert(img.size(0) == 3) if self.normalization is not None: img_norm = [] for i_c in range(img.size(0)): img_norm.append(self.normalization(img[i_c].unsqueeze(0))) img = torch.cat(img_norm, 0).contiguous() return img, target def __len__(self): return len(self.imgs) ################################## class CompositeImageFolder(data.Dataset): """ Like ImageFolder, but creates a multi channel image with n_classes channels. The red channel is always the first channel in the image. """ def __init__(self, data_path, nn_dict, image_cache, do_random_flips=False, normalization=None): classes, class_to_idx = find_classes(data_path) imgs = make_dataset(data_path, class_to_idx) if len(imgs) == 0: raise (RuntimeError("Found 0 images in subfolders of: " + data_path + "\n" "Supported image extensions are: " + ",".join(IMG_EXTENSIONS))) self.data_path = data_path self.imgs = imgs self.classes = classes self.class_to_idx = class_to_idx self.normalization = normalization self.do_random_flips = do_random_flips self.nn_dict = nn_dict self.image_cache = image_cache def read_image_with_cache(self, image_file_name): if image_file_name not in self.image_cache: self.image_cache[image_file_name] = read_image_for_pytorch(image_file_name) return self.image_cache[image_file_name] def __getitem__(self, index): path, target = self.imgs[index] _img = self.read_image_with_cache(path) image_size = (_img.shape[1], _img.shape[2]) total_channels = len(self.classes) + 1 img = np.zeros((total_channels, image_size[0], image_size[1])) # We use the red to create a binary mask binary_mask = _img[0, :, :] > 0 # We move the green of the current image into the right channel index img[target + 1, :, :] = _img[1, :, :] * binary_mask img[0, :, :] = _img[0, :, :] * binary_mask # Load the channels into the image. for c in self.classes: c_idx = self.class_to_idx[c] if c_idx == target: continue file_name = os.path.basename(path) nn_file = np.random.choice(self.nn_dict[file_name][c]) _nn_img = self.read_image_with_cache(os.path.join(self.data_path, c, nn_file)) # Move the green of that image into the correct channel of the image. img[c_idx + 1, :, :] = _nn_img[1, :, :] * binary_mask # pytorch does not have reverse indexing, so I;m using numpy for that if self.do_random_flips: if random.random() < 0.5: img = img[:, ::-1, :] if random.random() < 0.5: img = img[:, :, ::-1] img = img.copy() # convert to torch tensor manually (torchvision.transforms.ToTensor is buggy) img = torch.from_numpy(img.astype(np.float32)) / 255.0 assert(img.size(0) == total_channels) if self.normalization is not None: img_norm = [] for i_c in range(img.size(0)): img_norm.append(self.normalization(img[i_c].unsqueeze(0))) img = torch.cat(img_norm, 0).contiguous() return img, target def __len__(self): return len(self.imgs)
HLTriggerOffline/Higgs/python/HiggsValidation_cff.py
ckamtsikis/cmssw
852
141935
import FWCore.ParameterSet.Config as cms from HLTriggerOffline.Higgs.hltHiggsValidator_cfi import * HiggsValidationSequence = cms.Sequence( hltHiggsValidator ) #HLTHiggsVal_FastSim = cms.Sequence( # recoHiggsValidationHLTFastSim_seq + # hltHiggsValidator # )
research/syntaxnet/examples/dragnn/tutorial_2.py
zcdzcdzcd/models
3,326
141939
<reponame>zcdzcdzcd/models """Second example: separate tagger and parser.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os.path import tensorflow as tf from google.protobuf import text_format from dragnn.protos import spec_pb2 from dragnn.python import graph_builder from dragnn.python import lexicon from dragnn.python import spec_builder from dragnn.python import visualization from syntaxnet import sentence_pb2 import dragnn.python.load_dragnn_cc_impl import syntaxnet.load_parser_ops data_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'tutorial_data') lexicon_dir = '/tmp/tutorial/lexicon' training_sentence = os.path.join(data_dir, 'sentence.prototext') if not os.path.isdir(lexicon_dir): os.makedirs(lexicon_dir) def main(argv): del argv # unused # Constructs lexical resources for SyntaxNet in the given resource path, from # the training data. lexicon.build_lexicon( lexicon_dir, training_sentence, training_corpus_format='sentence-prototext') # Construct the ComponentSpec for tagging. This is a simple left-to-right RNN # sequence tagger. tagger = spec_builder.ComponentSpecBuilder('tagger') tagger.set_network_unit(name='FeedForwardNetwork', hidden_layer_sizes='256') tagger.set_transition_system(name='tagger') tagger.add_fixed_feature(name='words', fml='input.word', embedding_dim=64) tagger.add_rnn_link(embedding_dim=-1) tagger.fill_from_resources(lexicon_dir) # Construct the ComponentSpec for parsing. parser = spec_builder.ComponentSpecBuilder('parser') parser.set_network_unit( name='FeedForwardNetwork', hidden_layer_sizes='256', layer_norm_hidden='True') parser.set_transition_system(name='arc-standard') parser.add_token_link( source=tagger, fml='input.focus stack.focus stack(1).focus', embedding_dim=32, source_layer='logits') # Recurrent connection for the arc-standard parser. For both tokens on the # stack, we connect to the last time step to either SHIFT or REDUCE that # token. This allows the parser to build up compositional representations of # phrases. parser.add_link( source=parser, # recurrent connection name='rnn-stack', # unique identifier fml='stack.focus stack(1).focus', # look for both stack tokens source_translator='shift-reduce-step', # maps token indices -> step embedding_dim=32) # project down to 32 dims parser.fill_from_resources(lexicon_dir) master_spec = spec_pb2.MasterSpec() master_spec.component.extend([tagger.spec, parser.spec]) hyperparam_config = spec_pb2.GridPoint() # Build the TensorFlow graph. graph = tf.Graph() with graph.as_default(): builder = graph_builder.MasterBuilder(master_spec, hyperparam_config) target = spec_pb2.TrainTarget() target.name = 'all' target.unroll_using_oracle.extend([True, True]) dry_run = builder.add_training_from_config(target, trace_only=True) # Read in serialized protos from training data. sentence = sentence_pb2.Sentence() text_format.Merge(open(training_sentence).read(), sentence) training_set = [sentence.SerializeToString()] with tf.Session(graph=graph) as sess: # Make sure to re-initialize all underlying state. sess.run(tf.initialize_all_variables()) traces = sess.run( dry_run['traces'], feed_dict={dry_run['input_batch']: training_set}) with open('dragnn_tutorial_2.html', 'w') as f: f.write( visualization.trace_html( traces[0], height='400px', master_spec=master_spec).encode('utf-8')) if __name__ == '__main__': tf.app.run()
model/tensor_pool.py
Shimingyi/MotioNet
368
141947
import random import torch from torch.autograd import Variable class TensorPool(): def __init__(self, pool_size): self.pool_size = pool_size if self.pool_size > 0: self.num_imgs = 0 self.images = [] def query(self, tensors): if self.pool_size == 0: return tensors return_tensors = [] for tensor in tensors.data: tensor = torch.unsqueeze(tensor, 0) if self.num_imgs < self.pool_size: self.num_imgs = self.num_imgs + 1 self.images.append(tensor) return_tensors.append(tensor) else: p = random.uniform(0, 1) if p > 0.5: random_id = random.randint(0, self.pool_size-1) tmp = self.images[random_id].clone() self.images[random_id] = tensor return_tensors.append(tmp) else: return_tensors.append(tensor) return_images = Variable(torch.cat(return_tensors, 0)) return return_images
tests/library/python/for.py
chrisseaton/katahdin
115
141962
x = 0 for n in range(10): x = x + 1 assert x == 10
sdks/python/apache_beam/dataframe/frame_base_test.py
hengfengli/beam
5,279
141973
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import pandas as pd from apache_beam.dataframe import expressions from apache_beam.dataframe import frame_base from apache_beam.dataframe import frames class FrameBaseTest(unittest.TestCase): def test_elementwise_func(self): a = pd.Series([1, 2, 3]) b = pd.Series([100, 200, 300]) empty_proxy = a[:0] x = frames.DeferredSeries(expressions.PlaceholderExpression(empty_proxy)) y = frames.DeferredSeries(expressions.PlaceholderExpression(empty_proxy)) sub = frame_base._elementwise_function(lambda x, y: x - y) session = expressions.Session({x._expr: a, y._expr: b}) self.assertTrue(sub(x, y)._expr.evaluate_at(session).equals(a - b)) self.assertTrue(sub(x, 1)._expr.evaluate_at(session).equals(a - 1)) self.assertTrue(sub(1, x)._expr.evaluate_at(session).equals(1 - a)) self.assertTrue(sub(x, b)._expr.evaluate_at(session).equals(a - b)) self.assertTrue(sub(a, y)._expr.evaluate_at(session).equals(a - b)) def test_elementwise_func_kwarg(self): a = pd.Series([1, 2, 3]) b = pd.Series([100, 200, 300]) empty_proxy = a[:0] x = frames.DeferredSeries(expressions.PlaceholderExpression(empty_proxy)) y = frames.DeferredSeries(expressions.PlaceholderExpression(empty_proxy)) sub = frame_base._elementwise_function(lambda x, y=1: x - y) session = expressions.Session({x._expr: a, y._expr: b}) self.assertTrue(sub(x, y=y)._expr.evaluate_at(session).equals(a - b)) self.assertTrue(sub(x)._expr.evaluate_at(session).equals(a - 1)) self.assertTrue(sub(1, y=x)._expr.evaluate_at(session).equals(1 - a)) self.assertTrue(sub(x, y=b)._expr.evaluate_at(session).equals(a - b)) self.assertTrue(sub(a, y=y)._expr.evaluate_at(session).equals(a - b)) self.assertTrue(sub(x, y)._expr.evaluate_at(session).equals(a - b)) def test_maybe_inplace(self): @frame_base.maybe_inplace def add_one(frame): return frame + 1 frames.DeferredSeries.add_one = add_one original_expr = expressions.PlaceholderExpression(pd.Series([1, 2, 3])) x = frames.DeferredSeries(original_expr) x.add_one() self.assertIs(x._expr, original_expr) x.add_one(inplace=False) self.assertIs(x._expr, original_expr) x.add_one(inplace=True) self.assertIsNot(x._expr, original_expr) def test_args_to_kwargs(self): class Base(object): def func(self, a=1, b=2, c=3): pass class Proxy(object): @frame_base.args_to_kwargs(Base) def func(self, **kwargs): return kwargs proxy = Proxy() # pylint: disable=too-many-function-args self.assertEqual(proxy.func(), {}) self.assertEqual(proxy.func(100), {'a': 100}) self.assertEqual(proxy.func(2, 4, 6), {'a': 2, 'b': 4, 'c': 6}) self.assertEqual(proxy.func(2, c=6), {'a': 2, 'c': 6}) self.assertEqual(proxy.func(c=6, a=2), {'a': 2, 'c': 6}) def test_args_to_kwargs_populates_defaults(self): class Base(object): def func(self, a=1, b=2, c=3): pass class Proxy(object): @frame_base.args_to_kwargs(Base) @frame_base.populate_defaults(Base) def func(self, a, c=1000, **kwargs): return dict(kwargs, a=a, c=c) proxy = Proxy() # pylint: disable=too-many-function-args self.assertEqual(proxy.func(), {'a': 1, 'c': 1000}) self.assertEqual(proxy.func(100), {'a': 100, 'c': 1000}) self.assertEqual(proxy.func(2, 4, 6), {'a': 2, 'b': 4, 'c': 6}) self.assertEqual(proxy.func(2, c=6), {'a': 2, 'c': 6}) self.assertEqual(proxy.func(c=6, a=2), {'a': 2, 'c': 6}) self.assertEqual(proxy.func(c=6), {'a': 1, 'c': 6}) if __name__ == '__main__': unittest.main()
Models/Layers.py
GingerNg/SDNet
112
141997
<gh_stars>100-1000 # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. import os import random import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable import torch.nn.init as init from torch.nn.parameter import Parameter from torch.nn.utils.rnn import pad_packed_sequence as unpack from torch.nn.utils.rnn import pack_padded_sequence as pack def set_dropout_prob(p): global dropout_p dropout_p = p def set_seq_dropout(option): # option = True or False global do_seq_dropout do_seq_dropout = option def seq_dropout(x, p=0, training=False): """ x: batch * len * input_size """ if training == False or p == 0: return x dropout_mask = Variable(1.0 / (1-p) * torch.bernoulli((1-p) * (x.data.new(x.size(0), x.size(2)).zero_() + 1)), requires_grad=False) return dropout_mask.unsqueeze(1).expand_as(x) * x def dropout(x, p=0, training=False): """ x: (batch * len * input_size) or (any other shape) """ if do_seq_dropout and len(x.size()) == 3: # if x is (batch * len * input_size) return seq_dropout(x, p=p, training=training) else: return F.dropout(x, p=p, training=training) class CNN(nn.Module): def __init__(self, input_size, window_size, output_size): super(CNN, self).__init__() if window_size % 2 != 1: raise Exception("window size must be an odd number") padding_size = int((window_size - 1) / 2) self._output_size = output_size self.cnn = nn.Conv2d(1, output_size, (window_size, input_size), padding = (padding_size, 0), bias = False) init.xavier_uniform(self.cnn.weight) @property def output_size(self): return self._output_size ''' (item, subitem) can be (word, characters), or (sentence, words) x: num_items x max_subitem_size x input_size x_mask: num_items x max_subitem_size (not used but put here to align with RNN format) return num_items x max_subitem_size x output_size ''' def forward(self, x, x_mask): ''' x_unsqueeze: num_items x 1 x max_subitem_size x input_size x_conv: num_items x output_size x max_subitem_size x_output: num_items x max_subitem_size x output_size ''' x = F.dropout(x, p = dropout_p, training = self.training) x_unsqueeze = x.unsqueeze(1) x_conv = F.tanh(self.cnn(x_unsqueeze)).squeeze(3) x_output = torch.transpose(x_conv, 1, 2) return x_output class MaxPooling(nn.Module): def __init__(self): super(MaxPooling, self).__init__() self.MIN = -1e6 ''' (item, subitem) can be (word, characters), or (sentence, words) x: num_items x max_subitem_size x input_size x_mask: num_items x max_subitem_size return num_items x input_size ''' def forward(self, x, x_mask): ''' x_output: num_items x input_size x 1 --> num_items x input_size ''' empty_mask = x_mask.eq(0).unsqueeze(2).expand_as(x) x_now = x.clone() x_now.data.masked_fill_(empty_mask.data, self.MIN) x_output = x_now.max(1)[0] x_output.data.masked_fill_(x_output.data.eq(self.MIN), 0) return x_output class AveragePooling(nn.Module): def __init__(self): super(AveragePooling, self).__init__() ''' (item, subitem) can be (word, characters), or (sentence, words) x: num_items x max_subitem_size x input_size x_mask: num_items x max_subitem_size return num_items x input_size ''' def forward(self, x, x_mask): ''' x_output: num_items x input_size x 1 --> num_items x input_size ''' x_now = x.clone() empty_mask = x_mask.eq(0).unsqueeze(2).expand_as(x_now) x_now.data.masked_fill_(empty_mask.data, 0) x_sum = torch.sum(x_now, 1); # x_sum: num_items x input_size x_num = torch.sum(x_mask.eq(1).float(), 1).unsqueeze(1).expand_as(x_sum); # x_num: num_items x input_size x_num = torch.clamp(x_num, min = 1) return x_sum / x_num; class StackedBRNN(nn.Module): def __init__(self, input_size, hidden_size, num_layers, rnn_type = nn.LSTM, concat_layers = False, bidirectional = True, add_feat=0): super(StackedBRNN, self).__init__() self.bidir_coef = 2 if bidirectional else 1 self.num_layers = num_layers self.concat_layers = concat_layers self.hidden_size = hidden_size self.rnns = nn.ModuleList() for i in range(num_layers): in_size = input_size if i == 0 else (self.bidir_coef * hidden_size + add_feat if i== 1 else self.bidir_coef * hidden_size) rnn = rnn_type(in_size, hidden_size, num_layers = 1, bidirectional = bidirectional, batch_first = True) self.rnns.append(rnn) @property def output_size(self): if self.concat_layers: return self.num_layers * self.bidir_coef * self.hidden_size else: return self.bidir_coef * self.hidden_size """ Multi-layer bi-RNN Arguments: x (Float Tensor): a Float Tensor of size (batch * wordnum * input_dim). x_mask (Byte Tensor): a Byte Tensor of mask for the input tensor (batch * wordnum). x_additional (Byte Tensor): a Byte Tensor of mask for the additional input tensor (batch * wordnum * additional_dim). x_out (Float Tensor): a Float Tensor of size (batch * wordnum * output_size). """ def forward(self, x, x_mask, return_list=False, x_additional = None): hiddens = [x] for i in range(self.num_layers): rnn_input = hiddens[-1] if i == 1 and x_additional is not None: rnn_input = torch.cat((rnn_input, x_additional), 2) if dropout_p > 0: rnn_input = dropout(rnn_input, p=dropout_p, training = self.training) rnn_output = self.rnns[i](rnn_input)[0] hiddens.append(rnn_output) if self.concat_layers: output = torch.cat(hiddens[1:], 2) else: output = hiddens[-1] if return_list: return output, hiddens[1:] else: return output class AttentionScore(nn.Module): """ correlation_func = 1, sij = x1^Tx2 correlation_func = 2, sij = (Wx1)D(Wx2) correlation_func = 3, sij = Relu(Wx1)DRelu(Wx2) correlation_func = 4, sij = x1^TWx2 correlation_func = 5, sij = Relu(Wx1)DRelu(Wx2) """ def __init__(self, input_size, hidden_size, correlation_func = 1, do_similarity = False): super(AttentionScore, self).__init__() self.correlation_func = correlation_func self.hidden_size = hidden_size if correlation_func == 2 or correlation_func == 3: self.linear = nn.Linear(input_size, hidden_size, bias = False) if do_similarity: self.diagonal = Parameter(torch.ones(1, 1, 1) / (hidden_size ** 0.5), requires_grad = False) else: self.diagonal = Parameter(torch.ones(1, 1, hidden_size), requires_grad = True) if correlation_func == 4: self.linear = nn.Linear(input_size, input_size, bias=False) if correlation_func == 5: self.linear = nn.Linear(input_size, hidden_size, bias = False) def forward(self, x1, x2): ''' Input: x1: batch x word_num1 x dim x2: batch x word_num2 x dim Output: scores: batch x word_num1 x word_num2 ''' x1 = dropout(x1, p = dropout_p, training = self.training) x2 = dropout(x2, p = dropout_p, training = self.training) x1_rep = x1 x2_rep = x2 batch = x1_rep.size(0) word_num1 = x1_rep.size(1) word_num2 = x2_rep.size(1) dim = x1_rep.size(2) if self.correlation_func == 2 or self.correlation_func == 3: x1_rep = self.linear(x1_rep.contiguous().view(-1, dim)).view(batch, word_num1, self.hidden_size) # Wx1 x2_rep = self.linear(x2_rep.contiguous().view(-1, dim)).view(batch, word_num2, self.hidden_size) # Wx2 if self.correlation_func == 3: x1_rep = F.relu(x1_rep) x2_rep = F.relu(x2_rep) x1_rep = x1_rep * self.diagonal.expand_as(x1_rep) # x1_rep is (Wx1)D or Relu(Wx1)D # x1_rep: batch x word_num1 x dim (corr=1) or hidden_size (corr=2,3) if self.correlation_func == 4: x2_rep = self.linear(x2_rep.contiguous().view(-1, dim)).view(batch, word_num2, dim) # Wx2 if self.correlation_func == 5: x1_rep = self.linear(x1_rep.contiguous().view(-1, dim)).view(batch, word_num1, self.hidden_size) # Wx1 x2_rep = self.linear(x2_rep.contiguous().view(-1, dim)).view(batch, word_num2, self.hidden_size) # Wx2 x1_rep = F.relu(x1_rep) x2_rep = F.relu(x2_rep) scores = x1_rep.bmm(x2_rep.transpose(1, 2)) return scores class Attention(nn.Module): def __init__(self, input_size, hidden_size, correlation_func = 1, do_similarity = False): super(Attention, self).__init__() self.scoring = AttentionScore(input_size, hidden_size, correlation_func, do_similarity) def forward(self, x1, x2, x2_mask, x3 = None, drop_diagonal=False): ''' For each word in x1, get its attended linear combination of x3 (if none, x2), using scores calculated between x1 and x2. Input: x1: batch x word_num1 x dim x2: batch x word_num2 x dim x2_mask: batch x word_num2 x3 (if not None) : batch x word_num2 x dim_3 Output: attended: batch x word_num1 x dim_3 ''' batch = x1.size(0) word_num1 = x1.size(1) word_num2 = x2.size(1) if x3 is None: x3 = x2 scores = self.scoring(x1, x2) # scores: batch x word_num1 x word_num2 empty_mask = x2_mask.eq(0).unsqueeze(1).expand_as(scores) scores.data.masked_fill_(empty_mask.data, -float('inf')) if drop_diagonal: assert(scores.size(1) == scores.size(2)) diag_mask = torch.diag(scores.data.new(scores.size(1)).zero_() + 1).byte().unsqueeze(0).expand_as(scores) scores.data.masked_fill_(diag_mask, -float('inf')) # softmax alpha_flat = F.softmax(scores.view(-1, x2.size(1)), dim = 1) alpha = alpha_flat.view(-1, x1.size(1), x2.size(1)) # alpha: batch x word_num1 x word_num2 attended = alpha.bmm(x3) # attended: batch x word_num1 x dim_3 return attended def RNN_from_opt(input_size_, hidden_size_, num_layers=1, concat_rnn=False, add_feat=0, bidirectional=True, rnn_type=nn.LSTM): new_rnn = StackedBRNN( input_size=input_size_, hidden_size=hidden_size_, num_layers=num_layers, rnn_type=rnn_type, concat_layers=concat_rnn, bidirectional=bidirectional, add_feat=add_feat ) output_size = hidden_size_ if bidirectional: output_size *= 2 if concat_rnn: output_size *= num_layers return new_rnn, output_size # For summarizing a set of vectors into a single vector class LinearSelfAttn(nn.Module): """Self attention over a sequence: * o_i = softmax(Wx_i) for x_i in X. """ def __init__(self, input_size): super(LinearSelfAttn, self).__init__() self.linear = nn.Linear(input_size, 1) def forward(self, x, x_mask): """ x = batch * len * hdim x_mask = batch * len """ empty_mask = x_mask.eq(0).expand_as(x_mask) x = dropout(x, p=dropout_p, training=self.training) x_flat = x.contiguous().view(-1, x.size(-1)) scores = self.linear(x_flat).view(x.size(0), x.size(1)) scores.data.masked_fill_(empty_mask.data, -float('inf')) alpha = F.softmax(scores, dim = 1) return alpha def generate_mask(new_data, dropout_p=0.0): new_data = (1-dropout_p) * (new_data.zero_() + 1) for i in range(new_data.size(0)): one = random.randint(0, new_data.size(1) - 1) new_data[i][one] = 1 mask = Variable(1.0/(1 - dropout_p) * torch.bernoulli(new_data), requires_grad=False) return mask # Get positional scores and scores for 'yes', 'no', 'unknown' cases class GetFinalScores(nn.Module): def __init__(self, x_size, h_size): super(GetFinalScores, self).__init__() self.noanswer_linear = nn.Linear(h_size, x_size) self.noanswer_w = nn.Linear(x_size, 1, bias=True) self.no_linear = nn.Linear(h_size, x_size) self.no_w = nn.Linear(x_size, 1, bias=True) self.yes_linear = nn.Linear(h_size, x_size) self.yes_w = nn.Linear(x_size, 1, bias=True) self.attn = BilinearSeqAttn(x_size, h_size) self.attn2 = BilinearSeqAttn(x_size, h_size) self.rnn = nn.GRUCell(x_size, h_size) def forward(self, x, h0, x_mask): """ x = batch * len * x_size h0 = batch * h_size x_mask = batch * len """ score_s = self.attn(x, h0, x_mask) # score_s = batch * len ptr_net_in = torch.bmm(F.softmax(score_s, dim = 1).unsqueeze(1), x).squeeze(1) ptr_net_in = dropout(ptr_net_in, p=dropout_p, training=self.training) h0 = dropout(h0, p=dropout_p, training=self.training) h1 = self.rnn(ptr_net_in, h0) # h1 same size as h0 score_e = self.attn2(x, h1, x_mask) # score_e = batch * len score_no = self.get_single_score(x, h0, x_mask, self.no_linear, self.no_w) score_yes = self.get_single_score(x, h0, x_mask, self.yes_linear, self.yes_w) score_noanswer = self.get_single_score(x, h0, x_mask, self.noanswer_linear, self.noanswer_w) return score_s, score_e, score_no, score_yes, score_noanswer def get_single_score(self, x, h, x_mask, linear, w): Wh = linear(h) #batch * x_size xWh = x.bmm(Wh.unsqueeze(2)).squeeze(2) #batch * len empty_mask = x_mask.eq(0).expand_as(x_mask) xWh.data.masked_fill_(empty_mask.data, -float('inf')) attn_x = torch.bmm(F.softmax(xWh, dim = 1).unsqueeze(1), x) # batch * 1 * x_size single_score = w(attn_x).squeeze(2) # batch * 1 return single_score # For attending the span in document from the query class BilinearSeqAttn(nn.Module): """A bilinear attention layer over a sequence X w.r.t y: * o_i = x_i'Wy for x_i in X. """ def __init__(self, x_size, y_size, identity=False): super(BilinearSeqAttn, self).__init__() if not identity: self.linear = nn.Linear(y_size, x_size) else: self.linear = None def forward(self, x, y, x_mask): """ x = batch * len * h1 y = batch * h2 x_mask = batch * len """ empty_mask = x_mask.eq(0).expand_as(x_mask) x = dropout(x, p=dropout_p, training=self.training) y = dropout(y, p=dropout_p, training=self.training) Wy = self.linear(y) if self.linear is not None else y # batch * h1 xWy = x.bmm(Wy.unsqueeze(2)).squeeze(2) # batch * len xWy.data.masked_fill_(empty_mask.data, -float('inf')) return xWy # History-of-Word Multi-layer inter-attention class DeepAttention(nn.Module): def __init__(self, opt, abstr_list_cnt, deep_att_hidden_size_per_abstr, correlation_func=1, word_hidden_size=None): super(DeepAttention, self).__init__() word_hidden_size = opt['embedding_dim'] if word_hidden_size is None else word_hidden_size abstr_hidden_size = opt['hidden_size'] * 2 att_size = abstr_hidden_size * abstr_list_cnt + word_hidden_size self.int_attn_list = nn.ModuleList() for i in range(abstr_list_cnt+1): self.int_attn_list.append(Attention(att_size, deep_att_hidden_size_per_abstr, correlation_func = correlation_func)) rnn_input_size = abstr_hidden_size * abstr_list_cnt * 2 + (opt['highlvl_hidden_size'] * 2) self.rnn_input_size = rnn_input_size self.rnn, self.output_size = RNN_from_opt(rnn_input_size, opt['highlvl_hidden_size'], num_layers=1) self.opt = opt def forward(self, x1_word, x1_abstr, x2_word, x2_abstr, x1_mask, x2_mask, return_bef_rnn=False): """ x1_word, x2_word, x1_abstr, x2_abstr are list of 3D tensors. 3D tensor: batch_size * length * hidden_size """ x1_att = torch.cat(x1_word + x1_abstr, 2) x2_att = torch.cat(x2_word + x2_abstr[:-1], 2) x1 = torch.cat(x1_abstr, 2) x2_list = x2_abstr for i in range(len(x2_list)): attn_hiddens = self.int_attn_list[i](x1_att, x2_att, x2_mask, x3=x2_list[i]) x1 = torch.cat((x1, attn_hiddens), 2) x1_hiddens = self.rnn(x1, x1_mask) if return_bef_rnn: return x1_hiddens, x1 else: return x1_hiddens # bmm: batch matrix multiplication # unsqueeze: add singleton dimension # squeeze: remove singleton dimension def weighted_avg(x, weights): # used in lego_reader.py """ x = batch * len * d weights = batch * len """ return weights.unsqueeze(1).bmm(x).squeeze(1)
botorch/models/approximate_gp.py
saitcakmak/botorch
2,344
141998
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. r""" References .. [burt2020svgp] <NAME> and <NAME> and <NAME>, Convergence of Sparse Variational Inference in Gaussian Process Regression, Journal of Machine Learning Research, 2020, http://jmlr.org/papers/v21/19-1015.html. .. [chen2018dpp] <NAME> and <NAME> and <NAME>, Fast greedy MAP inference for determinantal point process to improve recommendation diversity, Proceedings of the 32nd International Conference on Neural Information Processing Systems, 2018, https://arxiv.org/abs/1709.05135. .. [hensman2013svgp] <NAME> and <NAME> and <NAME>, Gaussian Processes for Big Data, Proceedings of the 29th Conference on Uncertainty in Artificial Intelligence, 2013, https://arxiv.org/abs/1309.6835. """ from __future__ import annotations import copy from typing import Optional, Type, Union import torch from botorch.models.gpytorch import GPyTorchModel from botorch.models.transforms.input import InputTransform from botorch.models.transforms.outcome import OutcomeTransform from botorch.models.utils import validate_input_scaling from botorch.posteriors.gpytorch import GPyTorchPosterior from botorch.sampling import MCSampler from gpytorch.constraints import GreaterThan from gpytorch.distributions import MultivariateNormal from gpytorch.kernels import Kernel, MaternKernel, ScaleKernel from gpytorch.lazy import LazyTensor from gpytorch.likelihoods import ( GaussianLikelihood, Likelihood, MultitaskGaussianLikelihood, ) from gpytorch.means import ConstantMean, Mean from gpytorch.models import ApproximateGP from gpytorch.module import Module from gpytorch.priors import GammaPrior from gpytorch.utils.memoize import clear_cache_hook from gpytorch.variational import ( _VariationalDistribution, _VariationalStrategy, CholeskyVariationalDistribution, IndependentMultitaskVariationalStrategy, VariationalStrategy, ) from torch import Tensor MIN_INFERRED_NOISE_LEVEL = 1e-4 NEG_INF = -(torch.tensor(float("inf"))) class ApproximateGPyTorchModel(GPyTorchModel): def __init__( self, model: Optional[ApproximateGP] = None, likelihood: Optional[Likelihood] = None, num_outputs: int = 1, *args, **kwargs, ) -> None: r""" Botorch wrapper class for various (variational) approximate GP models in gpytorch. This can either include stochastic variational GPs (SVGPs) or variational implementations of weight space approximate GPs. Args: model: Instance of gpytorch.approximate GP models. If omitted, constructs a `_SingleTaskVariationalGP`. likelihood: Instance of a GPyYorch likelihood. If omitted, uses a either a `GaussianLikelihood` (if `num_outputs=1`) or a `MultitaskGaussianLikelihood`(if `num_outputs>1`). num_outputs: Number of outputs expected for the GP model. args: Optional positional arguments passed to the `_SingleTaskVariationalGP` constructor if no model is provided. kwargs: Optional keyword arguments passed to the `_SingleTaskVariationalGP` constructor if no model is provided. """ super().__init__() if model is None: model = _SingleTaskVariationalGP(num_outputs=num_outputs, *args, **kwargs) if likelihood is None: if num_outputs == 1: likelihood = GaussianLikelihood() else: likelihood = MultitaskGaussianLikelihood(num_tasks=num_outputs) self.model = model self.likelihood = likelihood self._desired_num_outputs = num_outputs @property def num_outputs(self): return self._desired_num_outputs def posterior( self, X, output_indices=None, observation_noise=False, *args, **kwargs ) -> GPyTorchPosterior: self.eval() # make sure model is in eval mode # input transforms are applied at `posterior` in `eval` mode, and at # `model.forward()` at the training time X = self.transform_inputs(X) # check for the multi-batch case for multi-outputs b/c this will throw # warnings X_ndim = X.ndim if self.num_outputs > 1 and X_ndim > 2: X = X.unsqueeze(-3).repeat(*[1] * (X_ndim - 2), self.num_outputs, 1, 1) dist = self.model(X) if observation_noise: dist = self.likelihood(dist, *args, **kwargs) posterior = GPyTorchPosterior(mvn=dist) if hasattr(self, "outcome_transform"): posterior = self.outcome_transform.untransform_posterior(posterior) return posterior def forward(self, X, *args, **kwargs) -> MultivariateNormal: if self.training: X = self.transform_inputs(X) return self.model(X) def fantasize(self, X, sampler=MCSampler, observation_noise=True, *args, **kwargs): raise NotImplementedError( "Fantasization of approximate GPs has not been implemented yet." ) class _SingleTaskVariationalGP(ApproximateGP): def __init__( self, train_X: Tensor, train_Y: Optional[Tensor] = None, num_outputs: int = 1, learn_inducing_points=True, covar_module: Optional[Kernel] = None, mean_module: Optional[Mean] = None, variational_distribution: Optional[_VariationalDistribution] = None, variational_strategy: Type[_VariationalStrategy] = VariationalStrategy, inducing_points: Optional[Union[Tensor, int]] = None, ) -> None: r""" Base class wrapper for a stochastic variational Gaussian Process (SVGP) model [hensman2013svgp]_. Uses pivoted cholesky initialization for the inducing points. Args: train_X: Training inputs (due to the ability of the SVGP to sub-sample this does not have to be all of the training inputs). train_Y: Training targets (optional). num_outputs: Number of output responses per input. covar_module: Kernel function. If omitted, uses a `MaternKernel`. mean_module: Mean of GP model. If omitted, uses a `ConstantMean`. variational_distribution: Type of variational distribution to use (default: CholeskyVariationalDistribution), the properties of the variational distribution will encourage scalability or ease of optimization. variational_strategy: Type of variational strategy to use (default: VariationalStrategy). The default setting uses "whitening" of the variational distribution to make training easier. inducing_points: The number or specific locations of the inducing points. """ # We use the model subclass wrapper to deal with input / outcome transforms. # The number of outputs will be correct here due to the check in # SingleTaskVariationalGP. input_batch_shape = train_X.shape[:-2] aug_batch_shape = copy.deepcopy(input_batch_shape) if num_outputs > 1: aug_batch_shape += torch.Size((num_outputs,)) self._aug_batch_shape = aug_batch_shape if mean_module is None: mean_module = ConstantMean(batch_shape=self._aug_batch_shape).to(train_X) if covar_module is None: covar_module = ScaleKernel( base_kernel=MaternKernel( nu=2.5, ard_num_dims=train_X.shape[-1], batch_shape=self._aug_batch_shape, lengthscale_prior=GammaPrior(3.0, 6.0), ), batch_shape=self._aug_batch_shape, outputscale_prior=GammaPrior(2.0, 0.15), ).to(train_X) self._subset_batch_dict = { "mean_module.constant": -2, "covar_module.raw_outputscale": -1, "covar_module.base_kernel.raw_lengthscale": -3, } # initialize inducing points with a pivoted cholesky init if they are not given if not isinstance(inducing_points, Tensor): if inducing_points is None: # number of inducing points is 25% the number of data points # as a heuristic inducing_points = int(0.25 * train_X.shape[-2]) inducing_points = _select_inducing_points( inputs=train_X, covar_module=covar_module, num_inducing=inducing_points, input_batch_shape=input_batch_shape, ) if variational_distribution is None: variational_distribution = CholeskyVariationalDistribution( num_inducing_points=inducing_points.shape[-2], batch_shape=self._aug_batch_shape, ) variational_strategy = variational_strategy( self, inducing_points=inducing_points, variational_distribution=variational_distribution, learn_inducing_locations=learn_inducing_points, ) # wrap variational models in independent multi-task variational strategy if num_outputs > 1: variational_strategy = IndependentMultitaskVariationalStrategy( base_variational_strategy=variational_strategy, num_tasks=num_outputs, task_dim=-1, ) super().__init__(variational_strategy=variational_strategy) self.mean_module = mean_module self.covar_module = covar_module def forward(self, X) -> MultivariateNormal: mean_x = self.mean_module(X) covar_x = self.covar_module(X) latent_dist = MultivariateNormal(mean_x, covar_x) return latent_dist class SingleTaskVariationalGP(ApproximateGPyTorchModel): r"""A single-task variational GP model following [hensman2013svgp]_ with pivoted cholesky initialization following [chen2018dpp]_ and [burt2020svgp]_. A single-task variational GP using relatively strong priors on the Kernel hyperparameters, which work best when covariates are normalized to the unit cube and outcomes are standardized (zero mean, unit variance). This model works in batch mode (each batch having its own hyperparameters). When the training observations include multiple outputs, this model will use batching to model outputs independently. However, batches of multi-output models are not supported at this time, if you need to use those, please use a ModelListGP. Use this model if you have a lot of data or if your responses are non-Gaussian. To train this model, you should use `gpytorch.mlls.VariationalELBO` and not the exact marginal log likelihood. Example mll: mll = VariationalELBO(model.likelihood, model, num_data=train_X.shape[-2]) """ def __init__( self, train_X: Tensor, train_Y: Optional[Tensor] = None, likelihood: Optional[Likelihood] = None, num_outputs: int = 1, learn_inducing_points: bool = True, covar_module: Optional[Kernel] = None, mean_module: Optional[Mean] = None, variational_distribution: Optional[_VariationalDistribution] = None, variational_strategy: Type[_VariationalStrategy] = VariationalStrategy, inducing_points: Optional[Union[Tensor, int]] = None, outcome_transform: Optional[OutcomeTransform] = None, input_transform: Optional[InputTransform] = None, ) -> None: r""" A single task stochastic variational Gaussian process model (SVGP) as described by [hensman2013svgp]_. We use pivoted cholesky initialization [burt2020svgp]_ to initialize the inducing points of the model. Args: train_X: Training inputs (due to the ability of the SVGP to sub-sample this does not have to be all of the training inputs). train_Y: Training targets (optional). likelihood: Instance of a GPyYorch likelihood. If omitted, uses a either a `GaussianLikelihood` (if `num_outputs=1`) or a `MultitaskGaussianLikelihood`(if `num_outputs>1`). num_outputs: Number of output responses per input (default: 1). covar_module: Kernel function. If omitted, uses a `MaternKernel`. mean_module: Mean of GP model. If omitted, uses a `ConstantMean`. variational_distribution: Type of variational distribution to use (default: CholeskyVariationalDistribution), the properties of the variational distribution will encourage scalability or ease of optimization. variational_strategy: Type of variational strategy to use (default: VariationalStrategy). The default setting uses "whitening" of the variational distribution to make training easier. inducing_points: The number or specific locations of the inducing points. """ with torch.no_grad(): transformed_X = self.transform_inputs( X=train_X, input_transform=input_transform ) if train_Y is not None: if outcome_transform is not None: train_Y, _ = outcome_transform(train_Y) self._validate_tensor_args(X=transformed_X, Y=train_Y) validate_input_scaling(train_X=transformed_X, train_Y=train_Y) if train_Y.shape[-1] != num_outputs: num_outputs = train_Y.shape[-1] self._num_outputs = num_outputs self._input_batch_shape = train_X.shape[:-2] aug_batch_shape = copy.deepcopy(self._input_batch_shape) if num_outputs > 1: aug_batch_shape += torch.Size([num_outputs]) self._aug_batch_shape = aug_batch_shape if likelihood is None: if num_outputs == 1: noise_prior = GammaPrior(1.1, 0.05) noise_prior_mode = (noise_prior.concentration - 1) / noise_prior.rate likelihood = GaussianLikelihood( noise_prior=noise_prior, batch_shape=self._aug_batch_shape, noise_constraint=GreaterThan( MIN_INFERRED_NOISE_LEVEL, transform=None, initial_value=noise_prior_mode, ), ) else: likelihood = MultitaskGaussianLikelihood(num_tasks=num_outputs) else: self._is_custom_likelihood = True model = _SingleTaskVariationalGP( train_X=transformed_X, train_Y=train_Y, num_outputs=num_outputs, learn_inducing_points=learn_inducing_points, covar_module=covar_module, mean_module=mean_module, variational_distribution=variational_distribution, variational_strategy=variational_strategy, inducing_points=inducing_points, ) super().__init__(model=model, likelihood=likelihood, num_outputs=num_outputs) if outcome_transform is not None: self.outcome_transform = outcome_transform if input_transform is not None: self.input_transform = input_transform # for model fitting utilities # TODO: make this a flag? self.model.train_inputs = [transformed_X] if train_Y is not None: self.model.train_targets = train_Y.squeeze(-1) self.to(train_X) def init_inducing_points( self, inputs: Tensor, ) -> Tensor: r""" Reinitialize the inducing point locations in-place with the current kernel applied to `inputs`. The variational distribution and variational strategy caches are reset. Args: inputs: (\*batch_shape, n, d)-dim input data tensor. Returns: (\*batch_shape, m, d)-dim tensor of selected inducing point locations. """ var_strat = self.model.variational_strategy clear_cache_hook(var_strat) if hasattr(var_strat, "base_variational_strategy"): var_strat = var_strat.base_variational_strategy clear_cache_hook(var_strat) with torch.no_grad(): num_inducing = var_strat.inducing_points.size(-2) inducing_points = _select_inducing_points( inputs=inputs, covar_module=self.model.covar_module, num_inducing=num_inducing, input_batch_shape=self._input_batch_shape, ) var_strat.inducing_points.copy_(inducing_points) var_strat.variational_params_initialized.fill_(0) return inducing_points def _select_inducing_points( inputs: Tensor, covar_module: Module, num_inducing: int, input_batch_shape: torch.Size, ) -> Tensor: r""" Utility function that evaluates a kernel at given inputs and selects inducing point locations based on the pivoted Cholesky heuristic. Args: inputs: A (*batch_shape, n, d)-dim input data tensor. covar_module: GPyTorch Module returning a LazyTensor kernel matrix. num_inducing: The maximun number (m) of inducing points (m <= n). input_batch_shape: The non-task-related batch shape. Returns: A (*batch_shape, m, d)-dim tensor of inducing point locations. """ train_train_kernel = covar_module(inputs).evaluate_kernel() # base case if train_train_kernel.ndimension() == 2: inducing_points = _pivoted_cholesky_init( train_inputs=inputs, kernel_matrix=train_train_kernel, max_length=num_inducing, ) # multi-task case elif train_train_kernel.ndimension() == 3 and len(input_batch_shape) == 0: input_element = inputs[0] if inputs.ndimension() == 3 else inputs kernel_element = train_train_kernel[0] inducing_points = _pivoted_cholesky_init( train_inputs=input_element, kernel_matrix=kernel_element, max_length=num_inducing, ) # batched input cases else: batched_inputs = ( inputs.expand(*input_batch_shape, -1, -1) if inputs.ndimension() == 2 else inputs ) reshaped_inputs = batched_inputs.flatten(end_dim=-3) inducing_points = [] for input_element in reshaped_inputs: # the extra kernel evals are a little wasteful but make it # easier to infer the task batch size kernel_element = covar_module(input_element).evaluate_kernel() # handle extra task batch dimension kernel_element = ( kernel_element[0] if kernel_element.ndimension() == 3 else kernel_element ) inducing_points.append( _pivoted_cholesky_init( train_inputs=input_element, kernel_matrix=kernel_element, max_length=num_inducing, ) ) inducing_points = torch.stack(inducing_points).view( *input_batch_shape, num_inducing, -1 ) return inducing_points def _pivoted_cholesky_init( train_inputs: Tensor, kernel_matrix: Union[Tensor, LazyTensor], max_length: int, epsilon: float = 1e-6, ) -> Tensor: r""" A pivoted cholesky initialization method for the inducing points, originally proposed in [burt2020svgp]_ with the algorithm itself coming from [chen2018dpp]_. Code is a PyTorch version from [chen2018dpp]_, copied from https://github.com/laming-chen/fast-map-dpp/blob/master/dpp.py. Args: train_inputs: training inputs (of shape n x d) kernel_matrix: kernel matrix on the training inputs max_length: number of inducing points to initialize epsilon: numerical jitter for stability. Returns: max_length x d tensor of the training inputs corresponding to the top max_length pivots of the training kernel matrix """ # this is numerically equivalent to iteratively performing a pivoted cholesky # while storing the diagonal pivots at each iteration # TODO: use gpytorch's pivoted cholesky instead once that gets an exposed list # TODO: ensure this works in batch mode, which it does not currently. item_size = kernel_matrix.shape[-2] cis = torch.zeros( (max_length, item_size), device=kernel_matrix.device, dtype=kernel_matrix.dtype ) di2s = kernel_matrix.diag() selected_items = [] selected_item = torch.argmax(di2s) selected_items.append(selected_item) while len(selected_items) < max_length: k = len(selected_items) - 1 ci_optimal = cis[:k, selected_item] di_optimal = torch.sqrt(di2s[selected_item]) elements = kernel_matrix[..., selected_item, :] eis = (elements - torch.matmul(ci_optimal, cis[:k, :])) / di_optimal cis[k, :] = eis di2s = di2s - eis.pow(2.0) di2s[selected_item] = NEG_INF selected_item = torch.argmax(di2s) if di2s[selected_item] < epsilon: break selected_items.append(selected_item) ind_points = train_inputs[torch.stack(selected_items)] return ind_points
django_quill/quill.py
LeeHanYeong/django-quill-editor
125
142013
import json __all__ = ( "QuillParseError", "Quill", ) class QuillParseError(Exception): def __init__(self, value): self.value = value def __str__(self): return "Failed to parse value(%s)" % self.value class Quill: def __init__(self, json_string): try: self.json_string = json_string json_data = json.loads(json_string) self.delta = json_data["delta"] self.html = json_data.get("html", "") except (json.JSONDecodeError, KeyError, TypeError): raise QuillParseError(json_string)
midi_ddsp/modules/interpretable_conditioning.py
magenta/midi-ddsp
169
142018
<reponame>magenta/midi-ddsp # Copyright 2022 The MIDI-DDSP Authors. # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Calculate note expression controls from synthesis parameters.""" import tensorflow as tf import ddsp import ddsp.training import math def extract_harm_controls(synth_params, log_scale=True, stop_gradient=False): """Get harmonic synth controls from the outputs of the processor group.""" f0 = synth_params['harmonic']['controls']['f0_hz'] amps = synth_params['harmonic']['controls']['amplitudes'] hd = synth_params['harmonic']['controls']['harmonic_distribution'] noise = synth_params['filtered_noise']['controls']['magnitudes'] if log_scale: amps = ddsp.core.amplitude_to_db(amps, use_tf=True) noise = ddsp.core.amplitude_to_db(noise, use_tf=True) if stop_gradient: amps = tf.stop_gradient(amps) hd = tf.stop_gradient(hd) noise = tf.stop_gradient(noise) return f0, amps, hd, noise def get_pitch_deviation(f0_midi, f0, mask_large_diff=True): # f0_midi: [batch_size,T,1] f0_midi_midi_scale = ddsp.core.hz_to_midi(f0_midi) f0_midi_scale = ddsp.core.hz_to_midi(f0) pitch_deviation = f0_midi_midi_scale - f0_midi_scale if mask_large_diff: pitch_deviation = tf.where(tf.greater(tf.abs(pitch_deviation), 2.0), 0.0, pitch_deviation) return pitch_deviation def get_amplitude(amplitude): # amplitude: [batch_size,T,1] return tf.convert_to_tensor(amplitude) def get_brightness(harmonic_distribution): # weighted average across harmonic_distribution # harmonic_distribution: [batch, nframe, nharmonics], # already scaled and normalized nharmonics = harmonic_distribution.shape[-1] num_bin = tf.cast(tf.linspace(1, nharmonics, num=nharmonics), tf.float32)[ tf.newaxis, tf.newaxis, :] return tf.reduce_mean(harmonic_distribution * num_bin, axis=-1, keepdims=True) def get_noise_level(noise_magnitudes): # noise_magnitudes: [n_batch, n_frames, n_frequencies] noise_amp = tf.reduce_mean(noise_magnitudes, axis=-1, keepdims=True) return noise_amp def get_normal_window(t, alpha=0.5): w = 0.5 * (1 - tf.math.cos(2 * math.pi * t / alpha)) mask = tf.cast(tf.logical_and(t > 0.5 * alpha, t < (1 - 0.5 * alpha)), tf.float32) w = tf.clip_by_value(w + mask, 0, 1) return w def get_vibrato_feature(pitch_deviation, note_mask, sampling_interval=0.004, min_note_length=50, vibrato_rate_min=3, vibrato_rate_max=9): batch_size = pitch_deviation.shape[0] total_length = pitch_deviation.shape[1] pitch_deviation_masked = note_mask * pitch_deviation pv_mean = ddsp.training.nn.pool_over_notes(pitch_deviation, note_mask, return_std=False) pitch_deviation_masked = note_mask * ( pitch_deviation_masked - pv_mean) # filter out DC component each_note_idx = tf.cumsum(note_mask, axis=1) * tf.cast(~(note_mask == 0), tf.float32) each_note_len = tf.reduce_sum(note_mask, axis=1, keepdims=True) each_note_time_ratio = each_note_idx / each_note_len window = get_normal_window(each_note_time_ratio) pitch_deviation_masked *= window # [batch_size*max_regions, n_frames] pitch_deviation_masked = tf.reshape( tf.transpose(pitch_deviation_masked, [0, 2, 1]), [-1, total_length]) T = sampling_interval N = pitch_deviation.shape[1] f = tf.linspace(0, int(1 / T), N) s_vibrato = tf.abs(tf.signal.rfft(pitch_deviation_masked)) s_vibrato = tf.math.divide_no_nan(s_vibrato, tf.reshape(each_note_len, [-1, 1])) vibrato_rate_idx = tf.argmax(tf.cast(s_vibrato, tf.float32), -1) vibrato_rate = tf.cast(tf.gather(f, vibrato_rate_idx), tf.float32) vibrato_extend = tf.gather_nd(s_vibrato, vibrato_rate_idx[:, tf.newaxis], batch_dims=1) # replace nan caused by rfft zeros with 0 vibrato_extend = tf.where(tf.math.is_nan(vibrato_extend), 0, vibrato_extend) # filter out vibrato between 3-9 hz vibrato_mask = tf.math.logical_and(vibrato_rate >= vibrato_rate_min, vibrato_rate <= vibrato_rate_max) # filter out vibrato extend > 0.012 # vibrato_mask = tf.math.logical_and(vibrato_mask, vibrato_extend > 0.012) # note length > 50 frames vibrato_mask = tf.math.logical_and(vibrato_mask, tf.reshape(each_note_len, [-1]) > min_note_length) # vibrato more than one cycle more_than_one_cycle_mask = vibrato_rate > tf.math.divide_no_nan( 1., tf.reshape(each_note_len, [-1]) * sampling_interval) vibrato_mask = tf.math.logical_and(vibrato_mask, more_than_one_cycle_mask) vibrato_mask = tf.cast(vibrato_mask, tf.float32) # construct output vibrato_extend = vibrato_mask * vibrato_extend vibrato_rate = vibrato_mask * vibrato_rate frame_wise_vibrato_rate = tf.reduce_sum( tf.reshape(vibrato_rate, [batch_size, 1, -1]) * note_mask, axis=-1, keepdims=True) frame_wise_vibrato_extend = tf.reduce_sum( tf.reshape(vibrato_extend, [batch_size, 1, -1]) * note_mask, axis=-1, keepdims=True) return frame_wise_vibrato_rate, frame_wise_vibrato_extend def get_amplitudes_max_pos(amplitudes, note_mask): note_mask_reverse = tf.cast(tf.logical_not(tf.cast(note_mask, tf.bool)), tf.float32) amplitudes_masked = note_mask * amplitudes + note_mask_reverse * -1000 # multiply -1000 here preventing argmax to the mask each_note_idx = tf.cumsum(note_mask, axis=1) * tf.cast(~(note_mask == 0), tf.float32) each_note_len = tf.reduce_max(each_note_idx, axis=1, keepdims=True) note_onset_index = tf.argmax(note_mask, axis=1) # index inside a note that achieves max amplitudes amplitudes_max_idx = tf.argmax(amplitudes_masked, axis=1) - note_onset_index amplitudes_max_pos = tf.math.divide_no_nan( tf.cast(amplitudes_max_idx[:, tf.newaxis, :], tf.float32), each_note_len) amplitudes_max_pos = tf.reduce_sum(amplitudes_max_pos * note_mask, axis=-1, keepdims=True) return amplitudes_max_pos def get_attack_level(noise_level, note_mask): each_note_idx = tf.cumsum(note_mask, axis=1) * tf.cast(~(note_mask == 0), tf.float32) attack_mask = tf.cast(tf.logical_and(each_note_idx > 0, each_note_idx <= 10), tf.float32) # pool over first 10 frames # [b, n, d] attack_notes_mean = ddsp.training.nn.get_note_moments(noise_level, attack_mask, return_std=False) # [b, t, n, d] attack_time_notes_mean = (attack_notes_mean[:, tf.newaxis, ...] * note_mask[..., tf.newaxis]) # [b, t, d] attack_level = tf.reduce_sum(attack_time_notes_mean, axis=2) return attack_level def get_interpretable_conditioning(f0_midi, f0, amplitude, harmonic_distribution, noise_magnitudes): """Calculate conditioning from synthesis needed for calculating note expression controls.""" pitch_deviation = get_pitch_deviation(f0_midi, f0) amplitude = get_amplitude(amplitude) brightness = get_brightness(harmonic_distribution) noise_level = get_noise_level(noise_magnitudes) conditioning = { 'pitch_deviation': pitch_deviation, 'amplitude': amplitude, 'brightness': brightness, 'noise_level': noise_level, } return conditioning def adsr_get_note_mask(q_pitch, max_regions=200, note_on_only=True): """Get a binary mask for each note from a monophonic instrument based on ADSR model. This function is modified from ddsp.training.nn.get_note_mask. In addition to the note boundary, it individually pool the first (A) and last few frames (D+R) of a note. Each transition of the value creates a new region. Returns the mask of each region. Args: q_pitch: A quantized value, such as pitch or velocity. Shape [batch, n_timesteps] or [batch, n_timesteps, 1]. max_regions: Maximum number of note regions to consider in the sequence. Also, the channel dimension of the output mask. Each value transition defines a new region, e.g. each note-on and note-off count as a separate region. note_on_only: Return a mask that is true only for regions where the pitch is greater than 0. Returns: A binary mask of each region [batch, n_timesteps, max_regions]. """ # Only batch and time dimensions. if len(q_pitch.shape) == 3: q_pitch = q_pitch[:, :, 0] # Get onset and offset points. edges = tf.abs(ddsp.core.diff(q_pitch, axis=1)) > 0 # Count endpoints as starts/ends of regions. edges = edges[:, :-1, ...] edges = tf.pad(edges, [[0, 0], [1, 0]], mode='constant', constant_values=True) edges = tf.pad(edges, [[0, 0], [0, 1]], mode='constant', constant_values=False) edges = tf.cast(edges, tf.int32) # Count up onset and offsets for each timestep. # Assumes each onset has a corresponding offset. # The -1 ensures that the 0th index is the first note. edge_idx = tf.cumsum(edges, axis=1) - 1 # Create masks of shape [batch, n_timesteps, max_regions]. note_mask = edge_idx[..., None] == tf.range(max_regions)[None, None, :] note_mask = tf.cast(note_mask, tf.float32) if note_on_only: # [batch, notes] note_pitches = ddsp.training.nn.get_note_moments(q_pitch, note_mask, return_std=False) # [batch, time, notes] note_on = tf.cast(note_pitches > 0.0, tf.float32)[:, None, :] # [batch, time, notes] note_mask *= note_on note_on_time_dim = tf.reduce_sum(note_mask, axis=-1) # note_on in time dimension # frame index for each note each_note_idx = tf.cumsum(note_mask, axis=1) * tf.cast(~(note_mask == 0), tf.float32) each_note_idx_reverse = tf.cumsum(note_mask, axis=1, reverse=True) * tf.cast( ~(note_mask == 0), tf.float32) each_note_len = tf.reduce_max(each_note_idx, axis=1, keepdims=True) * tf.cast( each_note_idx > 0, tf.float32) each_note_idx_reduce = tf.reduce_sum(each_note_idx, axis=-1) each_note_idx_reverse_reduce = tf.reduce_sum(each_note_idx_reverse, axis=-1) each_note_len_reduce = tf.reduce_sum(each_note_len, axis=-1) attack_mask = tf.math.logical_and(each_note_idx_reduce == 10, each_note_len_reduce >= 50) decay_mask = tf.math.logical_and(each_note_idx_reverse_reduce == 10, each_note_len_reduce >= 50) edges_adsr = edges + tf.cast(attack_mask, tf.int32) + tf.cast(decay_mask, tf.int32) edge_idx_adsr = tf.cumsum(edges_adsr, axis=1) - 1 # Create masks of shape [batch, n_timesteps, max_regions]. note_mask_adsr = edge_idx_adsr[..., None] == tf.range(max_regions)[None, None, :] note_mask_adsr = tf.cast(note_mask_adsr, tf.float32) if note_on_only: note_mask_adsr *= note_on_time_dim[..., tf.newaxis] return note_mask_adsr def get_conditioning_dict(conditioning, q_pitch, onsets, pool_type='note_pooling'): """Calculate note expression controls.""" # conditioning: dict of conditioning if pool_type == 'note_pooling': note_mask = ddsp.training.nn.get_note_mask_from_onset(q_pitch, onsets) elif pool_type == 'adsr_note_pooling': note_mask = adsr_get_note_mask(q_pitch) amp_mean, amp_std = ddsp.training.nn.pool_over_notes( conditioning['amplitude'], note_mask, return_std=True) brightness = ddsp.training.nn.pool_over_notes(conditioning['brightness'], note_mask, return_std=False) attack_level = get_attack_level(conditioning['noise_level'], note_mask) vibrato_rate, vibrato_extend = get_vibrato_feature( conditioning['pitch_deviation'], note_mask) amplitudes_max_pos = get_amplitudes_max_pos(conditioning['amplitude'], note_mask) # scale conditioning so that most value are in [0, 1] # TODO: (yusongwu) enable automatic scaling amp_mean = tf.where(tf.equal(amp_mean, 0.0), 0.0, amp_mean / 60 + 1.5) amp_std *= (2.5 / 60) vibrato_extend *= 10 brightness *= 5 attack_level = tf.where(tf.equal(attack_level, 0.0), 0.0, attack_level / 40 + 2.625) conditioning_dict = { 'volume': amp_mean, 'vol_fluc': amp_std, 'vibrato': vibrato_extend, 'brightness': brightness, 'attack': attack_level, # 'vibrato_rate': vibrato_rate, 'vol_peak_pos': amplitudes_max_pos, } return conditioning_dict
plan2explore/tools/count_dataset.py
sarthak268/plan2explore
189
142056
# Copyright 2019 The Dreamer Authors. Copyright 2020 Plan2Explore Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import numpy as np import tensorflow as tf def count_dataset(directory): directory = os.path.expanduser(directory) if not tf.gfile.Exists(directory): message = "Data set directory '{}' does not exist." raise ValueError(message.format(directory)) pattern = os.path.join(directory, '*.npz') def func(): filenames = tf.gfile.Glob(pattern) episodes = len(filenames) episodes = np.array(episodes, dtype=np.int32) return episodes return tf.py_func(func, [], tf.int32)
django/contrib/gis/db/backends/mysql/base.py
webjunkie/django
790
142074
from django.db.backends.mysql.base import * from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper from django.contrib.gis.db.backends.mysql.creation import MySQLCreation from django.contrib.gis.db.backends.mysql.introspection import MySQLIntrospection from django.contrib.gis.db.backends.mysql.operations import MySQLOperations class DatabaseWrapper(MySQLDatabaseWrapper): def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.creation = MySQLCreation(self) self.ops = MySQLOperations(self) self.introspection = MySQLIntrospection(self)
action_cls/msr_dataset.py
HarmoniaLeo/meteornet
127
142111
''' Provider for duck dataset from <NAME> ''' import os import os.path import json import numpy as np import sys import pickle import glob class Dataset(): def __init__(self, \ root='/scr1/mengyuan/ICCV-data/MSR_processed', \ num_points = 8192, \ num_frames=2, skip_frames=1, \ train=True): self.num_points = num_points self.num_frames = num_frames self.skip_frames = skip_frames # sample frames i, i+skip_frames, i+2*skip_frames, ... self.train = train self.root = root self.datapath = os.listdir(self.root) if train: self.datapath = [d for d in self.datapath if int(d.split('_')[1].split('s')[1]) <= 5] else: self.datapath = [d for d in self.datapath if int(d.split('_')[1].split('s')[1]) > 5] self.datapath = [d.split('.')[0] for d in self.datapath] self.data = [] self.label = [] self.index_map = [] self.load_data() self.shuffle() def load_data(self): # takes about 5G memory to load for i,file in enumerate(self.datapath): result = np.load(os.path.join(self.root, file+'.npz')) self.data.append(result['point_clouds']) self.label.append(int(file.split('_')[0][1:])-1) nframes = result['point_clouds'].shape[0] for t in range(0, nframes-self.skip_frames*(self.num_frames-1), self.skip_frames): self.index_map.append((i,t)) def shuffle(self): self.indices = np.arange(len(self.index_map)) if self.train: np.random.shuffle(self.indices) def __getitem__(self, idx): id, t = self.index_map[self.indices[idx]] points = [self.data[id][t+i*self.skip_frames] for i in range(self.num_frames)] for i,p in enumerate(points): if p.shape[0] > self.num_points: index = np.random.choice(p.shape[0], size=self.num_points, replace=False) else: repeat, residue = self.num_points // p.shape[0], self.num_points % p.shape[0] index = np.random.choice(p.shape[0], size=residue, replace=False) index = np.concatenate([np.arange(p.shape[0]) for _ in range(repeat)] + [index], axis=0) points[i] = p[index, :] points = np.array(points) if self.train: # scale the points scales = np.random.uniform(0.9, 1.1, size=3) points = points * scales points = points / 300 return points, self.label[id], id def __len__(self): return len(self.index_map) if __name__ == '__main__': d = Dataset(num_points=8192) print(len(d)) import time tic = time.time() point_size = 0.2 for i in range(100): points = d[i] print(points.shape) print(time.time() - tic)
examples/gzipfilter.py
Datatope/mod_python
183
142117
# # Usage: # <Directory /where/ever> # PythonOutputFilter gzipfilter # SetOutputFilter gzipfilter # </Directory> from mod_python import apache import os import sys import gzip import cStringIO from mod_python import apache def compress(s): sio = cStringIO.StringIO() f = gzip.GzipFile(mode='wb', fileobj=sio) f.write(s) f.close() return sio.getvalue() def accepts_gzip(req): if req.headers_in.has_key('accept-encoding'): encodings = req.headers_in['accept-encoding'] return (encodings.find("gzip") != -1) return 0 ### ### main filter function ### def outputfilter(filter): if (filter.req.main or not accepts_gzip(filter.req)): # Presense of filter.req.main tells us that # we are in a subrequest. We don't want to compress # the data more than once, so we pass_on() in # subrequests. We also pass_on() if the client # does not accept gzip encoding, of course. filter.pass_on() else: if not filter.req.sent_bodyct: # the above test allows us to set the encoding once # rather than every time the filter is invoked filter.req.headers_out['content-encoding'] = 'gzip' # loop through content, compressing s = filter.read() while s: s = compress(s) filter.write(s) s = filter.read() if s is None: # this means we received an EOS, so we pass it on # by closing the filter filter.close()
t5_closed_book_qa/t5_cbqa/postprocessors.py
deepneuralmachine/google-research
23,901
142127
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """T5 CBQA postprocessors.""" import numpy as np import tensorflow.compat.v1 as tf def natural_questions(output, prefix="answer:", example=None, is_target=False): """Get answers from predictions and targets. The predictions will contain a single set of one or more answers. The example may contain multiple sets of answers from different annotators. We return an answer group for each annotation, even if its empty. Args: output: str, target or prediction in text format. prefix: str, prefix expected before each answer. example: dict, input example. is_target: bool, whether the input was ground truth (True) or prediction (False). Returns: a list of answer tuples. """ if is_target: answer_groups = [] short_answers = np.split( example["short_answers/values"], example["short_answers/row_starts"][1:]) yes_no_answers = example["yes_no_answers"] if len(short_answers) != len(yes_no_answers): raise ValueError( "Number of annotations not consistent: %d vs %d" % (len(short_answers), len(yes_no_answers))) for short_ans_grp, y_n_ans in zip(short_answers, yes_no_answers): # Annotators cannot provide both y/n and short answers. if y_n_ans > -1 and short_ans_grp: raise ValueError( "Annotation cannot include both yes/no and short answers.") if y_n_ans == 0: answer_groups.append(("no",)) elif y_n_ans == 1: answer_groups.append(("yes",)) else: answer_groups.append( tuple(tf.compat.as_text(ans) for ans in short_ans_grp) ) else: answer_groups = [ tuple(s.strip() for s in output.split(prefix)[1:]) ] return answer_groups
Beginner/Chef and Interactive Contests (CHFINTRO)/interactive.py
anishsingh42/CodeChef
127
142139
<gh_stars>100-1000 N, r = map(int, input().split()) for i in range(N): R = int(input()) if R>=r: print('Good boi') else: print('Bad boi')
cupcake2/io/SeqSplitter.py
ArthurDondi/cDNA_Cupcake
205
142143
#!/usr/bin/env python """Define Class `FastaSplitter` which splits a fasta file into smaller files each containing `reads_per_split` reads.""" import os import os.path as op import sys from Bio import SeqIO from pbtranscript.Utils import mkdir class FaFqSplitter(object): """ Splits a fasta/fastq file into smaller chunks with a given prefix. """ def __init__(self, input_fa_or_fq, reads_per_split, out_dir, out_format, is_fq): self.input_fa_or_fq = input_fa_or_fq self.is_fq = is_fq self.out_dir = out_dir self.reads_per_split = reads_per_split # Number of reads per split self.out_format = out_format self.out_fns = None mkdir(self.out_dir) def __str__(self): if self.out_fns is None or len(self.out_fns) == 0: return "{input_fasta} ".format(input_fasta=self.input_fa_or_fq) + \ "will be splitted into files each has " + \ "{n} reads.".format(n=self.reads_per_split) else: return "{input_fasta} has been splitted into ".\ format(input_fasta=self.input_fa_or_fq) + \ "{m} files each has {n} reads:\n".\ format(m=len(self.out_fns), n=self.reads_per_split) + ";".join(self.out_fns) def _out_fn(self, split_index): """Return name of the `split_index`-th splitted file.""" if split_index > 999: raise ValueError("Too many splitted files to generate: number " + "of splitted files exceed 1000.") name = self.out_format.format(split_index) return op.join(self.out_dir, name) def split(self, reads_in_first_split=None): """Split `input_fasta` into smaller files each containing `reads_per_split` reads. Return splitted fasta.""" split_index = 0 self.out_fns = [] writer = open(self._out_fn(split_index), 'w') self.out_fns.append(self._out_fn(split_index)) if reads_in_first_split is None: reads_in_first_split = self.reads_per_split io_format = 'fastq' if self.is_fq else 'fasta' reader = SeqIO.parse(open(self.input_fa_or_fq), io_format) for ridx, r in enumerate(reader): if ((split_index == 0 and ridx == reads_in_first_split) or (split_index > 0 and ridx % self.reads_per_split == 0)) \ and ridx != 0: split_index += 1 writer.close() writer = open(self._out_fn(split_index), 'w') self.out_fns.append(self._out_fn(split_index)) SeqIO.write(r, writer, io_format) writer.close() return list(self.out_fns) def rmOutFNs(self): """Remove splitted files.""" for f in self.out_fns: os.remove(f) self.out_fns = [] def splitFaFq(input_fa_or_fq, reads_per_split, out_dir, out_format, is_fq, reads_in_first_split=None): """ Split input_fasta into small fasta files each containing at most reads_per_split reads. All splitted fasta files will be placed under out_dir with out_prefix. Return paths to splitted files in a list. """ obj = FaFqSplitter(input_fa_or_fq=input_fa_or_fq, reads_per_split=reads_per_split, out_dir=out_dir, out_format=out_format, is_fq=is_fq) return obj.split(reads_in_first_split=reads_in_first_split) def get_args(): """Get arguments.""" import argparse parser = argparse.ArgumentParser( description="Split a fasta/fastq file into smaller chunks.") parser.add_argument("input_fa_or_fq", type=str, help="Input fasta/fastq to be splitted.") parser.add_argument("reads_per_split", type=int, help="Reads per split.") parser.add_argument("out_dir", type=str, help="Output directory.") parser.add_argument("out_format", type=str, help="Output files format. (ex: \"input_split.{0:03d}.fasta\"") parser.add_argument("--is_fq", default=False, action="store_true", help="Is fastq (default: False)") this_args = parser.parse_args() return this_args def main(): """Main function, split a fasta into smaller chunks.""" import logging from pbtranscript.__init__ import get_version log = logging.getLogger(__name__) args = get_args() from pbtranscript.Utils import setup_log setup_log(alog=log, level=logging.DEBUG) log.info("Running {f} v{v}.".format(f=op.basename(__file__), v=get_version())) splitFaFq(input_fa_or_fq=args.input_fa_or_fq, reads_per_split=args.reads_per_split, out_dir=args.out_dir, out_format=args.out_format, is_fq=args.is_fq) if __name__ == "__main__": sys.exit(main())
models/dnn/dnn.py
Zhenyu-Lii/2021-Audio-Recognition
359
142168
<reponame>Zhenyu-Lii/2021-Audio-Recognition import os from typing import Optional from abc import ABC, abstractmethod import numpy as np from keras.models import Sequential, model_from_json from ..base import BaseModel from utils import curve class DNN(BaseModel, ABC): """ 所有基于 Keras 的深度学习模型的基类 Args: n_classes (int): 标签种类数量 lr (float): 学习率 """ def __init__(self, model: Sequential, trained: bool = False) -> None: super(DNN, self).__init__(model, trained) print(self.model.summary()) def save(self, path: str, name: str) -> None: """ 保存模型 Args: path (str): 模型路径 name (str): 模型文件名 """ h5_save_path = os.path.join(path, name + '.h5') self.model.save_weights(h5_save_path) save_json_path = os.path.join(path, name + '.json') with open(save_json_path, "w") as json_file: json_file.write(self.model.to_json()) @classmethod def load(cls, path: str, name: str): """ 加载模型 Args: path (str): 模型路径 name (str): 模型文件名 """ # 加载 json model_json_path = os.path.abspath(os.path.join(path, name + '.json')) json_file = open(model_json_path, 'r') loaded_model_json = json_file.read() json_file.close() model = model_from_json(loaded_model_json) # 加载权重 model_path = os.path.abspath(os.path.join(path, name + '.h5')) model.load_weights(model_path) return cls(model, True) def train( self, x_train: np.ndarray, y_train: np.ndarray, x_val: Optional[np.ndarray] = None, y_val: Optional[np.ndarray] = None, batch_size: int = 32, n_epochs: int = 50 ) -> None: """ 训练模型 Args: x_train (np.ndarray): 训练集样本 y_train (np.ndarray): 训练集标签 x_val (np.ndarray, optional): 测试集样本 y_val (np.ndarray, optional): 测试集标签 batch_size (int): 批大小 n_epochs (int): epoch 数 """ if x_val is None or y_val is None: x_val, y_val = x_train, y_train x_train, x_val = self.reshape_input(x_train), self.reshape_input(x_val) history = self.model.fit( x_train, y_train, batch_size = batch_size, epochs = n_epochs, shuffle = True, # 每个 epoch 开始前随机排列训练数据 validation_data = (x_val, y_val) ) # 训练集上的损失和准确率 acc = history.history['acc'] loss = history.history['loss'] # 验证集上的损失和准确率 val_acc = history.history['val_acc'] val_loss = history.history['val_loss'] curve(acc, val_acc, 'Accuracy', 'acc') curve(loss, val_loss, 'Loss', 'loss') self.trained = True def predict(self, samples: np.ndarray) -> np.ndarray: """ 预测音频的情感 Args: samples (np.ndarray): 需要识别的音频特征 Returns: results (np.ndarray): 识别结果 """ # 没有训练和加载过模型 if not self.trained: raise RuntimeError('There is no trained model.') samples = self.reshape_input(samples) return np.argmax(self.model.predict(samples), axis=1) @abstractmethod def reshape_input(self): pass
flask_rest_jsonapi/data_layers/filtering/alchemy.py
fossasia/flask-rest-jsonapi
1,757
142208
<gh_stars>1000+ # -*- coding: utf-8 -*- from sqlalchemy import and_, or_, not_ from flask import current_app from flask_rest_jsonapi.exceptions import InvalidFilters from flask_rest_jsonapi.schema import get_relationships, get_model_field def create_filters(model, filter_info, resource): """Apply filters from filters information to base query :param DeclarativeMeta model: the model of the node :param dict filter_info: current node filter information :param Resource resource: the resource """ filters = [] for filter_ in filter_info: filters.append(Node(model, filter_, resource, resource.schema).resolve()) return filters class Node(object): def __init__(self, model, filter_, resource, schema): self.model = model self.filter_ = filter_ self.resource = resource self.schema = schema def resolve(self): if 'or' not in self.filter_ and 'and' not in self.filter_ and 'not' not in self.filter_: value = self.value if isinstance(value, dict): value = Node(self.related_model, value, self.resource, self.related_schema).resolve() if '__' in self.filter_.get('name', ''): value = {self.filter_['name'].split('__')[1]: value} if isinstance(value, dict): return getattr(self.column, self.operator)(**value) else: return getattr(self.column, self.operator)(value) if 'or' in self.filter_: return or_(Node(self.model, filt, self.resource, self.schema).resolve() for filt in self.filter_['or']) if 'and' in self.filter_: return and_(Node(self.model, filt, self.resource, self.schema).resolve() for filt in self.filter_['and']) if 'not' in self.filter_: return not_(Node(self.model, self.filter_['not'], self.resource, self.schema).resolve()) @property def name(self): """Return the name of the node or raise a BadRequest exception :return str: the name of the field to filter on """ name = self.filter_.get('name') if name is None: raise InvalidFilters("Can't find name of a filter") if '__' in name: name = name.split('__')[0] if current_app.config.get('DASHERIZE_API') == True: name = name.replace('-', '_') if name not in self.schema._declared_fields: raise InvalidFilters("{} has no attribute {}".format(self.schema.__name__, name)) return name @property def op(self): """Return the operator of the node :return str: the operator to use in the filter """ try: return self.filter_['op'] except KeyError: raise InvalidFilters("Can't find op of a filter") @property def column(self): """Get the column object :param DeclarativeMeta model: the model :param str field: the field :return InstrumentedAttribute: the column to filter on """ field = self.name model_field = get_model_field(self.schema, field) try: return getattr(self.model, model_field) except AttributeError: raise InvalidFilters("{} has no attribute {}".format(self.model.__name__, model_field)) @property def operator(self): """Get the function operator from his name :return callable: a callable to make operation on a column """ operators = (self.op, self.op + '_', '__' + self.op + '__') for op in operators: if hasattr(self.column, op): return op raise InvalidFilters("{} has no operator {}".format(self.column.key, self.op)) @property def value(self): """Get the value to filter on :return: the value to filter on """ if self.filter_.get('field') is not None: try: result = getattr(self.model, self.filter_['field']) except AttributeError: raise InvalidFilters("{} has no attribute {}".format(self.model.__name__, self.filter_['field'])) else: return result else: if 'val' not in self.filter_: raise InvalidFilters("Can't find value or field in a filter") return self.filter_['val'] @property def related_model(self): """Get the related model of a relationship field :return DeclarativeMeta: the related model """ relationship_field = self.name if relationship_field not in get_relationships(self.schema).values(): raise InvalidFilters("{} has no relationship attribute {}".format(self.schema.__name__, relationship_field)) relationship_model_field = get_model_field(self.schema, relationship_field) return getattr(self.model, relationship_model_field).property.mapper.class_ @property def related_schema(self): """Get the related schema of a relationship field :return Schema: the related schema """ relationship_field = self.name if relationship_field not in get_relationships(self.schema).values(): raise InvalidFilters("{} has no relationship attribute {}".format(self.schema.__name__, relationship_field)) return self.schema._declared_fields[relationship_field].schema.__class__
code/sql/network.py
Dookas/Robust-Multitask-RL
106
142210
import math import random import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F from torch.autograd import Variable from memory_replay import Transition use_cuda = torch.cuda.is_available() FloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor LongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor ByteTensor = torch.cuda.ByteTensor if use_cuda else torch.ByteTensor Tensor = FloatTensor class DQN(nn.Module): """ Deep neural network with represents an agent. """ def __init__(self, num_actions): super(DQN, self).__init__() self.conv1 = nn.Conv2d(1, 5, kernel_size=2) self.bn1 = nn.BatchNorm2d(5) self.conv2 = nn.Conv2d(5, 10, kernel_size=3) self.bn2 = nn.BatchNorm2d(10) self.conv3 = nn.Conv2d(10, 10, kernel_size=3) self.bn3 = nn.BatchNorm2d(10) self.head = nn.Linear(200, num_actions) def forward(self, x): x = F.leaky_relu(self.bn1(self.conv1(x))) x = F.leaky_relu(self.bn2(self.conv2(x))) x = F.leaky_relu(self.bn3(self.conv3(x))) return self.head(x.view(x.size(0), -1)) # class DQN(nn.Module): # """ # Deep neural network with represents an agent. # """ # def __init__(self, num_actions): # super(DQN, self).__init__() # self.conv1 = nn.Conv2d(1, 10, kernel_size=2) # self.max_pool = nn.MaxPool2d((2,2)) # self.bn1 = nn.BatchNorm2d(10) # self.conv2 = nn.Conv2d(10, 20, kernel_size=3) # self.bn2 = nn.BatchNorm2d(20) # self.linear = nn.Linear(80, 20) # # self.bn3 = nn.BatchNorm1d(50) # self.head = nn.Linear(20, num_actions) # def forward(self, x): # x = F.leaky_relu(self.max_pool(self.bn1(self.conv1(x)))) # x = F.leaky_relu(self.bn2(self.conv2(x))) # x = F.leaky_relu(self.linear(x.view(x.size(0), -1))) # return self.head(x) def select_action(state, model, num_actions, EPS_START, EPS_END, EPS_DECAY, steps_done): """ Selects whether the next action is choosen by our model or randomly """ sample = random.random() eps_threshold = EPS_END + (EPS_START - EPS_END) * \ math.exp(-1. * steps_done / EPS_DECAY) if sample > eps_threshold: return model( Variable(state, volatile=True).type(FloatTensor)).data.max(1)[1].view(1, 1) else: return LongTensor([[random.randrange(num_actions)]]) def optimize_model(model, optimizer, memory, BATCH_SIZE, GAMMA, BETA): global last_sync if len(memory) < BATCH_SIZE: return transitions = memory.sample(BATCH_SIZE) # Transpose the batch (see http://stackoverflow.com/a/19343/3343043 for # detailed explanation). batch = Transition(*zip(*transitions)) # Compute a mask of non-final states and concatenate the batch elements non_final_mask = ByteTensor(tuple(map(lambda s: s is not None, batch.next_state))) # We don't want to backprop through the expected action values and volatile # will save us on temporarily changing the model parameters' # requires_grad to False! non_final_next_states = Variable(torch.cat([s for s in batch.next_state if s is not None]), volatile=True) state_batch = Variable(torch.cat(batch.state)) action_batch = Variable(torch.cat(batch.action)) reward_batch = Variable(torch.cat(batch.reward)) # Compute Q(s_t, a) - the model computes Q(s_t), then we select the # columns of actions taken state_action_values = model(state_batch).gather(1, action_batch) # Compute V(s_{t+1}) for all next states. next_state_values = Variable(torch.zeros(BATCH_SIZE).type(Tensor)) next_state_values[non_final_mask] = torch.log( torch.exp( BETA * model(non_final_next_states)).sum(1)) / BETA # Now, we don't want to mess up the loss with a volatile flag, so let's # clear it. After this, we'll just end up with a Variable that has # requires_grad=False next_state_values.volatile = False # Compute the expected Q values expected_state_action_values = (next_state_values * GAMMA) + reward_batch # Compute Huber loss loss = F.mse_loss(state_action_values, expected_state_action_values) # Optimize the model optimizer.zero_grad() loss.backward() for param in model.parameters(): param.grad.data.clamp_(-1, 1) optimizer.step()
observations/r/voteincome.py
hajime9652/observations
199
142218
<reponame>hajime9652/observations # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import numpy as np import os import sys from observations.util import maybe_download_and_extract def voteincome(path): """Sample Turnout and Demographic Data from the 2000 Current Population Surve y This data set contains turnout and demographic data from a sample of respondents to the 2000 Current Population Survey (CPS). The states represented are South Carolina and Arkansas. The data represent only a sample and results from this example should not be used in publication. A data frame containing 7 variables ("state", "year", "vote", "income", "education", "age", "female") and 1500 observations. `state` a factor variable with levels equal to "AR" (Arkansas) and "SC" (South Carolina) `year` an integer vector `vote` an integer vector taking on values "1" (Voted) and "0" (Did Not Vote) `income` an integer vector ranging from "4" (Less than \\$5000) to "17" (Greater than \\$75000) denoting family income. See the CPS codebook for more information on variable coding `education` an integer vector ranging from "1" (Less than High School Education) to "4" (More than a College Education). See the CPS codebook for more information on variable coding `age` an integer vector ranging from "18" to "85" `female` an integer vector taking on values "1" (Female) and "0" (Male) Census Bureau Current Population Survey Args: path: str. Path to directory which either stores file or otherwise file will be downloaded and extracted there. Filename is `voteincome.csv`. Returns: Tuple of np.ndarray `x_train` with 1500 rows and 7 columns and dictionary `metadata` of column headers (feature names). """ import pandas as pd path = os.path.expanduser(path) filename = 'voteincome.csv' if not os.path.exists(os.path.join(path, filename)): url = 'http://dustintran.com/data/r/Zelig/voteincome.csv' maybe_download_and_extract(path, url, save_file_name='voteincome.csv', resume=False) data = pd.read_csv(os.path.join(path, filename), index_col=0, parse_dates=True) x_train = data.values metadata = {'columns': data.columns} return x_train, metadata
setup_mcedit2.py
elcarrion06/mcedit2
673
142228
<gh_stars>100-1000 """ setup-mcedit2 """ from os import path import sys from setuptools import setup, find_packages from Cython.Build import cythonize import numpy # Output annotated .html import Cython.Compiler.Options Cython.Compiler.Options.annotate = True with file("version.txt") as f: version = f.read().strip() install_requires = [ "numpy", ] include_dirs = [numpy.get_include()] mcedit2_ext_modules = cythonize( [ "src/mcedit2/rendering/blockmodels.pyx", "src/mcedit2/rendering/modelmesh.pyx", ], ) for m in mcedit2_ext_modules: m.include_dirs = include_dirs sys.path.append(path.join(path.dirname(__file__), "src")) from mcedit2.util.gen_ui import compile_ui compile_ui() setup(name='mcedit2', version=version, description="Interactive 3D World Editor for Minecraft Levels", # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Development Status :: 2 - Pre-Alpha", "Environment :: Win32 (MS Windows)", "Environment :: X11 Applications :: Qt", "Environment :: MacOS X", "Intended Audience :: End Users/Desktop", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Topic :: Utilities", "License :: OSI Approved :: MIT License", ], keywords='minecraft', author='<NAME>', author_email='<EMAIL>', url='https://github.com/mcedit/mcedit2', license='MIT License', packages=find_packages('src', include=["mcedit2*",]), package_dir={'': 'src'}, ext_modules=mcedit2_ext_modules, include_dirs=include_dirs, include_package_data=True, zip_safe=False, install_requires=install_requires, entry_points=""" # -*- Entry points: -*- [console_scripts] mcedit2=mcedit2.main:main """, )
examples/widgets/line/lv_example_line_1.py
nickzhuang0613/lvgl
5,238
142229
<gh_stars>1000+ # Create an array for the points of the line line_points = [ {"x":5, "y":5}, {"x":70, "y":70}, {"x":120, "y":10}, {"x":180, "y":60}, {"x":240, "y":10}] # Create style style_line = lv.style_t() style_line.init() style_line.set_line_width(8) style_line.set_line_color(lv.palette_main(lv.PALETTE.BLUE)) style_line.set_line_rounded(True) # Create a line and apply the new style line1 = lv.line(lv.scr_act()) line1.set_points(line_points, 5) # Set the points line1.add_style(style_line, 0) line1.center()
homeassistant/components/numato/sensor.py
MrDelik/core
30,023
142259
<reponame>MrDelik/core """Sensor platform integration for ADC ports of Numato USB GPIO expanders.""" from __future__ import annotations import logging from numato_gpio import NumatoGpioError from homeassistant.components.sensor import SensorEntity from homeassistant.const import CONF_ID, CONF_NAME, CONF_SENSORS from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import ( CONF_DEVICES, CONF_DST_RANGE, CONF_DST_UNIT, CONF_PORTS, CONF_SRC_RANGE, DATA_API, DOMAIN, ) _LOGGER = logging.getLogger(__name__) ICON = "mdi:gauge" def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the configured Numato USB GPIO ADC sensor ports.""" if discovery_info is None: return api = hass.data[DOMAIN][DATA_API] sensors = [] devices = hass.data[DOMAIN][CONF_DEVICES] for device in [d for d in devices if CONF_SENSORS in d]: device_id = device[CONF_ID] ports = device[CONF_SENSORS][CONF_PORTS] for port, adc_def in ports.items(): try: api.setup_input(device_id, port) except NumatoGpioError as err: _LOGGER.error( "Failed to initialize sensor '%s' on Numato device %s port %s: %s", adc_def[CONF_NAME], device_id, port, err, ) continue sensors.append( NumatoGpioAdc( adc_def[CONF_NAME], device_id, port, adc_def[CONF_SRC_RANGE], adc_def[CONF_DST_RANGE], adc_def[CONF_DST_UNIT], api, ) ) add_entities(sensors, True) class NumatoGpioAdc(SensorEntity): """Represents an ADC port of a Numato USB GPIO expander.""" def __init__(self, name, device_id, port, src_range, dst_range, dst_unit, api): """Initialize the sensor.""" self._name = name self._device_id = device_id self._port = port self._src_range = src_range self._dst_range = dst_range self._state = None self._unit_of_measurement = dst_unit self._api = api @property def name(self): """Return the name of the sensor.""" return self._name @property def native_value(self): """Return the state of the sensor.""" return self._state @property def native_unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement @property def icon(self): """Return the icon to use in the frontend, if any.""" return ICON def update(self): """Get the latest data and updates the state.""" try: adc_val = self._api.read_adc_input(self._device_id, self._port) adc_val = self._clamp_to_source_range(adc_val) self._state = self._linear_scale_to_dest_range(adc_val) except NumatoGpioError as err: self._state = None _LOGGER.error( "Failed to update Numato device %s ADC-port %s: %s", self._device_id, self._port, err, ) def _clamp_to_source_range(self, val): # clamp to source range val = max(val, self._src_range[0]) val = min(val, self._src_range[1]) return val def _linear_scale_to_dest_range(self, val): # linear scale to dest range src_len = self._src_range[1] - self._src_range[0] adc_val_rel = val - self._src_range[0] ratio = float(adc_val_rel) / float(src_len) dst_len = self._dst_range[1] - self._dst_range[0] dest_val = self._dst_range[0] + ratio * dst_len return dest_val
nnutils/chamfer_python.py
NVlabs/UMR
184
142269
# ----------------------------------------------------------------------------- # Code adapted from: # https://github.com/ThibaultGROUEIX/ChamferDistancePytorch/blob/719b0f1ca5ba370616cb837c03ab88d9a88173ff/chamfer_python.py # # MIT License # # Copyright (c) 2019 ThibaultGROUEIX # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ----------------------------------------------------------------------------- import torch def pairwise_dist(x, y): xx, yy, zz = torch.mm(x, x.t()), torch.mm(y, y.t()), torch.mm(x, y.t()) rx = xx.diag().unsqueeze(0).expand_as(xx) ry = yy.diag().unsqueeze(0).expand_as(yy) P = rx.t() + ry - 2 * zz return P def NN_loss(x, y, dim=0): dist = pairwise_dist(x, y) values, indices = dist.min(dim=dim) return values.mean() def distChamfer(a, b): """ :param a: Pointclouds Batch x nul_points x dim :param b: Pointclouds Batch x nul_points x dim :return: -closest point on b of points from a -closest point on a of points from b -idx of closest point on b of points from a -idx of closest point on a of points from b Works for pointcloud of any dimension """ x, y = a, b bs, num_points_x, points_dim = x.size() bs, num_points_y, points_dim = y.size() xx = torch.pow(x, 2).sum(2) yy = torch.pow(y, 2).sum(2) zz = torch.bmm(x, y.transpose(2, 1)) rx = xx.unsqueeze(1).expand(bs, num_points_y, num_points_x) # Diagonal elements xx ry = yy.unsqueeze(1).expand(bs, num_points_x, num_points_y) # Diagonal elements yy P = rx.transpose(2, 1) + ry - 2 * zz return torch.min(P, 2)[0], torch.min(P, 1)[0], torch.min(P, 2)[1].int(), torch.min(P, 1)[1].int()
tests/python/compiler/test_top_level4.py
ThomsonTan/nnvm
1,872
142287
import numpy as np import tvm from tvm.contrib import graph_runtime import topi import nnvm.symbol as sym import nnvm.compiler from nnvm.testing.config import ctx_list def helper(symbol, inputs, dtype, np_forward, np_backward=None, need_input=True, need_head_grads=True): ishapes = {} input_syms = [] np_inputs = {} for (name, shape, s) in inputs: ishapes.update({name: shape}) np_inputs.update({name: np.random.uniform(size=shape).astype(dtype)}) input_syms.append(s) for target, ctx in ctx_list(): graph, lib, _ = nnvm.compiler.build(symbol, target, ishapes) m = graph_runtime.create(graph, lib, ctx) m.run(**np_inputs) y_np = np_forward(**np_inputs) out = m.get_output(0, tvm.nd.empty(y_np.shape, dtype)) np.testing.assert_allclose(out.asnumpy(), y_np, atol=1e-5, rtol=1e-5) # backward if np_backward: graph._set_symbol_list_attr("grad_ys", symbol) graph._set_symbol_list_attr("grad_xs", input_syms) graph._set_symbol_list_attr("grad_ys_out_grad", sym.Variable("head_grads", shape=y_np.shape)) graph = graph.apply("Gradient") ishapes.update({"head_grads": y_np.shape}) graph, lib, _ = nnvm.compiler.build(graph, target, ishapes) m = graph_runtime.create(graph, lib, ctx) head_grads = np.random.uniform(size=y_np.shape).astype(dtype) y_np = np_backward(head_grads=head_grads, **np_inputs) b_inputs = {} if need_input: b_inputs.update(np_inputs) if need_head_grads: b_inputs.update({"head_grads":head_grads}) m.run(**b_inputs) for i in range(len(y_np)): out = m.get_output(i, tvm.nd.empty(y_np[i].shape, dtype)) np.testing.assert_allclose(out.asnumpy(), y_np[i], atol=1e-5, rtol=1e-5) def verify_transpose(dshape, axes): x = sym.Variable("x") if axes: y = sym.transpose(x, axes=axes) else: y = sym.transpose(x) y = y + 1 dtype = "float32" for target, ctx in ctx_list(): graph, lib, _ = nnvm.compiler.build(y, target, {"x": dshape}) m = graph_runtime.create(graph, lib, ctx) # set input data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype)) m.run(x=data) out_np = np.transpose(data.asnumpy(), axes=axes) + 1 out = m.get_output(0, tvm.nd.empty(out_np.shape)) np.testing.assert_allclose(out.asnumpy(), out_np, atol=1e-5, rtol=1e-5) def verify_reduce(dshape, fnp, fsym, **kwargs): x = sym.Variable("x") y = fsym(x + 1, **kwargs) dtype = "float32" for target, ctx in ctx_list(): graph, lib, _ = nnvm.compiler.build(y, target, {"x": dshape}) m = graph_runtime.create(graph, lib, ctx) # set input data = np.random.uniform(size=dshape).astype(dtype) out_np = fnp(data + 1, **kwargs) m.run(x=data) out = m.get_output(0, tvm.nd.empty(out_np.shape)) np.testing.assert_allclose(out.asnumpy(), out_np, atol=1e-5, rtol=1e-5) def test_tranpose(): verify_transpose((2, 3, 4), (0, 2, 1)) verify_transpose((2, 3, 4), None) def test_reduce(): verify_reduce((2, 3, 4), np.max, sym.max, axis=1, keepdims=True) verify_reduce((4, 4, 3), np.min, sym.min, keepdims=True) verify_reduce((4, 4, 3), np.sum, sym.sum, axis=(0, 2)) def verify_flip(ishape, axis): x = sym.Variable("x") y = sym.flip(x, axis=axis) + 1 dtype = "float32" x_np = np.random.uniform(size=ishape).astype(dtype) res = np.flip(x_np, axis) + 1 for target, ctx in ctx_list(): # set input graph, lib, _ = nnvm.compiler.build(y, target, {"x": ishape}) m = graph_runtime.create(graph, lib, ctx) m.run(x=x_np) out = m.get_output(0, tvm.nd.empty(res.shape)) np.testing.assert_allclose(out.asnumpy(), res, atol=1e-5, rtol=1e-5) def test_flip(): verify_flip((3, 4, 3), 1) verify_flip((3, 4, 3), 0) verify_flip((3, 4, 3), 2) verify_flip((3, 4, 3), -1) verify_flip((3, 4, 3), -3) verify_flip((3, 4, 3), -2) def verify_reshape(dshape, oshape): x = sym.Variable("x") y = sym.reshape(x, shape=oshape) y = y + 1 dtype = "float32" for target, ctx in ctx_list(): graph, lib, _ = nnvm.compiler.build(y, target, {"x": dshape}) m = graph_runtime.create(graph, lib, ctx) # set input data = tvm.nd.array(np.random.uniform(size=dshape).astype(dtype)) m.run(x=data) out_np = data.asnumpy().reshape(oshape) + 1 out = m.get_output(0, tvm.nd.empty(out_np.shape)) np.testing.assert_allclose(out.asnumpy(), out_np, atol=1e-5, rtol=1e-5) def test_reshape(): verify_reshape((2, 3, 4), (-1, 2, 1)) verify_reshape((2, 3, 4), (8, 3)) verify_reshape((4, 7), (2, 7, 2)) def test_clip(): x = sym.Variable("x") a_min=0.2 a_max=0.75 y = sym.clip(x, a_min=a_min, a_max=a_max) def forward(x): return np.clip(x, a_min=a_min, a_max=a_max) def backward(head_grads, x): mask1 = np.greater_equal(x, a_min).astype("float") mask2 = np.less_equal(x, a_max).astype("float") return [head_grads * mask1 * mask2] dtype = "float32" inputs = [('x', (3, 4, 5), x)] helper(y, inputs, dtype, forward, backward) def test_greater(): l = sym.Variable("l") r = sym.Variable("r") y = sym.greater(l, r) def forward(l, r): return np.greater(l, r).astype("float32") def backward(head_grads, l, r): return [np.zeros_like(l)] dtype = "float32" inputs = [('l', (3, 4, 5), l), ('r', (3, 4, 5), r)] helper(y, inputs, dtype, forward, backward, need_head_grads=False) def test_less(): l = sym.Variable("l") r = sym.Variable("r") y = sym.less(l, r) def forward(l, r): return np.less(l, r).astype("float32") def backward(head_grads, l, r): return [np.zeros_like(l)] dtype = "float32" inputs = [('l', (3, 4, 5), l), ('r', (3, 4, 5), r)] helper(y, inputs, dtype, forward, backward, need_head_grads=False) def test_reshape_like(): x = sym.Variable("x") y = sym.Variable("y") z = sym.reshape_like(x, y) def forward(x, y): return np.reshape(x, y.shape) def backward(head_grads, x, y): return [np.reshape(head_grads, x.shape), np.zeros_like(y)] dtype = "float32" inputs = [('x', (3, 4, 5), x), ('y', (5, 4, 3), y)] helper(z, inputs, dtype, forward, backward) def verify_expand_like(in_shape, out_shape, axis, exclude): x = sym.Variable("x") y = sym.Variable("y") z = sym.expand_like(x, y, axis=axis, exclude=exclude) def forward(x, y): odim = len(out_shape) real_axis = [i if i >= 0 else i + odim for i in axis] real_axis = sorted(real_axis) if exclude: real_axis = list(set(range(odim)) - set(real_axis)) for i in real_axis: x = np.expand_dims(x, i).astype(x.dtype) for i in real_axis: x = np.concatenate([x]*out_shape[i], axis=i).astype(x.dtype) return x def backward(head_grads, x, y): odim = len(out_shape) real_axis = [i if i >= 0 else i + odim for i in axis] real_axis = sorted(real_axis) if exclude: real_axis = list(set(range(odim)) - set(real_axis)) return [np.sum(head_grads, axis=tuple(real_axis)), np.zeros_like(y)] dtype = "float32" inputs = [('x', in_shape, x), ('y', out_shape, y)] helper(z, inputs, dtype, forward, backward, need_input=False) def test_expand_like(): verify_expand_like((3,), (3, 2), [1], False) verify_expand_like((2,), (2, 3), [1], False) verify_expand_like((3, 4), (3, 5, 4), [1], False) verify_expand_like((5, 7), (5, 6, 7, 8), [0, 2], True) def verify_elemwise_sum(num_args): s = [sym.Variable("input" + str(i)) for i in range(num_args)] y = sym.elemwise_sum(*s, num_args=num_args) def forward(**inputs): return np.sum(np.array(list(inputs.values())), axis=0) def backward(head_grads, **inputs): return [head_grads] * num_args dtype = "float32" inputs = [("input" + str(i), (3, 4, 5), s[i]) for i in range(num_args)] helper(y, inputs, dtype, forward, backward, need_input=False) def test_elemwise_sum(): verify_elemwise_sum(1) verify_elemwise_sum(5) verify_elemwise_sum(7) def test_block_grad(): x = sym.Variable("x") y = sym.block_grad(x) def forward(x): return x def backward(head_grads, x): return [np.zeros_like(head_grads)] dtype = "float32" inputs = [('x', (3, 4, 5), x)] helper(y, inputs, dtype, forward, backward, need_head_grads=False) def test_full(): shape = (3, 4, 5) value = 7 dtype = "float32" for target, ctx in ctx_list(): data = sym.Variable("data", dtype=dtype) # full_like s = sym.full_like(data=data, fill_value=value, name="s") graph, lib, _ = nnvm.compiler.build(s, target, {"data": shape}) m = graph_runtime.create(graph, lib, ctx) m.run(data=np.random.uniform(size=shape).astype(dtype)) out = m.get_output(0, tvm.nd.empty(shape, dtype=dtype)) np.testing.assert_allclose( out.asnumpy(), np.full(shape, fill_value=value, dtype=dtype), atol=1e-5, rtol=1e-5) # ones_like s = sym.ones_like(data=data, fill_value=value, name="s") graph, lib, _ = nnvm.compiler.build(s, target, {"data": shape}) m = graph_runtime.create(graph, lib, ctx) m.run(data=np.random.uniform(size=shape).astype(dtype)) out = m.get_output(0, tvm.nd.empty(shape, dtype=dtype)) np.testing.assert_allclose( out.asnumpy(), np.full(shape, fill_value=1, dtype=dtype), atol=1e-5, rtol=1e-5) # zeros_like s = sym.zeros_like(data=data, fill_value=value, name="s") graph, lib, _ = nnvm.compiler.build(s, target, {"data": shape}) m = graph_runtime.create(graph, lib, ctx) m.run(data=np.random.uniform(size=shape).astype(dtype)) out = m.get_output(0, tvm.nd.empty(shape, dtype=dtype)) np.testing.assert_allclose( out.asnumpy(), np.full(shape, fill_value=0, dtype=dtype), atol=1e-5, rtol=1e-5) # full s = sym.full(shape=shape, dtype=dtype, fill_value=value, name="s") graph, lib, _ = nnvm.compiler.build(s, target) m = graph_runtime.create(graph, lib, ctx) m.run() out = m.get_output(0, tvm.nd.empty(shape, dtype=dtype)) np.testing.assert_allclose( out.asnumpy(), np.full(shape, fill_value=value, dtype=dtype), atol=1e-5, rtol=1e-5) # ones s = sym.ones(shape=shape, dtype=dtype, name="s") graph, lib, _ = nnvm.compiler.build(s, target) m = graph_runtime.create(graph, lib, ctx) m.run() out = m.get_output(0, tvm.nd.empty(shape, dtype=dtype)) np.testing.assert_allclose( out.asnumpy(), np.full(shape, fill_value=1, dtype=dtype), atol=1e-5, rtol=1e-5) # zeros s = sym.zeros(shape=shape, dtype=dtype, name="s") graph, lib, _ = nnvm.compiler.build(s, target) m = graph_runtime.create(graph, lib, ctx) m.run() out = m.get_output(0, tvm.nd.empty(shape, dtype=dtype)) np.testing.assert_allclose( out.asnumpy(), np.full(shape, fill_value=0, dtype=dtype), atol=1e-5, rtol=1e-5) if __name__ == "__main__": test_reshape() test_reduce() test_tranpose() test_clip() test_greater() test_less() test_reshape_like() test_expand_like() test_elemwise_sum() test_block_grad() test_full() test_flip() print(nnvm.compiler.engine.dump())
swagger_tester/swagger_tester.py
pwnieexpress/swagger-tester
109
142288
<reponame>pwnieexpress/swagger-tester # -*- coding: utf-8 -*- import json import logging import os import requests import six import time try: from urllib import urlencode except ImportError: # Python 3 from urllib.parse import urlencode import connexion from swagger_parser import SwaggerParser logging.basicConfig() logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) # The swagger path item object (as well as HTTP) allows for the following # HTTP methods (http://swagger.io/specification/#pathItemObject): _HTTP_METHODS = ['put', 'post', 'get', 'delete', 'options', 'head', 'patch'] def get_request_args(path, action, swagger_parser): """Get request args from an action and a path. Args: path: path of the action. action: action of the request(get, delete, post, put). swagger_parser: instance of SwaggerParser. Returns: A dict of args to transmit to bravado. """ request_args = {} if path in swagger_parser.paths.keys() and action in swagger_parser.paths[path].keys(): operation_spec = swagger_parser.paths[path][action] if 'parameters' in operation_spec.keys(): for param_name, param_spec in operation_spec['parameters'].items(): request_args[param_name] = swagger_parser.get_example_from_prop_spec(param_spec) return request_args def validate_definition(swagger_parser, valid_response, response): """Validate the definition of the response given the given specification and body. Args: swagger_parser: instance of swagger parser. body: valid body answer from spec. response: response of the request. """ # additionalProperties do not match any definition because the keys # vary. we can only check the type of the values if 'any_prop1' in valid_response and 'any_prop2' in valid_response: assert swagger_parser.validate_additional_properties(valid_response, response) return # No answer if response is None or response == '': assert valid_response == '' or valid_response is None return if valid_response == '' or valid_response is None: assert response is None or response == '' return # Validate output definition if isinstance(valid_response, list): # Return type is a list assert isinstance(response, list) if response: valid_response = valid_response[0] response = response[0] else: return # Not a dict and not a text if ((not isinstance(response, dict) or not isinstance(valid_response, dict)) and (not isinstance(response, (six.text_type, six.string_types)) or not isinstance(valid_response, (six.text_type, six.string_types)))): assert type(response) == type(valid_response) elif isinstance(response, dict) and isinstance(valid_response, dict): # Check if there is a definition that match body and response valid_definition = swagger_parser.get_dict_definition(valid_response, get_list=True) actual_definition = swagger_parser.get_dict_definition(response, get_list=True) assert len(set(valid_definition).intersection(actual_definition)) >= 1 def parse_parameters(url, action, path, request_args, swagger_parser): """Parse the swagger parameters to make a request. Replace var in url, make query dict, body and headers. Args: url: url of the request. action: HTTP action. path: path of the request. request_args: dict of args to send to the request. swagger_parser: instance of swagger parser. Returns: (url, body, query_params, headers, files) """ body = None query_params = {} files = {} headers = [('Content-Type', 'application/json')] if path in swagger_parser.paths.keys() and action in swagger_parser.paths[path].keys(): operation_spec = swagger_parser.paths[path][action] # Get body and path for parameter_name, parameter_spec in operation_spec['parameters'].items(): if parameter_spec['in'] == 'body': body = request_args[parameter_name] elif parameter_spec['in'] == 'path': url = url.replace('{{{0}}}'.format(parameter_name), str(request_args[parameter_name])) elif parameter_spec['in'] == 'query': if isinstance(request_args[parameter_name], list): query_params[parameter_name] = ','.join(request_args[parameter_name]) else: query_params[parameter_name] = str(request_args[parameter_name]) elif parameter_spec['in'] == 'formData': if body is None: body = {} if (isinstance(request_args[parameter_name], tuple) and hasattr(request_args[parameter_name][0], 'read')): files[parameter_name] = (request_args[parameter_name][1], request_args[parameter_name][0]) else: body[parameter_name] = request_args[parameter_name] # The first header is always content type, so just replace it so we don't squash custom headers headers[0] = ('Content-Type', 'multipart/form-data') elif parameter_spec['in'] == 'header': header_value = request_args.get(parameter_name) header_value = header_value or parameter_spec.get('default', '') headers += [(parameter_spec['name'], str(header_value))] return url, body, query_params, headers, files def get_url_body_from_request(action, path, request_args, swagger_parser): """Get the url and the body from an action, path, and request args. Args: action: HTTP action. path: path of the request. request_args: dict of args to send to the request. swagger_parser: instance of swagger parser. Returns: url, body, headers, files """ url, body, query_params, headers, files = parse_parameters(path, action, path, request_args, swagger_parser) url = '{0}?{1}'.format(url, urlencode(query_params)) if ('Content-Type', 'multipart/form-data') not in headers: try: if body: body = json.dumps(body) except TypeError as exc: logger.warning(u'Cannot decode body: {0}.'.format(repr(exc))) else: headers.remove(('Content-Type', 'multipart/form-data')) return url, body, headers, files def get_method_from_action(client, action): """Get a client method from an action. Args: client: flask client. aciton: action name. Returns: A flask client function. """ error_msg = "Action '{0}' is not recognized; needs to be one of {1}.".format(action, str(_HTTP_METHODS)) assert action in _HTTP_METHODS, error_msg return client.__getattribute__(action) def swagger_test(swagger_yaml_path=None, app_url=None, authorize_error=None, wait_time_between_tests=0, use_example=True, dry_run=False, extra_headers={}): """Test the given swagger api. Test with either a swagger.yaml path for a connexion app or with an API URL if you have a running API. Args: swagger_yaml_path: path of your YAML swagger file. app_url: URL of the swagger api. authorize_error: dict containing the error you don't want to raise. ex: { 'get': { '/pet/': ['404'] } } Will ignore 404 when getting a pet. wait_time_between_tests: an number that will be used as waiting time between tests [in seconds]. use_example: use example of your swagger file instead of generated data. dry_run: don't actually execute the test, only show what would be sent extra_headers: additional headers you may want to send for all operations Raises: ValueError: In case you specify neither a swagger.yaml path or an app URL. """ for _ in swagger_test_yield(swagger_yaml_path=swagger_yaml_path, app_url=app_url, authorize_error=authorize_error, wait_time_between_tests=wait_time_between_tests, use_example=use_example, dry_run=dry_run, extra_headers=extra_headers): pass def swagger_test_yield(swagger_yaml_path=None, app_url=None, authorize_error=None, wait_time_between_tests=0, use_example=True, dry_run=False, extra_headers={}): """Test the given swagger api. Yield the action and operation done for each test. Test with either a swagger.yaml path for a connexion app or with an API URL if you have a running API. Args: swagger_yaml_path: path of your YAML swagger file. app_url: URL of the swagger api. authorize_error: dict containing the error you don't want to raise. ex: { 'get': { '/pet/': ['404'] } } Will ignore 404 when getting a pet. wait_time_between_tests: an number that will be used as waiting time between tests [in seconds]. use_example: use example of your swagger file instead of generated data. dry_run: don't actually execute the test, only show what would be sent extra_headers: additional headers you may want to send for all operations Returns: Yield between each test: (action, operation) Raises: ValueError: In case you specify neither a swagger.yaml path or an app URL. """ if authorize_error is None: authorize_error = {} # Init test if swagger_yaml_path is not None and app_url is not None: app_client = requests swagger_parser = SwaggerParser(swagger_yaml_path, use_example=use_example) elif swagger_yaml_path is not None: specification_dir = os.path.dirname(os.path.realpath(swagger_yaml_path)) app = connexion.App(__name__, port=8080, debug=True, specification_dir=specification_dir) app.add_api(os.path.basename(swagger_yaml_path)) app_client = app.app.test_client() swagger_parser = SwaggerParser(swagger_yaml_path, use_example=use_example) elif app_url is not None: app_client = requests remote_swagger_def = requests.get(u'{0}/swagger.json'.format(app_url)).json() swagger_parser = SwaggerParser(swagger_dict=remote_swagger_def, use_example=use_example) else: raise ValueError('You must either specify a swagger.yaml path or an app url') print("Starting testrun against {0} or {1} using examples: " "{2}".format(swagger_yaml_path, app_url, use_example)) operation_sorted = {method: [] for method in _HTTP_METHODS} # Sort operation by action operations = swagger_parser.operation.copy() operations.update(swagger_parser.generated_operation) for operation, request in operations.items(): operation_sorted[request[1]].append((operation, request)) postponed = [] # For every operationId for action in _HTTP_METHODS: for operation in operation_sorted[action]: # Make request path = operation[1][0] action = operation[1][1] client_name = getattr(app_client, '__name__', 'FlaskClient') request_args = get_request_args(path, action, swagger_parser) url, body, headers, files = get_url_body_from_request(action, path, request_args, swagger_parser) logger.info(u'TESTING {0} {1}'.format(action.upper(), url)) # Add any extra headers specified by the user headers.extend([(key, value)for key, value in extra_headers.items()]) if swagger_yaml_path is not None and app_url is None: if dry_run: logger.info("\nWould send %s to %s with body %s and headers %s" % (action.upper(), url, body, headers)) continue response = get_method_from_action(app_client, action)(url, headers=headers, data=body) else: if app_url.endswith(swagger_parser.base_path): base_url = app_url[:-len(swagger_parser.base_path)] else: base_url = app_url full_path = u'{0}{1}'.format(base_url, url) if dry_run: logger.info("\nWould send %s to %s with body %s and headers %s" % (action.upper(), full_path, body, headers)) continue response = get_method_from_action(app_client, action)(full_path, headers=dict(headers), data=body, files=files) logger.info(u'Using {0}, got status code {1} for ********** {2} {3}'.format( client_name, response.status_code, action.upper(), url)) # Check if authorize error if (action in authorize_error and path in authorize_error[action] and response.status_code in authorize_error[action][path]): logger.info(u'Got expected authorized error on {0} with status {1}'.format(url, response.status_code)) yield (action, operation) continue if response.status_code is not 404: # Get valid request and response body body_req = swagger_parser.get_send_request_correct_body(path, action) try: response_spec = swagger_parser.get_request_data(path, action, body_req) except (TypeError, ValueError) as exc: logger.warning(u'Error in the swagger file: {0}'.format(repr(exc))) continue # Get response data if hasattr(response, 'content'): response_text = response.content else: response_text = response.data # Convert to str if hasattr(response_text, 'decode'): response_text = response_text.decode('utf-8') # Get json try: response_json = json.loads(response_text) except ValueError: response_json = response_text if response.status_code in response_spec.keys(): validate_definition(swagger_parser, response_spec[response.status_code], response_json) elif 'default' in response_spec.keys(): validate_definition(swagger_parser, response_spec['default'], response_json) else: raise AssertionError('Invalid status code {0}. Expected: {1}'.format(response.status_code, response_spec.keys())) if wait_time_between_tests > 0: time.sleep(wait_time_between_tests) yield (action, operation) else: # 404 => Postpone retry if {'action': action, 'operation': operation} in postponed: # Already postponed => raise error raise Exception(u'Invalid status code {0}'.format(response.status_code)) operation_sorted[action].append(operation) postponed.append({'action': action, 'operation': operation}) yield (action, operation) continue
test/jpypetest/test_module.py
pitmanst/jpype
531
142292
# ***************************************************************************** # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # See NOTICE file for details. # # ***************************************************************************** # Tests for module functionality including failures that cannot # be triggered in normal operations import _jpype import jpype import unittest import subrun import common import unittest.mock as mock @subrun.TestCase(individual=True) class ModuleStartTestCase(unittest.TestCase): def testNoJObject(self): with self.assertRaises(RuntimeError): import jpype del _jpype.JObject jpype.startJVM() def testNoJInterface(self): with self.assertRaises(RuntimeError): import jpype del _jpype.JInterface jpype.startJVM() def testNoJArray(self): with self.assertRaises(RuntimeError): import jpype del _jpype.JArray jpype.startJVM() def testNoJException(self): with self.assertRaises(RuntimeError): import jpype del _jpype.JException jpype.startJVM() def testNoJClassPre(self): with self.assertRaises(RuntimeError): import jpype del _jpype._jclassPre jpype.startJVM() def testNoJClassPost(self): with self.assertRaises(RuntimeError): import jpype del _jpype._jclassPost jpype.startJVM() def testNoMethodAnnotations(self): with self.assertRaises(RuntimeError): import jpype del _jpype.getMethodAnnotations jpype.startJVM() def testNoMethodCode(self): with self.assertRaises(RuntimeError): import jpype del _jpype.getMethodCode jpype.startJVM() def testShutdown(self): import jpype jpype.startJVM(convertStrings=False) jpype.shutdownJVM() class ModuleTestCase(common.JPypeTestCase): def setUp(self): common.JPypeTestCase.setUp(self) def testIsPackage(self): self.assertTrue(_jpype.isPackage("java")) self.assertFalse(_jpype.isPackage("jva")) with self.assertRaises(TypeError): _jpype.isPackage(object()) def testGetClass(self): self.assertIsInstance(_jpype._getClass("java.lang.String"), _jpype._JClass) with self.assertRaises(TypeError): _jpype._getClass(object()) def testHasClass(self): self.assertTrue(_jpype._hasClass("java.lang.String")) with self.assertRaises(TypeError): _jpype._hasClass(object()) class JInitTestCase(common.JPypeTestCase): def setUp(self): common.JPypeTestCase.setUp(self) def testJInit(self): with mock.patch("_jpype.isStarted") as started: started.return_value = False self.assertEqual(len(jpype._jinit.JInitializers), 0) A = [] def func(): A.append(1) jpype.onJVMStart(func) self.assertEqual(len(A), 0) self.assertEqual(jpype._jinit.JInitializers[0], func) started.return_value = True jpype.onJVMStart(func) self.assertEqual(len(A), 1) jpype._jinit.runJVMInitializers() self.assertEqual(len(A), 2)
dedupe/predicates.py
fritshermans/dedupe
2,190
142293
<reponame>fritshermans/dedupe #!/usr/bin/python # -*- coding: utf-8 -*- import re import math import itertools import string import abc from doublemetaphone import doublemetaphone from dedupe.cpredicates import ngrams, initials import dedupe.tfidf as tfidf import dedupe.levenshtein as levenshtein from typing import Sequence, Callable, Any, Tuple, Set from dedupe._typing import RecordDict words = re.compile(r"[\w']+").findall integers = re.compile(r"\d+").findall start_word = re.compile(r"^([\w']+)").match two_start_words = re.compile(r"^([\w']+\s+[\w']+)").match start_integer = re.compile(r"^(\d+)").match alpha_numeric = re.compile(r"(?=\w*\d)[a-zA-Z\d]+").findall PUNCTABLE = str.maketrans("", "", string.punctuation) def strip_punc(s): return s.translate(PUNCTABLE) class Predicate(abc.ABC): def __iter__(self): yield self def __repr__(self): return "%s: %s" % (self.type, self.__name__) def __hash__(self): try: return self._cached_hash except AttributeError: h = self._cached_hash = hash(repr(self)) return h def __eq__(self, other): return repr(self) == repr(other) def __len__(self): return 1 @abc.abstractmethod def __call__(self, record, **kwargs) -> tuple: pass def __add__(self, other: 'Predicate') -> 'CompoundPredicate': if isinstance(other, CompoundPredicate): return CompoundPredicate((self,) + tuple(other)) elif isinstance(other, Predicate): return CompoundPredicate((self, other)) else: raise ValueError('Can only combine predicates') class SimplePredicate(Predicate): type = "SimplePredicate" def __init__(self, func: Callable[[Any], Tuple[str, ...]], field: str): self.func = func self.__name__ = "(%s, %s)" % (func.__name__, field) self.field = field def __call__(self, record: RecordDict, **kwargs) -> Tuple[str, ...]: column = record[self.field] if column: return self.func(column) else: return () class StringPredicate(SimplePredicate): def __call__(self, record: RecordDict, **kwargs): column = record[self.field] if column: return self.func(" ".join(strip_punc(column).split())) else: return () class ExistsPredicate(Predicate): type = "ExistsPredicate" def __init__(self, field): self.__name__ = "(Exists, %s)" % (field,) self.field = field @staticmethod def func(column): if column: return ('1',) else: return ('0',) def __call__(self, record, **kwargs): column = record[self.field] return self.func(column) class IndexPredicate(Predicate): def __init__(self, threshold, field): self.__name__ = '(%s, %s)' % (threshold, field) self.field = field self.threshold = threshold self.index = None def __getstate__(self): odict = self.__dict__.copy() odict['index'] = None return odict def __setstate__(self, d): self.__dict__.update(d) # backwards compatibility if not hasattr(self, 'index'): self.index = None def reset(self): ... def bust_cache(self): self._cache = {} class CanopyPredicate(object): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.canopy = {} self._cache = {} def freeze(self, records): self._cache = {record[self.field]: self(record) for record in records} self.canopy = {} self.index = None def reset(self): self._cache = {} self.canopy = {} self.index = None def __call__(self, record, **kwargs): block_key = None column = record[self.field] if column: if column in self._cache: return self._cache[column] doc = self.preprocess(column) try: doc_id = self.index._doc_to_id[doc] except AttributeError: raise AttributeError("Attempting to block with an index " "predicate without indexing records") if doc_id in self.canopy: block_key = self.canopy[doc_id] else: canopy_members = self.index.search(doc, self.threshold) for member in canopy_members: if member not in self.canopy: self.canopy[member] = doc_id if canopy_members: block_key = doc_id self.canopy[doc_id] = doc_id else: self.canopy[doc_id] = None if block_key is None: return [] else: return [str(block_key)] class SearchPredicate(object): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._cache = {} def freeze(self, records_1, records_2): self._cache = {(record[self.field], False): self(record, False) for record in records_1} self._cache.update({(record[self.field], True): self(record, True) for record in records_2}) self.index = None def reset(self): self._cache = {} self.index = None def __call__(self, record, target=False, **kwargs): column = record[self.field] if column: if (column, target) in self._cache: return self._cache[(column, target)] else: doc = self.preprocess(column) try: if target: centers = [self.index._doc_to_id[doc]] else: centers = self.index.search(doc, self.threshold) except AttributeError: raise AttributeError("Attempting to block with an index " "predicate without indexing records") result = [str(center) for center in centers] self._cache[(column, target)] = result return result else: return () class TfidfPredicate(IndexPredicate): def initIndex(self): self.reset() return tfidf.TfIdfIndex() class TfidfCanopyPredicate(CanopyPredicate, TfidfPredicate): pass class TfidfSearchPredicate(SearchPredicate, TfidfPredicate): pass class TfidfTextPredicate(object): def preprocess(self, doc): return tuple(words(doc)) class TfidfSetPredicate(object): def preprocess(self, doc): return doc class TfidfNGramPredicate(object): def preprocess(self, doc): return tuple(sorted(ngrams(" ".join(strip_punc(doc).split()), 2))) class TfidfTextSearchPredicate(TfidfTextPredicate, TfidfSearchPredicate): type = "TfidfTextSearchPredicate" class TfidfSetSearchPredicate(TfidfSetPredicate, TfidfSearchPredicate): type = "TfidfSetSearchPredicate" class TfidfNGramSearchPredicate(TfidfNGramPredicate, TfidfSearchPredicate): type = "TfidfNGramSearchPredicate" class TfidfTextCanopyPredicate(TfidfTextPredicate, TfidfCanopyPredicate): type = "TfidfTextCanopyPredicate" class TfidfSetCanopyPredicate(TfidfSetPredicate, TfidfCanopyPredicate): type = "TfidfSetCanopyPredicate" class TfidfNGramCanopyPredicate(TfidfNGramPredicate, TfidfCanopyPredicate): type = "TfidfNGramCanopyPredicate" class LevenshteinPredicate(IndexPredicate): def initIndex(self): self.reset() return levenshtein.LevenshteinIndex() def preprocess(self, doc): return " ".join(strip_punc(doc).split()) class LevenshteinCanopyPredicate(CanopyPredicate, LevenshteinPredicate): type = "LevenshteinCanopyPredicate" class LevenshteinSearchPredicate(SearchPredicate, LevenshteinPredicate): type = "LevenshteinSearchPredicate" class CompoundPredicate(tuple, Predicate): type = "CompoundPredicate" def __hash__(self): try: return self._cached_hash except AttributeError: h = self._cached_hash = hash(frozenset(self)) return h def __eq__(self, other): return frozenset(self) == frozenset(other) def __call__(self, record, **kwargs): predicate_keys = [predicate(record, **kwargs) for predicate in self] return [ u':'.join( # must escape : to avoid confusion with : join separator b.replace(u':', u'\\:') for b in block_key ) for block_key in itertools.product(*predicate_keys) ] def __add__(self, other: Predicate) -> 'CompoundPredicate': # type: ignore if isinstance(other, CompoundPredicate): return CompoundPredicate(tuple(self) + tuple(other)) elif isinstance(other, Predicate): return CompoundPredicate(tuple(self) + (other,)) else: raise ValueError('Can only combine predicates') def wholeFieldPredicate(field: Any) -> Tuple[str]: """return the whole field""" return (str(field), ) def tokenFieldPredicate(field): """returns the tokens""" return set(words(field)) def firstTokenPredicate(field: str) -> Sequence[str]: first_token = start_word(field) if first_token: return first_token.groups() else: return () def firstTwoTokensPredicate(field: str) -> Sequence[str]: first_two_tokens = two_start_words(field) if first_two_tokens: return first_two_tokens.groups() else: return () def commonIntegerPredicate(field: str) -> Set[str]: """return any integers""" return {str(int(i)) for i in integers(field)} def alphaNumericPredicate(field: str) -> Set[str]: return set(alpha_numeric(field)) def nearIntegersPredicate(field: str) -> Set[str]: """return any integers N, N+1, and N-1""" ints = integers(field) near_ints = set() for char in ints: num = int(char) near_ints.add(str(num - 1)) near_ints.add(str(num)) near_ints.add(str(num + 1)) return near_ints def hundredIntegerPredicate(field: str) -> Set[str]: return {str(int(i))[:-2] + '00' for i in integers(field)} def hundredIntegersOddPredicate(field: str) -> Set[str]: return {str(int(i))[:-2] + '0' + str(int(i) % 2) for i in integers(field)} def firstIntegerPredicate(field: str) -> Sequence[str]: first_token = start_integer(field) if first_token: return first_token.groups() else: return () def ngramsTokens(field: Sequence[Any], n: int) -> Set[str]: grams = set() n_tokens = len(field) for i in range(n_tokens): for j in range(i + n, min(n_tokens, i + n) + 1): grams.add(' '.join(str(tok) for tok in field[i:j])) return grams def commonTwoTokens(field: str) -> Set[str]: return ngramsTokens(field.split(), 2) def commonThreeTokens(field: str) -> Set[str]: return ngramsTokens(field.split(), 3) def fingerprint(field: str) -> Tuple[str]: return (u''.join(sorted(field.split())).strip(),) def oneGramFingerprint(field: str) -> Tuple[str]: return (u''.join(sorted(set(ngrams(field.replace(' ', ''), 1)))).strip(),) def twoGramFingerprint(field: str) -> Tuple[str, ...]: if len(field) > 1: return (u''.join(sorted(gram.strip() for gram in set(ngrams(field.replace(' ', ''), 2)))),) else: return () def commonFourGram(field: str) -> Set[str]: """return 4-grams""" return set(ngrams(field.replace(' ', ''), 4)) def commonSixGram(field: str) -> Set[str]: """return 6-grams""" return set(ngrams(field.replace(' ', ''), 6)) def sameThreeCharStartPredicate(field: str) -> Tuple[str]: """return first three characters""" return initials(field.replace(' ', ''), 3) def sameFiveCharStartPredicate(field: str) -> Tuple[str]: """return first five characters""" return initials(field.replace(' ', ''), 5) def sameSevenCharStartPredicate(field: str) -> Tuple[str]: """return first seven characters""" return initials(field.replace(' ', ''), 7) def suffixArray(field): n = len(field) - 4 if n > 0: for i in range(0, n): yield field[i:] def sortedAcronym(field: str) -> Tuple[str]: return (''.join(sorted(each[0] for each in field.split())),) def doubleMetaphone(field): return {metaphone for metaphone in doublemetaphone(field) if metaphone} def metaphoneToken(field): return {metaphone_token for metaphone_token in itertools.chain(*(doublemetaphone(token) for token in set(field.split()))) if metaphone_token} def wholeSetPredicate(field_set): return (str(field_set),) def commonSetElementPredicate(field_set): """return set as individual elements""" return tuple([str(each) for each in field_set]) def commonTwoElementsPredicate(field): sequence = sorted(field) return ngramsTokens(sequence, 2) def commonThreeElementsPredicate(field): sequence = sorted(field) return ngramsTokens(sequence, 3) def lastSetElementPredicate(field_set): return (str(max(field_set)), ) def firstSetElementPredicate(field_set): return (str(min(field_set)), ) def magnitudeOfCardinality(field_set): return orderOfMagnitude(len(field_set)) def latLongGridPredicate(field, digits=1): """ Given a lat / long pair, return the grid coordinates at the nearest base value. e.g., (42.3, -5.4) returns a grid at 0.1 degree resolution of 0.1 degrees of latitude ~ 7km, so this is effectively a 14km lat grid. This is imprecise for longitude, since 1 degree of longitude is 0km at the poles, and up to 111km at the equator. But it should be reasonably precise given some prior logical block (e.g., country). """ if any(field): return (str([round(dim, digits) for dim in field]),) else: return () def orderOfMagnitude(field): if field > 0: return (str(int(round(math.log10(field)))), ) else: return () def roundTo1(field): # thanks http://stackoverflow.com/questions/3410976/how-to-round-a-number-to-significant-figures-in-python abs_num = abs(field) order = int(math.floor(math.log10(abs_num))) rounded = round(abs_num, -order) return (str(int(math.copysign(rounded, field))),)
src/thug/cli.py
rmoutie/thug-memes
234
142309
from enum import Enum from os import (getcwd, path as osp) import sys import click import cv2 from .conf import (get_config, print_config) from .detect.opencv import HaarCascadeDetector from .meme.basic import Meme from .meme.thug import ThugMeme MEME_RESULT_DIR = getcwd() CONTEXT = dict(help_option_names=['-h', '--help']) class Detector(Enum): OPEN_CV = 'opencv' DLIB = 'dlib' _common_decorators = [ click.version_option(None, '-v', '--version'), click.argument('fpath', type=click.Path(exists=True)), click.argument('txt1'), click.argument('txt2'), click.option( '--override', '-o', type=(str, str), multiple=True, help='Override any configuration option: <option_name> <new_value>.'), click.option( '--show-config', is_flag=True, help='Show the configuration and exit. Takes into account -o options.') ] def add_decorators(decorators): def _add_decorators(func): for decorator in reversed(decorators): func = decorator(func) return func return _add_decorators def _load_configuration(override, show_and_exit): conf = get_config(overrides=override) if show_and_exit: print_config(conf) sys.exit(0) return conf def _form_result_path(orig_path, result_dir, fname_extra=''): fname = osp.basename(orig_path) base, extension = osp.splitext(fname) fname = '{}{}{}'.format(base, fname_extra, extension) return osp.join(result_dir, fname) @click.command(context_settings=CONTEXT) @add_decorators(_common_decorators) def meme(fpath, txt1, txt2, override, show_config): """Generate a normal meme.""" conf = _load_configuration(override, show_config) res_path = _form_result_path( orig_path=osp.abspath(fpath), result_dir=MEME_RESULT_DIR, fname_extra=conf['meme']['meme_result_name_add']) meme = Meme(config=conf['meme'], img_path=fpath, txt1=txt1, txt2=txt2) meme.create(res_file=res_path) @click.command(context_settings=CONTEXT) @add_decorators(_common_decorators) @click.option( '--debug', is_flag=True, help='Show debug information (e.g. the detection results img)') @click.option( '--detector', type=click.Choice([Detector.OPEN_CV.value, Detector.DLIB.value]), default=Detector.OPEN_CV.value, help='Detector to use for finding faces and landmarks.') def thug_meme(fpath, txt1, txt2, override, show_config, debug, detector): """Generate an awesome thug meme.""" fpath = osp.abspath(fpath) conf = _load_configuration(override, show_config) res_path = _form_result_path( orig_path=fpath, result_dir=MEME_RESULT_DIR, fname_extra=conf['meme']['thug_result_name_add']) if detector == Detector.OPEN_CV.value: detector = HaarCascadeDetector(config=conf['detect']) elif detector == Detector.DLIB.value: from .detect.dlib import DlibDetector detector = DlibDetector(config=conf['detect']) thugs = detector.find_thug_landmarks( img_path=osp.abspath(fpath), show_result=debug) meme = ThugMeme( config=conf['meme'], thug_landmarks=thugs, img_path=fpath, txt1=txt1, txt2=txt2) meme.create(res_path) if debug: cv2.waitKey(0) cv2.destroyAllWindows()
judge/detection_perf.py
sunset768541/ctw-baseline
333
142311
<filename>judge/detection_perf.py # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import codecs import json import matplotlib.pyplot as plt import numpy as np import os import plot_tools import settings import subprocess import sys from classification_perf import get_chartjs from jinja2 import Template def main(dt_file_path): makefile = os.path.join(settings.PRODUCTS_ROOT, 'makefile') with open(makefile, 'w') as f: f.write('all: {}\n'.format(settings.DETECTION_EXE)) f.write('{}: ../codalab/evalwrap.cpp ../cppapi/eval_tools.hpp\n'.format(settings.DETECTION_EXE)) f.write('\tg++ -std=c++11 -O2 $< -o $@') args = ['make', '-f', makefile] print(*args) p = subprocess.Popen(args) assert 0 == p.wait() with open(settings.TEST_DETECTION_GT) as f: gt = f.read() args = [settings.DETECTION_EXE, dt_file_path] print(*args) p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE) report_str = p.communicate(gt.encode('utf-8'))[0].decode('utf-8') assert 0 == p.wait() report = json.loads(report_str) assert 0 == report['error'], report['msg'] with codecs.open(settings.PROPOSAL_REPORT if proposal else settings.DETECTION_REPORT, 'w', 'utf-8') as f: json.dump(report, f, ensure_ascii=False, indent=2, sort_keys=True) html_explore(report) show(report) draw(report) def html_explore(report): jdata = [{ 'model_name': 'YOLO_v2', 'performance': { szname: { 'attributes': [ {'n': o['n'], 'recalls': {1: o['recall']}} for o in szattr['attributes'] ], } for szname, szattr in report['performance'].items() }, }] with open('explore_cls.template.html') as f: template = Template(f.read()) with codecs.open(settings.PROPOSAL_EXPLORE if proposal else settings.DETECTION_EXPLORE, 'w', 'utf-8') as f: f.write(template.render({ 'title': 'Explore detection performance', 'chartjs': get_chartjs(), 'performance_all': json.dumps(jdata, sort_keys=True), 'attributes': settings.ATTRIBUTES, })) def show(report): def percentage(x, digit=1): fmt = { 1: '{:4.1f}%', 2: '{:5.2f}%', } return fmt[digit].format(x * 100) with open(settings.STAT_FREQUENCY) as f: frequency = json.load(f) freq_order = [o['text'] for o in frequency] performance = report['performance'] for szname, stat in sorted(performance.items()): print(szname) for k in ('n', 'mAP', 'AP', 'mAP_micro'): x = stat[k] if isinstance(x, float): x = percentage(x) print('{:>4s}'.format(k), '=', x) for i, attr in zip(range(-1, len(settings.ATTRIBUTES)), ['__all__'] + settings.ATTRIBUTES): n = 0 rc = 0 for k, o in enumerate(performance[szname]['attributes']): if i == -1 or int(k) & 2 ** i: n += o['n'] rc += o['recall'] r = 0. if n == 0 else rc / n print('{:13s}'.format(attr), 'n', '=', '{:6d}'.format(n), ',', 'recall', '=', percentage(r)) for char in freq_order[:10]: print(char, percentage(performance[szname]['texts'].get(char, {'AP': 0.})['AP'])) print() def draw(report): def attr_recall(attr_perfs, attr_id): m = len(settings.ATTRIBUTES) n = rc = 0 for k, o in enumerate(attr_perfs): if attr_id == -1 or (attr_id < m and 0 != int(k) & 2 ** attr_id) or (m <= attr_id and 0 == int(k) & 2 ** (attr_id - m)): n += o['n'] rc += o['recall'] return 0. if n == 0 else rc / n data = [ [ { 'legend': szname, 'data': [attr_recall(report['performance'][szname]['attributes'], i) for i in range(-1, 2 * len(settings.ATTRIBUTES))], } ] for szname, _ in settings.SIZE_RANGES ] labels = ['all'] + settings.ATTRIBUTES + list(map('~{}'.format, settings.ATTRIBUTES)) with plt.style.context({ 'figure.subplot.left': .05, 'figure.subplot.right': .98, 'figure.subplot.top': .96, 'pdf.fonttype': 42, 'legend.loc': 'upper center', }): plt.figure(figsize=(12, 3)) plt.xlim((.3, .7 + len(labels))) plt.ylim((0., 1.)) plt.grid(which='major', axis='y', linestyle='dotted') plot_tools.draw_bar(data, labels, width=.18, legend_kwargs={'ncol': len(settings.SIZE_RANGES)}) plt.ylabel('recall') plt.savefig(os.path.join(settings.PLOTS_DIR, ('pro' if proposal else 'det') + '_recall_by_attr_size.pdf')) plt.close() with plt.style.context({ 'figure.subplot.left': .10, 'figure.subplot.right': .97, 'figure.subplot.bottom': .10, 'figure.subplot.top': .97, 'pdf.fonttype': 42, 'legend.loc': 'upper right', }): plt.figure(figsize=(5.5, 5.5)) plt.xlim((0., 1.)) plt.ylim((0., 1.)) plt.grid(which='major', axis='both', linestyle='dotted') for szname, stat in sorted(report['performance'].items()): y = [1.] + stat['AP_curve'] + [0.] * (stat['n'] - len(stat['AP_curve'])) x = np.linspace(0, 1, len(y)) plt.plot(x, y, label=szname) plt.legend() plt.xlabel('recall') plt.ylabel('precision') plt.savefig(os.path.join(settings.PLOTS_DIR, ('pro' if proposal else 'det') + '_AP_curve.pdf')) plt.close() with plt.style.context({ 'figure.subplot.left': .10, 'figure.subplot.right': .97, 'figure.subplot.bottom': .10, 'figure.subplot.top': .97, 'pdf.fonttype': 42, 'legend.loc': 'upper right', }): plt.figure(figsize=(5.5, 5.5)) plt.xlim((0., 1.)) plt.ylim((0., 1.)) plt.grid(which='major', axis='both', linestyle='dotted') for szname, stat in sorted(report['performance'].items()): if stat['mAP_curve']: x, y = zip(*stat['mAP_curve']) x = [0.] + list(x) + [x[-1]] y = [y[0]] + list(y) + [0.] else: x, y = [0., 1.], [0., 0.] plt.plot(x, y, label=szname) plt.legend() plt.xlabel('recall') plt.ylabel('precision') plt.savefig(os.path.join(settings.PLOTS_DIR, ('pro' if proposal else 'det') + '_mAP_curve.pdf')) plt.close() if __name__ == '__main__': proposal = 'proposal' in sys.argv[1:] main('../detection/products/proposals.jsonl' if proposal else '../detection/products/detections.jsonl')
src/tools/grap_disassembler/disassembler.py
AirbusCyber/grap
171
142312
# !/usr/bin/env python # -*- coding: utf-8 -*- import re import sys import os import multiprocessing import multiprocessing.dummy import signal import io try: from capstone import Cs from capstone import CS_ARCH_X86 from capstone import CS_MODE_32 from capstone import CS_MODE_64 except ImportError: print("Warning: Capstone not found") class Instruction: def __init__(self, offset, va, address, mnemonic, op_str, size, bytes, cache_only): self.offset = offset self.va = va self.address = address self.mnemonic = mnemonic self.op_str = op_str self.size = size self.bytes = bytes self.ifrom = list() # VA of previous instructions self.ito = list() # VA of next instructions self.to_succ = None self.to_other = None self.cache_only = cache_only def add_ifrom(self, inst_offset): self.ifrom.append(inst_offset) def add_ito(self, inst_offset, from_pred=True): self.ito.append(inst_offset) if from_pred: if self.to_succ is None: self.to_succ = inst_offset else: print("Warning: Should only have one successor node.") else: if self.to_other is None: self.to_other = inst_offset else: print("Warning: Should only have one \"other\" node.") def __str__(self): ba = " ".join([hex(h)[2:].zfill(2) for h in self.bytes]) prev = ["0x%08X" % x for x in self.ifrom] next = ["0x%08X" % x for x in self.ito] m = "" if self.mnemonic in ['ret', 'retf', 'jmp', 'jmpf']: m += "0x%08X:\t\t\t%-30s\t%-10s\t%-40s\t%-35s%-35s\n" % ( self.va, ba, self.mnemonic, self.op_str, next, prev) m += "0x%08X ; --------------------------------------------------------------\n" % (self.va) m += "0x%08X " % (self.va) elif len(self.ifrom) >= 2: m += "0x%08X\n" % (self.va) m += "0x%08X loc_%08X:\n" % (self.va, self.va) m += "0x%08X:\t\t\t%-30s\t%-10s\t%-40s\t%-35s%-35s" % ( self.va, ba, self.mnemonic, self.op_str, next, prev) else: m += "0x%08X:\t\t\t%-30s\t%-10s\t%-40s\t%-35s%-35s" % ( self.va, ba, self.mnemonic, self.op_str, next, prev) return m class GenericDisassembler: def __init__(self, arch, mode): self.arch = arch self.mode = mode self.capstone = Cs(self.arch, self.mode) self.prologues = { # Triple backslash (\\\) are needed to escape bytes in the compiled regex CS_MODE_32: [ b"\x55\x89\xE5", # push ebp & mov ebp, esp b"\x55\x8B\xEC", # push ebp & mov ebp, esp b"\x55\x8b\x6c\x24", # push ebp & mov ebp, [esp+?] ], CS_MODE_64: [ b"\x55\x48\x89\xE5", # push rbp & mov rbp, rsp ] }[mode] self.conditional_jmp_mnemonics = {'jz', 'je', 'jcxz', 'jecxz', 'jrcxz', 'jnz', 'jp', 'jpe', 'jnp', 'ja', 'jae', 'jb', 'jbe', 'jg', 'jge', 'jl', 'jle', 'js', 'jns', 'jo', 'jno', 'jecxz', 'loop', 'loopne', 'loope', 'jne'} self.x86_32_registers = {'eax', 'ebx', 'ecx', 'edx', 'esi', 'edi', 'esp', 'ebp'} self.max_instruction_size = 16 def linear_sweep_cache(self, data, offset, insts, bin_instance, verbose=False): section_offset_last = self.get_section_offset_last(bin_instance, offset) curr_offset = offset try: inst_va = self.get_va_from_offset(bin_instance, curr_offset) instructions = self.capstone.disasm_lite(data[offset:], inst_va) curr_offset = offset for (address, size, mnemonic, op_str) in instructions: inst = Instruction( offset=curr_offset, va=inst_va, address=address, mnemonic=mnemonic, op_str=op_str, size=size, bytes=data[curr_offset:curr_offset+size], cache_only=True, ) insts[curr_offset] = inst curr_offset += size inst_va += size if section_offset_last is not None and curr_offset > section_offset_last: break except Exception as e: print("WARNING:", repr(e)) return insts def _dis(self, data, offset, insts, bin_instance, iat_api=dict(), verbose=False, ifrom=None, from_pred=True, is_rva=False): ''' <insts> is a dict like {'offset': <Instruction>} ''' args_queue = [] args_queue.append((offset, ifrom, from_pred)) while args_queue != []: offset, ifrom, from_pred = args_queue.pop(0) if offset is None: continue inst = None if offset in insts: inst = insts[offset] if inst.cache_only: inst.cache_only = False else: if ifrom: inst.add_ifrom(ifrom.offset) insts[ifrom.offset].add_ito(inst.offset, from_pred) continue if inst is None: try: inst_va = self.get_va_from_offset(bin_instance, offset) (address, size, mnemonic, op_str) = next(self.capstone.disasm_lite(data[offset:offset+self.max_instruction_size], inst_va, count=1)) inst = Instruction( offset=offset, va=inst_va, address=address, mnemonic=mnemonic, op_str=op_str, size=size, bytes=data[offset:offset+size], cache_only=False, ) insts[inst.offset] = inst except Exception as e: if verbose: print("WARNING:", repr(e)) continue if ifrom: insts[inst.offset].add_ifrom(ifrom.offset) insts[ifrom.offset].add_ito(inst.offset, from_pred) # No child if inst.mnemonic in ['ret', 'retf']: pass # 1 remote child elif inst.mnemonic in ['jmp', 'jmpf']: if "word ptr [0x" in inst.op_str: iat_va = int(inst.op_str.split('[')[1].split(']')[0], 16) if iat_va in iat_api: inst.op_str = iat_api[iat_va].decode("utf-8") else: try: remote_offset = self.get_offset_from_va(bin_instance, int(inst.op_str, 16)) if remote_offset is not None: args_queue.insert(0, (remote_offset, insts[inst.offset], False)) except Exception as e: if verbose: print("WARNING:", repr(e)) pass # 2 children (next, then remote) - except call elif inst.mnemonic in self.conditional_jmp_mnemonics: next_offset = inst.offset + inst.size args_queue.insert(0, (next_offset, insts[inst.offset], True)) # Call to Imported API (in IAT) # dword ptr [0x........] or qword ptr [0x........] if "word ptr [0x" in inst.op_str: iat_va = int(inst.op_str.split('[')[1].split(']')[0], 16) if iat_va in iat_api: inst.op_str = iat_api[iat_va].decode("utf-8") elif inst.op_str in ['eax', 'ebx', 'ecx', 'edx', 'esi', 'edi', 'esp', 'ebp']: pass else: try: remote_offset = self.get_offset_from_va(bin_instance, int(inst.op_str, 16)) except Exception as e: if verbose: print("WARNING:", repr(e)) continue args_queue.insert(1, (remote_offset, insts[inst.offset], False)) # 2 children (next, then remote) - call elif inst.mnemonic in ['call']: next_offset = inst.offset + inst.size remote_offset = None args_queue.insert(0, (next_offset, insts[inst.offset], True)) # Call to Imported API (in IAT) # dword ptr [0x........] or qword ptr [0x........] if "word ptr [0x" in inst.op_str: iat_va = int(inst.op_str.split('[')[1].split(']')[0], 16) if iat_va in iat_api: inst.op_str = iat_api[iat_va].decode("utf-8") elif inst.op_str in self.x86_32_registers: pass else: try: remote_offset = self.get_offset_from_va(bin_instance, int(inst.op_str, 16)) except Exception as e: if verbose: print("WARNING:", repr(e)) pass if remote_offset: args_queue.insert(1, (remote_offset, insts[inst.offset], False)) # 1 child (next) - basic instruction else: next_offset = inst.offset + inst.size args_queue.insert(0, (next_offset, insts[inst.offset], True)) return insts def dis_prologues(self, data, bin_instance, iat_api, insts, verbose): prologues_re = "|".encode().join(self.prologues) compiled_re = re.compile(prologues_re) for m in compiled_re.finditer(data): function_offset = m.start() inst = insts.get(function_offset, None) if inst is None or inst.cache_only: insts = self._dis(data=data, offset=function_offset, iat_api=iat_api, bin_instance=bin_instance, insts=insts, verbose=verbose) return insts def dis(self, data, offset, iat_api, bin_instance, verbose=False): ''' data: raw binary of full PE va: va of the instruction located at <data[index]> iat_api: dict of imported API like {VA_IN_IAT: API_NAME} ''' insts = dict() insts = self.linear_sweep_cache(data=data, offset=offset, insts=insts, bin_instance=bin_instance, verbose=verbose) insts = self._dis(data=data, offset=offset, iat_api=iat_api, bin_instance=bin_instance, insts=insts, verbose=verbose) # Exploration of the exported functions self._dis_exported_funcs(bin_instance=bin_instance, insts=insts, data=data, verbose=verbose, iat_api=iat_api) # Search for unrecognized functions from their prolog function insts = self.dis_prologues(data=data, bin_instance=bin_instance, iat_api=iat_api, insts=insts, verbose=verbose) return insts def display(self, insts, offset_from=0): for offset, inst in sorted(insts.items()): if offset >= offset_from: print(inst) def export_to_dot(self, insts, oep_offset, displayable=True): ''' Export the intruction graph to DOT format ''' nodes = io.StringIO() edges = io.StringIO() dot = io.StringIO() header = "digraph G {\n" footer = "}" if displayable: for offset, inst in sorted(insts.items()): if not inst.cache_only: if inst.op_str == "": inst_str = "%s" % inst.mnemonic else: inst_str = "%s %s" % (inst.mnemonic, inst.op_str) if offset != oep_offset: nodes.write(('"%X" [label="%s", address="0x%X", inst="%s", ' 'style="", shape=box, fillcolor="white"]\n') % ( inst.va, "%016X: %s %s" % (inst.va, inst.mnemonic, inst.op_str), inst.va, inst_str )) else: nodes.write(('"%X" [label="%s", address="0x%X", inst="%s", ' 'style="", shape=box, fillcolor="white", root=true]\n') % ( inst.va, "%016X: %s %s" % (inst.va, inst.mnemonic, inst.op_str), inst.va, inst_str )) if inst.to_succ is not None: edges.write(('"%X" -> "%X" [label=0, color=%s, child_number=1]\n') % ( inst.va, insts[inst.to_succ].va, "black" )) if inst.to_other is not None: edges.write(('"%X" -> "%X" [label=1, color=%s, child_number=2]\n') % ( inst.va, insts[inst.to_other].va, "red" )) else: for offset, inst in sorted(insts.items()): if not inst.cache_only: if inst.op_str == "": inst_str = "%s" % inst.mnemonic else: inst_str = "%s %s" % (inst.mnemonic, inst.op_str) if offset != oep_offset: nodes.write(('"%X" [inst="%s", address="0x%X"]\n') % ( inst.va, inst_str, inst.va )) else: nodes.write(('"%X" [inst="%s", address="0x%X", root=true]\n') % ( inst.va, inst_str, inst.va )) if inst.to_succ is not None: edges.write(('"%X" -> "%X" [child_number=1]\n') % (inst.va, insts[inst.to_succ].va)) if inst.to_other is not None: edges.write(('"%X" -> "%X" [child_number=2]\n') % (inst.va, insts[inst.to_other].va)) dot.write(header) dot.write(nodes.getvalue()) dot.write(edges.getvalue()) dot.write(footer) return dot.getvalue() class PEDisassembler(GenericDisassembler): def get_offset_from_rva(self, pe, rva): remote_offset = pe.get_offset_from_rva(rva) return remote_offset def get_offset_from_va(self, pe, rva): return self.get_offset_from_rva(pe, rva - self.get_image_base_rva(pe)) def get_rva_from_offset(self, pe, offset): return pe.get_rva_from_offset(offset) def get_va_from_offset(self, pe, offset): return self.get_rva_from_offset(pe, offset) + self.get_image_base_rva(pe) def get_image_base_rva(self, pe): return pe.OPTIONAL_HEADER.ImageBase def get_section_offset_last(self, pe, offset): for section in pe.sections: section_begin = section.PointerToRawData section_last = section.PointerToRawData + section.SizeOfRawData - 1 if section_begin <= offset <= section_last: return section_last print("WARNING: Cound not determine current section boundaries.") return None def _dis_exported_funcs(self, bin_instance, insts, data, verbose, iat_api=dict()): """ Disassemble all the exported functions. Args: pe (PE) : PE Object insts (Dict) : Dictionary of instructions """ # Export table try: export_table = bin_instance.DIRECTORY_ENTRY_EXPORT.symbols except Exception as e: if verbose: print("WARNING:", repr(e)) export_table = None if export_table is not None: for exp in export_table: inst = insts.get(exp, None) offset = bin_instance.get_offset_from_rva(exp.address) if verbose: va = self.get_va_from_offset(bin_instance, offset) print("Export:", hex(exp.address), hex(va), exp.name.decode("utf-8"), exp.ordinal) if inst is None or inst.cache_only: insts = self._dis(data=data, offset=offset, iat_api=iat_api, bin_instance=bin_instance, insts=insts, verbose=verbose) class ELFDisassembler(GenericDisassembler): def __init__(self, arch, mode, elf): GenericDisassembler.__init__(self, arch, mode) # We cache ELF properties for faster access self.image_base_rva = None for section in elf.iter_sections(): self.image_base_rva = section['sh_addr'] - section['sh_offset'] break self.n_segments = 0 self.seg_offset_low = [] self.seg_offset_high = [] self.seg_rva_low = [] self.seg_rva_high = [] self.seg_pvaddr = [] self.seg_poffset = [] self.seg_pvaddr_minus_poffset = [] for segment in elf.iter_segments(): self.n_segments += 1 self.seg_offset_low.append(segment['p_offset']) self.seg_offset_high.append(segment['p_offset'] + segment['p_filesz']) self.seg_rva_low.append(segment['p_vaddr']) self.seg_rva_high.append(segment['p_vaddr'] + segment['p_memsz']) self.seg_pvaddr_minus_poffset.append(segment['p_vaddr'] - segment['p_offset']) def get_offset_from_rva(self, elf, rva): for s in range(self.n_segments): if self.seg_rva_low[s] <= rva < self.seg_rva_high[s]: return rva - self.seg_pvaddr_minus_poffset[s] return None def get_offset_from_va(self, elf, va): return self.get_offset_from_rva(elf, va - self.get_image_base_rva(elf)) def get_rva_from_offset(self, elf, offset): for s in range(self.n_segments): if self.seg_offset_low[s] <= offset < self.seg_offset_high[s]: return self.seg_pvaddr_minus_poffset[s] + offset return None def get_va_from_offset(self, elf, offset): return self.get_rva_from_offset(elf, offset) + self.image_base_rva def get_image_base_rva(self, elf): return self.image_base_rva def get_section_offset_last(self, elf, offset): for s in range(self.n_segments): segment_begin = self.seg_offset_low[s] segment_last = self.seg_offset_high[s] - 1 if segment_begin <= offset <= segment_last: return segment_last print("WARNING: Cound not determine current segment boundaries.") return None def _dis_exported_funcs(self, data, bin_instance, insts, verbose): """ Disassemble all the exported functions. Args: elf (ELFFile) : ELF Object insts (Dict) : Dictionary of instructions """ image_base = self.get_image_base_rva(bin_instance) if bin_instance.get_section_by_name('.dynsym') is not None: # Dynsym for sym in bin_instance.get_section_by_name('.dynsym').iter_symbols(): info = sym.entry # If the symbol is an exported function if info.st_info['type'] == 'STT_FUNC' and \ info.st_info['bind'] == 'STB_GLOBAL': # If this is a new non-empty function inst = insts.get(info.st_value, None) if info.st_value != 0 and (inst is None or inst.cache_only): offset = self.get_offset_from_rva( bin_instance, (info.st_value - image_base) ) if verbose: print('Func %s found at offset 0x%08X, RVA: 0x%08X' % ( sym.name, offset, info.st_value )) insts = self._dis(data=data, offset=offset, bin_instance=bin_instance, insts=insts, verbose=verbose) def dis(self, data, offset, iat_api, bin_instance, verbose=False): ''' data: raw binary of full elf va: va of the instruction located at <data[index]> iat_api: dict of imported API like {VA_IN_IAT: API_NAME} ''' insts = dict() insts = self.linear_sweep_cache(data=data, offset=offset, insts=insts, bin_instance=bin_instance, verbose=verbose) insts = self._dis(data=data, offset=offset, bin_instance=bin_instance, insts=insts, verbose=verbose) # Function 'start' jumps on function 'main' with a dynamic jump. 'main' address is given in argument # so we get that argument and we continue to disassemble ''' .text:0000000000XXX89F mov r8, offset fini ; fini .text:0000000000XXX8A6 mov rcx, offset init ; init .text:0000000000XXX8AD mov rdi, offset main ; main .text:0000000000XXX8B4 call ___libc_start_main Note that the 'mov' can also be 'lea': lea rdi, [rip + 0x3baa] lea rdi, [rip + 0x3b33] lea rdi, [rip - 0x1106] call cs:__libc_start_main_ptr ''' for offset, inst in sorted(insts.items()): # mov r8, offset fini ; fini i1 = inst # mov rcx, offset init ; init if len(i1.ito) != 1: continue i2 = insts[i1.ito[0]] # mov rdi, offset main ; main # mov rcx, offset init ; init if len(i2.ito) != 1: continue i3 = insts[i2.ito[0]] # call ___libc_start_main # mov rcx, offset init ; init if len(i3.ito) != 1: continue i4 = insts[i3.ito[0]] all_mov = i1.mnemonic == "mov" and i2.mnemonic == "mov" and i3.mnemonic == "mov" all_lea = i1.mnemonic == "lea" and i2.mnemonic == "lea" and i3.mnemonic == "lea" if (all_mov or all_lea) and i4.mnemonic == "call": pass else: continue try: if all_mov: rva_fini = int(i1.op_str.split(", 0x")[1], 16) rva_init = int(i2.op_str.split(", 0x")[1], 16) rva_main = int(i3.op_str.split(", 0x")[1], 16) else: # Then all_lea if "+" in i1.op_str: rva_fini = i2.address + int(i1.op_str.split("+ 0x")[1][:-1], 16) else: # "-" in i1.op_str: rva_fini = i2.address - int(i1.op_str.split("- 0x")[1][:-1], 16) if "+" in i2.op_str: rva_init = i3.address + int(i2.op_str.split("+ 0x")[1][:-1], 16) else: # "-" in i2.op_str: rva_init = i3.address - int(i2.op_str.split("- 0x")[1][:-1], 16) if "+" in i3.op_str: rva_main = i4.address + int(i3.op_str.split("+ 0x")[1][:-1], 16) else: # "-" in i3.op_str: rva_main = i4.address - int(i3.op_str.split("- 0x")[1][:-1], 16) insts = self._dis(data=data, offset=self.get_offset_from_rva(bin_instance, rva_fini), bin_instance=bin_instance, insts=insts, verbose=verbose) insts = self._dis(data=data, offset=self.get_offset_from_rva(bin_instance, rva_init), bin_instance=bin_instance, insts=insts, verbose=verbose) insts = self._dis(data=data, offset=self.get_offset_from_rva(bin_instance, rva_main), bin_instance=bin_instance, insts=insts, verbose=verbose) break except Exception as e: if verbose: print("WARNING:", repr(e)) continue # Exploration of the exported functions self._dis_exported_funcs(data=data, bin_instance=bin_instance, insts=insts, verbose=verbose) # Search for unrecognized functions from their prolog function insts = self.dis_prologues(data=data, bin_instance=bin_instance, iat_api=iat_api, insts=insts, verbose=verbose) return insts class RawDisassembler(GenericDisassembler): def get_offset_from_rva(self, raw, rva): return rva def get_offset_from_va(self, raw, rva): return rva - self.get_image_base_rva(raw) def get_rva_from_offset(self, raw, offset): return offset def get_va_from_offset(self, raw, offset): return self.get_rva_from_offset(raw, offset) + self.get_image_base_rva(raw) def get_image_base_rva(self, raw): return 0 def _dis_exported_funcs(self, bin_instance, insts, data, verbose, iat_api=dict()): return def write_to_file(path, data): try: f = open(path, "w") f.write(data) f.close() except: print("WARNING: Could not write data to", path) def disassemble_pe(pe_data = None, pe_path = None, dot_path = None, print_listing=False, readable=False, verbose=False): if pe_data is None and pe_path is None: print("ERROR: Missing PE path or data.") return None if pe_data is None: pe_data = open(pe_path, "rb").read() try: import pefile pe = pefile.PE(data=pe_data) except Exception as e: if verbose: print("WARNING:", repr(e)) print("ERROR: pefile could not parse PE.") return None arch = CS_ARCH_X86 is_32 = pe.FILE_HEADER.Characteristics & 0x0100 mode = CS_MODE_32 if is_32 else CS_MODE_64 oep_rva = pe.OPTIONAL_HEADER.AddressOfEntryPoint code_section = pe.get_section_by_rva(oep_rva) if code_section is None: print("ERROR: pefile could not find code section.") return None oep_offset = oep_rva - code_section.VirtualAddress + code_section.PointerToRawData iat_dict = dict() try: import_table = pe.DIRECTORY_ENTRY_IMPORT except Exception as e: if verbose: print("WARNING:", repr(e)) import_table = None if import_table is not None: for entry in import_table: for imp in entry.imports: if entry.dll is None: entry_str = "".encode("utf-8") else: entry_str = entry.dll if imp.name is None: imp_str = "".encode("utf-8") else: imp_str = imp.name iat_dict[imp.address] = entry_str + ".".encode("utf-8") + imp_str if verbose: for k in iat_dict: print("Import:", hex(k), iat_dict[k].decode("utf-8")) disass = PEDisassembler(arch=arch, mode=mode) insts = disass.dis(data=pe_data, offset=oep_offset, iat_api=iat_dict, bin_instance=pe, verbose=verbose) if dot_path is not None: dot = disass.export_to_dot(insts=insts, oep_offset=oep_offset, displayable=readable) write_to_file(dot_path, dot) if print_listing: disass.display(insts, offset_from=0) return True def disassemble_elf(elf_data = None, elf_path = None, dot_path = None, print_listing=False, readable=False, verbose=False): if elf_path is None: print("ERROR: Missing ELF path.") return None from elftools.elf.elffile import ELFFile if elf_data is None: elf_data = open(elf_path, "rb").read() elf = ELFFile(io.BytesIO(elf_data)) arch = CS_ARCH_X86 mode = CS_MODE_64 if elf.elfclass == 64 else CS_MODE_32 oep_rva = elf.header.e_entry def get_offset_from_rva(elf, offset): for section in elf.iter_sections(): try: if section['sh_addr'] <= oep_rva < section['sh_addr'] + section['sh_size']: return section['sh_offset'] + (oep_rva - section['sh_addr']) except Exception as e: if verbose: print("WARNING:", repr(e)) continue return None oep_offset = get_offset_from_rva(elf, oep_rva) if oep_offset is None: print("ERROR: Cannot retrieve entry point offset from RVA (0x%08X)." % (elf.header.e_entry)) return None disass = ELFDisassembler(arch=arch, mode=mode, elf=elf) insts = disass.dis(data=elf_data, offset=oep_offset, iat_api={}, bin_instance=elf, verbose=verbose) if dot_path is not None: dot = disass.export_to_dot(insts=insts, oep_offset=oep_offset, displayable=readable) write_to_file(dot_path, dot) if print_listing: disass.display(insts, offset_from=0) return True def disassemble_raw(raw_data = None, raw_path = None, dot_path = None, print_listing=False, readable=False, raw_64=False, entrypoint=None, verbose=False): if raw_data is None and raw_path is None: print("ERROR: Missing PE path or data.") return None if raw_data is None: raw_data = open(raw_path, "rb").read() arch = CS_ARCH_X86 mode = CS_MODE_64 if raw_64 else CS_MODE_32 if entrypoint is not None: oep_offset = entrypoint else: oep_offset = 0 iat_dict = dict() disass = RawDisassembler(arch=arch, mode=mode) insts = disass.dis(data=raw_data, offset=oep_offset, iat_api=iat_dict, bin_instance=None, verbose=verbose) if dot_path is not None: dot = disass.export_to_dot(insts=insts, oep_offset=oep_offset, displayable=readable) write_to_file(dot_path, dot) if print_listing: disass.display(insts, offset_from=0) return True def disassemble_file(bin_data = None, dir_path=None, bin_path=None, dot_path=None, print_listing=False, readable=False, raw=False, raw_64=False, entrypoint=None, verbose=False, use_existing=False, skip_disassembly=False): return_path = dot_path if dir_path is not None: # Binary file comes from directly being recursively traversed, return dir path return_path = dir_path if skip_disassembly or (use_existing and os.path.exists(dot_path)): return return_path if verbose: print("Disassembling", bin_path) if bin_data is None: if bin_path is None: return None bin_data = open(bin_path, "rb").read() if raw: if disassemble_raw(raw_data=bin_data, raw_path=bin_path, dot_path=dot_path, print_listing=print_listing, readable=readable, raw_64=raw_64, entrypoint=entrypoint, verbose=verbose): return return_path elif bin_data[0:2].decode("ascii") == "MZ": if disassemble_pe(pe_data=bin_data, pe_path=bin_path, dot_path=dot_path, print_listing=print_listing, readable=readable, verbose=verbose): return return_path elif bin_data[0:4].decode("ascii") == "\x7fELF": if disassemble_elf(elf_data=bin_data, elf_path=bin_path, dot_path=dot_path, print_listing=print_listing, readable=readable, verbose=verbose): return return_path else: if verbose: print(("WARNING: Test file " + bin_path + " does not seem to be a PE/ELF or dot file. Use raw option if raw file.")) return None def disas_worker(arg): return disassemble_file(bin_path=arg[0], dir_path=arg[1], dot_path=arg[2], print_listing=arg[3], readable=arg[4], verbose=arg[5], raw=arg[6], raw_64=arg[7], use_existing=arg[8], skip_disassembly=arg[9]) def timeout_worker(*arg): # One thread to process this file, with a timeout p = multiprocessing.dummy.Pool(1) res = p.apply_async(disas_worker, arg) try: out = res.get(timeout=arg[0][-1]) p.close() except multiprocessing.TimeoutError: print("WARNING: Disassembly timeout for", arg[0][0]) p.terminate() p.close() out = None return out def disassemble_files(path_dir_list, dot_path_suffix, dot_dir=None, multiprocess=True, n_processes=4, print_listing=False, readable=False, raw=False, raw_64=False, verbose=False, use_existing=False, timeout=0, skip_disassembly=False): dot_path_list = [] arg_list = [] if path_dir_list is not None and path_dir_list != []: if multiprocess: if isinstance(timeout, int) or (isinstance(timeout, str) and timeout.isdigit()): i = int(timeout) if i == 0: timeout_sec = 31536000 else: timeout_sec = i else: timeout_sec = 31536000 for path, dir_arg_path in path_dir_list: if dot_dir is None: dot_path = path + dot_path_suffix else: dot_path = os.path.join(dot_dir, os.path.basename(path) + dot_path_suffix) arg_list.append((path, dir_arg_path, dot_path, print_listing, readable, verbose, raw, raw_64, use_existing, skip_disassembly, timeout_sec)) original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) # 8 tasks per child max: regenerate workers to free "lost" memory # TODO: make sure memory is freed when timeout is reached pool = multiprocessing.Pool(processes=min(n_processes, len(arg_list)), maxtasksperchild=8) signal.signal(signal.SIGINT, original_sigint_handler) ret = None try: res = pool.map_async(timeout_worker, arg_list, chunksize=1) # Need timeout (one year) so SIGINT is not ignored ret = res.get(timeout=31536000) except KeyboardInterrupt: pool.terminate() else: pool.close() if ret is not None: dot_path_list = [p for p in ret if p is not None] else: for path, dir_path in path_dir_list: r = disassemble_file(bin_path=path, dir_path=dir_path, dot_path=path+dot_path_suffix, print_listing=print_listing, readable=readable, raw=raw, raw_64=raw_64, verbose=verbose, use_existing=use_existing, skip_disassembly=skip_disassembly) if r is not None: dot_path_list.append(r) return dot_path_list if __name__ == "__main__": if len(sys.argv) == 2: sys.setrecursionlimit(1000000) bin_path = sys.argv[1] dot_path = bin_path + ".grapcfg" disassemble_file(bin_path=bin_path, dot_path=dot_path, verbose=True)
.jenkins/pytorch/print_sccache_log.py
Stonepia/pytorch
206
142335
import sys log_file_path = sys.argv[1] with open(log_file_path) as f: lines = f.readlines() for line in lines: # Ignore errors from CPU instruction set or symbol existing testing keywords = ['src.c', 'CheckSymbolExists.c'] if all([keyword not in line for keyword in keywords]): print(line)
applications/ParticleMechanicsApplication/python_scripts/particle_json_output_process.py
lkusch/Kratos
778
142342
from __future__ import print_function, absolute_import, division #makes KratosMultiphysics backward compatible with python 2.6 and 2.7 # Importing the Kratos Library import KratosMultiphysics from KratosMultiphysics.json_utilities import read_external_json, write_external_json # Importing the base class from KratosMultiphysics.json_output_process import JsonOutputProcess def Factory(settings, Model): if(type(settings) != KratosMultiphysics.Parameters): raise Exception("Expected input shall be a Parameters object, encapsulating a json string") return ParticleJsonOutputProcess(Model, settings["Parameters"]) # All the processes python processes should be derived from "Process" class ParticleJsonOutputProcess(JsonOutputProcess): def ExecuteBeforeSolutionLoop(self): data = {} data["TIME"] = [] count = 0 # Material points values for mp in self.sub_model_part.Elements: compute = self.__check_flag(mp) if (compute == True): if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)] = {} else: data["RESULTANT"] = {} for i in range(self.params["gauss_points_output_variables"].size()): out = self.params["gauss_points_output_variables"][i] variable_name = out.GetString() variable_type = KratosMultiphysics.KratosGlobals.GetVariableType(variable_name) if (variable_type == "Double" or variable_type == "Integer" or variable_type == "Component"): if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)][variable_name] = [] else: if (count == 0): data["RESULTANT"][variable_name] = [] elif variable_type == "Array": if (KratosMultiphysics.KratosGlobals.GetVariableType(variable_name + "_X") == "Double"): if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)][variable_name + "_X"] = [] data["PARTICLE_" + str(mp.Id)][variable_name + "_Y"] = [] data["PARTICLE_" + str(mp.Id)][variable_name + "_Z"] = [] else: if (count == 0): data["RESULTANT"][variable_name + "_X"] = [] data["RESULTANT"][variable_name + "_Y"] = [] data["RESULTANT"][variable_name + "_Z"] = [] else: if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)][variable_name] = [] else: if (count == 0): data["RESULTANT"][variable_name] = [] elif variable_type == "Vector": if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)][variable_name] = [] else: if (count == 0): data["RESULTANT"][variable_name] = [] count += 1 write_external_json(self.output_file_name, data) def ExecuteFinalizeSolutionStep(self): data = read_external_json(self.output_file_name) time = self.sub_model_part.ProcessInfo.GetValue(KratosMultiphysics.TIME) dt = self.sub_model_part.ProcessInfo.GetValue(KratosMultiphysics.DELTA_TIME) self.time_counter += dt if self.time_counter > self.frequency: self.time_counter = 0.0 data["TIME"].append(time) count = 0 # Material points values for mp in self.sub_model_part.Elements: compute = self.__check_flag(mp) if (compute == True): for i in range(self.params["gauss_points_output_variables"].size()): out = self.params["gauss_points_output_variables"][i] variable_name = out.GetString() variable = KratosMultiphysics.KratosGlobals.GetVariable(variable_name) variable_type = KratosMultiphysics.KratosGlobals.GetVariableType(variable_name) values_vector = mp.CalculateOnIntegrationPoints(variable, self.sub_model_part.ProcessInfo) value = values_vector[0] if (variable_type == "Double" or variable_type == "Integer" or variable_type == "Component"): if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)][variable_name].append(value) else: if (count == 0): data["RESULTANT"][variable_name].append(value) else: data["RESULTANT"][variable_name][-1] += value elif variable_type == "Array": if (KratosMultiphysics.KratosGlobals.GetVariableType(variable_name + "_X") == "Double"): if (self.resultant_solution == False): data["PARTICLE_" + str(mp.Id)][variable_name + "_X"].append(value[0]) data["PARTICLE_" + str(mp.Id)][variable_name + "_Y"].append(value[1]) data["PARTICLE_" + str(mp.Id)][variable_name + "_Z"].append(value[2]) else: if (count == 0): data["RESULTANT"][variable_name + "_X"].append(value[0]) data["RESULTANT"][variable_name + "_Y"].append(value[1]) data["RESULTANT"][variable_name + "_Z"].append(value[2]) else: data["RESULTANT"][variable_name + "_X"][-1] += value[0] data["RESULTANT"][variable_name + "_Y"][-1] += value[1] data["RESULTANT"][variable_name + "_Z"][-1] += value[2] else: if (self.resultant_solution == False): list = self.__kratos_vector_to__python_list(value) data["PARTICLE_" + str(mp.Id)][variable_name ].append(list) else: aux = 0.0 for index in range(len(value)): aux += value[index] if (count == 0): data["RESULTANT"][variable_name ].append(aux) else: data["RESULTANT"][variable_name ][-1] += aux elif variable_type == "Vector": if (self.resultant_solution == False): list = self.__kratos_vector_to__python_list(value) data["PARTICLE_" + str(mp.Id)][variable_name].append(list) else: if (count == 0): list = self.__kratos_vector_to__python_list(value) data["RESULTANT"][variable_name][-1] += list count += 1 write_external_json(self.output_file_name, data) def __kratos_vector_to__python_list(self, value): list = [] for index in range(len(value)): list.append(value[index]) return list def __check_flag(self, component): if self.flag != None: if component.Is(self.flag) == False: return False return True
tests/nnapi/specs/V1_2/reduce_min_float_nnfw.mod.py
periannath/ONE
255
142354
<reponame>periannath/ONE model = Model() i1 = Input("input", "TENSOR_FLOAT32", "{1, 2, 2, 1}") axis = Parameter("axis", "TENSOR_INT32", "{1}", [2]) keepDims = False output = Output("output", "TENSOR_FLOAT32", "{1, 2, 1}") model = model.Operation("REDUCE_MIN", i1, axis, keepDims).To(output) # Example 1. Input in operand 0, input0 = {i1: # input 0 [2.0, 1.0, 3.0, 4.0]} output0 = {output: # output 0 [1.0, 3.0]} # Instantiate an example Example((input0, output0))
coding_interviews/leetcode/medium/subrectangle_queries/subrectangle_queries.py
LeandroTk/Algorithms
205
142395
<filename>coding_interviews/leetcode/medium/subrectangle_queries/subrectangle_queries.py # https://leetcode.com/problems/subrectangle-queries class SubrectangleQueries: def __init__(self, rectangle): self.rectangle = rectangle def updateSubrectangle(self, row1, col1, row2, col2, newValue): for row in range(row1, row2 + 1): for col in range(col1, col2 + 1): self.rectangle[row][col] = newValue def getValue(self, row, col): return self.rectangle[row][col]
examples/show_font_table.py
aathi2002/open-tamil
218
142415
# This code is released to public domain. # It is part of open-tamil project examples. # # Code to show fontbased encoding tables in Open-TAMIL # Ref: https://github.com/Ezhil-Language-Foundation/open-tamil/issues/216 import sys import unicodedata # requires installing fontTools # Ref: https://fonttools.readthedocs.io from fontTools import ttLib from pprint import pprint def show_font_table(fontname): f = ttLib.TTFont(fontname) cmap = f.getBestCmap() pprint(cmap) for k, v in cmap.items(): if v.startswith("uni"): v = chr(int(v[3:], 16)) try: sfx = unicodedata.name(v) except ValueError as e: sfx = None else: sfx = unicodedata.name(v[0]) print(k, "=>", v, "|", sfx) if len(sys.argv) < 2: print("usage: python3 show_font_table.py {TTF filename}") sys.exit(-1) else: show_font_table(sys.argv[1])
tests/constants.py
fraudnet/kibana-docker
256
142416
import os from subprocess import run, PIPE try: version = os.environ['ELASTIC_VERSION'] except KeyError: version = run('./bin/elastic-version', stdout=PIPE).stdout.decode().strip()
Chapter07/transfer_learning_imdb.py
arifmudi/Hands-On-Transfer-Learning-with-Python
756
142445
# -*- coding: utf-8 -*- """ Created on Thu Apr 12 13:33:45 2018 @author: tghosh """ import config from dataloader.loader import Loader from preprocessing.utils import Preprocess, remove_empty_docs from dataloader.embeddings import GloVe from model.cnn_document_model import DocumentModel, TrainingParameters from keras.callbacks import ModelCheckpoint, EarlyStopping import numpy as np import pandas as pd from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.svm import SVC train_params = TrainingParameters('imdb_transfer_tanh_activation', model_file_path = config.MODEL_DIR+ '/imdb/transfer_model_10.hdf5', model_hyper_parameters = config.MODEL_DIR+ '/imdb/transfer_model_10.json', model_train_parameters = config.MODEL_DIR+ '/imdb/transfer_model_10_meta.json', num_epochs=30, batch_size=128) #train_df = Loader.load_imdb_data(directory = 'train') train_df = pd.read_csv(config.IMDB_DATA_CSV + '/movie_reviews_train.csv', encoding='ISO-8859-1') print(train_df.shape) # build TFIDF features on train reviews tv = TfidfVectorizer(use_idf=True, min_df=0.00005, max_df=1.0, ngram_range=(1, 1), stop_words = 'english', sublinear_tf=True) tv_features = tv.fit_transform(train_df['review'].tolist()) #test_df = Loader.load_imdb_data(directory = 'test') test_df = pd.read_csv(config.IMDB_DATA_CSV + '/movie_reviews_test.csv', encoding='ISO-8859-1') print(test_df.shape) corpus = train_df['review'].tolist() target = train_df['sentiment'].tolist() corpus, target = remove_empty_docs(corpus, target) print(len(corpus)) preprocessor = Preprocess(corpus=corpus) corpus_to_seq = preprocessor.fit() #Take only 5% of data for training train_df = train_df.sample(frac=0.05, random_state = train_params.seed) corpus = train_df['review'].tolist() target = train_df['sentiment'].tolist() corpus_to_seq = preprocessor.transform(corpus) test_corpus = test_df['review'].tolist() test_target = test_df['sentiment'].tolist() test_corpus, test_target = remove_empty_docs(test_corpus, test_target) print(len(test_corpus)) test_corpus_to_seq = preprocessor.transform(test_corpus) x_train = np.array(corpus_to_seq) x_test = np.array(test_corpus_to_seq) y_train = np.array(target) y_test = np.array(test_target) print(x_train.shape, y_train.shape) glove=GloVe(50) initial_embeddings = glove.get_embedding(preprocessor.word_index) amazon_review_model = DocumentModel.load_model("C:/Users/tghosh/Work/Data Science/Transfer Learning/Chapter-7/models/amazonreviews/model_06.json") amazon_review_model.load_model_weights("C:/Users/tghosh/Work/Data Science/Transfer Learning/Chapter-7/models/amazonreviews/model_06.hdf5") learned_embeddings = amazon_review_model.get_classification_model().get_layer('imdb_embedding').get_weights()[0] glove.update_embeddings(preprocessor.word_index , np.array(learned_embeddings), amazon_review_model.word_index) initial_embeddings = glove.get_embedding(preprocessor.word_index) imdb_model = DocumentModel(vocab_size=preprocessor.get_vocab_size(), word_index = preprocessor.word_index, num_sentences=Preprocess.NUM_SENTENCES, embedding_weights=initial_embeddings, embedding_regularizer_l2 = 0.0, conv_activation = 'tanh', train_embedding = True, learn_word_conv = False, learn_sent_conv = False, hidden_dims=64, input_dropout=0.1, hidden_layer_kernel_regularizer=0.01, final_layer_kernel_regularizer=0.01) #transfer word & sentence conv filters for l_name in ['word_conv','sentence_conv','hidden_0', 'final']: imdb_model.get_classification_model()\ .get_layer(l_name).set_weights(weights=amazon_review_model .get_classification_model() .get_layer(l_name).get_weights()) from keras.optimizers import Adam adam = Adam(lr=0.002) imdb_model.get_classification_model()\ .compile(loss="binary_crossentropy", optimizer='rmsprop', metrics=["accuracy"]) checkpointer = ModelCheckpoint(filepath=train_params.model_file_path, verbose=1, save_best_only=True, save_weights_only=True) early_stop = EarlyStopping(patience=2) imdb_model.get_classification_model().fit(x_train, y_train, batch_size=train_params.batch_size, epochs=train_params.num_epochs, verbose=2,validation_split=0.01, callbacks=[checkpointer]) #imdb_model.load_model_weights(train_params.model_file_path) imdb_model.get_classification_model().evaluate( x_test, y_test, batch_size=train_params.batch_size*10, verbose=2) #imdb_model._save_model(train_params.model_hyper_parameters) #train_params.save() #learned_embeddings = imdb_model.get_classification_model().get_layer('imdb_embedding').get_weights()[0] #embd_change = {} #for word, i in preprocessor.word_index.items(): # embd_change[word] = np.linalg.norm(initial_embeddings[i]-learned_embeddings[i]) #embd_change = sorted(embd_change.items(), key=lambda x: x[1], reverse=True) #embd_change[0:100] #print(len(tv.get_feature_names())) #tv_train_features = tv.transform(corpus) #tv_test_features = tv.transform(test_corpus) # #clf = SVC(C=1,kernel='linear', random_state=1, gamma=0.01) #svm=clf.fit(tv_train_features, target) #preds_test = svm.predict(tv_test_features) # #from sklearn.metrics import classification_report,accuracy_score,confusion_matrix #print(classification_report(y_test, preds_test)) #print(confusion_matrix(y_test, preds_test))
aldjemy/wrapper.py
Piero-Palevsky-OH/aldjemy
255
142456
<reponame>Piero-Palevsky-OH/aldjemy class Wrapper: "Wrapper to disable commit in sqla" def __init__(self, obj): self.obj = obj def __getattr__(self, attr): if attr in ["commit", "rollback"]: return lambda *args, **kwargs: None obj = getattr(self.obj, attr) if attr not in ["cursor", "execute"]: return obj if attr == "cursor": return type(self)(obj) return self.wrapper(obj) def wrapper(self, obj): "Implement if you need to make your customized wrapper" return obj def __call__(self, *args, **kwargs): self.obj = self.obj(*args, **kwargs) return self
env/Lib/site-packages/plotly/validators/scatter3d/_surfaceaxis.py
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
11,750
142466
import _plotly_utils.basevalidators class SurfaceaxisValidator(_plotly_utils.basevalidators.EnumeratedValidator): def __init__(self, plotly_name="surfaceaxis", parent_name="scatter3d", **kwargs): super(SurfaceaxisValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "calc"), values=kwargs.pop("values", [-1, 0, 1, 2]), **kwargs )
scripts/visualize_model.py
jonahwu/cner
823
142490
import spacy_streamlit import typer def main(models: str, default_text: str): models = [name.strip() for name in models.split(",")] spacy_streamlit.visualize(models, default_text, visualizers=["ner"]) if __name__ == "__main__": try: typer.run(main) except SystemExit: pass
libs/sqlobject/tests/test_expire.py
scambra/HTPC-Manager
422
142492
from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Expiring, syncing ######################################## class SyncTest(SQLObject): name = StringCol(length=50, alternateID=True, dbName='name_col') def test_expire(): setupClass(SyncTest) SyncTest(name='bob') SyncTest(name='tim') conn = SyncTest._connection b = SyncTest.byName('bob') conn.query("UPDATE sync_test SET name_col = 'robert' WHERE id = %i" % b.id) assert b.name == 'bob' b.expire() assert b.name == 'robert' conn.query("UPDATE sync_test SET name_col = 'bobby' WHERE id = %i" % b.id) b.sync() assert b.name == 'bobby'
jactorch/transforms/bbox/transforms.py
dapatil211/Jacinle
114
142501
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # File : transforms.py # Author : <NAME> # Email : <EMAIL> # Date : 03/03/2018 # # This file is part of Jacinle. # Distributed under terms of the MIT license. import random import torch import torchvision.transforms as transforms import jactorch.transforms.image as jac_transforms from . import functional as F __all__ = ["Compose", "Lambda", "ToTensor", "NormalizeBbox", "DenormalizeBbox", "Normalize", "Resize", "CenterCrop", "Pad", "RandomCrop", "RandomHorizontalFlip", "RandomVerticalFlip", "RandomResizedCrop", "LinearTransformation", "ColorJitter", "RandomRotation", "Grayscale", "RandomGrayscale", "PadMultipleOf"] class Compose(transforms.Compose): def __call__(self, img, bbox): for t in self.transforms: img, bbox = t(img, bbox) return img, bbox class Lambda(transforms.Lambda): def __call__(self, img, bbox): return self.lambd(img, bbox) class ToTensor(transforms.ToTensor): def __call__(self, img, bbox): # TODO(Jiayuan Mao @ 07/23): check whether bboxes are out of the image. return super().__call__(img), torch.from_numpy(bbox) class NormalizeBbox(object): def __call__(self, img, bbox): return F.normalize_bbox(img, bbox) class DenormalizeBbox(object): def __call__(self, img, bbox): return F.denormalize_bbox(img, bbox) class Normalize(transforms.Normalize): def __call__(self, img, bbox): return super().__call__(img), bbox class Resize(transforms.Resize): # Assuming bboxdinates are 0/1-normalized. def __call__(self, img, bbox): return super().__call__(img), bbox class CenterCrop(transforms.CenterCrop): def __call__(self, img, bbox): return F.center_crop(img, bbox, self.size) class Pad(transforms.Pad): def __call__(self, img, bbox): return F.pad(img, bbox, self.padding, self.fill) class RandomCrop(transforms.RandomCrop): def __call__(self, img, bbox): if self.padding > 0: img = F.pad(img, bbox, self.padding) i, j, h, w = self.get_params(img, self.size) return F.crop(img, bbox, i, j, h, w) class RandomHorizontalFlip(transforms.RandomHorizontalFlip): def __call__(self, img, bbox): if random.random() < 0.5: return F.hflip(img, bbox) return img, bbox class RandomVerticalFlip(transforms.RandomVerticalFlip): def __call__(self, img, bbox): if random.random() < 0.5: return F.vflip(img, bbox) return img, bbox class RandomResizedCrop(transforms.RandomResizedCrop): def __call__(self, img, bbox): i, j, h, w = self.get_params(img, self.scale, self.ratio) return F.resized_crop(img, bbox, i, j, h, w, self.size, self.interpolation) class Grayscale(transforms.Grayscale): def __call__(self, img, bbox): return super().__call__(img), bbox class RandomGrayscale(transforms.RandomGrayscale): def __call__(self, img, bbox): return super().__call__(img), bbox class LinearTransformation(transforms.LinearTransformation): def __call__(self, tensor, bbox): return super().__call__(tensor), bbox class ColorJitter(transforms.ColorJitter): def __call__(self, img, bbox): return super().__call__(img), bbox class RandomRotation(transforms.RandomRotation): def __call__(self, img, bbox): assert self.degrees[0] == self.degrees[1] == 0 angle = self.get_params(self.degrees) return F.rotate(img, bbox, angle, self.resample, self.expand, self.center) class PadMultipleOf(jac_transforms.PadMultipleOf): def __call__(self, img, coor): return F.pad_multiple_of(img, coor, self.multiple)
src/layers.py
AlexHeffner/DropEdge
380
142510
<filename>src/layers.py import math import torch from torch.nn.parameter import Parameter from torch.nn.modules.module import Module from torch import nn import torch.nn.functional as F class GraphConvolutionBS(Module): """ GCN Layer with BN, Self-loop and Res connection. """ def __init__(self, in_features, out_features, activation=lambda x: x, withbn=True, withloop=True, bias=True, res=False): """ Initial function. :param in_features: the input feature dimension. :param out_features: the output feature dimension. :param activation: the activation function. :param withbn: using batch normalization. :param withloop: using self feature modeling. :param bias: enable bias. :param res: enable res connections. """ super(GraphConvolutionBS, self).__init__() self.in_features = in_features self.out_features = out_features self.sigma = activation self.res = res # Parameter setting. self.weight = Parameter(torch.FloatTensor(in_features, out_features)) # Is this the best practice or not? if withloop: self.self_weight = Parameter(torch.FloatTensor(in_features, out_features)) else: self.register_parameter("self_weight", None) if withbn: self.bn = torch.nn.BatchNorm1d(out_features) else: self.register_parameter("bn", None) if bias: self.bias = Parameter(torch.FloatTensor(out_features)) else: self.register_parameter('bias', None) self.reset_parameters() def reset_parameters(self): stdv = 1. / math.sqrt(self.weight.size(1)) self.weight.data.uniform_(-stdv, stdv) if self.self_weight is not None: stdv = 1. / math.sqrt(self.self_weight.size(1)) self.self_weight.data.uniform_(-stdv, stdv) if self.bias is not None: self.bias.data.uniform_(-stdv, stdv) def forward(self, input, adj): support = torch.mm(input, self.weight) output = torch.spmm(adj, support) # Self-loop if self.self_weight is not None: output = output + torch.mm(input, self.self_weight) if self.bias is not None: output = output + self.bias # BN if self.bn is not None: output = self.bn(output) # Res if self.res: return self.sigma(output) + input else: return self.sigma(output) def __repr__(self): return self.__class__.__name__ + ' (' \ + str(self.in_features) + ' -> ' \ + str(self.out_features) + ')' class GraphBaseBlock(Module): """ The base block for Multi-layer GCN / ResGCN / Dense GCN """ def __init__(self, in_features, out_features, nbaselayer, withbn=True, withloop=True, activation=F.relu, dropout=True, aggrmethod="concat", dense=False): """ The base block for constructing DeepGCN model. :param in_features: the input feature dimension. :param out_features: the hidden feature dimension. :param nbaselayer: the number of layers in the base block. :param withbn: using batch normalization in graph convolution. :param withloop: using self feature modeling in graph convolution. :param activation: the activation function, default is ReLu. :param dropout: the dropout ratio. :param aggrmethod: the aggregation function for baseblock, can be "concat" and "add". For "resgcn", the default is "add", for others the default is "concat". :param dense: enable dense connection """ super(GraphBaseBlock, self).__init__() self.in_features = in_features self.hiddendim = out_features self.nhiddenlayer = nbaselayer self.activation = activation self.aggrmethod = aggrmethod self.dense = dense self.dropout = dropout self.withbn = withbn self.withloop = withloop self.hiddenlayers = nn.ModuleList() self.__makehidden() if self.aggrmethod == "concat" and dense == False: self.out_features = in_features + out_features elif self.aggrmethod == "concat" and dense == True: self.out_features = in_features + out_features * nbaselayer elif self.aggrmethod == "add": if in_features != self.hiddendim: raise RuntimeError("The dimension of in_features and hiddendim should be matched in add model.") self.out_features = out_features elif self.aggrmethod == "nores": self.out_features = out_features else: raise NotImplementedError("The aggregation method only support 'concat','add' and 'nores'.") def __makehidden(self): # for i in xrange(self.nhiddenlayer): for i in range(self.nhiddenlayer): if i == 0: layer = GraphConvolutionBS(self.in_features, self.hiddendim, self.activation, self.withbn, self.withloop) else: layer = GraphConvolutionBS(self.hiddendim, self.hiddendim, self.activation, self.withbn, self.withloop) self.hiddenlayers.append(layer) def _doconcat(self, x, subx): if x is None: return subx if self.aggrmethod == "concat": return torch.cat((x, subx), 1) elif self.aggrmethod == "add": return x + subx elif self.aggrmethod == "nores": return x def forward(self, input, adj): x = input denseout = None # Here out is the result in all levels. for gc in self.hiddenlayers: denseout = self._doconcat(denseout, x) x = gc(x, adj) x = F.dropout(x, self.dropout, training=self.training) if not self.dense: return self._doconcat(x, input) return self._doconcat(x, denseout) def get_outdim(self): return self.out_features def __repr__(self): return "%s %s (%d - [%d:%d] > %d)" % (self.__class__.__name__, self.aggrmethod, self.in_features, self.hiddendim, self.nhiddenlayer, self.out_features) class MultiLayerGCNBlock(Module): """ Muti-Layer GCN with same hidden dimension. """ def __init__(self, in_features, out_features, nbaselayer, withbn=True, withloop=True, activation=F.relu, dropout=True, aggrmethod=None, dense=None): """ The multiple layer GCN block. :param in_features: the input feature dimension. :param out_features: the hidden feature dimension. :param nbaselayer: the number of layers in the base block. :param withbn: using batch normalization in graph convolution. :param withloop: using self feature modeling in graph convolution. :param activation: the activation function, default is ReLu. :param dropout: the dropout ratio. :param aggrmethod: not applied. :param dense: not applied. """ super(MultiLayerGCNBlock, self).__init__() self.model = GraphBaseBlock(in_features=in_features, out_features=out_features, nbaselayer=nbaselayer, withbn=withbn, withloop=withloop, activation=activation, dropout=dropout, dense=False, aggrmethod="nores") def forward(self, input, adj): return self.model.forward(input, adj) def get_outdim(self): return self.model.get_outdim() def __repr__(self): return "%s %s (%d - [%d:%d] > %d)" % (self.__class__.__name__, self.aggrmethod, self.model.in_features, self.model.hiddendim, self.model.nhiddenlayer, self.model.out_features) class ResGCNBlock(Module): """ The multiple layer GCN with residual connection block. """ def __init__(self, in_features, out_features, nbaselayer, withbn=True, withloop=True, activation=F.relu, dropout=True, aggrmethod=None, dense=None): """ The multiple layer GCN with residual connection block. :param in_features: the input feature dimension. :param out_features: the hidden feature dimension. :param nbaselayer: the number of layers in the base block. :param withbn: using batch normalization in graph convolution. :param withloop: using self feature modeling in graph convolution. :param activation: the activation function, default is ReLu. :param dropout: the dropout ratio. :param aggrmethod: not applied. :param dense: not applied. """ super(ResGCNBlock, self).__init__() self.model = GraphBaseBlock(in_features=in_features, out_features=out_features, nbaselayer=nbaselayer, withbn=withbn, withloop=withloop, activation=activation, dropout=dropout, dense=False, aggrmethod="add") def forward(self, input, adj): return self.model.forward(input, adj) def get_outdim(self): return self.model.get_outdim() def __repr__(self): return "%s %s (%d - [%d:%d] > %d)" % (self.__class__.__name__, self.aggrmethod, self.model.in_features, self.model.hiddendim, self.model.nhiddenlayer, self.model.out_features) class DenseGCNBlock(Module): """ The multiple layer GCN with dense connection block. """ def __init__(self, in_features, out_features, nbaselayer, withbn=True, withloop=True, activation=F.relu, dropout=True, aggrmethod="concat", dense=True): """ The multiple layer GCN with dense connection block. :param in_features: the input feature dimension. :param out_features: the hidden feature dimension. :param nbaselayer: the number of layers in the base block. :param withbn: using batch normalization in graph convolution. :param withloop: using self feature modeling in graph convolution. :param activation: the activation function, default is ReLu. :param dropout: the dropout ratio. :param aggrmethod: the aggregation function for the output. For denseblock, default is "concat". :param dense: default is True, cannot be changed. """ super(DenseGCNBlock, self).__init__() self.model = GraphBaseBlock(in_features=in_features, out_features=out_features, nbaselayer=nbaselayer, withbn=withbn, withloop=withloop, activation=activation, dropout=dropout, dense=True, aggrmethod=aggrmethod) def forward(self, input, adj): return self.model.forward(input, adj) def get_outdim(self): return self.model.get_outdim() def __repr__(self): return "%s %s (%d - [%d:%d] > %d)" % (self.__class__.__name__, self.aggrmethod, self.model.in_features, self.model.hiddendim, self.model.nhiddenlayer, self.model.out_features) class InecptionGCNBlock(Module): """ The multiple layer GCN with inception connection block. """ def __init__(self, in_features, out_features, nbaselayer, withbn=True, withloop=True, activation=F.relu, dropout=True, aggrmethod="concat", dense=False): """ The multiple layer GCN with inception connection block. :param in_features: the input feature dimension. :param out_features: the hidden feature dimension. :param nbaselayer: the number of layers in the base block. :param withbn: using batch normalization in graph convolution. :param withloop: using self feature modeling in graph convolution. :param activation: the activation function, default is ReLu. :param dropout: the dropout ratio. :param aggrmethod: the aggregation function for baseblock, can be "concat" and "add". For "resgcn", the default is "add", for others the default is "concat". :param dense: not applied. The default is False, cannot be changed. """ super(InecptionGCNBlock, self).__init__() self.in_features = in_features self.out_features = out_features self.hiddendim = out_features self.nbaselayer = nbaselayer self.activation = activation self.aggrmethod = aggrmethod self.dropout = dropout self.withbn = withbn self.withloop = withloop self.midlayers = nn.ModuleList() self.__makehidden() if self.aggrmethod == "concat": self.out_features = in_features + out_features * nbaselayer elif self.aggrmethod == "add": if in_features != self.hiddendim: raise RuntimeError("The dimension of in_features and hiddendim should be matched in 'add' model.") self.out_features = out_features else: raise NotImplementedError("The aggregation method only support 'concat', 'add'.") def __makehidden(self): # for j in xrange(self.nhiddenlayer): for j in range(self.nbaselayer): reslayer = nn.ModuleList() # for i in xrange(j + 1): for i in range(j + 1): if i == 0: layer = GraphConvolutionBS(self.in_features, self.hiddendim, self.activation, self.withbn, self.withloop) else: layer = GraphConvolutionBS(self.hiddendim, self.hiddendim, self.activation, self.withbn, self.withloop) reslayer.append(layer) self.midlayers.append(reslayer) def forward(self, input, adj): x = input for reslayer in self.midlayers: subx = input for gc in reslayer: subx = gc(subx, adj) subx = F.dropout(subx, self.dropout, training=self.training) x = self._doconcat(x, subx) return x def get_outdim(self): return self.out_features def _doconcat(self, x, subx): if self.aggrmethod == "concat": return torch.cat((x, subx), 1) elif self.aggrmethod == "add": return x + subx def __repr__(self): return "%s %s (%d - [%d:%d] > %d)" % (self.__class__.__name__, self.aggrmethod, self.in_features, self.hiddendim, self.nbaselayer, self.out_features) class Dense(Module): """ Simple Dense layer, Do not consider adj. """ def __init__(self, in_features, out_features, activation=lambda x: x, bias=True, res=False): super(Dense, self).__init__() self.in_features = in_features self.out_features = out_features self.sigma = activation self.weight = Parameter(torch.FloatTensor(in_features, out_features)) self.res = res self.bn = nn.BatchNorm1d(out_features) if bias: self.bias = Parameter(torch.FloatTensor(out_features)) else: self.register_parameter('bias', None) self.reset_parameters() def reset_parameters(self): stdv = 1. / math.sqrt(self.weight.size(1)) self.weight.data.uniform_(-stdv, stdv) if self.bias is not None: self.bias.data.uniform_(-stdv, stdv) def forward(self, input, adj): output = torch.mm(input, self.weight) if self.bias is not None: output = output + self.bias output = self.bn(output) return self.sigma(output) def __repr__(self): return self.__class__.__name__ + ' (' \ + str(self.in_features) + ' -> ' \ + str(self.out_features) + ')'
cookbook/mesher_prismmesh_vardens.py
XuesongDing/fatiando
179
142526
<reponame>XuesongDing/fatiando """ Meshing: Make a 3D prism mesh with depth-varying density """ from fatiando import gridder, mesher from fatiando.vis import myv shape = (10, 20, 10) nz, ny, nx = shape mesh = mesher.PrismMesh((0, 100, 0, 200, 0, 50), shape) def fill(i): k = i / (nx * ny) return k mesh.addprop('density', [fill(i) for i in xrange(mesh.size)]) myv.figure() myv.prisms(mesh, prop='density') myv.axes(myv.outline(), fmt='%.0f') myv.show()
koalixcrm/crm/documents/pdf_export.py
Cataldir/koalixcrm
290
142529
<gh_stars>100-1000 # -*- coding: utf-8 -*- import os from subprocess import check_output from subprocess import STDOUT from django.conf import settings from django.core import serializers from lxml import etree import koalixcrm.crm.documents.sales_document import koalixcrm.djangoUserExtension.models class PDFExport: @staticmethod def find_element_in_xml(xml_string, find_pattern, find_value): parser = etree.XMLParser(encoding='utf-8', remove_blank_text=True) root_element = etree.fromstring(xml_string.encode('utf-8'), parser=parser) found_element = root_element.findall(find_pattern) if found_element is None: return 0 else: for element in found_element: if element.text == find_value: return 1 return 0 @staticmethod def append_element_to_pattern(xml_string, find_pattern, name_of_element, value_of_element, **kwargs): attributes = kwargs.get('attributes', None) parser = etree.XMLParser(encoding='utf-8', remove_blank_text=True) root_element = etree.fromstring(xml_string.encode('utf-8'), parser=parser) found_element = root_element.find(find_pattern) new_element = etree.SubElement(found_element, name_of_element, attrib=attributes) new_element.text = value_of_element.__str__() return (etree.tostring(root_element, encoding='UTF-8', xml_declaration=True, pretty_print=True)).decode('utf-8') @staticmethod def merge_xml(xml_string_1, xml_string_2): parser = etree.XMLParser(encoding='utf-8', remove_blank_text=True) root_element_1 = etree.fromstring(xml_string_1.encode('utf-8'), parser=parser) root_element_2 = etree.fromstring(xml_string_2.encode('utf-8'), parser=parser) for child in root_element_2: root_element_1.append(child) return (etree.tostring(root_element_1, encoding='UTF-8', xml_declaration=True, pretty_print=True)).decode('utf-8') @staticmethod def write_xml(objects_to_serialize): xml = serializers.serialize("xml", objects_to_serialize, indent=3) return xml @staticmethod def write_xml_file(xml, file_path): f = open(file_path, "wb+") f.truncate() f.write(xml.encode('utf-8')) f.close() @staticmethod def perform_xsl_transformation(file_with_serialized_xml, xsl_file, fop_config_file, file_output_pdf): check_output([settings.FOP_EXECUTABLE, '-c', fop_config_file.path_full, '-xml', os.path.join(settings.PDF_OUTPUT_ROOT, file_with_serialized_xml), '-xsl', xsl_file.path_full, '-pdf', file_output_pdf], stderr=STDOUT) @staticmethod def create_pdf(object_to_create_pdf, template_set, printed_by, *args, **kwargs): # define the files which are involved in pdf creation process fop_config_file = object_to_create_pdf.get_fop_config_file(template_set) xsl_file = object_to_create_pdf.get_xsl_file(template_set) file_with_serialized_xml = os.path.join(settings.PDF_OUTPUT_ROOT, (str(type(object_to_create_pdf).__name__) + "_" + str(object_to_create_pdf.id) + ".xml")) file_output_pdf = os.path.join(settings.PDF_OUTPUT_ROOT, (str(type(object_to_create_pdf).__name__) + "_" + str(object_to_create_pdf.id) + ".pdf")) # list the sub-objects which have to be serialized xml_string = object_to_create_pdf.serialize_to_xml(*args, **kwargs) objects_to_serialize = list(koalixcrm.djangoUserExtension.models.DocumentTemplate.objects.filter(id=template_set.id)) xml_string_temp = PDFExport.write_xml(objects_to_serialize) xml_string = PDFExport.merge_xml(xml_string, xml_string_temp) objects_to_serialize = koalixcrm.djangoUserExtension.models.UserExtension.objects_to_serialize(object_to_create_pdf, printed_by) xml_string_temp = PDFExport.write_xml(objects_to_serialize) xml_string = PDFExport.merge_xml(xml_string, xml_string_temp) # extend the xml-string with required basic settings xml_string = PDFExport.append_element_to_pattern(xml_string, ".", "filebrowser_directory", settings.MEDIA_ROOT) # write xml-string to xml-file PDFExport.write_xml_file(xml_string, file_with_serialized_xml) # perform xsl transformation PDFExport.perform_xsl_transformation(file_with_serialized_xml, xsl_file, fop_config_file, file_output_pdf) return file_output_pdf
u2flib_server/model.py
xnguyena2/python-u2flib-server
306
142586
# Copyright (c) 2013 Yubico AB # All rights reserved. # # Redistribution and use in source and binary forms, with or # without modification, are permitted provided that the following # conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from u2flib_server.utils import websafe_encode, websafe_decode, sha_256 from cryptography import x509 from cryptography.exceptions import InvalidSignature from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec from cryptography.hazmat.primitives.serialization import load_der_public_key from binascii import a2b_hex from enum import Enum, IntEnum, unique import struct import json import six import os __all__ = [ 'Transport', 'Type', 'RegistrationData', 'SignatureData', 'RegisteredKey', 'DeviceRegistration', 'ClientData', 'RegisterRequest', 'RegisterResponse', 'SignResponse', 'U2fRegisterRequest', 'U2fSignRequest' ] U2F_V2 = 'U2F_V2' TRANSPORTS_EXT_OID = x509.ObjectIdentifier('1.3.6.1.4.1.45724.2.1.1') PUB_KEY_DER_PREFIX = a2b_hex( '3059301306072a8648ce3d020106082a8648ce3d030107034200') CERTS_TO_FIX = [ a2b_hex('349bca1031f8c82c4ceca38b9cebf1a69df9fb3b94eed99eb3fb9aa3822d26e8'), a2b_hex('<KEY>'), a2b_hex('1d8764f0f7cd1352df6150045c8f638e517270e8b5dda1c63ade9c2280240cae'), a2b_hex('<KEY>'), a2b_hex('<KEY>'), a2b_hex('ca993121846c464d666096d35f13bf44c1b05af205f9b4a1e00cf6cc10c5e511') ] def _parse_tlv_size(tlv): l = tlv[1] n_bytes = 1 if l > 0x80: n_bytes = l - 0x80 l = 0 for i in range(2, 2 + n_bytes): l = l * 256 + tlv[i] return 2 + n_bytes + l def _pop_bytes(data, l): x = bytes(data[:l]) del data[:l] return x def _fix_cert(der): # Some early certs have UNUSED BITS incorrectly set. if sha_256(der) in CERTS_TO_FIX: der = der[:-257] + b'\0' + der[-256:] return der def _validate_client_data(client_data, challenge, typ, valid_facets): if client_data.typ != typ: raise ValueError("Wrong type! Was: %r, expecting: %r" % ( client_data.typ, typ)) if challenge != client_data.challenge: raise ValueError("Wrong challenge! Was: %r, expecting: %r" % ( client_data.challenge, challenge)) if valid_facets is not None and client_data.origin not in valid_facets: raise ValueError("Invalid facet! Was: %r, expecting one of: %r" % ( client_data.origin, valid_facets)) @unique class Transport(IntEnum): BT = 0x01 # Bluetooth Classic BLE = 0x02 # Bluetooth Low Energy USB = 0x04 NFC = 0x08 @property def key(self): return self.name.lower() @staticmethod def transports_from_cert(cert): if isinstance(cert, bytes): cert = x509.load_der_x509_certificate(cert, default_backend()) try: ext = cert.extensions.get_extension_for_oid(TRANSPORTS_EXT_OID) der_bitstring = ext.value.value int_bytes = bytearray(der_bitstring[3:]) # Mask away unused bits (should already be 0, but make sure) unused_bits = six.indexbytes(der_bitstring, 2) int_bytes[-1] &= (0xff << unused_bits) # Reverse the bitstring and convert to integer transports = 0 for byte in int_bytes: for _ in range(8): transports = (transports << 1) | (byte & 1) byte >>= 1 return [t for t in Transport if t.value & transports] except x509.ExtensionNotFound: return None @unique class Type(Enum): REGISTER = 'navigator.id.finishEnrollment' SIGN = 'navigator.id.getAssertion' class RegistrationData(object): def __init__(self, data): buf = bytearray(data) if buf.pop(0) != 0x05: raise ValueError('Reserved byte value must be 0x05') self.pub_key = _pop_bytes(buf, 65) self.key_handle = _pop_bytes(buf, buf.pop(0)) cert_len = _parse_tlv_size(buf) self.certificate = _fix_cert(_pop_bytes(buf, cert_len)) self.signature = bytes(buf) @property def keyHandle(self): return websafe_encode(self.key_handle) @property def publicKey(self): return websafe_encode(self.pub_key) def verify(self, app_param, chal_param): cert = x509.load_der_x509_certificate(self.certificate, default_backend()) pubkey = cert.public_key() verifier = pubkey.verifier(self.signature, ec.ECDSA(hashes.SHA256())) verifier.update(b'\0' + app_param + chal_param + self.key_handle + self.pub_key) try: verifier.verify() except InvalidSignature: raise ValueError('Attestation signature is invalid') @property def bytes(self): return ( six.int2byte(0x05) + self.pub_key + six.int2byte(len(self.key_handle)) + self.key_handle + self.certificate + self.signature ) class SignatureData(object): def __init__(self, data): buf = bytearray(data) self.user_presence = buf.pop(0) self.counter = struct.unpack('>I', _pop_bytes(buf, 4))[0] self.signature = bytes(buf) def verify(self, app_param, chal_param, der_pubkey): pubkey = load_der_public_key(PUB_KEY_DER_PREFIX + der_pubkey, default_backend()) verifier = pubkey.verifier(self.signature, ec.ECDSA(hashes.SHA256())) verifier.update(app_param + six.int2byte(self.user_presence) + struct.pack('>I', self.counter) + chal_param) try: verifier.verify() except InvalidSignature: raise ValueError('U2F signature is invalid') @property def bytes(self): return ( six.int2byte(self.user_presence) + struct.pack('>I', self.counter) + self.signature ) class JSONDict(dict): _required_fields = [] def __init__(self, *args, **kwargs): if len(args) == 1 and not kwargs: arg = args[0] args = tuple() if isinstance(arg, six.text_type): kwargs = json.loads(arg) elif isinstance(arg, six.binary_type): kwargs = json.loads(arg.decode('utf-8')) else: kwargs = dict(arg) super(JSONDict, self).__init__(*args, **kwargs) missing = set(self._required_fields).difference(self.keys()) if missing: raise ValueError('Missing required fields: %s' % ', '.join(missing)) def __getattr__(self, key): try: return self[key] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % (type(self).__name__, key)) @property def json(self): return json.dumps(self) @classmethod def wrap(cls, data): return data if isinstance(data, cls) else cls(data) class WithAppId(object): @property def applicationParameter(self): return sha_256(self['appId'].encode('idna')) class WithChallenge(object): @property def challenge(self): return websafe_decode(self['challenge']) class WithKeyHandle(object): @property def keyHandle(self): return websafe_decode(self['keyHandle']) class RegisteredKey(JSONDict, WithAppId, WithKeyHandle): _required_fields = ['version', 'keyHandle'] @property def key_data(self): data = { 'version': self['version'], 'keyHandle': self['keyHandle'] } if 'appId' in self: data['appId'] = self['appId'] if self.get('transports') is not None: data['transports'] = self['transports'] return data @property def transports(self): if 'transports' in self: return [getattr(Transport, x.upper()) for x in self['transports']] return None class DeviceRegistration(RegisteredKey): _required_fields = ['version', 'keyHandle', 'publicKey'] @property def publicKey(self): return websafe_decode(self['publicKey']) class ClientData(JSONDict, WithChallenge): _required_fields = ['typ', 'challenge', 'origin'] def __init__(self, *args, **kwargs): if len(args) == 1: data = args[0] if isinstance(data, six.binary_type): data = data.decode('utf-8') try: args = [websafe_decode(data)] except ValueError: pass # Not encoded, leave as is super(ClientData, self).__init__(*args, **kwargs) @property def typ(self): return Type(self['typ']) class WithClientData(object): @property def clientData(self): return ClientData.wrap(self['clientData']) @property def challengeParameter(self): return sha_256(websafe_decode(self['clientData'])) class RegisterRequest(JSONDict, WithAppId, WithChallenge): _required_fields = ['version', 'challenge'] class RegisterResponse(JSONDict, WithClientData): _required_fields = ['version', 'registrationData', 'clientData'] @property def registrationData(self): return RegistrationData(websafe_decode(self['registrationData'])) def verify(self, app_param): self.registrationData.verify(app_param, self.challengeParameter) class SignResponse(JSONDict, WithClientData, WithKeyHandle): _required_fields = ['keyHandle', 'signatureData', 'clientData'] @property def signatureData(self): return SignatureData(websafe_decode(self['signatureData'])) def verify(self, app_param, der_pubkey): self.signatureData.verify(app_param, self.challengeParameter, der_pubkey) class WithRegisteredKeys(object): @property def registeredKeys(self): return [RegisteredKey.wrap(x) for x in self['registeredKeys']] class U2fRegisterRequest(JSONDict, WithAppId, WithRegisteredKeys): _required_fields = ['appId', 'registerRequests', 'registeredKeys'] @property def registerRequests(self): return [RegisterRequest.wrap(x) for x in self['registerRequests']] def get_request(self, version): for req in self.registerRequests: if req.version == version: return req raise ValueError('No RegisterRequest found for version: %s' % version) @property def data_for_client(self): return { 'appId': self['appId'], 'registerRequests': self['registerRequests'], 'registeredKeys': [r.key_data for r in self.registeredKeys] } @classmethod def create(cls, app_id, registered_keys, challenge=None): if challenge is None: challenge = os.urandom(32) return cls( appId=app_id, registerRequests=[RegisterRequest( version=U2F_V2, challenge=websafe_encode(challenge) )], registeredKeys=registered_keys ) def complete(self, response, valid_facets=None): resp = RegisterResponse.wrap(response) req = self.get_request(U2F_V2) _validate_client_data(resp.clientData, req.challenge, Type.REGISTER, valid_facets) resp.verify(self.applicationParameter) registration_data = resp.registrationData transports = Transport.transports_from_cert( registration_data.certificate) transports = [t.key for t in transports] if transports else transports return DeviceRegistration( version=req.version, keyHandle=registration_data.keyHandle, appId=self.appId, publicKey=registration_data.publicKey, transports=transports, ), registration_data.certificate class U2fSignRequest(JSONDict, WithAppId, WithChallenge, WithRegisteredKeys): _required_fields = ['appId', 'challenge', 'registeredKeys'] def __init__(self, *args, **kwargs): super(U2fSignRequest, self).__init__(*args, **kwargs) if len(self.registeredKeys) == 0: raise ValueError('Must have at least one RegisteredKey') @property def data_for_client(self): return { 'appId': self['appId'], 'challenge': self['challenge'], 'registeredKeys': [r.key_data for r in self.registeredKeys] } @property def devices(self): return [DeviceRegistration.wrap(x) for x in self['registeredKeys']] @classmethod def create(cls, app_id, devices, challenge=None): if challenge is None: challenge = os.urandom(32) return cls( appId=app_id, registeredKeys=devices, challenge=websafe_encode(challenge) ) def complete(self, response, valid_facets=None): resp = SignResponse.wrap(response) _validate_client_data(resp.clientData, self.challenge, Type.SIGN, valid_facets) device = next(d for d in self.devices if d.keyHandle == resp.keyHandle) app_param = device.applicationParameter \ if 'appId' in device else self.applicationParameter resp.verify(app_param, device.publicKey) sign_data = resp.signatureData return device, sign_data.counter, sign_data.user_presence
test/bibliopixel/util/offset_range_test.py
rec/leds
253
142598
import unittest from bibliopixel.util import offset_range class OffsetRangeTest(unittest.TestCase): def test_empty(self): dmx = offset_range.DMXChannel.make() self.assertEqual(dmx.index(0), None) self.assertEqual(dmx.index(1), 0) self.assertEqual(dmx.index(2), 1) self.assertEqual(dmx.index(511), 510) self.assertEqual(dmx.index(512), 511) self.assertEqual(dmx.index(513), None) l256 = list(range(256)) r = list(dmx.read_from(l256)) self.assertEqual(r, l256 + ([0] * 256)) target = [23] * 128 dmx.copy_to(l256, target) self.assertEqual(target, list(range(128))) def test_empty_copy(self): dmx = offset_range.DMXChannel.make() l256 = list(range(256)) r = list(dmx.read_from(l256)) self.assertEqual(r, l256 + ([0] * 256)) target = [] dmx.copy_to(l256, target) self.assertEqual(target, []) def test_positive_offset(self): midi = offset_range.MidiChannel(offset=4) self.assertEqual(midi.index(0), None) self.assertEqual(midi.index(1), None) self.assertEqual(midi.index(4), None) self.assertEqual(midi.index(5), 0) self.assertEqual(midi.index(6), 1) self.assertEqual(midi.index(15), 10) self.assertEqual(midi.index(16), 11) self.assertEqual(midi.index(16), 11) self.assertEqual(midi.index(17), None) expected = [-1, -1, -1, -1] + list(range(12)) actual = list(midi.read_from(range(16), pad=-1)) self.assertEqual(expected, actual) target = [100] * 100 midi.copy_to(list(range(16)), target) expected = list(range(4, 16)) + [100] * 88 self.assertEqual(target, expected) def test_negative_offset(self): midi = offset_range.MidiChannel(-4) self.assertEqual(midi.index(0), None) self.assertEqual(midi.index(1), 4) self.assertEqual(midi.index(2), 5) self.assertEqual(midi.index(12), 15) self.assertEqual(midi.index(13), None) actual = list(midi.read_from(range(16), pad=-1)) expected = list(range(4, 16)) + [-1, -1, -1, -1] self.assertEqual(expected, actual) target = [100] * 8 midi.copy_to(list(range(16)), target) expected = [4, 5, 6, 7, 8, 9, 10, 11] self.assertEqual(target, expected) def test_begin_end_offset(self): midi = offset_range.MidiChannel(offset=-5, begin=6, end=8) self.assertEqual(midi.index(0), None) self.assertEqual(midi.index(4), None) self.assertEqual(midi.index(5), None) self.assertEqual(midi.index(6), 10) self.assertEqual(midi.index(7), 11) self.assertEqual(midi.index(8), 12) self.assertEqual(midi.index(9), None) self.assertEqual(midi.index(10), None) actual = list(midi.read_from(range(16))) expected = [0, 0, 0, 0, 0, 10, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0] self.assertEqual(expected, actual) target = [100] * 24 midi.copy_to(list(range(7)), target) expected = 5 * [100] + [5, 6] + 17 * [100] self.assertEqual(target, expected) target = [100] * 24 midi.copy_to(list(range(8)), target) expected = 5 * [100] + [5, 6, 7] + 16 * [100] self.assertEqual(target, expected) target = [100] * 24 midi.copy_to(list(range(9)), target) expected = 5 * [100] + [5, 6, 7] + 16 * [100] self.assertEqual(target, expected) def test_errors(self): with self.assertRaises(ValueError): offset_range.MidiChannel(begin=0) offset_range.MidiChannel(begin=1) offset_range.MidiChannel(begin=16) with self.assertRaises(ValueError): offset_range.MidiChannel(begin=17) with self.assertRaises(ValueError): offset_range.MidiChannel(end=0) offset_range.MidiChannel(end=1) offset_range.MidiChannel(end=16) with self.assertRaises(ValueError): offset_range.MidiChannel(end=17) with self.assertRaises(ValueError): offset_range.MidiChannel(begin=2, end=1)
build/CI/kylin-system-testing/kylin_utils/kylin.py
ApacheSourceCode/kylin
3,402
142614
#!/usr/bin/python # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import logging import time import random import requests from .basic import BasicHttpClient class KylinHttpClient(BasicHttpClient): # pylint: disable=too-many-public-methods _base_url = 'http://{host}:{port}/kylin/api' def __init__(self, host, port, version): super().__init__(host, port) self._headers = { 'Content-Type': 'application/json;charset=utf-8' } self._base_url = self._base_url.format(host=self._host, port=self._port) self.generic_project = "generic_test_project" self.pushdown_project = "pushdown_test_project" self.version = version def login(self, username, password): self._inner_session.request('POST', self._base_url + '/user/authentication', auth=(username, password)) return self._request('GET', '/user/authentication', inner_session=True) def check_login_state(self): return self._request('GET', '/user/authentication', inner_session=True) def get_session(self): return self._inner_session def logout(self): self._inner_session = requests.Session() def list_projects(self, limit=100, offset=0): params = {'limit': limit, 'offset': offset} resp = self._request('GET', '/projects', params=params) return resp def create_project(self, project_name, description=None, override_kylin_properties=None): data = {'name': project_name, 'description': description, 'override_kylin_properties': override_kylin_properties, } payload = { 'projectDescData': json.dumps(data), } resp = self._request('POST', '/projects', json=payload) return resp def update_project(self, project_name, description=None, override_kylin_properties=None): """ :param project_name: project name :param description: description of project :param override_kylin_properties: the kylin properties that needs to be override :return: """ data = {'name': project_name, 'description': description, 'override_kylin_properties': override_kylin_properties, } payload = { 'formerProjectName': project_name, 'projectDescData': json.dumps(data), } resp = self._request('PUT', '/projects', json=payload) return resp def delete_project(self, project_name, force=False): """ delete project API, before delete the project, make sure the project does not contain models and cubes. If you want to force delete the project, make force=True :param project_name: project name :param force: if force, delete cubes and models before delete project :return: """ if force: cubes = self.list_cubes(project_name) logging.debug("Cubes to be deleted: %s", cubes) while cubes: for cube in cubes: self.delete_cube(cube['name']) cubes = self.list_cubes(project_name) models = self.list_model_desc(project_name) logging.debug("Models to be deleted: %s", models) while models: for model in models: self.delete_model(model['name']) models = self.list_model_desc(project_name) url = '/projects/{project}'.format(project=project_name) resp = self._request('DELETE', url) return resp def load_table(self, project_name, tables, calculate=False): """ load or reload table api :param calculate: Default is True :param project_name: project name :param tables: table list, for instance, ['default.kylin_fact', 'default.kylin_sales'] :return: """ # workaround of #15337 # time.sleep(random.randint(5, 10)) url = '/tables/{tables}/{project}/'.format(tables=tables, project=project_name) payload = {'calculate': calculate } resp = self._request('POST', url, json=payload) return resp def unload_table(self, project_name, tables): url = '/tables/{tables}/{project}'.format(tables=tables, project=project_name) resp = self._request('DELETE', url) return resp def list_hive_tables(self, project_name, extension=False, user_session=False): """ :param project_name: project name :param extension: specify whether the table's extension information is returned :param user_session: boolean, true for using login session to execute :return: """ url = '/tables' params = {'project': project_name, 'ext': extension} resp = self._request('GET', url, params=params, inner_session=user_session) return resp def get_table_info(self, project_name, table_name): """ :param project_name: project name :param table_name: table name :return: hive table information """ url = '/tables/{project}/{table}'.format(project=project_name, table=table_name) resp = self._request('GET', url) return resp def get_tables_info(self, project_name, ext='true'): url = '/tables' params = {'project': project_name, 'ext': ext} resp = self._request('GET', url, params=params) return resp def get_table_streaming_config(self, project_name, table_name, limit=100, offset=0): params = {'table': table_name, 'project': project_name, 'limit': limit, 'offset': offset} resp = self._request('GET', '/streaming/getConfig', params=params) return resp def load_kafka_table(self, project_name, kafka_config, streaming_config, table_data, message=None): url = '/streaming' payload = {'project': project_name, 'kafkaConfig': json.dumps(kafka_config), 'streamingConfig': json.dumps(streaming_config), 'tableData': json.dumps(table_data), 'message': message} resp = self._request('POST', url, json=payload) return resp def update_kafka_table(self, project_name, kafka_config, streaming_config, table_data, cluster_index=0): url = '/streaming' payload = {'project': project_name, 'kafkaConfig': kafka_config, 'streamingConfig': streaming_config, 'tableData': table_data, 'clusterIndex': cluster_index} resp = self._request('PUT', url, json=payload) return resp def list_model_desc(self, project_name=None, model_name=None, limit=100, offset=0): """ :param offset: :param limit: :param project_name: project name :param model_name: model name :return: model desc list """ params = {'limit': limit, 'offset': offset, 'modelName': model_name, 'projectName': project_name } resp = self._request('GET', '/models', params=params) return resp def create_model(self, project_name, model_name, model_desc_data, user_session=False): url = '/models' payload = { 'project': project_name, 'model': model_name, 'modelDescData': json.dumps(model_desc_data) } logging.debug("Current payload for creating model is %s", payload) resp = self._request('POST', url, json=payload, inner_session=user_session) return resp def update_model(self, project_name, model_name, model_desc_data, user_session=False): url = '/models' payload = { 'project': project_name, 'model': model_name, 'modelDescData': json.dumps(model_desc_data) } resp = self._request('PUT', url, json=payload, inner_session=user_session) return resp def clone_model(self, project_name, model_name, new_model_name): url = '/models/{model}/clone'.format(model=model_name) payload = {'modelName': new_model_name, 'project': project_name} resp = self._request('PUT', url, json=payload) return resp def delete_model(self, model_name): url = '/models/{model}'.format(model=model_name) # return value is None here return self._request('DELETE', url) def get_cube_desc(self, cube_name): url = '/cube_desc/{cube}'.format(cube=cube_name) resp = self._request('GET', url) return resp def list_cubes(self, project=None, offset=0, limit=10000, cube_name=None, model_name=None, user_session=False): params = {'projectName': project, 'offset': offset, 'limit': limit, 'cubeName': cube_name, 'modelName': model_name} resp = self._request('GET', '/cubes/', params=params, inner_session=user_session) return resp def get_cube_instance(self, cube_name): url = '/cubes/{cube}'.format(cube=cube_name) resp = self._request('GET', url) return resp def create_cube(self, project_name, cube_name, cube_desc_data, user_session=False): # workaround of #15337 time.sleep(random.randint(5, 10)) url = '/cubes' payload = { 'project': project_name, 'cubeName': cube_name, 'cubeDescData': json.dumps(cube_desc_data) } resp = self._request('POST', url, json=payload, inner_session=user_session) return resp def update_cube(self, project_name, cube_name, cube_desc_data, user_session=False): # workaround of #15337 time.sleep(random.randint(5, 10)) url = '/cubes' payload = { 'project': project_name, 'cubeName': cube_name, 'cubeDescData': json.dumps(cube_desc_data) } resp = self._request('PUT', url, json=payload, inner_session=user_session) return resp def update_cube_engine(self, cube_name, engine_type): url = '/cubes/{cube}/engine/{engine}'.format(cube=cube_name, engine=engine_type) resp = self._request('PUT', url) return resp def build_segment(self, cube_name, start_time, end_time, force=False): """ :param cube_name: the name of the cube to be built :param force: force submit mode :param start_time: long, start time, corresponding to the timestamp in GMT format, for instance, 1388534400000 corresponding to 2014-01-01 00:00:00 :param end_time: long, end time, corresponding to the timestamp in GMT format :return: """ url = '/cubes/{cube}/build'.format(cube=cube_name) payload = { 'buildType': 'BUILD', 'startTime': start_time, 'endTime': end_time, 'force': force } resp = self._request('PUT', url, json=payload) return resp def full_build_cube(self, cube_name, force=False): """ :param cube_name: the name of the cube to be built :param force: force submit mode :return: """ return self.build_segment(cube_name, force=force, start_time=0, end_time=31556995200000) def merge_segment(self, cube_name, start_time=0, end_time=31556995200000, force=True): """ :param cube_name: the name of the cube to be built :param force: force submit mode :param start_time: long, start time, corresponding to the timestamp in GMT format, for instance, 1388534400000 corresponding to 2014-01-01 00:00:00 :param end_time: long, end time, corresponding to the timestamp in GMT format :return: """ url = '/cubes/{cube}/build'.format(cube=cube_name) payload = { 'buildType': 'MERGE', 'startTime': start_time, 'endTime': end_time, 'force': force } resp = self._request('PUT', url, json=payload) return resp def refresh_segment(self, cube_name, start_time, end_time, force=True): """ :param cube_name: the name of the cube to be built :param force: force submit mode :param start_time: long, start time, corresponding to the timestamp in GMT format, for instance, 1388534400000 corresponding to 2014-01-01 00:00:00 :param end_time: long, end time, corresponding to the timestamp in GMT format :return: """ url = '/cubes/{cube}/build'.format(cube=cube_name) payload = { 'buildType': 'REFRESH', 'startTime': start_time, 'endTime': end_time, 'force': force } resp = self._request('PUT', url, json=payload) return resp def delete_segments(self, cube_name, segment_name): url = '/cubes/{cube}/segs/{segment}'.format(cube=cube_name, segment=segment_name) resp = self._request('DELETE', url) return resp def build_streaming_cube(self, project_name, cube_name, source_offset_start=0, source_offset_end='9223372036854775807'): """ :param cube_name: cube name :param source_offset_start: long, the start offset where build begins. Here 0 means it is from the last position :param source_offset_end: long, the end offset where build ends. 9223372036854775807 (Long.MAX_VALUE) means to the end position on Kafka topic. :param mp_values: string, multiple partition values of corresponding model :param force: boolean, force submit mode :return: """ url = '/cubes/{cube}/segments/build_streaming'.format(cube=cube_name) payload = { 'buildType': 'BUILD', 'project': project_name, 'sourceOffsetStart': source_offset_start, 'sourceOffsetEnd': source_offset_end, } resp = self._request('PUT', url, json=payload) return resp def build_cube_customized(self, cube_name, source_offset_start, source_offset_end=None, mp_values=None, force=False): """ :param cube_name: cube name :param source_offset_start: long, the start offset where build begins :param source_offset_end: long, the end offset where build ends :param mp_values: string, multiple partition values of corresponding model :param force: boolean, force submit mode :return: """ url = '/cubes/{cube}/segments/build_customized'.format(cube=cube_name) payload = { 'buildType': 'BUILD', 'sourceOffsetStart': source_offset_start, 'sourceOffsetEnd': source_offset_end, 'mpValues': mp_values, 'force': force } resp = self._request('PUT', url, json=payload) return resp def clone_cube(self, project_name, cube_name, new_cube_name): """ :param project_name: project name :param cube_name: cube name of being cloned :param new_cube_name: cube name to be cloned to :return: """ url = '/cubes/{cube}/clone'.format(cube=cube_name) payload = { 'cubeName': new_cube_name, 'project': project_name } resp = self._request('PUT', url, json=payload) return resp def enable_cube(self, cube_name): url = '/cubes/{cube}/enable'.format(cube=cube_name) resp = self._request('PUT', url) return resp def disable_cube(self, cube_name): url = '/cubes/{cube}/disable'.format(cube=cube_name) resp = self._request('PUT', url) return resp def purge_cube(self, cube_name): url = '/cubes/{cube}/purge'.format(cube=cube_name) resp = self._request('PUT', url) return resp def delete_cube(self, cube_name): url = '/cubes/{cube}'.format(cube=cube_name) return self._request('DELETE', url) def list_holes(self, cube_name): """ A healthy cube in production should not have holes in the meaning of inconsecutive segments. :param cube_name: cube name :return: """ url = '/cubes/{cube}/holes'.format(cube=cube_name) resp = self._request('GET', url) return resp def fill_holes(self, cube_name): """ For non-streaming data based Cube, Kyligence Enterprise will submit normal build cube job(s) with corresponding time partition value range(s); For streaming data based Cube, please make sure that corresponding data is not expired or deleted in source before filling holes, otherwise the build job will fail. :param cube_name: string, cube name :return: """ url = '/cubes/{cube}/holes'.format(cube=cube_name) resp = self._request('PUT', url) return resp def export_cuboids(self, cube_name): url = '/cubes/{cube}/cuboids/export'.fomat(cube=cube_name) resp = self._request('PUT', url) return resp def refresh_lookup(self, cube_name, lookup_table): """ Only lookup tables of SCD Type 1 are supported to refresh. :param cube_name: cube name :param lookup_table: the name of lookup table to be refreshed with the format DATABASE.TABLE :return: """ url = '/cubes/{cube}/refresh_lookup'.format(cube=cube_name) payload = { 'cubeName': cube_name, 'lookupTableName': lookup_table } resp = self._request('PUT', url, json=payload) return resp def get_job_info(self, job_id): url = '/jobs/{job_id}'.format(job_id=job_id) resp = self._request('GET', url) return resp def get_job_status(self, job_id): return self.get_job_info(job_id)['job_status'] def get_step_output(self, job_id, step_id): url = '/jobs/{jobId}/steps/{stepId}/output'.format(jobId=job_id, stepId=step_id) resp = self._request('GET', url) return resp def pause_job(self, job_id): url = '/jobs/{jobId}/pause'.format(jobId=job_id) resp = self._request('PUT', url) return resp def resume_job(self, job_id): url = '/jobs/{jobId}/resume'.format(jobId=job_id) resp = self._request('PUT', url) return resp def discard_job(self, job_id): url = '/jobs/{jobId}/cancel'.format(jobId=job_id) resp = self._request('PUT', url) return resp def delete_job(self, job_id): url = '/jobs/{jobId}/drop'.format(jobId=job_id) resp = self._request('DELETE', url) return resp def resubmit_job(self, job_id): url = '/jobs/{jobId}/resubmit'.format(jobId=job_id) resp = self._request('PUT', url) return resp def list_jobs(self, project_name, status=None, offset=0, limit=10000, time_filter=1, job_search_mode='ALL'): """ list jobs in specific project :param job_search_mode: CUBING_ONLY, CHECKPOINT_ONLY, ALL :param project_name: project name :param status: int, 0 -> NEW, 1 -> PENDING, 2 -> RUNNING, 4 -> FINISHED, 8 -> ERROR, 16 -> DISCARDED, 32 -> STOPPED :param offset: offset of returned result :param limit: quantity of returned result per page :param time_filter: int, 0 -> last one day, 1 -> last one week, 2 -> last one month, 3 -> last one year, 4 -> all :return: """ url = '/jobs' params = { 'projectName': project_name, 'status': status, 'offset': offset, 'limit': limit, 'timeFilter': time_filter, 'jobSearchMode': job_search_mode } resp = self._request('GET', url, params=params) return resp def await_all_jobs(self, project_name, waiting_time=30): """ await all jobs to be finished, default timeout is 30 minutes :param project_name: project name :param waiting_time: timeout, in minutes :return: boolean, timeout will return false """ running_flag = ['PENDING', 'RUNNING'] try_time = 0 max_try_time = waiting_time * 2 while try_time < max_try_time: jobs = self.list_jobs(project_name) all_finished = True for job in jobs: if job['job_status'] in running_flag: all_finished = False break if job['job_status'] == 'ERROR': return False if all_finished: return True time.sleep(30) try_time += 1 return False def await_job(self, job_id, waiting_time=20, interval=1, excepted_status=None): """ Await specific job to be given status, default timeout is 20 minutes. :param job_id: job id :param waiting_time: timeout, in minutes. :param interval: check interval, default value is 1 second :param excepted_status: excepted job status list, default contains 'ERROR', 'FINISHED' and 'DISCARDED' :return: boolean, if the job is in finish status, return true """ finish_flags = ['ERROR', 'FINISHED', 'DISCARDED'] if excepted_status is None: excepted_status = finish_flags timeout = waiting_time * 60 start = time.time() while time.time() - start < timeout: job_status = self.get_job_status(job_id) if job_status in excepted_status: return True if job_status in finish_flags: return False time.sleep(interval) return False def await_job_finished(self, job_id, waiting_time=20, interval=1): """ Await specific job to be finished, default timeout is 20 minutes. :param job_id: job id :param waiting_time: timeout, in minutes. :param interval: check interval, default value is 1 second :return: boolean, if the job is in finish status, return true """ return self.await_job(job_id, waiting_time, interval, excepted_status=['FINISHED']) def await_job_error(self, job_id, waiting_time=20, interval=1): """ Await specific job to be error, default timeout is 20 minutes. :param job_id: job id :param waiting_time: timeout, in minutes. :param interval: check interval, default value is 1 second :return: boolean, if the job is in finish status, return true """ return self.await_job(job_id, waiting_time, interval, excepted_status=['ERROR']) def await_job_discarded(self, job_id, waiting_time=20, interval=1): """ Await specific job to be discarded, default timeout is 20 minutes. :param job_id: job id :param waiting_time: timeout, in minutes. :param interval: check interval, default value is 1 second :return: boolean, if the job is in finish status, return true """ return self.await_job(job_id, waiting_time, interval, excepted_status=['DISCARDED']) def await_job_step(self, job_id, step, excepted_status=None, waiting_time=20, interval=1): """ Await specific job step to be given status, default timeout is 20 minutes. :param job_id: job id :param step: job step :param waiting_time: timeout, in minutes. :param interval: check interval, default value is 1 second :param excepted_status: excepted job status list, default contains 'ERROR', 'FINISHED' and 'DISCARDED' :return: boolean, if the job is in finish status, return true """ finish_flags = ['ERROR', 'FINISHED', 'DISCARDED'] if excepted_status is None: excepted_status = finish_flags timeout = waiting_time * 60 start = time.time() while time.time() - start < timeout: job_info = self.get_job_info(job_id) job_status = job_info['steps'][step]['step_status'] if job_status in excepted_status: return True if job_status in finish_flags: return False time.sleep(interval) return False def execute_query(self, project_name, sql, cube_name=None, offset=None, limit=None, backdoortoggles=None, user_session=False, timeout=60): url = '/query' payload = { 'project': project_name, 'sql': sql, 'offset': offset, 'limit': limit } if cube_name: backdoortoggles = {"backdoorToggles": {"DEBUG_TOGGLE_HIT_CUBE": cube_name}} if backdoortoggles: payload.update(backdoortoggles) resp = self._request('POST', url, json=payload, inner_session=user_session, timeout=timeout) return resp def save_query(self, sql_name, project_name, sql, description=None): url = '/saved_queries' payload = { 'name': sql_name, 'project': project_name, 'sql': sql, 'description': description } self._request('POST', url, json=payload) def get_queries(self, project_name, user_session=False): url = '/saved_queries' params = { 'project': project_name } response = self._request('GET', url, params=params, inner_session=user_session) return response def remove_query(self, sql_id): url = '/saved_queries/{id}'.format(id=sql_id) self._request('DELETE', url) def list_queryable_tables(self, project_name): url = '/tables_and_columns' params = {'project': project_name} resp = self._request('GET', url, params=params) return resp def get_all_system_prop(self, server=None): url = '/admin/config' if server is not None: url = '/admin/config?server={serverName}'.format(serverName=server) prop_resp = self._request('GET', url).get('config') property_values = {} if prop_resp is None: return property_values prop_lines = prop_resp.splitlines(False) for prop_line in prop_lines: splits = prop_line.split('=') property_values[splits[0]] = splits[1] return property_values def create_user(self, user_name, password, authorities, disabled=False, user_session=False): """ create a user :param user_name: string, target user name :param password: string, target password :param authorities: array, user's authorities :param disabled: boolean, true for disabled user false for enable user :param user_session: boolean, true for using login session to execute :return: """ url = '/user/{username}'.format(username=user_name) payload = { 'username': user_name, 'password': password, 'authorities': authorities, 'disabled': disabled, } resp = self._request('POST', url, json=payload, inner_session=user_session) return resp def delete_user(self, user_name, user_session=False): """ delete user :param user_name: string :param user_session: boolean, true for using login session to execute :return: """ url = '/user/{username}'.format(username=user_name) resp = self._request('DELETE', url, inner_session=user_session) return resp def update_user(self, user_name, authorities, password=None, disabled=False, user_session=False, payload_user_name=None): """ update user's info :param user_name: string, target user name :param password: string, target password :param authorities: array, user's authorities :param disabled: boolean, true for disabled user false for enable user :param user_session: boolean, true for using login session to execute :param payload_user_name: string, true for using login session to execute :return: """ url = '/user/{username}'.format(username=user_name) username_in_payload = user_name if payload_user_name is None else payload_user_name payload = { 'username': username_in_payload, 'password': password, 'authorities': authorities, 'disabled': disabled, } resp = self._request('PUT', url, json=payload, inner_session=user_session) return resp def update_user_password(self, user_name, new_password, password=<PASSWORD>, user_session=False): """ update user's password :param user_name: string, target for username :param new_password: string, user's new password :param password: string, <PASSWORD> :param user_session: boolean, true for using login session to execute :return: """ url = '/user/password' payload = { 'username': user_name, 'password': password, 'newPassword': <PASSWORD> } resp = self._request('PUT', url, json=payload, inner_session=user_session) return resp def list_users(self, project_name=None, group_name=None, is_fuzz_match=False, name=None, offset=0, limit=10000 , user_session=False): """ list users :param group_name:string, group name :param project_name: string, project's name :param offset: offset of returned result :param limit: quantity of returned result per page :param is_fuzz_match: bool, true for param name fuzzy match :param name: string, user's name :param user_session: boolean, true for using login session to execute :return: """ url = '/user/users' params = { 'offset': offset, 'limit': limit, 'groupName': group_name, 'project': project_name, 'isFuzzMatch': is_fuzz_match, 'name': name } resp = self._request('GET', url, params=params, inner_session=user_session) return resp def list_user_authorities(self, project_name, user_session=False): """ list groups in a project :param project_name: string, target project name :param user_session: boolean, true for using login session to execute :return: """ url = '/user_group/groups' params = { 'project': project_name } resp = self._request('GET', url, params=params, inner_session=user_session) return resp def create_group(self, group_name, user_session=False): """ create a group with group_name :param group_name: string, target group name :param user_session: boolean, true for using login session to execute :return: """ url = '/user_group/{group_name}'.format(group_name=group_name) resp = self._request('POST', url, inner_session=user_session) return resp def delete_group(self, group_name, user_session=False): """ delete group by group_name :param group_name: string, target group name :param user_session: boolean, true for using login session to execute :return: """ url = '/user_group/{group_name}'.format(group_name=group_name) resp = self._request('DELETE', url, inner_session=user_session) return resp def add_or_del_users(self, group_name, users): url = '/user_group/users/{group}'.format(group=group_name) payload = {'users': users} resp = self._request('POST', url, json=payload) return resp def _request(self, method, url, **kwargs): # pylint: disable=arguments-differ return super()._request(method, self._base_url + url, **kwargs) def connect(**conf): _host = conf.get('host') _port = conf.get('port') _version = conf.get('version') return KylinHttpClient(_host, _port, _version)
freight/notifiers/base.py
armandomeeuwenoord/freight
562
142619
<reponame>armandomeeuwenoord/freight<filename>freight/notifiers/base.py<gh_stars>100-1000 from freight.models import Deploy, TaskStatus from freight import http __all__ = ["Notifier", "NotifierEvent"] class NotifierEvent(object): TASK_STARTED = 0 TASK_FINISHED = 1 TASK_QUEUED = 2 class Notifier(object): DEFAULT_EVENTS = [ NotifierEvent.TASK_QUEUED, NotifierEvent.TASK_STARTED, NotifierEvent.TASK_FINISHED, ] def get_default_options(self): return { # TODO(dcramer): we want to support events, but we need validators # before that can happen to avoid magical constants # 'events': {}, } def get_options(self): return {} def send(self, task, config, event): # TODO(mattrobenolt): Split this out into send_deploy, send_x # since we want different notifications for different tasks, # and remove this shim. For now, we there are only deploys deploy = Deploy.query.filter(Deploy.task_id == task.id).first() return self.send_deploy(deploy, task, config, event) def send_deploy(self, deploy, task, config, event): raise NotImplementedError def should_send(self, task, config, event): deploy = Deploy.query.filter(Deploy.task_id == task.id).first() return self.should_send_deploy(deploy, task, config, event) def should_send_deploy(self, deploy, task, config, event): return event in config.get("events", self.DEFAULT_EVENTS) def generate_event_title(app, deploy, task, user, event): number = deploy.number app_name = app.name params = dict(task.params or {}) env = deploy.environment ref = task.ref sha = task.sha[:7] if task.sha else task.ref status_label = task.status_label duration = task.duration user = user.name.split("@", 1)[ 0 ] # Usernames can either be 'user' or '<EMAIL>' link = http.absolute_uri( f"/deploys/{app.name}/{deploy.environment}/{deploy.number}" ) # TODO(dcramer): show the ref when it differs from the sha if event == NotifierEvent.TASK_QUEUED: return f"[{app_name}/{env}] {user} queued deploy <{link}|#{number}> ({sha})" if event == NotifierEvent.TASK_STARTED: return f"[{app_name}/{env}] {user} started deploy <{link}|#{number}> ({sha})" if task.status == TaskStatus.failed: return f"[{app_name}/{env}] Failed to finish {user}'s deploy <{link}|#{number}> ({sha}) after {duration}s" if task.status == TaskStatus.cancelled: return f"[{app_name}/{env}] {user}'s deploy <{link}|#{number}> ({sha}) was cancelled after {duration}s" if task.status == TaskStatus.finished: return f"[{app_name}/{env}] Successfully finished {user}'s deploy <{link}|#{number}> ({sha}) after {duration}s" raise NotImplementedError(task.status)
wagtail_localize/test/migrations/0015_testsnippet_small_charfield.py
hpoul/wagtail-localize
123
142631
# Generated by Django 3.2.4 on 2021-06-29 15:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('wagtail_localize_test', '0014_auto_20210217_0924'), ] operations = [ migrations.AddField( model_name='testsnippet', name='small_charfield', field=models.CharField(blank=True, max_length=10), ), ]
src/compas_rhino/artists/pointartist.py
funkchaser/compas
235
142632
<gh_stars>100-1000 from __future__ import print_function from __future__ import absolute_import from __future__ import division import compas_rhino from compas.artists import PrimitiveArtist from .artist import RhinoArtist class PointArtist(RhinoArtist, PrimitiveArtist): """Artist for drawing points. Parameters ---------- point : :class:`compas.geometry.Point` A COMPAS point. layer : str, optional The layer that should contain the drawing. """ def __init__(self, point, layer=None, **kwargs): super(PointArtist, self).__init__(primitive=point, layer=layer, **kwargs) def draw(self): """Draw the point. Returns ------- list The GUIDs of the created Rhino objects. """ points = [{'pos': list(self.primitive), 'color': self.color, 'name': self.primitive.name}] guids = compas_rhino.draw_points(points, layer=self.layer, clear=False, redraw=False) return guids
leetcode.com/python/1358_Number_of_Substrings_Containing_All_Three_Characters.py
vansh-tiwari/coding-interview-gym
713
142643
<gh_stars>100-1000 from collections import defaultdict class Solution(object): def numberOfSubstrings(self, s): """ :type s: str :rtype: int """ left, right = 0, 0 subStringCount = 0 couter = {c: 0 for c in 'abc'} while right < len(s): couter[s[right]] += 1 while all(couter.values()): couter[s[left]] -= 1 left += 1 subStringCount += left right += 1 return subStringCount
bcs-ui/backend/uniapps/network/views/charts/versions.py
laodiu/bk-bcs
599
142656
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from rest_framework import viewsets from rest_framework.renderers import BrowsableAPIRenderer from rest_framework.response import Response from backend.helm.helm.models import ChartVersion from backend.uniapps.network import constants, serializers from backend.uniapps.network.constants import K8S_LB_CHART_NAME, K8S_LB_NAMESPACE from backend.uniapps.network.views.charts.releases import HelmReleaseMixin from backend.utils.renderers import BKAPIRenderer class K8SIngressControllerViewSet(viewsets.ViewSet, HelmReleaseMixin): renderer_classes = (BKAPIRenderer, BrowsableAPIRenderer) chart_name = K8S_LB_CHART_NAME namespace = K8S_LB_NAMESPACE public_repo_name = "public-repo" release_version_prefix = constants.RELEASE_VERSION_PREFIX def get_chart_versions(self, request, project_id): # 过滤公共仓库下面的lb chart名称 chart_versions = ( ChartVersion.objects.filter( name=self.chart_name, chart__repository__project_id=project_id, chart__repository__name=self.public_repo_name, ) .order_by("-created") .values("version", "id") ) # 获取release版本的version params = request.query_params cluster_id = params.get("cluster_id") # 查询是否release namespace = params.get("namespace") or self.namespace release = self.get_helm_release(cluster_id, name=self.chart_name, namespace=namespace) if not release: return Response(chart_versions) # id: -1, 表示此数据为组装数据,仅供前端展示匹配使用 chart_versions = list(chart_versions) chart_versions.insert( 0, {"version": f"{self.release_version_prefix} {release.get_current_version()}", "id": -1} ) return Response(chart_versions) def get_version_detail(self, request, project_id): """获取指定版本chart信息,包含release的版本""" slz = serializers.ChartVersionSLZ(data=request.data) slz.is_valid(raise_exception=True) data = slz.validated_data version = data["version"] # 如果是release,查询release中对应的values信息 if not version.startswith(self.release_version_prefix): chart_version = ChartVersion.objects.get( name=self.chart_name, version=version, chart__repository__project_id=project_id, chart__repository__name=self.public_repo_name, ) version_detail = {"name": self.chart_name, "version": version, "files": chart_version.files} return Response(version_detail) # 获取release对应的values version_detail = {"name": self.chart_name, "version": version} namespace = data.get("namespace") or self.namespace cluster_id = data.get("cluster_id") release = self.get_helm_release(cluster_id, self.chart_name, namespace=namespace) if not release: return Response(version_detail) version_detail["files"] = release.release.chartVersionSnapshot.files return Response(version_detail)
gen_version.py
virtualmix/noto-emoji
1,225
142669
<filename>gen_version.py #!/usr/bin/env python3 # # Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generate version string for NotoColorEmoji. This parses the color emoji template file and updates the lines containing version string info, writing a new file. The nameID 5 field in the emoji font should reflect the commit/date of the repo it was built from. This will build a string of the following format: Version 1.39;GOOG;noto-emoji:20170220:a8a215d2e889' This is intended to indicate that it was built by Google from noto-emoji at commit a8a215d2e889 and date 20170220 (since dates are a bit easier to locate in time than commit hashes). For building with external data we don't include the commit id as we might be using different resoruces. Instead the version string is: Version 1.39;GOOG;noto-emoji:20170518;BETA <msg> Here the date is the current date, and the message after 'BETA ' is provided using the '-b' flag. There's no commit hash. This also bypasses some checks about the state of the repo. The relase number should have 2 or 3 minor digits. Right now we've been using 2 but at the next major relase we probably want to use 3. This supports both. It will bump the version number if none is provided, maintaining the minor digit length. """ import argparse import datetime import re from nototools import tool_utils # These are not very lenient, we expect to be applied to the noto color # emoji template ttx file which matches these. Why then require the # input argument, you ask? Um... testing? _nameid_re = re.compile(r'\s*<namerecord nameID="5"') _version_re = re.compile(r'\s*Version\s(\d+.\d{2,3})') _headrev_re = re.compile(r'\s*<fontRevision value="(\d+.\d{2,3})"/>') def _get_existing_version(lines): """Scan lines for all existing version numbers, and ensure they match. Return the matched version number string.""" version = None def check_version(new_version): if version is not None and new_version != version: raise Exception( 'version %s and namerecord version %s do not match' % ( version, new_version)) return new_version saw_nameid = False for line in lines: if saw_nameid: saw_nameid = False m = _version_re.match(line) if not m: raise Exception('could not match line "%s" in namerecord' % line) version = check_version(m.group(1)) elif _nameid_re.match(line): saw_nameid = True else: m = _headrev_re.match(line) if m: version = check_version(m.group(1)) return version def _version_to_mm(version): majs, mins = version.split('.') minor_len = len(mins) return int(majs), int(mins), minor_len def _mm_to_version(major, minor, minor_len): fmt = '%%d.%%0%dd' % minor_len return fmt % (major, minor) def _version_compare(lhs, rhs): lmaj, lmin, llen = _version_to_mm(lhs) rmaj, rmin, rlen = _version_to_mm(rhs) # if major versions differ, we don't care about the minor length, else # they should be the same if lmaj != rmaj: return lmaj - rmaj if llen != rlen: raise Exception('minor version lengths differ: "%s" and "%s"' % (lhs, rhs)) return lmin - rmin def _version_bump(version): major, minor, minor_len = _version_to_mm(version) minor = (minor + 1) % (10 ** minor_len) if minor == 0: raise Exception('cannot bump version "%s", requires new major' % version) return _mm_to_version(major, minor, minor_len) def _get_repo_version_str(beta): """See above for description of this string.""" if beta is not None: date_str = datetime.date.today().strftime('%Y%m%d') return 'GOOG;noto-emoji:%s;BETA %s' % (date_str, beta) p = tool_utils.resolve_path('[emoji]') commit, date, _ = tool_utils.git_head_commit(p) if not tool_utils.git_check_remote_commit(p, commit): raise Exception('emoji not on upstream master branch') date_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})') m = date_re.match(date) if not m: raise Exception('could not match "%s" with "%s"' % (date, date_re.pattern)) ymd = ''.join(m.groups()) return 'GOOG;noto-emoji:%s:%s' % (ymd, commit[:12]) def _replace_existing_version(lines, version, version_str): """Update lines with new version strings in appropriate places.""" saw_nameid = False for i in range(len(lines)): line = lines[i] if saw_nameid: saw_nameid = False # preserve indentation lead_ws = len(line) - len(line.lstrip()) lines[i] = line[:lead_ws] + version_str + '\n' elif _nameid_re.match(line): saw_nameid = True elif _headrev_re.match(line): lead_ws = len(line) - len(line.lstrip()) lines[i] = line[:lead_ws] + '<fontRevision value="%s"/>\n' % version def update_version(srcfile, dstfile, version, beta): """Update version in srcfile and write to dstfile. If version is None, bumps the current version, else version must be greater than the current verison.""" with open(srcfile, 'r') as f: lines = f.readlines() current_version = _get_existing_version(lines) if not version: version = _version_bump(current_version) elif version and _version_compare(version, current_version) <= 0: raise Exception('new version %s is <= current version %s' % ( version, current_version)) version_str = 'Version %s;%s' % (version, _get_repo_version_str(beta)) _replace_existing_version(lines, version, version_str) with open(dstfile, 'w') as f: for line in lines: f.write(line) def main(): parser = argparse.ArgumentParser() parser.add_argument( '-v', '--version', help='version number, default bumps the current ' 'version', metavar='ver') parser.add_argument( '-s', '--src', help='ttx file with name and head tables', metavar='file', required=True) parser.add_argument( '-d', '--dst', help='name of edited ttx file to write', metavar='file', required=True) parser.add_argument( '-b', '--beta', help='beta tag if font is built using external resources') args = parser.parse_args() update_version(args.src, args.dst, args.version, args.beta) if __name__ == '__main__': main()
scrapers/scrape_nw_common.py
brunis83/covid_19
485
142672
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import re from bs4 import BeautifulSoup import scrape_common as sc def get_nw_page(): url = 'https://www.nw.ch/gesundheitsamtdienste/6044' content = sc.download(url, silent=True) content = content.replace("&nbsp;", " ") content = re.sub(r'(\d+)\'(\d+)', r'\1\2', content) soup = BeautifulSoup(content, 'html.parser') return url, soup
nipyapi/registry/models/bundle_version_metadata.py
Jimvin/nipyapi
199
142680
# coding: utf-8 """ Apache NiFi Registry REST API The REST API provides an interface to a registry with operations for saving, versioning, reading NiFi flows and components. OpenAPI spec version: 1.15.0 Contact: <EMAIL> Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class BundleVersionMetadata(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'link': 'JaxbLink', 'id': 'str', 'bundle_id': 'str', 'bucket_id': 'str', 'group_id': 'str', 'artifact_id': 'str', 'version': 'str', 'timestamp': 'int', 'author': 'str', 'description': 'str', 'sha256': 'str', 'sha256_supplied': 'bool', 'content_size': 'int', 'system_api_version': 'str', 'build_info': 'BuildInfo' } attribute_map = { 'link': 'link', 'id': 'id', 'bundle_id': 'bundleId', 'bucket_id': 'bucketId', 'group_id': 'groupId', 'artifact_id': 'artifactId', 'version': 'version', 'timestamp': 'timestamp', 'author': 'author', 'description': 'description', 'sha256': 'sha256', 'sha256_supplied': 'sha256Supplied', 'content_size': 'contentSize', 'system_api_version': 'systemApiVersion', 'build_info': 'buildInfo' } def __init__(self, link=None, id=None, bundle_id=None, bucket_id=None, group_id=None, artifact_id=None, version=None, timestamp=None, author=None, description=None, sha256=None, sha256_supplied=None, content_size=None, system_api_version=None, build_info=None): """ BundleVersionMetadata - a model defined in Swagger """ self._link = None self._id = None self._bundle_id = None self._bucket_id = None self._group_id = None self._artifact_id = None self._version = None self._timestamp = None self._author = None self._description = None self._sha256 = None self._sha256_supplied = None self._content_size = None self._system_api_version = None self._build_info = None if link is not None: self.link = link if id is not None: self.id = id if bundle_id is not None: self.bundle_id = bundle_id self.bucket_id = bucket_id if group_id is not None: self.group_id = group_id if artifact_id is not None: self.artifact_id = artifact_id if version is not None: self.version = version if timestamp is not None: self.timestamp = timestamp if author is not None: self.author = author if description is not None: self.description = description if sha256 is not None: self.sha256 = sha256 self.sha256_supplied = sha256_supplied self.content_size = content_size if system_api_version is not None: self.system_api_version = system_api_version self.build_info = build_info @property def link(self): """ Gets the link of this BundleVersionMetadata. An WebLink to this entity. :return: The link of this BundleVersionMetadata. :rtype: JaxbLink """ return self._link @link.setter def link(self, link): """ Sets the link of this BundleVersionMetadata. An WebLink to this entity. :param link: The link of this BundleVersionMetadata. :type: JaxbLink """ self._link = link @property def id(self): """ Gets the id of this BundleVersionMetadata. The id of this version of the extension bundle :return: The id of this BundleVersionMetadata. :rtype: str """ return self._id @id.setter def id(self, id): """ Sets the id of this BundleVersionMetadata. The id of this version of the extension bundle :param id: The id of this BundleVersionMetadata. :type: str """ self._id = id @property def bundle_id(self): """ Gets the bundle_id of this BundleVersionMetadata. The id of the extension bundle this version is for :return: The bundle_id of this BundleVersionMetadata. :rtype: str """ return self._bundle_id @bundle_id.setter def bundle_id(self, bundle_id): """ Sets the bundle_id of this BundleVersionMetadata. The id of the extension bundle this version is for :param bundle_id: The bundle_id of this BundleVersionMetadata. :type: str """ self._bundle_id = bundle_id @property def bucket_id(self): """ Gets the bucket_id of this BundleVersionMetadata. The id of the bucket the extension bundle belongs to :return: The bucket_id of this BundleVersionMetadata. :rtype: str """ return self._bucket_id @bucket_id.setter def bucket_id(self, bucket_id): """ Sets the bucket_id of this BundleVersionMetadata. The id of the bucket the extension bundle belongs to :param bucket_id: The bucket_id of this BundleVersionMetadata. :type: str """ if bucket_id is None: raise ValueError("Invalid value for `bucket_id`, must not be `None`") self._bucket_id = bucket_id @property def group_id(self): """ Gets the group_id of this BundleVersionMetadata. :return: The group_id of this BundleVersionMetadata. :rtype: str """ return self._group_id @group_id.setter def group_id(self, group_id): """ Sets the group_id of this BundleVersionMetadata. :param group_id: The group_id of this BundleVersionMetadata. :type: str """ self._group_id = group_id @property def artifact_id(self): """ Gets the artifact_id of this BundleVersionMetadata. :return: The artifact_id of this BundleVersionMetadata. :rtype: str """ return self._artifact_id @artifact_id.setter def artifact_id(self, artifact_id): """ Sets the artifact_id of this BundleVersionMetadata. :param artifact_id: The artifact_id of this BundleVersionMetadata. :type: str """ self._artifact_id = artifact_id @property def version(self): """ Gets the version of this BundleVersionMetadata. The version of the extension bundle :return: The version of this BundleVersionMetadata. :rtype: str """ return self._version @version.setter def version(self, version): """ Sets the version of this BundleVersionMetadata. The version of the extension bundle :param version: The version of this BundleVersionMetadata. :type: str """ self._version = version @property def timestamp(self): """ Gets the timestamp of this BundleVersionMetadata. The timestamp of the create date of this version :return: The timestamp of this BundleVersionMetadata. :rtype: int """ return self._timestamp @timestamp.setter def timestamp(self, timestamp): """ Sets the timestamp of this BundleVersionMetadata. The timestamp of the create date of this version :param timestamp: The timestamp of this BundleVersionMetadata. :type: int """ if timestamp is not None and timestamp < 1: raise ValueError("Invalid value for `timestamp`, must be a value greater than or equal to `1`") self._timestamp = timestamp @property def author(self): """ Gets the author of this BundleVersionMetadata. The identity that created this version :return: The author of this BundleVersionMetadata. :rtype: str """ return self._author @author.setter def author(self, author): """ Sets the author of this BundleVersionMetadata. The identity that created this version :param author: The author of this BundleVersionMetadata. :type: str """ self._author = author @property def description(self): """ Gets the description of this BundleVersionMetadata. The description for this version :return: The description of this BundleVersionMetadata. :rtype: str """ return self._description @description.setter def description(self, description): """ Sets the description of this BundleVersionMetadata. The description for this version :param description: The description of this BundleVersionMetadata. :type: str """ self._description = description @property def sha256(self): """ Gets the sha256 of this BundleVersionMetadata. The hex representation of the SHA-256 digest of the binary content for this version :return: The sha256 of this BundleVersionMetadata. :rtype: str """ return self._sha256 @sha256.setter def sha256(self, sha256): """ Sets the sha256 of this BundleVersionMetadata. The hex representation of the SHA-256 digest of the binary content for this version :param sha256: The sha256 of this BundleVersionMetadata. :type: str """ self._sha256 = sha256 @property def sha256_supplied(self): """ Gets the sha256_supplied of this BundleVersionMetadata. Whether or not the client supplied a SHA-256 when uploading the bundle :return: The sha256_supplied of this BundleVersionMetadata. :rtype: bool """ return self._sha256_supplied @sha256_supplied.setter def sha256_supplied(self, sha256_supplied): """ Sets the sha256_supplied of this BundleVersionMetadata. Whether or not the client supplied a SHA-256 when uploading the bundle :param sha256_supplied: The sha256_supplied of this BundleVersionMetadata. :type: bool """ if sha256_supplied is None: raise ValueError("Invalid value for `sha256_supplied`, must not be `None`") self._sha256_supplied = sha256_supplied @property def content_size(self): """ Gets the content_size of this BundleVersionMetadata. The size of the binary content for this version in bytes :return: The content_size of this BundleVersionMetadata. :rtype: int """ return self._content_size @content_size.setter def content_size(self, content_size): """ Sets the content_size of this BundleVersionMetadata. The size of the binary content for this version in bytes :param content_size: The content_size of this BundleVersionMetadata. :type: int """ if content_size is None: raise ValueError("Invalid value for `content_size`, must not be `None`") if content_size is not None and content_size < 0: raise ValueError("Invalid value for `content_size`, must be a value greater than or equal to `0`") self._content_size = content_size @property def system_api_version(self): """ Gets the system_api_version of this BundleVersionMetadata. The version of the system API that this bundle version was built against :return: The system_api_version of this BundleVersionMetadata. :rtype: str """ return self._system_api_version @system_api_version.setter def system_api_version(self, system_api_version): """ Sets the system_api_version of this BundleVersionMetadata. The version of the system API that this bundle version was built against :param system_api_version: The system_api_version of this BundleVersionMetadata. :type: str """ self._system_api_version = system_api_version @property def build_info(self): """ Gets the build_info of this BundleVersionMetadata. The build information about this version :return: The build_info of this BundleVersionMetadata. :rtype: BuildInfo """ return self._build_info @build_info.setter def build_info(self, build_info): """ Sets the build_info of this BundleVersionMetadata. The build information about this version :param build_info: The build_info of this BundleVersionMetadata. :type: BuildInfo """ if build_info is None: raise ValueError("Invalid value for `build_info`, must not be `None`") self._build_info = build_info def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, BundleVersionMetadata): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
tests/changes/backends/jenkins/test_builder.py
vault-the/changes
443
142686
from __future__ import absolute_import import mock import os.path import responses import pytest import re import time from flask import current_app from uuid import UUID from changes.config import db, redis from changes.constants import Status, Result from changes.lib.artifact_store_lib import ArtifactState from changes.lib.artifact_store_mock import ArtifactStoreMock from changes.models.artifact import Artifact from changes.models.failurereason import FailureReason from changes.models.filecoverage import FileCoverage from changes.models.job import Job from changes.models.log import LogSource from changes.models.patch import Patch from changes.models.test import TestCase from changes.models.testartifact import TestArtifact from changes.backends.jenkins.builder import JenkinsBuilder, MASTER_BLACKLIST_KEY, JENKINS_LOG_NAME from changes.testutils import ( BackendTestCase, eager_tasks, SAMPLE_DIFF, SAMPLE_XUNIT, SAMPLE_COVERAGE, SAMPLE_XUNIT_TESTARTIFACTS ) class BaseTestCase(BackendTestCase): builder_cls = JenkinsBuilder builder_options = { 'master_urls': ['http://jenkins.example.com'], 'diff_urls': ['http://jenkins-diff.example.com'], 'job_name': 'server', } def setUp(self): self.project = self.create_project() ArtifactStoreMock.reset() super(BaseTestCase, self).setUp() def get_builder(self, **options): base_options = self.builder_options.copy() base_options.update(options) return self.builder_cls(app=current_app, **base_options) def load_fixture(self, filename): filepath = os.path.join( os.path.dirname(__file__), filename, ) with open(filepath, 'rb') as fp: return fp.read() class CreateBuildTest(BaseTestCase): def test_sets_cluster(self): job_id = '81d1596fd4d642f4a6bdf86c45e014e8' build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(job_id)) builder = self.get_builder(cluster='foobar') with mock.patch.object(builder, 'create_jenkins_build') as create_jenkins_build: def fake_update(step, **kwargs): step.data.update({'master': 'fake', 'item_id': '99', 'build_no': None}) return {'queued': True} create_jenkins_build.side_effect = fake_update builder.create_job(job) step = job.phases[0].steps[0] assert step.cluster == 'foobar' @responses.activate def test_queued_creation(self): job_id = '81d1596fd4d642f4a6bdf86c45e014e8' responses.add( responses.POST, 'http://jenkins.example.com/job/server/build', body='', status=201) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'), body=self.load_fixture('fixtures/GET/queue_item_by_job_id.xml')) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/job/server/api/xml/\\?depth=1&xpath=/queue/item\\[action/parameter/name=%22CHANGES_BID%22%20and%20action/parameter/value=%22.*?%22\\]/id'), status=404) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(job_id)) builder = self.get_builder() builder.create_job(job) step = job.phases[0].steps[0] assert step.data == { 'build_no': None, 'item_id': '13', 'job_name': 'server', 'queued': True, 'uri': None, 'master': 'http://jenkins.example.com', } @responses.activate def test_active_creation(self): job_id = 'f9481a17aac446718d7893b6e1c6288b' responses.add( responses.POST, 'http://jenkins.example.com/job/server/build', body='', status=201) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'), status=404) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'), body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(hex=job_id), ) builder = self.get_builder() builder.create_job(job) step = job.phases[0].steps[0] assert step.data == { 'build_no': '1', 'item_id': None, 'job_name': 'server', 'queued': False, 'uri': None, 'master': 'http://jenkins.example.com', } @responses.activate @mock.patch.object(JenkinsBuilder, '_find_job') def test_patch(self, find_job): responses.add( responses.POST, 'http://jenkins-diff.example.com/job/server/build', body='', status=201) find_job.return_value = { 'build_no': '1', 'item_id': None, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins-diff.example.com', } patch = Patch( repository=self.project.repository, parent_revision_sha='7ebd1f2d750064652ef5bbff72452cc19e1731e0', diff=SAMPLE_DIFF, ) db.session.add(patch) source = self.create_source(self.project, patch=patch) build = self.create_build(self.project, source=source) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8') ) builder = self.get_builder() builder.create_job(job) @responses.activate def test_multi_master(self): job_id = 'f9481a17aac446718d7893b6e1c6288b' responses.add( responses.GET, 'http://jenkins-2.example.com/queue/api/json/', body=self.load_fixture('fixtures/GET/queue_list_other_jobs.json'), status=200) responses.add( responses.GET, 'http://jenkins.example.com/queue/api/json/', body=self.load_fixture('fixtures/GET/queue_list.json'), status=200) responses.add( responses.POST, 'http://jenkins-2.example.com/job/server/build', body='', status=201) responses.add( responses.GET, re.compile('http://jenkins-2\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'), status=404) responses.add( responses.GET, re.compile('http://jenkins-2\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'), body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(hex=job_id), ) builder = self.get_builder() builder.master_urls = [ 'http://jenkins.example.com', 'http://jenkins-2.example.com', ] builder.create_job(job) step = job.phases[0].steps[0] assert step.data['master'] == 'http://jenkins-2.example.com' @responses.activate def test_multi_master_one_bad(self): job_id = 'f9481a17aac446718d7893b6e1c6288b' responses.add( responses.GET, 'http://jenkins-2.example.com/queue/api/json/', body=self.load_fixture('fixtures/GET/queue_list_other_jobs.json'), status=200) # This one has a failure status. responses.add( responses.GET, 'http://jenkins.example.com/queue/api/json/', body='', status=503) responses.add( responses.POST, 'http://jenkins-2.example.com/job/server/build', body='', status=201) responses.add( responses.GET, re.compile('http://jenkins-2\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'), status=404) responses.add( responses.GET, re.compile('http://jenkins-2\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'), body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(hex=job_id), ) builder = self.get_builder() builder.master_urls = [ 'http://jenkins.example.com', 'http://jenkins-2.example.com', ] builder.create_job(job) step = job.phases[0].steps[0] assert step.data['master'] == 'http://jenkins-2.example.com' def test_pick_master_with_blacklist(self): redis.sadd(MASTER_BLACKLIST_KEY, 'http://jenkins.example.com') builder = self.get_builder() builder.master_urls = [ 'http://jenkins.example.com', 'http://jenkins-2.example.com', ] assert 'http://jenkins-2.example.com' == builder._pick_master('job1') @responses.activate def test_jobstep_replacement(self): job_id = 'f9481a17aac446718d7893b6e1c6288b' responses.add( responses.POST, 'http://jenkins.example.com/job/server/build', body='', status=201) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'), status=404) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/job/server/api/xml/\\?xpath=%2FfreeStyleProject%2Fbuild%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fnumber&depth=1&wrapper=x'), body=self.load_fixture('fixtures/GET/build_item_by_job_id.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(hex=job_id), ) builder = self.get_builder() builder.create_job(job) failstep = job.phases[0].steps[0] failstep.result = Result.infra_failed failstep.status = Status.finished db.session.add(failstep) db.session.commit() replacement_step = builder.create_job(job, replaces=failstep) # new jobstep should still be part of same job/phase assert replacement_step.job == job assert replacement_step.phase == failstep.phase # make sure .steps actually includes the new jobstep assert len(failstep.phase.steps) == 2 # make sure replacement id is correctly set assert failstep.replacement_id == replacement_step.id assert replacement_step.data == { 'build_no': '1', 'item_id': None, 'job_name': 'server', 'queued': False, 'uri': None, 'master': 'http://jenkins.example.com', } class CancelStepTest(BaseTestCase): @responses.activate def test_queued(self): responses.add( responses.POST, 'http://jenkins.example.com/queue/cancelItem?id=13', match_querystring=True, status=302) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'item_id': 13, 'job_name': 'server', 'master': 'http://jenkins.example.com', }, status=Status.queued) builder = self.get_builder() builder.cancel_step(step) assert step.result == Result.aborted assert step.status == Status.finished @responses.activate def test_active(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/stop/', body='', status=302) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': 2, 'job_name': 'server', 'master': 'http://jenkins.example.com', }, status=Status.in_progress) builder = self.get_builder() builder.cancel_step(step) assert step.status == Status.finished assert step.result == Result.aborted @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) def test_timeouts_sync_log(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_building.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '7'}, body='Foo bar') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) builder = self.get_builder() # The job is not yet complete after this sync step so no logs yet. builder.sync_step(step) source = LogSource.query.filter_by(job=job).first() assert source is None step.data['timed_out'] = True builder.cancel_step(step) source = LogSource.query.filter_by(job=job).first() assert source.step == step assert source.name == JENKINS_LOG_NAME assert source.project == self.project assert source.date_created == step.date_started assert step.data.get('log_offset') == 7 bucket_name = step.id.hex + '-jenkins' artifact_name = step.data['log_artifact_name'] artifact = ArtifactStoreMock('').get_artifact(bucket_name, artifact_name) assert artifact.name == artifact_name assert artifact.path == JENKINS_LOG_NAME assert artifact.size == 7 assert artifact.state == ArtifactState.UPLOADED assert ArtifactStoreMock('').get_artifact_content(bucket_name, artifact_name).getvalue() == 'Foo bar' class SyncStepTest(BaseTestCase): @responses.activate def test_waiting_in_queue(self): responses.add( responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/', body=self.load_fixture('fixtures/GET/queue_details_pending.json')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': None, 'item_id': 13, 'job_name': 'server', 'queued': True, 'master': 'http://jenkins.example.com', }) builder = self.get_builder() builder.sync_step(step) assert step.status == Status.queued @responses.activate def test_cancelled_in_queue(self): responses.add( responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/', body=self.load_fixture('fixtures/GET/queue_details_cancelled.json')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': None, 'item_id': 13, 'job_name': 'server', 'queued': True, 'master': 'http://jenkins.example.com', }) builder = self.get_builder() builder.sync_step(step) assert step.status == Status.finished assert step.result == Result.aborted @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) def test_queued_to_active(self): responses.add( responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/', body=self.load_fixture('fixtures/GET/queue_details_building.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_building.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '0'}, body='') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': None, 'item_id': 13, 'job_name': 'server', 'queued': True, 'master': 'http://jenkins.example.com', }) builder = self.get_builder() builder.sync_step(step) assert step.data['build_no'] == 2 @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) def test_success_result(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_success.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '0'}, body='') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }) builder = self.get_builder() builder.sync_step(step) assert step.data['build_no'] == 2 assert step.status == Status.finished assert step.result == Result.passed assert step.date_finished is not None @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) def test_failed_result(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_failed.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '0'}, body='') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) builder = self.get_builder() builder.sync_step(step) assert step.data['build_no'] == 2 assert step.status == Status.finished assert step.result == Result.failed assert step.date_finished is not None def test_present_manifest(self): build = self.create_build(self.project) job = self.create_job(build=build) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }) artifacts = [self.create_artifact(step, 'manifest.json')] builder = self.get_builder() builder.verify_final_artifacts(step, artifacts) assert not FailureReason.query.filter( FailureReason.step_id == step.id ).first() def test_missing_manifest_result(self): build = self.create_build(self.project) job = self.create_job(build=build) phase = self.create_jobphase(job) step = self.create_jobstep(phase, status=Status.finished) builder = self.get_builder() builder.verify_final_artifacts(step, []) assert FailureReason.query.filter( FailureReason.step_id == step.id, FailureReason.reason == 'missing_manifest_json' ).first() assert step.result == Result.infra_failed @responses.activate @mock.patch('changes.backends.jenkins.builder.time') @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) def test_result_slow_log(self, mock_time): mock_time.time.return_value = time.time() def log_text_callback(request): # Zoom 10 minutes into the future; this should cause the console # downloading code to bail mock_time.time.return_value += 10 * 60 data = "log\n" * 10000 return (200, {'X-Text-Size': str(len(data))}, data) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_failed.json')) responses.add_callback( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, callback=log_text_callback) responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) builder = self.get_builder() builder.sync_step(step) assert len(step.logsources) == 1 bucket_name = step.id.hex + '-jenkins' artifact_name = step.data['log_artifact_name'] assert "LOG TRUNCATED" in ArtifactStoreMock('').\ get_artifact_content(bucket_name, artifact_name).getvalue() class SyncGenericResultsTest(BaseTestCase): @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock) def test_does_sync_log(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_failed.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '7'}, body='Foo bar') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) builder = self.get_builder() builder.sync_step(step) source = LogSource.query.filter_by(job=job).first() assert source.step == step assert source.name == JENKINS_LOG_NAME assert source.project == self.project assert source.date_created == step.date_started assert step.data.get('log_offset') == 7 bucket_name = step.id.hex + '-jenkins' artifact_name = step.data['log_artifact_name'] artifact = ArtifactStoreMock('').get_artifact(bucket_name, artifact_name) assert artifact.name == artifact_name assert artifact.path == JENKINS_LOG_NAME assert artifact.size == 7 assert artifact.state == ArtifactState.UPLOADED assert ArtifactStoreMock('').get_artifact_content(bucket_name, artifact_name).getvalue() == 'Foo bar' @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock) def test_does_save_artifacts(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_with_artifacts.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '0'}, body='') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }) builder = self.get_builder() builder.sync_step(step) expected_artifacts_data = dict() expected_artifacts_data['foobar.log'] = { "displayPath": "foobar.log", "fileName": "foobar.log", "relativePath": "artifacts/foobar.log", } expected_artifacts_data['foo/tests.xml'] = { "displayPath": "tests.xml", "fileName": "tests.xml", "relativePath": "artifacts/foo/tests.xml", } expected_artifacts_data['tests.xml'] = { "displayPath": "tests.xml", "fileName": "tests.xml", "relativePath": "artifacts/tests.xml", } for name, data in expected_artifacts_data.iteritems(): artifact = Artifact.query.filter( Artifact.name == name, Artifact.step == step, ).first() assert artifact.data == data class ArtifactsManagerMatchTest(BaseTestCase): def test_standard(self): builder = self.get_builder() mgr = builder.get_artifact_manager(mock.Mock()) assert not mgr.can_process('build_report.log') def test_fetch_jenkins(self): builder = self.get_builder(debug_config={'fetch_jenkins_logs': True}) mgr = builder.get_artifact_manager(mock.Mock()) assert mgr.can_process('build_report.log') class SyncArtifactTest(BaseTestCase): @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock) def test_sync_artifact_xunit(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/xunit.xml', body=SAMPLE_XUNIT, stream=True) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) artifact = self.create_artifact(step, name='xunit.xml', data={ "displayPath": "xunit.xml", "fileName": "xunit.xml", "relativePath": "artifacts/xunit.xml" }) builder = self.get_builder() builder.sync_artifact(artifact) test_list = list(TestCase.query.filter( TestCase.job_id == job.id )) assert len(test_list) == 3 @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock) def test_sync_artifact_coverage(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/coverage.xml', body=SAMPLE_COVERAGE, stream=True) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) artifact = self.create_artifact(step, name='coverage.xml', data={ "displayPath": "coverage.xml", "fileName": "coverage.xml", "relativePath": "artifacts/coverage.xml" }) builder = self.get_builder() builder.sync_artifact(artifact) cover_list = list(FileCoverage.query.filter( FileCoverage.job_id == job.id )) assert len(cover_list) == 2 @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock) def test_sync_artifact_file(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/foo.bar', body=SAMPLE_COVERAGE, stream=True) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) artifact = self.create_artifact(step, name='foo.bar', data={ "displayPath": "foo.bar", "fileName": "foo.bar", "relativePath": "artifacts/foo.bar" }) builder = self.get_builder() builder.sync_artifact(artifact) class SyncTestArtifactsTest(BaseTestCase): @responses.activate @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.models.testresult.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.storage.artifactstore.ArtifactStoreClient', ArtifactStoreMock) def test_sync_testartifacts(self): responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/artifact/artifacts/xunit.xml', body=SAMPLE_XUNIT_TESTARTIFACTS, stream=True) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID('81d1596fd4d642f4a6bdf86c45e014e8'), data={ 'build_no': 2, 'item_id': 13, 'job_name': 'server', 'queued': False, 'master': 'http://jenkins.example.com', }, ) phase = self.create_jobphase(job) step = self.create_jobstep(phase, data=job.data) artifact = self.create_artifact(step, name='xunit.xml', data={ "displayPath": "xunit.xml", "fileName": "xunit.xml", "relativePath": "artifacts/xunit.xml" }) builder = self.get_builder() builder.sync_artifact(artifact) test_artifacts = list(TestArtifact.query) test = TestCase.query.first() assert len(test_artifacts) == 1 test_artifact = test_artifacts[0] assert test_artifact.file.get_file().read() == "sample_content" assert test_artifact.name == "sample_name.txt" assert str(test_artifact.type) == "Text" assert test_artifact.test == test class JenkinsIntegrationTest(BaseTestCase): """ This test should ensure a full cycle of tasks completes successfully within the jenkins builder space. """ # it's possible for this test to infinitely hang due to continuous polling, # so let's ensure we set a timeout @pytest.mark.timeout(5) @mock.patch('changes.config.redis.lock', mock.MagicMock()) @mock.patch('changes.backends.jenkins.builder.ArtifactStoreClient', ArtifactStoreMock) @mock.patch('changes.jobs.sync_job_step.ArtifactStoreClient', ArtifactStoreMock) @eager_tasks @responses.activate def test_full(self): from changes.jobs.create_job import create_job job_id = '81d1596fd4d642f4a6bdf86c45e014e8' # TODO: move this out of this file and integrate w/ buildstep responses.add( responses.POST, 'http://jenkins.example.com/job/server/build', body='', status=201) responses.add( responses.GET, re.compile('http://jenkins\\.example\\.com/queue/api/xml/\\?xpath=%2Fqueue%2Fitem%5Baction%2Fparameter%2Fname%3D%22CHANGES_BID%22\\+and\\+action%2Fparameter%2Fvalue%3D%22.*?%22%5D%2Fid&wrapper=x'), body=self.load_fixture('fixtures/GET/queue_item_by_job_id.xml')) responses.add( responses.GET, 'http://jenkins.example.com/queue/item/13/api/json/', body=self.load_fixture('fixtures/GET/queue_details_building.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/api/json/', body=self.load_fixture('fixtures/GET/job_details_success.json')) responses.add( responses.GET, 'http://jenkins.example.com/job/server/2/logText/progressiveText/?start=0', match_querystring=True, adding_headers={'X-Text-Size': '7'}, body='Foo bar') responses.add( responses.GET, 'http://jenkins.example.com/computer/server-ubuntu-10.04%20(ami-746cf244)%20(i-836023b7)/config.xml', body=self.load_fixture('fixtures/GET/node_config.xml')) artifacts_store_requests_re = re.compile(r'http://localhost:1234/buckets/.+/artifacts') # Simulate test type which doesn't interact with artifacts store. responses.add( responses.GET, artifacts_store_requests_re, body='', status=404) build = self.create_build(self.project) job = self.create_job( build=build, id=UUID(job_id)) plan = self.create_plan(self.project) self.create_step( plan, order=0, implementation='changes.backends.jenkins.buildstep.JenkinsBuildStep', data={ 'job_name': 'server', 'jenkins_url': 'http://jenkins.example.com', }, ) self.create_job_plan(job, plan) job_id = job.id.hex build_id = build.id.hex create_job.delay( job_id=job_id, task_id=job_id, parent_task_id=build_id, ) job = Job.query.get(job_id) assert job.status == Status.finished assert job.result == Result.passed assert job.date_created assert job.date_started assert job.date_finished phase_list = job.phases assert len(phase_list) == 1 assert phase_list[0].status == Status.finished assert phase_list[0].result == Result.passed assert phase_list[0].date_created assert phase_list[0].date_started assert phase_list[0].date_finished step_list = phase_list[0].steps assert len(step_list) == 1 assert step_list[0].status == Status.finished assert step_list[0].result == Result.passed assert step_list[0].date_created assert step_list[0].date_started assert step_list[0].date_finished assert step_list[0].data == { 'item_id': '13', 'queued': False, 'log_offset': 7, 'log_artifact_name': JENKINS_LOG_NAME, 'jenkins_bucket_name': step_list[0].id.hex + '-jenkins', 'job_name': 'server', 'build_no': 2, 'uri': 'https://jenkins.build.itc.dropbox.com/job/server/2/', 'master': 'http://jenkins.example.com', } node = step_list[0].node assert node.label == 'server-ubuntu-10.04 (ami-746cf244) (i-836023b7)' assert [n.label for n in node.clusters] == ['server-runner'] source = LogSource.query.filter_by(job=job).first() assert source.name == JENKINS_LOG_NAME assert source.step == step_list[0] assert source.project == self.project assert source.date_created == job.date_started bucket_name = step_list[0].id.hex + '-jenkins' artifact_name = step_list[0].data['log_artifact_name'] artifact = ArtifactStoreMock('').get_artifact(bucket_name, artifact_name) assert artifact.name == artifact_name assert artifact.path == JENKINS_LOG_NAME assert artifact.size == 7 assert artifact.state == ArtifactState.UPLOADED assert ArtifactStoreMock('').get_artifact_content(bucket_name, artifact_name).getvalue() == 'Foo bar'
opticspy/ray_tracing/glass_function/transferMatrix.py
Graylien/opticspy
306
142687
# This file is part of PyTMM. # # PyTMM is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyTMM is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Foobar. If not, see <http://www.gnu.org/licenses/>. # # # Copyright 2014-2015 <NAME> <<EMAIL>> import numpy import enum class Polarization(enum.Enum): s = 0 p = 1 class TransferMatrix: """ Dielectric layer TMM How the functions eat structure matricies: | T | | | | | | | | 1 | | | = | Bottom | | Matrix | | Top | = | | | 0 | | | | | | | | R | """ @staticmethod def structure(*args): """ args - separate structure matricies Left to Right = Bottom to Top :param args: """ mat = numpy.identity(2, dtype=numpy.complex128) for m in args: mat = numpy.dot(m.matrix, mat) return TransferMatrix(mat) @staticmethod def layer(n, d, wavelength, theta=0, pol=Polarization.s): """ Creates a Air-DielectricLayer-Air Transfer Matrix :param n: :param d: :param wavelength: """ bottomBoundary = TransferMatrix.boundingLayer(1, n, theta, pol) topBoundary = TransferMatrix.boundingLayer(n, 1, theta, pol) propagation = TransferMatrix.propagationLayer(n, d, wavelength, theta, pol) return TransferMatrix.structure(bottomBoundary, propagation, topBoundary) @staticmethod def boundingLayer(n1, n2, theta=0, pol=Polarization.s): """ Creates a DielectricLayer-DielectricLayer Boundary Transfer Matrix :param n1: :param n2: """ # if numpy.abs((n1/n2)*numpy.sin(theta)) >= 1.0: # theta2 = numpy.pi/2 * numpy.sign(numpy.sin(theta)) # else: theta2 = numpy.arcsin((n1/n2)*numpy.sin(theta), dtype=numpy.complex128) # TE if pol is Polarization.s: _n1 = n1*numpy.cos(theta) _n2 = n2*numpy.cos(theta2) a21 = 1 # TM elif pol is Polarization.p: _n1 = n1/numpy.cos(theta) _n2 = n2/numpy.cos(theta2) a21 = numpy.cos(theta2)/numpy.cos(theta) boundary = 1/(2 * a21 * _n2) *numpy.array([[(_n1 + _n2), (_n2 - _n1)], [(_n2 - _n1), (_n1 + _n2)]], dtype=numpy.complex128) return TransferMatrix(boundary) @staticmethod def propagationLayer(n, d, wavelength, theta=0, pol=Polarization.s): """ Creates a Propagation Transfer Matrix, width d, refractive index n :param n: :param d: :param wavelength: """ theta2 = numpy.arcsin((1/n)*numpy.sin(theta), dtype=numpy.complex128) propagation = numpy.array([[numpy.exp((-1j * n * d * 2 * numpy.pi / wavelength) * numpy.cos(theta2)), 0], [0, numpy.exp((1j * n * d * 2 * numpy.pi / wavelength) * numpy.cos(theta2))]], dtype=numpy.complex128) return TransferMatrix(propagation) def __init__(self, matrix): self.matrix = matrix def invert(self): """ Inverts matrix """ self.matrix = numpy.linalg.inv(self.matrix) def appendLeft(self, matrix): """ :param matrix: """ self.matrix = numpy.dot(matrix.matrix, self.matrix) def appendRight(self, matrix): """ :param matrix: """ self.matrix = numpy.dot(self.matrix, matrix.matrix) def solvePropagation(transferMatrix, incidentField=1.0): """Calculate reflectance and transmittance :param transferMatrix: :param incidentField: """ # res[1] = transmittance, res[0] = reflectance lhs = numpy.array([[transferMatrix.matrix[0, 1], -1], [transferMatrix.matrix[1, 1], 0]]) rhs = numpy.array([-transferMatrix.matrix[0, 0], -transferMatrix.matrix[1, 0]]) rhs = numpy.multiply(rhs, incidentField) res = numpy.linalg.solve(lhs, rhs) reflectance = res[0] transmittance = res[1] return reflectance, transmittance def findReciprocalTransferMatrix(transmittance, reflectance, bottomMat=TransferMatrix(numpy.identity(2)), topMat=TransferMatrix(numpy.identity(2))): # , incidentField=1.0 """ :param transmittance: :param reflectance: :param bottomMat: :param topMat: :return: """ assert transmittance != 0 matrix = numpy.array([[1 / numpy.conj(transmittance), reflectance / transmittance], [numpy.conj(reflectance / transmittance), 1 / transmittance]]) matrix = numpy.dot(numpy.linalg.inv(bottomMat.matrix), matrix) matrix = numpy.dot(matrix, numpy.linalg.inv(topMat.matrix)) return TransferMatrix(matrix) def findReciprocalTransferMatrixLegacy(transmittance, reflectance, bottomMat=TransferMatrix(numpy.identity(2)), topMat=TransferMatrix(numpy.identity(2))): # , incidentField=1.0 """ :param transmittance: :param reflectance: :param bottomMat: :param topMat: :return: """ a = numpy.identity(2) b = numpy.array([[numpy.real(reflectance), numpy.imag(reflectance)], [numpy.imag(reflectance), -numpy.real(reflectance)]]) lhs = numpy.vstack((numpy.hstack((a, b)), numpy.hstack((b, a)))) rhs = numpy.array([numpy.real(transmittance), numpy.imag(transmittance), 0, 0]) res = numpy.linalg.solve(lhs, rhs) matrix = numpy.array([[res[0] + 1j * res[1], res[2] - 1j * res[3]], [res[2] + 1j * res[3], res[0] - 1j * res[1]]]) matrix = numpy.dot(numpy.linalg.inv(bottomMat.matrix), matrix) matrix = numpy.dot(matrix, numpy.linalg.inv(topMat.matrix)) return TransferMatrix(matrix) def findGeneralizedTransferMatrix(transmitance1, reflectance1, transmitance2, reflectance2, bottomMat1=TransferMatrix(numpy.identity(2)), topMat1=TransferMatrix(numpy.identity(2)), bottomMat2=TransferMatrix(numpy.identity(2)), topMat2=TransferMatrix(numpy.identity(2))): """ :param transmitance1: :param reflectance1: :param transmitance2: :param reflectance2: :param bottomMat1: :param topMat1: :param bottomMat2: :param topMat2: :return: """ a12 = numpy.dot(numpy.linalg.inv(bottomMat1.matrix), numpy.array([[transmitance1], [0]])) a34 = numpy.dot(numpy.linalg.inv(bottomMat2.matrix), numpy.array([[transmitance2], [0]])) b12 = numpy.dot(topMat1.matrix, numpy.array([[1], [reflectance1]])) b34 = numpy.dot(topMat2.matrix, numpy.array([[1], [reflectance2]])) rhs = numpy.array([a12[0, 0], a34[0, 0], a12[1, 0], a34[1, 0]]) bmat = numpy.array([[b12[0, 0], b12[1, 0]], [b34[0, 0], b34[1, 0]]]) lhs = numpy.vstack((numpy.hstack((bmat, numpy.zeros((2, 2)))), numpy.hstack((numpy.zeros((2, 2)), bmat)))) res = numpy.linalg.solve(lhs, rhs) mat = numpy.array([[res[0], res[2]], [res[1], res[3]]]) return TransferMatrix(mat)
src/api_generator.py
stevenlovegrove/sony_camera_api
191
142690
<filename>src/api_generator.py from __future__ import print_function from api_list import exist_param, no_param def gen(): result = '' exist_def = """def %s(self, param=None): return self._cmd(method="%s", param=param)""" no_def = """def %s(self): return self._cmd(method="%s")""" for x in exist_param: result += exist_def%(x, x) + '\n\n' for x in no_param: result += no_def%(x, x) + '\n\n' return result print(gen())
Harvard-CS50x/pset6/sentiments/analyzer.py
Sam-Gao-Xin/Courses-
622
142695
import nltk class Analyzer(): """Implements sentiment analysis.""" def __init__(self, positives, negatives): """Initialize Analyzer.""" self.negatives=[] self.positives=[] with open ("negative-words.txt") as negative: for line in negative: if not line.startswith((" ", ";")): self.negatives.extend(line.split()) with open ("positive-words.txt") as positive: for line in positive: if not line.startswith((" ", ";")): self.positives.extend(line.split()) # TODO def analyze(self, text): """Analyze text for sentiment, returning its score.""" tokenizer = nltk.tokenize.TweetTokenizer() tokens = tokenizer.tokenize(text) score = 0 for token in tokens: if token in self.negatives: score -= 1 elif token in self.positives: score += 1 # TODO return score
examples/data_pipeline/dataset.py
parmeet/text
3,172
142711
import torch from torchtext.datasets import DATASETS class BatchTextClassificationData(torch.utils.data.IterableDataset): def __init__(self, dataset_name, batch_size=16): super(BatchTextClassificationData, self).__init__() self._iterator = DATASETS[dataset_name](split='train') self.batch_size = batch_size def __iter__(self): _data = [] for i, item in enumerate(self._iterator): _data.append(item) if len(_data) >= self.batch_size: yield _data _data = [] if len(_data) > 0: yield _data
tests/issues/test_issue.py
mubashshirjamal/code
1,582
142768
<reponame>mubashshirjamal/code # encoding: UTF-8 from tests.base import TestCase from vilya.models.issue import Issue from vilya.models.issue_comment import IssueComment class TestIssue(TestCase): def test_add_issue(self): i = Issue.add('test', 'test description', 'test', 'assignee') assert isinstance(i, Issue) assert i.title == 'test' assert i.description == 'test description' assert i.creator_id == 'test' assert i.assignee_id == 'assignee' def test_update_issue(self): i = Issue.add('test', 'test description', 'test', 'assignee') i.update("test1", "test1 description") i = Issue.get(i.id) assert i.title == 'test1' assert i.description == 'test1 description' assert i.creator_id == 'test' assert i.assignee_id == 'assignee' assert i.closer_id is None def test_close_issue(self): i = Issue.add('test', 'test description', 'test', 'assignee') i.close("test") i = Issue.get(i.id) assert i.title == 'test' assert i.description == 'test description' assert i.creator_id == 'test' assert i.closer_id == "test" assert i.assignee_id == 'assignee' def test_get_issue(self): issue1 = Issue.add('test1', 'test1 description', 'test', 'assignee') issue2 = Issue.add('test2', 'test2 description', 'test', 'assignee') issue2.close("test") i1 = Issue.get(issue1.id) assert isinstance(i1, Issue) assert i1.title == 'test1' assert i1.description == 'test1 description' assert i1.creator_id == 'test' assert i1.assignee_id == 'assignee' assert i1.closer_id is None i2 = Issue.get(issue2.id) assert isinstance(i2, Issue) assert i2.title == 'test2' assert i2.description == 'test2 description' assert i2.creator_id == 'test' assert i2.assignee_id == 'assignee' assert i2.closer_id == 'test' i1 = Issue.get(issue1.id) assert isinstance(i1, Issue) assert i1.title == 'test1' assert i1.description == 'test1 description' assert i1.creator_id == 'test' assert i1.assignee_id == 'assignee' assert i1.closer_id is None iss = Issue.gets_by_creator_id("test") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 4 iss = Issue.gets_by_creator_id("test", "open") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 4 iss = Issue.gets_by_creator_id("test", "closed") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 2 iss = Issue.gets_by_assignee_id("assignee") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 6 iss = Issue.gets_by_assignee_id("assignee", "open") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 4 iss = Issue.gets_by_assignee_id("assignee", "closed") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 2 iss = Issue.gets_by_closer_id("test") assert all([isinstance(i, Issue) for i in iss]) assert len(iss) == 2 def test_add_comment(self): i = Issue.add('test', 'test description', 'test', 'assignee') c = IssueComment.add(i.id, 'content', 'test') assert isinstance(c, IssueComment) assert c.issue_id == i.id assert c.content == 'content' assert c.author_id == 'test' def test_get_comment(self): i = Issue.add('test', 'test description', 'test', 'assignee') c = IssueComment.add(i.id, 'content', 'test') c = IssueComment.get(c.id) assert isinstance(c, IssueComment) assert c.issue_id == i.id assert c.content == 'content' assert c.author_id == 'test' c = IssueComment.add(i.id, 'content', 'test') cs = IssueComment.gets_by_issue_id(i.id) assert all([isinstance(t, IssueComment) for t in cs]) assert len(cs) == 2 def test_update_comment(self): i = Issue.add('test', 'test description', 'test', 'assignee') c = IssueComment.add(i.id, 'content', 'test') c.update('content1') c = IssueComment.get(c.id) assert c.issue_id == i.id assert c.content == 'content1' assert c.author_id == 'test'
ps4000aExamples/ps4824BlockExample.py
shikajiro/picosdk-python-wrappers
114
142787
<reponame>shikajiro/picosdk-python-wrappers # # Copyright (C) 2018 Pico Technology Ltd. See LICENSE file for terms. # # PS4824 BLOCK MODE EXAMPLE # This example opens a 4000a driver device, sets up two channels and a trigger then collects a block of data. # This data is then plotted as mV against time in ns. import ctypes import numpy as np from picosdk.ps4000a import ps4000a as ps import matplotlib.pyplot as plt from picosdk.functions import adc2mV, assert_pico_ok # Create chandle and status ready for use chandle = ctypes.c_int16() status = {} # Open 4000 series PicoScope # Returns handle to chandle for use in future API functions status["openunit"] = ps.ps4000aOpenUnit(ctypes.byref(chandle), None) try: assert_pico_ok(status["openunit"]) except: powerStatus = status["openunit"] if powerStatus == 286: status["changePowerSource"] = ps.ps4000aChangePowerSource(chandle, powerStatus) else: raise assert_pico_ok(status["changePowerSource"]) # Set up channel A # handle = chandle # channel = PS4000a_CHANNEL_A = 0 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chARange = 7 status["setChA"] = ps.ps4000aSetChannel(chandle, 0, 1, 1, chARange, 0) assert_pico_ok(status["setChA"]) # Set up channel B # handle = chandle # channel = PS4000a_CHANNEL_B = 1 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chBRange = 7 status["setChB"] = ps.ps4000aSetChannel(chandle, 1, 1, 1, chBRange, 0) assert_pico_ok(status["setChB"]) # Set up channel C # handle = chandle # channel = PS4000a_CHANNEL_C = 2 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chCRange = 7 status["setChC"] = ps.ps4000aSetChannel(chandle, 2, 0, 1, chCRange, 0) assert_pico_ok(status["setChC"]) # Set up channel D # handle = chandle # channel = PS4000a_CHANNEL_D = 3 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chDRange = 7 status["setChD"] = ps.ps4000aSetChannel(chandle, 3, 0, 1, chDRange, 0) assert_pico_ok(status["setChD"]) # Set up channel E # handle = chandle # channel = PS4000a_CHANNEL_E = 1 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chERange = 7 status["setChE"] = ps.ps4000aSetChannel(chandle, 4, 0, 1, chERange, 0) assert_pico_ok(status["setChE"]) # Set up channel F # handle = chandle # channel = PS4000a_CHANNEL_F = 1 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chFRange = 7 status["setChF"] = ps.ps4000aSetChannel(chandle, 5, 0, 1, chFRange, 0) assert_pico_ok(status["setChF"]) # Set up channel G # handle = chandle # channel = PS4000a_CHANNEL_G = 1 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chGRange = 7 status["setChG"] = ps.ps4000aSetChannel(chandle, 6, 0, 1, chGRange, 0) assert_pico_ok(status["setChG"]) # Set up channel H # handle = chandle # channel = PS4000a_CHANNEL_H = 1 # enabled = 1 # coupling type = PS4000a_DC = 1 # range = PS4000a_2V = 7 # analogOffset = 0 V chHRange = 7 status["setChH"] = ps.ps4000aSetChannel(chandle, 7, 0, 1, chHRange, 0) assert_pico_ok(status["setChH"]) # Set up single trigger # handle = chandle # enabled = 1 # source = PS4000a_CHANNEL_A = 0 # threshold = 1024 ADC counts # direction = PS4000a_RISING = 2 # delay = 0 s # auto Trigger = 1000 ms status["trigger"] = ps.ps4000aSetSimpleTrigger(chandle, 1, 0, 1024, 2, 0, 100) assert_pico_ok(status["trigger"]) # Set number of pre and post trigger samples to be collected preTriggerSamples = 2500 postTriggerSamples = 2500 maxSamples = preTriggerSamples + postTriggerSamples # Get timebase information # handle = chandle # timebase = 8 = timebase # noSamples = maxSamples # pointer to timeIntervalNanoseconds = ctypes.byref(timeIntervalns) # pointer to maxSamples = ctypes.byref(returnedMaxSamples) # segment index = 0 timebase = 8 timeIntervalns = ctypes.c_float() returnedMaxSamples = ctypes.c_int32() oversample = ctypes.c_int16(1) status["getTimebase2"] = ps.ps4000aGetTimebase2(chandle, timebase, maxSamples, ctypes.byref(timeIntervalns), ctypes.byref(returnedMaxSamples), 0) assert_pico_ok(status["getTimebase2"]) # Run block capture # handle = chandle # number of pre-trigger samples = preTriggerSamples # number of post-trigger samples = PostTriggerSamples # timebase = 3 = 80 ns = timebase (see Programmer's guide for mre information on timebases) # time indisposed ms = None (not needed in the example) # segment index = 0 # lpReady = None (using ps4000aIsReady rather than ps4000aBlockReady) # pParameter = None status["runBlock"] = ps.ps4000aRunBlock(chandle, preTriggerSamples, postTriggerSamples, timebase, None, 0, None, None) assert_pico_ok(status["runBlock"]) # Check for data collection to finish using ps4000aIsReady ready = ctypes.c_int16(0) check = ctypes.c_int16(0) while ready.value == check.value: status["isReady"] = ps.ps4000aIsReady(chandle, ctypes.byref(ready)) # Create buffers ready for assigning pointers for data collection bufferAMax = (ctypes.c_int16 * maxSamples)() bufferAMin = (ctypes.c_int16 * maxSamples)() # used for downsampling which isn't in the scope of this example bufferBMax = (ctypes.c_int16 * maxSamples)() bufferBMin = (ctypes.c_int16 * maxSamples)() # used for downsampling which isn't in the scope of this example # Set data buffer location for data collection from channel A # handle = chandle # source = PS4000a_CHANNEL_A = 0 # pointer to buffer max = ctypes.byref(bufferAMax) # pointer to buffer min = ctypes.byref(bufferAMin) # buffer length = maxSamples # segementIndex = 0 # mode = PS4000A_RATIO_MODE_NONE = 0 status["setDataBuffersA"] = ps.ps4000aSetDataBuffers(chandle, 0, ctypes.byref(bufferAMax), ctypes.byref(bufferAMin), maxSamples, 0 , 0) assert_pico_ok(status["setDataBuffersA"]) # Set data buffer location for data collection from channel B # handle = chandle # source = PS4000a_CHANNEL_B = 1 # pointer to buffer max = ctypes.byref(bufferBMax) # pointer to buffer min = ctypes.byref(bufferBMin) # buffer length = maxSamples # segementIndex = 0 # mode = PS4000A_RATIO_MODE_NONE = 0 status["setDataBuffersB"] = ps.ps4000aSetDataBuffers(chandle, 1, ctypes.byref(bufferBMax), ctypes.byref(bufferBMin), maxSamples, 0 , 0) assert_pico_ok(status["setDataBuffersB"]) # create overflow loaction overflow = ctypes.c_int16() # create converted type maxSamples cmaxSamples = ctypes.c_int32(maxSamples) # Retried data from scope to buffers assigned above # handle = chandle # start index = 0 # pointer to number of samples = ctypes.byref(cmaxSamples) # downsample ratio = 0 # downsample ratio mode = PS4000a_RATIO_MODE_NONE # pointer to overflow = ctypes.byref(overflow)) status["getValues"] = ps.ps4000aGetValues(chandle, 0, ctypes.byref(cmaxSamples), 0, 0, 0, ctypes.byref(overflow)) assert_pico_ok(status["getValues"]) # find maximum ADC count value # handle = chandle # pointer to value = ctypes.byref(maxADC) maxADC = ctypes.c_int16(32767) # convert ADC counts data to mV adc2mVChAMax = adc2mV(bufferAMax, chARange, maxADC) adc2mVChBMax = adc2mV(bufferBMax, chBRange, maxADC) # Create time data time = np.linspace(0, (cmaxSamples.value) * timeIntervalns.value, cmaxSamples.value) # plot data from channel A and B plt.plot(time, adc2mVChAMax[:]) plt.plot(time, adc2mVChBMax[:]) plt.xlabel('Time (ns)') plt.ylabel('Voltage (mV)') plt.show() # Stop the scope # handle = chandle status["stop"] = ps.ps4000aStop(chandle) assert_pico_ok(status["stop"]) # Close unitDisconnect the scope # handle = chandle status["close"] = ps.ps4000aCloseUnit(chandle) assert_pico_ok(status["close"]) # display status returns print(status)
src/intensio_obfuscator/core/obfuscation/intensio_replace.py
bbhunter/Intensio-Obfuscator
553
142827
<reponame>bbhunter/Intensio-Obfuscator # -*- coding: utf-8 -*- # https://github.com/Hnfull/Intensio-Obfuscator #---------------------------------------------------------- [Lib] -----------------------------------------------------------# import re import fileinput import os import sys from progress.bar import Bar try: from intensio_obfuscator.core.obfuscation.intensio_mixer import Mixer from intensio_obfuscator.core.utils.intensio_exclude import EXCLUDE_WORDS_BY_DEFAULT from intensio_obfuscator.core.utils.intensio_utils import Utils, Colors, BreakLoop, Reg except ModuleNotFoundError: from core.obfuscation.intensio_mixer import Mixer from core.utils.intensio_exclude import EXCLUDE_WORDS_BY_DEFAULT from core.utils.intensio_utils import Utils, Colors, BreakLoop, Reg #------------------------------------------------- [Function(s)/Class(es)] --------------------------------------------------# class Replace: def __init__(self): self.mixer = Mixer() self.utils = Utils() def EachLine(self, line, dictionary, fileNameImport, listModuleImport): getIndexLineList = [] returnLine = [] charValue = [] checkCharAfterWord = 1 wordSetter = 0 checkGetKey = "" checkGetWord = "" getLine = "" breakLine = "" if listModuleImport == True: detectSpecialChars = Reg.detectSpecialChars else: detectSpecialChars = Reg.detectSpecialCharsWihtoutQuotes # -- Get list of all letters in line -- # for indexLine, letterLine in enumerate(line): getIndexLineList.append(letterLine) # -- Loop in each letter of line -- # for indexLine, letterLine in enumerate(line): # -- Add in final line list all chars mixed -- # if charValue != []: for obfIndex, obfValue in enumerate(charValue): if obfIndex == 0: # First letter in string mixed are already added in the final line continue returnLine.append(obfValue) charValue = [] # -- If the variable is only a letter, check if the next character is specific so as not to replace it -- # if re.match(detectSpecialChars, letterLine): returnLine.append(letterLine) # -- Count indexes of word to move after it --# countDeleteIndex = 0 for i in getWord: countDeleteIndex += 1 wordSetter = countDeleteIndex - 2 # -2 Is to letter already append and the letter in progress else: # -- The index numbers of variable is decremented to add the mixed letters that be replaced -- # if wordSetter > 0: wordSetter -= 1 continue else: try: # -- Loop in the dictionary with already mixed values-- # for key, value in dictionary: for indexKey, letterKey in enumerate(key): for letterValue in value: # -- Check if letter of word is equal to letter of key -- # if letterKey == letterLine: # -- Begin process to check -- # if indexKey == 0: # if equal to the first letter of word on <key> variable indexExplore = indexLine + len(key) # Place index position after the word # -- If indexError return to next loop -- # try: getIndexLineList[indexExplore] except IndexError: continue # -- Check the char after and before the word -- # if re.match(detectSpecialChars, getIndexLineList[indexExplore]): # Index check if word found is not into the other word indexExploreBefore = indexLine - 1 # Index check if char after the end of string is found with 'detectSpecialChars' regex indexExploreAfter = indexExplore try: if not re.match(r"\w|\\|\%", getIndexLineList[indexExploreBefore]): # -- Check if it's 'from' and 'import' file in line to avoid replace \ # name of file if variable is identical name to file -- # getLine = "".join(getIndexLineList) if fileNameImport == False: if "import" in getLine: if "from" in getLine: # -- Cut the line from the current index and check if it is \ # not there is the keyword "import" in the line -- # breakLine = getIndexLineList[:indexLine] breakLine = "".join(breakLine) if not "import" in breakLine: # -- It's a file because only 'from'key word -- # checkCharAfterWord = 1 else: checkCharAfterWord = 0 else: checkCharAfterWord = 1 # -- Check if after char of the word found by 'detectSpecialChars' is \ # not ' or " -- # elif re.match(r"\"|\'", getIndexLineList[indexExploreAfter]): if re.match(r"\[|\(|\{", getIndexLineList[indexExploreAfter - 1]): checkCharAfterWord = 0 else: checkCharAfterWord = 1 else: checkCharAfterWord = 0 # -- Only for [-rfn, --replacefilsname] feature -- # else: # -- check if file name is imported - # breakLine = getIndexLineList[:indexLine] breakLine = "".join(breakLine) # -- If file name is imported after 'import', the file name is not \ # replaced -- # if "import" in breakLine: checkCharAfterWord = 1 else: checkCharAfterWord = 0 else: checkCharAfterWord = 1 except IndexError: checkCharAfterWord = 0 pass else: checkCharAfterWord = 1 if checkCharAfterWord == 0: # -- Initialize vars -- # getCharAllInKey = [] getWord = [] indexExploreStart = indexLine # -- Delete -1, first letter is already increment -- # indexExploreEnd = indexLine + len(key) - 1 # -- List contain all letters of key -- # for getLetterKey in key: getCharAllInKey.append(getLetterKey) # -- Check if all letters of key is equal to all letters of word -- # for indexCheckLetter, checkLetter in enumerate(getIndexLineList): if indexCheckLetter >= indexExploreStart and \ indexCheckLetter <= indexExploreEnd: getWord.append(checkLetter) # -- Check if number of chars in key equal number of chars in word -- # if list(set(getCharAllInKey) - set(getWord)) == []: checkGetWord = "".join(getWord) checkGetKey = "".join(getCharAllInKey) # -- Check if key == word -- # if checkGetWord == checkGetKey: # -- Check if word is not in strings quotes and if a variable # is in format() in end of multiple line -- # if self.utils.DetectIntoSimpleQuotes(getLine, indexLine) == False \ or self.utils.DetectMultipleLinesQuotes(getLine) == True: for obfChar in value: charValue.append(obfChar) letterLine = letterValue raise BreakLoop else: break else: break else: break else: break else: break else: break raise BreakLoop except BreakLoop: returnLine.append(letterLine) # -- Rewrite the line -- # returnLine = "".join(returnLine) return returnLine[:] def StringToString(self, outputArg, mixerLengthArg, excludeWordsByUser, verboseArg): variablesDict = {} classesDict = {} functionsDict = {} allDict = {} classFuncDict = {} checkWordsMixed = [] wordsExcludedUser = [] wordsExcludedUserFound = [] wordsExcludedDefault = [] wordsExcludedDefaultFound = [] checkAllWords = [] checkWordsError = [] checkKeyWordsMixed = [] checkCountWordsMixed = 0 checkCountWordsValue = 0 countRecursFiles = 0 multipleLinesQuotes = 0 recursFiles = self.utils.CheckFileDir( output=outputArg, detectFiles="py", blockDir="__pycache__", blockFile=False, dirOnly=False ) for file in recursFiles: countRecursFiles += 1 print("\n[+] Running replacement of variables/classes/functions in " + str(countRecursFiles) + \ " file(s), he can be long... you have time to make a coffee :)\n") # -- Replace variables/classes/functions to random strings with length defined -- # with Bar("Setting up ", fill="=", max=100, suffix="%(percent)d%%") as bar: for file in recursFiles: with open(file, "r") as readFile: readF = readFile.readlines() for eachLine in readF: # -- Variables -- # search = re.search(Reg.detectSimpleVars, eachLine) if search: if "," in search.group(1): modifySearch = search.group(1).replace(",", " ") modifySearch = modifySearch.split() for i in modifySearch: if i not in variablesDict: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) i = i.strip() variablesDict[i] = mixer else: if search.group(1) not in variablesDict: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) modifySearch = search.group(1).strip() variablesDict[modifySearch] = mixer # -- Error variables -- # search = re.search(Reg.detectErrorVars, eachLine) if search: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) if search.group(2) not in variablesDict: variablesDict[search.group(2)] = mixer # -- Loop variables -- # search = re.search(Reg.detectLoopVars, eachLine) if search: if "," in search.group(1): modifySearch = search.group(1).replace(",", " ") modifySearch = modifySearch.split() for i in modifySearch: if i not in variablesDict: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) variablesDict[i] = mixer else: if search.group(1) not in variablesDict: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) variablesDict[search.group(1)] = mixer # -- Function(s) -- # search = re.search(Reg.detectFunctions, eachLine) if search: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) if search.group(1) not in functionsDict: if not "__init__" in search.group(1): functionsDict[search.group(1)] = mixer # -- Class(es) -- # search = re.search(Reg.detectClasses, eachLine) if search: mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) if search.group(1) not in classesDict: classesDict[search.group(1)] = mixer bar.next(40) if EXCLUDE_WORDS_BY_DEFAULT != []: for word in EXCLUDE_WORDS_BY_DEFAULT: if "#" in word or word == "\n": continue else: word = word.rstrip() wordsExcludedDefault.append(word) else: print(Colors.ERROR + "[-] No worde found in exclude words list" + Colors.DISABLE + "\n") bar.next(10) if excludeWordsByUser != False: with open(excludeWordsByUser, "r") as readFile: for word in readFile: if "#" in word or word == "\n": continue else: word = word.rstrip() wordsExcludedUser.append(word) bar.next(10) for word in wordsExcludedUser: if word in variablesDict.keys(): wordsExcludedUserFound.append(word) if word in classesDict.keys(): wordsExcludedUserFound.append(word) if word in functionsDict.keys(): wordsExcludedUserFound.append(word) bar.next(20) for word in wordsExcludedDefault: if word in variablesDict.keys(): wordsExcludedDefaultFound.append(word) if word in classesDict.keys(): wordsExcludedDefaultFound.append(word) if word in functionsDict.keys(): wordsExcludedDefaultFound.append(word) for word in wordsExcludedUserFound: if word in variablesDict.keys(): del variablesDict[word] if word in classesDict.keys(): del classesDict[word] if word in functionsDict.keys(): del functionsDict[word] for word in wordsExcludedDefaultFound: if word in variablesDict.keys(): del variablesDict[word] if word in classesDict.keys(): del classesDict[word] if word in functionsDict.keys(): del functionsDict[word] bar.next(20) bar.finish() # -- Display variables/classes/functions found -- # if verboseArg: print("\n[+] Variable(s) found :\n") if variablesDict == {}: print("-> No result") else: for key, value in variablesDict.items(): print("-> {} : {}".format(key, value)) print("\n[+] Class(es) found :\n") if classesDict == {}: print("-> No result") else: for key, value in classesDict.items(): print("-> {} : {}".format(key, value)) print("\n[+] Function(s) found :\n") if functionsDict == {}: print("-> No result") else: for key, value in functionsDict.items(): print("-> {} : {}".format(key, value)) print("\n[+] String excluded found in '{}' that have been matched from '{}' :\n".format( self.pythonExcludeUserString, outputArg ) ) if wordsExcludedUserFound == []: print("-> No result") else: for word in wordsExcludedUserFound: print("-> {} : excluded by user".format(word)) print("\n[+] String excluded found in '{}' that have been matched from '{}' :\n".format( self.pythonExcludeDefaultString, outputArg ) ) if wordsExcludedDefaultFound == []: print("-> No result") else: for word in wordsExcludedDefaultFound: print("-> {} : excluded by default".format(word)) print("") # -- Merge all dicts -- # allDict = self.utils.DictMerge(dict1=allDict, dict2=variablesDict) allDict = self.utils.DictMerge(dict1=allDict, dict2=functionsDict) allDict = self.utils.DictMerge(dict1=allDict, dict2=classesDict) classFuncDict = self.utils.DictMerge(dict1=classFuncDict, dict2=classesDict) classFuncDict = self.utils.DictMerge(dict1=classFuncDict, dict2=functionsDict) # -- Change variables/classes/functions to mixed values -- # with Bar("Obfuscation ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar: for file in recursFiles: # -- Replace variable(s) only -- # with fileinput.input(file, inplace=True) as inputFile: for eachLine in inputFile: if not eachLine: continue else: if multipleLinesQuotes == 1: if re.match(Reg.checkIfEndVarStdoutMultipleQuotes, eachLine): if self.utils.DetectMultipleLinesQuotes(eachLine) == True: eachLine = Replace.EachLine( self, line=eachLine, dictionary=allDict.items(), fileNameImport=False, listModuleImport=False ) sys.stdout.write(eachLine) multipleLinesQuotes = 0 else: sys.stdout.write(eachLine) else: sys.stdout.write(eachLine) elif re.match(Reg.checkIfVarMultipleQuotes, eachLine) \ or re.match(Reg.checkIfStdoutMultipleQuotes, eachLine): if self.utils.DetectMultipleLinesQuotes(eachLine) == False: pass else: multipleLinesQuotes = 1 eachLine = Replace.EachLine( self, line=eachLine, dictionary=allDict.items(), fileNameImport=False, listModuleImport=False ) sys.stdout.write(eachLine) continue else: eachLine = Replace.EachLine( self, line=eachLine, dictionary=allDict.items(), fileNameImport=False, listModuleImport=False ) sys.stdout.write(eachLine) bar.next(1) bar.finish() with Bar("Check ", fill="=", max=100, suffix="%(percent)d%%") as bar: for file in recursFiles: # -- Check if variables/classes/functions have been mixed -- # with open(file, "r") as readFile: readF = readFile.readlines() for eachLine in readF: for key, value in allDict.items(): if value in eachLine: if re.match(r"\w+" + re.escape(value) + r"\w+", eachLine): pass else: checkWordsMixed.append(value) checkKeyWordsMixed.append(key) bar.next(70) # -- Delete duplicated words -- # checkListWordsMixed = list(dict.fromkeys(checkWordsMixed)) checkKeyWordsMixed = list(dict.fromkeys(checkKeyWordsMixed)) bar.next(15) for i in checkListWordsMixed: checkCountWordsMixed += 1 for i in allDict.values(): checkCountWordsValue += 1 bar.next(15) bar.finish() if checkCountWordsMixed == checkCountWordsValue: print("\n-> {} variable(s)/class(es)/function(s) replaced in {} file(s)\n".format( checkCountWordsValue, countRecursFiles ) ) return 1 else: if verboseArg: for key in allDict.keys(): checkAllWords.append(key) checkWordsError = list(set(checkAllWords) - set(checkKeyWordsMixed)) print("\n[!] Word(s) that not been replaced, check if an error will appear when will launch your " + \ "obfuscated code... :\n") if checkWordsError != []: for wordNoReplaced in checkWordsError: print("-> Word : {}".format(wordNoReplaced)) return 0 def StringsToHex(self, outputArg, mixerLengthArg, verboseArg): checkHexError = {} splitLetter = [] getLetterLineList = [] countRecursFiles = 0 checkPrint = 0 numberLine = 0 checkError = False hexLine = "" recursFiles = self.utils.CheckFileDir( output=outputArg, detectFiles="py", blockDir="__pycache__", blockFile=False, dirOnly=False ) for number in recursFiles: countRecursFiles += 1 print("\n[+] Running replace all strings to their hexadecimal value in {} file(s)...\n".format(countRecursFiles)) # -- Replace all strings to their hexadecimal value -- # with Bar("Obfuscation ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar: for file in recursFiles: # -- Add a new first random line and move the old first line to the second line to avoid replacing it -- # checkPrint = 0 # initialize check print() func at the begining of each file with open(file, "r") as inputFile: stringRandomMixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) firstLine = "{}\n".format(stringRandomMixer) line = inputFile.readlines() line.insert(0, firstLine) with open(file, "w") as inputFile: inputFile.writelines(line) # -- Replace all lines-- # with fileinput.input(file, inplace=True) as inputFile: for eachLine in inputFile: if checkPrint == 0: varMixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) sys.stdout.write(varMixer + "=\"\"\"") checkPrint = 1 else: getLetterLineList = [] for letterLine in eachLine: if len(letterLine.encode("utf-8").hex()) > 2: splitLetter = [] for i in letterLine.encode("utf-8").hex(): splitLetter.append(i) if len(splitLetter) == 2: letterLine = "".join(splitLetter) letterToHex = "\\x" + str(letterLine) getLetterLineList.append(letterToHex) splitLetter = [] else: continue else: letterToHex = "\\x" + str(letterLine.encode("utf-8").hex()) getLetterLineList.append(letterToHex) # Get list of all letters in line hexLine = "".join(getLetterLineList) sys.stdout.write(hexLine) # -- Add exec funtions to interpret hex code in strings -- # with open(file, "a") as inputFile: inputFile.write("\"\"\"") inputFile.write("\nexec({})".format(varMixer)) bar.next(1) bar.finish() with Bar("Check ", fill="=", max=countRecursFiles, suffix="%(percent)d%%") as bar: for file in recursFiles: numberLine = 0 # -- Check if all lines are replaced of hexadecimal value -- # with open(file, "r") as inputFile: for eachLine in inputFile: numberLine += 1 if not eachLine: continue else: if not "\\x" in eachLine: if re.match(Reg.detectExecFunction, eachLine): continue else: checkHexError[numberLine] = file checkError = True else: continue bar.next(1) bar.finish() if checkError == False: return 1 else: if verboseArg: print("\n[!] Line(s) that have not been replaced by their hexadecimal values... :\n") for key, value in checkHexError.items(): print("\n-> File : {}".format(value)) print("-> Line : {}".format(key)) else: print("\n" + Colors.ERROR + "[!] Launch Intensio-Obfuscator with verbose mode because line(s) have not been " + \ "replaced by their hexadecimal values" + Colors.DISABLE + "\n") return 0 def FilesName(self, outputArg, mixerLengthArg, excludeFilesByUser, verboseArg): checkFilesFoundCompare = {} filesNameDict = {} filesNameDictNoExt = {} filesNameFound = [] filesNameFoundNoExt = [] filesNameMixed = [] fileNameExcluded = [] fileNameExcludedFound = [] fileNameExcludedByUser = [] importNoCompliantFound = [] badNameDir = [] numberLine = 0 countRecursFiles = 0 checkCountRecursFiles = 0 currentPosition = os.getcwd() recursFiles = self.utils.CheckFileDir( output=outputArg, detectFiles="py", blockDir="__pycache__", blockFile="__init__", dirOnly=False ) recursFilesWithInit = self.utils.CheckFileDir( output=outputArg, detectFiles="py", blockDir="__pycache__", blockFile=False, dirOnly=False ) recursDirs = self.utils.CheckFileDir( output=outputArg, detectFiles="", blockDir="__pycache__", blockFile=False, dirOnly=True ) for file in recursFiles: countRecursFiles += 1 print("\n[+] Running replace files name in {} file(s)...\n".format(countRecursFiles)) with Bar("Setting up ", fill="=", max=100, suffix="%(percent)d%%") as bar: for file in recursFiles: parseFilePath = file.split(self.utils.Platform(getOS=False, getPathType=True)) mixer = self.mixer.GetStringMixer(mixerLengthArgDefined=mixerLengthArg) filesNameDict[parseFilePath[-1]] = mixer + ".py" filesNameDictNoExt[parseFilePath[-1].replace(".py", "")] = mixer deleteExt = parseFilePath[-1].replace(".py","") filesNameFound.append(parseFilePath[-1]) filesNameFoundNoExt.append(deleteExt) filesNameMixed.append(mixer + ".py") bar.next(30) # -- Check if directory have the same name for directory in recursDirs: for fileName in filesNameFoundNoExt: if re.match(r".*" + re.escape(self.utils.Platform(getOS=False, getPathType=True)) + r"{1}" + \ re.escape(fileName) + re.escape(self.utils.Platform(getOS=False, getPathType=True)) + \ r"{1}.*", directory): fileNameExcluded.append(fileName) badNameDir.append(fileName) bar.next(20) if excludeFilesByUser != False: with open(excludeFilesByUser) as readFile: for fileName in readFile: if "#" in fileName or fileName == "\n" or re.match(r"yourFileName[0-9]{1}", fileName): continue else: fileName = fileName.rstrip() fileNameExcluded.append(fileName) fileNameExcludedByUser.append(fileName) bar.next(10) # -- Delete file name excluded in dictionnary -- # for word in fileNameExcluded: for fileNameNoExt in filesNameFoundNoExt: if fileNameNoExt == word: fileNameExcludedFound.append(word) filesNameFoundNoExt.remove(word) for fileName in filesNameFound: if fileName == word: filesNameFound.remove(word) if word in filesNameDictNoExt.keys(): del filesNameDictNoExt[word] word = word + ".py" if word in filesNameDict.keys(): del filesNameDict[word] bar.next(10) # -- Check if file name in code is after 'import' native python function --# for file in recursFiles: with open(file, "r") as readFile: readF = readFile.readlines() for eachLine in readF: if re.match(Reg.detectPythonImport, eachLine): searchFileName = re.search(r"(import\s+)(.*)", eachLine) if searchFileName.group(2): searchFileName = searchFileName.group(2).replace(",", "") searchFileName = searchFileName.split() for i in searchFileName: i = i.strip() for fileNameNoExt in filesNameFoundNoExt: if fileNameNoExt == i: importNoCompliantFound.append(i) filesNameFoundNoExt.remove(i) for fileName in filesNameFound: fileName = fileName.replace(".py", "") if fileName == i: i = i + ".py" filesNameFound.remove(i) if i in filesNameDictNoExt.keys(): del filesNameDictNoExt[i] if i in filesNameDict.keys(): del filesNameDict[i] bar.next(30) bar.finish() # -- Diplay all file name(s) found with their mixed values if verbose arg is actived -- # if verboseArg: print("\n[+] File name(s) found with their mixed values :\n") if filesNameDict == {}: print("-> No result") else: for key, value in filesNameDict.items(): print("-> {} : {}".format(key, value)) print("\n[+] File name(s) excluded found in '{}' that have been matched from '{}' :\n".format( self.pythonExcludeUserFileName, outputArg ) ) if fileNameExcludedByUser == []: print("-> No result") else: for i in fileNameExcludedByUser: print("-> {} : excluded by user".format(i)) print("\n[+] File name(s) no compliant for 'replace file name' feature :\n") if importNoCompliantFound == []: print("-> No result") else: for i in importNoCompliantFound: print("-> {} : no compliant ( file name excluded automatically )".format(i)) print("\n[+] Directory that have same name of python file(s) :\n") if badNameDir == []: print("-> No result") else: for i in badNameDir: print("-> {} : no compliant ( file name excluded automatically )".format(i)) i = i.rstrip() print("") # -- Replace all file names to random strings with length and obfuscation level defined -- # with Bar("Obfuscation ", fill="=", max=100, suffix="%(percent)d%%") as bar: for fileInCode in recursFilesWithInit: # -- Rename all files in python code -- # with fileinput.input(fileInCode, inplace=True) as inputFile: for eachLine in inputFile: if re.match(Reg.detectPythonImport, eachLine): eachLine = Replace.EachLine( self, line=eachLine, dictionary=filesNameDictNoExt.items(), fileNameImport=True, listModuleImport=False ) sys.stdout.write(eachLine) continue else: sys.stdout.write(eachLine) bar.next(50) for file in recursFiles: parseFilePath = file.split(self.utils.Platform(getOS=False, getPathType=True)) # -- Rename all files in their directories -- # for key, value in filesNameDict.items(): if key == parseFilePath[-1]: parseFilePath.remove(parseFilePath[-1]) parseFilePathToMove = self.utils.Platform(getOS=False, getPathType=True).join(parseFilePath) os.chdir(parseFilePathToMove) # Move in directory to rename python file os.rename(key, value) else: continue os.chdir(currentPosition) bar.next(50) bar.finish() checkRecursFiles = self.utils.CheckFileDir( output=outputArg, detectFiles="py", blockDir="__pycache__", blockFile="__init__", dirOnly=False ) for file in checkRecursFiles: checkCountRecursFiles += 1 # -- Check if all files name are been replaced to random strings -- # with Bar("Check ", fill="=", max=checkCountRecursFiles, suffix="%(percent)d%%") as bar: for file in checkRecursFiles: numberLine = 0 # -- Check for file name in directory -- # for key, value in filesNameDict.items(): if key in file: checkFilesFoundCompare[key] = value bar.next(1) bar.finish() if checkFilesFoundCompare != {} : if verboseArg: if checkFilesFoundCompare != {}: print("\n[!] File name that have not been replaced by their random string value... :\n") for key, value in checkFilesFoundCompare.items(): print("\n-> File : {}".format(key)) print("-> Value mixed : {}".format(value)) else: print("\n" + Colors.ERROR + "[-] Launch intensio-obfuscatior with verbose mode [-v, --verbose] because " + \ "file name(s) have not been replaced by their random string value" + Colors.DISABLE) return 0 else: return 1
paddleseg/models/portraitnet.py
JamesLim-sy/PaddleSeg
4,708
142830
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import paddle.nn as nn from paddleseg import utils from paddleseg.cvlibs import manager @manager.MODELS.add_component class PortraitNet(nn.Layer): """ The PortraitNet implementation based on PaddlePaddle. The original article refers to <NAME>, <NAME>, <NAME>, <NAME>, <NAME> "PortraitNet: Real-time Portrait Segmentation Network for Mobile Device" (https://www.yongliangyang.net/docs/mobilePotrait_c&g19.pdf). Args: num_classes (int, optional): The unique number of target classes. Default: 2. backbone (Paddle.nn.Layer): Backbone network, currently support MobileNetV2. add_edge (bool, optional): Whether output to edge. Default: False pretrained (str, optional): The path or url of pretrained model. Default: None """ def __init__(self, num_classes, backbone, min_channel=16, channel_ratio=1.0, add_edge=False, pretrained=None): super(PortraitNet, self).__init__() self.backbone = backbone self.head = PortraitNetHead(num_classes, min_channel, channel_ratio, add_edge) self.pretrained = pretrained self.init_weight() def forward(self, x): img = x[:, :3, :, :] img_ori = x[:, 3:, :, :] feat_list = self.backbone(img) logits_list = self.head(feat_list) feat_list = self.backbone(img_ori) logits_ori_list = self.head(feat_list) return [ logits_list[0], logits_ori_list[0], logits_list[1], logits_ori_list[1] ] def init_weight(self): if self.pretrained is not None: utils.load_entire_model(self, self.pretrained) class PortraitNetHead(nn.Layer): def __init__(self, num_classes, min_channel=16, channel_ratio=1.0, add_edge=False): super().__init__() self.min_channel = min_channel self.channel_ratio = channel_ratio self.add_edge = add_edge self.deconv1 = nn.Conv2DTranspose( self.depth(96), self.depth(96), groups=1, kernel_size=4, stride=2, padding=1, bias_attr=False) self.deconv2 = nn.Conv2DTranspose( self.depth(32), self.depth(32), groups=1, kernel_size=4, stride=2, padding=1, bias_attr=False) self.deconv3 = nn.Conv2DTranspose( self.depth(24), self.depth(24), groups=1, kernel_size=4, stride=2, padding=1, bias_attr=False) self.deconv4 = nn.Conv2DTranspose( self.depth(16), self.depth(16), groups=1, kernel_size=4, stride=2, padding=1, bias_attr=False) self.deconv5 = nn.Conv2DTranspose( self.depth(8), self.depth(8), groups=1, kernel_size=4, stride=2, padding=1, bias_attr=False) self.transit1 = ResidualBlock(self.depth(320), self.depth(96)) self.transit2 = ResidualBlock(self.depth(96), self.depth(32)) self.transit3 = ResidualBlock(self.depth(32), self.depth(24)) self.transit4 = ResidualBlock(self.depth(24), self.depth(16)) self.transit5 = ResidualBlock(self.depth(16), self.depth(8)) self.pred = nn.Conv2D( self.depth(8), num_classes, 3, 1, 1, bias_attr=False) if self.add_edge: self.edge = nn.Conv2D( self.depth(8), num_classes, 3, 1, 1, bias_attr=False) def depth(self, channels): min_channel = min(channels, self.min_channel) return max(min_channel, int(channels * self.channel_ratio)) def forward(self, feat_list): feature_1_4, feature_1_8, feature_1_16, feature_1_32 = feat_list up_1_16 = self.deconv1(self.transit1(feature_1_32)) up_1_8 = self.deconv2(self.transit2(feature_1_16 + up_1_16)) up_1_4 = self.deconv3(self.transit3(feature_1_8 + up_1_8)) up_1_2 = self.deconv4(self.transit4(feature_1_4 + up_1_4)) up_1_1 = self.deconv5(self.transit5(up_1_2)) pred = self.pred(up_1_1) if self.add_edge: edge = self.edge(up_1_1) return pred, edge else: return pred class ConvDw(nn.Layer): def __init__(self, inp, oup, kernel, stride): super(ConvDw, self).__init__() self.conv = nn.Sequential( nn.Conv2D( inp, inp, kernel, stride, (kernel - 1) // 2, groups=inp, bias_attr=False), nn.BatchNorm2D(num_features=inp, epsilon=1e-05, momentum=0.1), nn.ReLU(), nn.Conv2D(inp, oup, 1, 1, 0, bias_attr=False), nn.BatchNorm2D(num_features=oup, epsilon=1e-05, momentum=0.1), nn.ReLU(), ) def forward(self, x): return self.conv(x) class ResidualBlock(nn.Layer): def __init__(self, inp, oup, stride=1): super(ResidualBlock, self).__init__() self.block = nn.Sequential( ConvDw(inp, oup, 3, stride=stride), nn.Conv2D( in_channels=oup, out_channels=oup, kernel_size=3, stride=1, padding=1, groups=oup, bias_attr=False), nn.BatchNorm2D(num_features=oup, epsilon=1e-05, momentum=0.1), nn.ReLU(), nn.Conv2D( in_channels=oup, out_channels=oup, kernel_size=1, stride=1, padding=0, bias_attr=False), nn.BatchNorm2D(num_features=oup, epsilon=1e-05, momentum=0.1), ) if inp == oup: self.residual = None else: self.residual = nn.Sequential( nn.Conv2D( in_channels=inp, out_channels=oup, kernel_size=1, stride=1, padding=0, bias_attr=False), nn.BatchNorm2D(num_features=oup, epsilon=1e-05, momentum=0.1), ) self.relu = nn.ReLU() def forward(self, x): residual = x out = self.block(x) if self.residual is not None: residual = self.residual(x) out += residual out = self.relu(out) return out
tests/test_discovery.py
tipofthesowrd/SoCo
1,149
142832
import socket import select import ipaddress import ifaddr from collections import OrderedDict from unittest.mock import patch, MagicMock as Mock, PropertyMock, call from soco import discover from soco import config from soco.discovery import ( any_soco, by_name, _find_ipv4_addresses, _find_ipv4_networks, _check_ip_and_port, _is_sonos, _sonos_scan_worker_thread, scan_network, ) IP_ADDR = "192.168.1.101" TIMEOUT = 5 class TestDiscover: def test_discover(self, monkeypatch): # Create a fake socket, whose data is always a certain string monkeypatch.setattr("socket.socket", Mock()) sock = socket.socket.return_value sock.recvfrom.return_value = ( b"SERVER: Linux UPnP/1.0 Sonos/26.1-76230 (ZPS3)", [IP_ADDR], ) # (data, # address) # Return a couple of IP addresses from _find_ipv4_addresses() monkeypatch.setattr( "soco.discovery._find_ipv4_addresses", Mock(return_value={"192.168.0.15", "192.168.1.16"}), ) # Prevent creation of soco instances monkeypatch.setattr("soco.config.SOCO_CLASS", Mock()) # Fake return value for select monkeypatch.setattr("select.select", Mock(return_value=([sock], 1, 1))) # Set timeout TIMEOUT = 2 discover(timeout=TIMEOUT) # 6 packets in total should be sent (3 to # 192.168.0.15 and 3 to 192.168.1.16) assert sock.sendto.call_count == 6 # select called with the relevant timeout select.select.assert_called_with([sock, sock], [], [], min(TIMEOUT, 0.1)) # SoCo should be created with the IP address received config.SOCO_CLASS.assert_called_with(IP_ADDR) # Now test include_visible parameter. include_invisible=True should # result in calling SoCo.all_zones etc # Reset gethostbyname, to always return the same value monkeypatch.setattr("socket.gethostbyname", Mock(return_value="192.168.1.15")) config.SOCO_CLASS.return_value = Mock(all_zones="ALL", visible_zones="VISIBLE") assert discover(include_invisible=True) == "ALL" assert discover(include_invisible=False) == "VISIBLE" # If select does not return within timeout SoCo should not be called # at all # Simulate no data being returned within timeout select.select.return_value = (0, 1, 1) discover(timeout=1) # Check no SoCo instance created config.SOCO_CLASS.assert_not_called def test_by_name(): """Test the by_name method""" devices = set() for name in ("fake", "non", "Kitchen"): mymock = Mock(player_name=name) devices.add(mymock) # The mock we want to find is the last one mock_to_be_found = mymock # Patch out discover and test with patch("soco.discovery.discover") as discover_: discover_.return_value = devices # Test not found device = by_name("Living Room") assert device is None discover_.assert_called_once_with(allow_network_scan=False) # Test found device = by_name("Kitchen") assert device is mock_to_be_found discover_.assert_has_calls( [call(allow_network_scan=False), call(allow_network_scan=False)] ) # Tests for scan_network() def test__find_ipv4_networks(monkeypatch): _set_up_adapters(monkeypatch) # Check that we get the expected networks; test different min_netmask values assert ipaddress.ip_network("192.168.0.55/24", False) in _find_ipv4_networks(24) assert ipaddress.ip_network("192.168.1.1/24", False) in _find_ipv4_networks(24) assert ipaddress.ip_network("192.168.1.1/16", False) not in _find_ipv4_networks(24) assert ipaddress.ip_network("192.168.1.1/16", False) in _find_ipv4_networks(16) assert ipaddress.ip_network("192.168.1.1/16", False) in _find_ipv4_networks(0) assert ipaddress.ip_network("192.168.3.11/8", False) not in _find_ipv4_networks(8) assert ipaddress.ip_network("127.0.0.1/24", False) not in _find_ipv4_networks(24) assert ipaddress.ip_network("169.254.1.10/16", False) not in _find_ipv4_networks(16) def test__find_ipv4_addresses(monkeypatch): _set_up_adapters(monkeypatch) assert _find_ipv4_addresses() == {"192.168.0.1", "192.168.1.1", "192.168.3.11"} def test__check_ip_and_port(monkeypatch): _setup_sockets(monkeypatch) assert _check_ip_and_port("192.168.0.1", 1400, 0.1) is True assert _check_ip_and_port("192.168.0.1", 1401, 0.1) is False assert _check_ip_and_port("192.168.0.3", 1400, 0.1) is False def test__is_sonos(monkeypatch): with patch("soco.config.SOCO_CLASS", new=_mock_soco_new): assert _is_sonos("192.168.0.1") is True assert _is_sonos("192.168.0.2") is True assert _is_sonos("192.168.0.3") is False def test__sonos_scan_worker_thread(monkeypatch): _setup_sockets(monkeypatch) with patch("soco.config.SOCO_CLASS", new=_mock_soco_new): ip_set = {"192.168.0.1", "192.168.0.2", "192.168.0.3"} sonos_ip_addresses = [] _sonos_scan_worker_thread(ip_set, 0.1, sonos_ip_addresses, False) assert len(sonos_ip_addresses) == 1 assert ( "192.168.0.1" in sonos_ip_addresses or "192.168.0.2" in sonos_ip_addresses ) assert "192.168.0.3" not in sonos_ip_addresses ip_set = {"192.168.0.1", "192.168.0.2", "192.168.0.3"} sonos_ip_addresses = [] _sonos_scan_worker_thread(ip_set, 0.1, sonos_ip_addresses, True) assert len(sonos_ip_addresses) == 2 assert {"192.168.0.1", "192.168.0.2"} == set(sonos_ip_addresses) assert "192.168.0.3" not in sonos_ip_addresses def test_scan_network(monkeypatch): _setup_sockets(monkeypatch) _set_up_adapters(monkeypatch) with patch("soco.config.SOCO_CLASS", new=_mock_soco_new): assert "192.168.0.1" in scan_network(include_invisible=False) assert "192.168.0.2" not in scan_network(include_invisible=False) assert "192.168.0.1" in scan_network( include_invisible=False, multi_household=True ) assert "192.168.0.2" not in scan_network( include_invisible=False, multi_household=True ) assert "192.168.0.1" in scan_network( include_invisible=True, multi_household=True ) assert "192.168.0.2" in scan_network(include_invisible=True) assert "192.168.0.2" in scan_network( include_invisible=True, multi_household=True ) # This one can take a few seconds to run; large address # space, and large number of threads assert "192.168.0.1" in scan_network( include_invisible=False, multi_household=True, max_threads=15000, min_netmask=16, ) # Test specified networks assert "192.168.0.1" in scan_network( include_invisible=False, networks_to_scan=["192.168.0.1/24"] ) assert "192.168.0.2" in scan_network( include_invisible=True, networks_to_scan=["192.168.0.1/24"] ) assert "192.168.0.2" not in scan_network( include_invisible=False, networks_to_scan=["192.168.0.1/24"] ) assert "192.168.0.1" in scan_network(networks_to_scan=[]) assert scan_network(networks_to_scan=["not_a_network", ""]) is None # Helper functions for scan_network() tests def _set_up_adapters(monkeypatch): """Helper function that creates a number of mock network adapters to be returned by ifaddr.get_adapters().""" private_24 = ifaddr.IP("192.168.0.1", 24, "private-24") private_16 = ifaddr.IP("192.168.1.1", 16, "private-16") public = ifaddr.IP("192.168.3.11", 8, "public") loopback = ifaddr.IP("127.0.0.1", 24, "loopback") link_local = ifaddr.IP("169.254.1.10", 16, "link_local") ips = [private_24, private_16, public, loopback, link_local] # Set up mock adapters adapters = OrderedDict() for index in range(len(ips)): ip = ips[index] adapters[ip.nice_name] = ifaddr._shared.Adapter( ip.nice_name, ip.nice_name, [ip], index=index + 1 ) # Patch the response from ifaddr.get_adapters() monkeypatch.setattr("ifaddr.get_adapters", Mock(return_value=adapters.values())) def _mock_soco_new(ip_address): """Helper function that replaces the SoCo constructor. Returns Mock objects for Sonos devices at two specific IP addresses.""" if ip_address in ["192.168.0.1", "192.168.0.2"]: return Mock( visible_zones=["192.168.0.1"], all_zones=["192.168.0.1", "192.168.0.2"] ) else: raise ValueError def _setup_sockets(monkeypatch): """Helper function to create fake socket connection responses corresponding to Sonos speakers on specific IP address / port combinations only.""" def mock_socket_connect_ex_return(_, address_port): if address_port in [("192.168.0.1", 1400), ("192.168.0.2", 1400)]: return 0 else: return 1 monkeypatch.setattr("socket.socket.connect_ex", mock_socket_connect_ex_return)
compressor/parser/lxml.py
cron-ooo/django-compressor
1,480
142833
<reponame>cron-ooo/django-compressor from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import smart_str from django.utils.functional import cached_property from compressor.exceptions import ParserError from compressor.parser import ParserBase class LxmlParser(ParserBase): """ LxmlParser will use `lxml.html` parser to parse rendered contents of {% compress %} tag. """ def __init__(self, content): try: from lxml.html import fromstring from lxml.etree import tostring except ImportError as err: raise ImproperlyConfigured("Error while importing lxml: %s" % err) except Exception as err: raise ParserError("Error while initializing parser: %s" % err) self.fromstring = fromstring self.tostring = tostring super().__init__(content) @cached_property def tree(self): """ Document tree. """ content = '<root>%s</root>' % self.content tree = self.fromstring(content) self.tostring(tree, encoding=str) return tree def css_elems(self): return self.tree.xpath('//link[re:test(@rel, "^stylesheet$", "i")]|style', namespaces={"re": "http://exslt.org/regular-expressions"}) def js_elems(self): return self.tree.findall('script') def elem_attribs(self, elem): return elem.attrib def elem_content(self, elem): return smart_str(elem.text) def elem_name(self, elem): return elem.tag def elem_str(self, elem): return smart_str(self.tostring(elem, method='html', encoding=str))
test cases/common/71 ctarget dependency/gen2.py
iinuwa/meson
4,047
142836
#!/usr/bin/env python3 import sys, os from glob import glob files = glob(os.path.join(sys.argv[1], '*.tmp')) assert len(files) == 1 with open(files[0]) as ifile, open(sys.argv[2], 'w') as ofile: ofile.write(ifile.read())
hatch/files/setup.py
williamirick/hatch
2,549
142870
<filename>hatch/files/setup.py from hatch.structures import File from hatch.utils import normalize_package_name TEMPLATE = """\ #################### Maintained by Hatch #################### # This file is auto-generated by hatch. If you'd like to customize this file # please add your changes near the bottom marked for 'USER OVERRIDES'. # EVERYTHING ELSE WILL BE OVERWRITTEN by hatch. ############################################################# from io import open from setuptools import find_packages, setup with open('{package_name_normalized}/__init__.py', 'r') as f: for line in f: if line.startswith('__version__'): version = line.strip().split('=')[1].strip(' \\'"') break else: version = '0.0.1' with open('{readme_file}', 'r', encoding='utf-8') as f: readme = f.read() REQUIRES = {requires} kwargs = {{ 'name': '{package_name}', 'version': version, 'description': '', 'long_description': readme, 'author': '{name}', 'author_email': '{email}', 'maintainer': '{name}', 'maintainer_email': '{email}', 'url': '{package_url}', 'license': '{license}', 'classifiers': [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers',{license_classifiers} 'Natural Language :: English', 'Operating System :: OS Independent',{pyversions} 'Programming Language :: Python :: Implementation :: CPython',{pypy} ], 'install_requires': REQUIRES, 'tests_require': ['coverage', 'pytest'], 'packages': find_packages(exclude=('tests', 'tests.*')),{entry_point} }} #################### BEGIN USER OVERRIDES #################### # Add your customizations in this section.{user_overrides} ###################### END USER OVERRIDES #################### setup(**kwargs) """ class SetupFile(File): def __init__(self, name, email, package_name, pyversions, licenses, readme, package_url, cli, requires=None, user_overrides=None): normalized_package_name = normalize_package_name(package_name) pypy = '\n' versions = '' for pyversion in pyversions: if not pyversion.startswith('pypy'): versions += "\n 'Programming Language :: Python :: {}',".format( pyversion ) else: pypy = "\n 'Programming Language :: Python :: Implementation :: PyPy',\n" license_classifiers = '' for li in licenses: license_classifiers += "\n '{}',".format(li.pypi_classifier) entry_point = '' if cli: entry_point += ( '\n' " 'entry_points': {{\n" " 'console_scripts': [\n" " '{pn} = {pnn}.cli:{pnn}',\n" ' ],\n' ' }},'.format(pn=package_name, pnn=normalized_package_name) ) # For testing we use https://github.com/r1chardj0n3s/parse and its # `parse` function breaks on empty inputs. entry_point += '\n' user_overrides = '\n' + (user_overrides or '') super(SetupFile, self).__init__( 'setup.py', TEMPLATE.format( name=name, email=email, package_name=package_name, package_name_normalized=normalized_package_name, readme_file=readme.file_name, package_url=package_url, license='/'.join(li.short_name for li in licenses), license_classifiers=license_classifiers, pyversions=versions, pypy=pypy, entry_point=entry_point, user_overrides=user_overrides, requires=requires or [], ) )
imapfw/toolkit.py
paralax/imapfw
492
142874
<gh_stars>100-1000 # The MIT License (MIT). # Copyright (c) 2015, <NAME> & contributors. import os from threading import Thread def runHook(hookFunc, *args): class Hook(object): def __init__(self): self._stop = True def ended(self): self._stop = False def stop(self): return self._stop hookName = hookFunc.__name__ # Don't run hooks for action unitTests. if hookName == 'preHook': if args[0] == 'unitTests': return False hook = Hook() args = (hook,) + args thread = Thread(name=hookName, target=hookFunc, args=args, daemon=True) thread.start() thread.join(10) # TODO: get timeout from rascal. return hook.stop() def xTrans(thing, transforms): """Applies set of transformations to a thing. :args: - thing: string; if None, then no processing will take place. - transforms: iterable that returns transformation function on each turn. Returns transformed thing.""" if thing == None: return None for f in transforms: thing = f(thing) return thing def expandPath(path): xtrans = [os.path.expanduser, os.path.expandvars, os.path.abspath] return xTrans(path, xtrans) def dictValueFromPath(dictionnary, path): def getItem(tmpDict, lst_path): if len(lst_path) > 0: if isinstance(tmpDict, dict): newDict = tmpDict.get(lst_path.pop(0)) return getItem(newDict, lst_path) else: raise KeyError('invalid path') return tmpDict lst_path = path.split('.') return getItem(dictionnary, lst_path)