code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
''' Takes a block_representation and returns the nonce ''' return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['bits']
def get_nonce(block_representation, coin_symbol='btc', api_key=None)
Takes a block_representation and returns the nonce
6.032659
5.152444
1.170834
''' Takes a block_representation and returns the previous block hash ''' return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['prev_block']
def get_prev_block_hash(block_representation, coin_symbol='btc', api_key=None)
Takes a block_representation and returns the previous block hash
4.887414
4.054458
1.205442
''' Takes a block_height and returns the block_hash ''' return get_block_overview(block_representation=block_height, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['hash']
def get_block_hash(block_height, coin_symbol='btc', api_key=None)
Takes a block_height and returns the block_hash
6.433332
5.447356
1.181001
''' Takes a block_hash and returns the block_height ''' return get_block_overview(block_representation=block_hash, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['height']
def get_block_height(block_hash, coin_symbol='btc', api_key=None)
Takes a block_hash and returns the block_height
6.402682
5.397964
1.186129
assert is_valid_coin_symbol(coin_symbol) block_overview = get_block_overview( block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=txn_limit, txn_offset=txn_offset, api_key=api_key, ) if 'error' in block_overview: return block_overview txids_to_lookup = block_overview['txids'] txs_details = get_transactions_details( tx_hash_list=txids_to_lookup, coin_symbol=coin_symbol, limit=in_out_limit, api_key=api_key, ) if 'error' in txs_details: return txs_details # build comparator dict to use for fast sorting of batched results later txids_comparator_dict = {} for cnt, tx_id in enumerate(txids_to_lookup): txids_comparator_dict[tx_id] = cnt # sort results using comparator dict block_overview['txids'] = sorted( txs_details, key=lambda k: txids_comparator_dict.get(k.get('hash'), 9999), # anything that fails goes last ) return block_overview
def get_block_details(block_representation, coin_symbol='btc', txn_limit=None, txn_offset=None, in_out_limit=None, api_key=None)
Takes a block_representation, coin_symbol and txn_limit and 1) Gets the block overview 2) Makes a separate API call to get specific data on txn_limit transactions Note: block_representation may be the block number or block hash WARNING: using a high txn_limit will make this *extremely* slow.
2.47925
2.510521
0.987544
overview = get_blockchain_overview(coin_symbol=coin_symbol, api_key=api_key) return { 'high_fee_per_kb': overview['high_fee_per_kb'], 'medium_fee_per_kb': overview['medium_fee_per_kb'], 'low_fee_per_kb': overview['low_fee_per_kb'], }
def get_blockchain_fee_estimates(coin_symbol='btc', api_key=None)
Returns high, medium, and low fee estimates for a given blockchain.
1.995458
1.80701
1.104287
assert is_valid_coin_symbol(coin_symbol) assert api_key, 'api_key required' url = make_url(coin_symbol, 'payments') logger.info(url) params = {'token': api_key} data = { 'destination': destination_address, } if callback_url: data['callback_url'] = callback_url r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def get_forwarding_address_details(destination_address, api_key, callback_url=None, coin_symbol='btc')
Give a destination address and return the details of the input address that will automatically forward to the destination address Note: a blockcypher api_key is required for this method
2.952558
3.064298
0.963535
assert api_key, 'api_key required' resp_dict = get_forwarding_address_details( destination_address=destination_address, api_key=api_key, callback_url=callback_url, coin_symbol=coin_symbol ) return resp_dict['input_address']
def get_forwarding_address(destination_address, api_key, callback_url=None, coin_symbol='btc')
Give a destination address and return an input address that will automatically forward to the destination address. See get_forwarding_address_details if you also need the forwarding address ID. Note: a blockcypher api_key is required for this method
3.152981
2.762541
1.141334
''' List the forwarding addresses for a certain api key (and on a specific blockchain) ''' assert is_valid_coin_symbol(coin_symbol) assert api_key url = make_url(coin_symbol, 'payments') params = {'token': api_key} if offset: params['start'] = offset r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def list_forwarding_addresses(api_key, offset=None, coin_symbol='btc')
List the forwarding addresses for a certain api key (and on a specific blockchain)
4.842745
3.334597
1.452273
''' Delete a forwarding address on a specific blockchain, using its payment id ''' assert payment_id, 'payment_id required' assert is_valid_coin_symbol(coin_symbol) assert api_key, 'api_key required' params = {'token': api_key} url = make_url(**dict(payments=payment_id)) r = requests.delete(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r, allow_204=True)
def delete_forwarding_address(payment_id, coin_symbol='btc', api_key=None)
Delete a forwarding address on a specific blockchain, using its payment id
4.637092
3.70587
1.251283
''' Subscribe to transaction webhooks on a given address. Webhooks for transaction broadcast and each confirmation (up to 6). Returns the blockcypher ID of the subscription ''' assert is_valid_coin_symbol(coin_symbol) assert is_valid_address_for_coinsymbol(subscription_address, coin_symbol) assert api_key, 'api_key required' url = make_url(coin_symbol, 'hooks') params = {'token': api_key} data = { 'event': event, 'url': callback_url, 'address': subscription_address, } if event == 'tx-confirmation' and confirmations: data['confirmations'] = confirmations elif event == 'tx-confidence' and confidence: data['confidence'] = confidence r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) return response_dict['id']
def subscribe_to_address_webhook(callback_url, subscription_address, event='tx-confirmation', confirmations=0, confidence=0.00, coin_symbol='btc', api_key=None)
Subscribe to transaction webhooks on a given address. Webhooks for transaction broadcast and each confirmation (up to 6). Returns the blockcypher ID of the subscription
3.433387
2.435581
1.409678
''' Subscribe to transaction webhooks on a given address. Webhooks for transaction broadcast and each confirmation (up to 6). Returns the blockcypher ID of the subscription ''' assert is_valid_coin_symbol(coin_symbol) assert is_valid_wallet_name(wallet_name), wallet_name assert api_key, 'api_key required' url = make_url(coin_symbol, 'hooks') params = {'token': api_key} data = { 'event': event, 'url': callback_url, 'wallet_name': wallet_name, } r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) return response_dict['id']
def subscribe_to_wallet_webhook(callback_url, wallet_name, event='tx-confirmation', coin_symbol='btc', api_key=None)
Subscribe to transaction webhooks on a given address. Webhooks for transaction broadcast and each confirmation (up to 6). Returns the blockcypher ID of the subscription
3.935147
2.455993
1.602263
''' Send yourself test coins on the bitcoin or blockcypher testnet You can see your balance info at: - https://live.blockcypher.com/bcy/ for BCY - https://live.blockcypher.com/btc-testnet/ for BTC Testnet ''' assert coin_symbol in ('bcy', 'btc-testnet') assert is_valid_address_for_coinsymbol(b58_address=address_to_fund, coin_symbol=coin_symbol) assert satoshis > 0 assert api_key, 'api_key required' url = make_url(coin_symbol, 'faucet') data = { 'address': address_to_fund, 'amount': satoshis, } params = {'token': api_key} r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def send_faucet_coins(address_to_fund, satoshis, api_key, coin_symbol='bcy')
Send yourself test coins on the bitcoin or blockcypher testnet You can see your balance info at: - https://live.blockcypher.com/bcy/ for BCY - https://live.blockcypher.com/btc-testnet/ for BTC Testnet
3.588143
2.305287
1.556484
''' Takes a signed transaction hex binary (and coin_symbol) and broadcasts it to the bitcoin network. ''' assert is_valid_coin_symbol(coin_symbol) assert api_key, 'api_key required' url = _get_pushtx_url(coin_symbol=coin_symbol) logger.info(url) data = {'tx': tx_hex} params = {'token': api_key} r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def pushtx(tx_hex, coin_symbol='btc', api_key=None)
Takes a signed transaction hex binary (and coin_symbol) and broadcasts it to the bitcoin network.
3.804459
2.790054
1.363579
''' Takes a signed transaction hex binary (and coin_symbol) and decodes it to JSON. Does NOT broadcast the transaction to the bitcoin network. Especially useful for testing/debugging and sanity checking ''' assert is_valid_coin_symbol(coin_symbol) assert api_key, 'api_key required' url = make_url(coin_symbol, **dict(txs='decode')) params = {'token': api_key} data = { 'tx': tx_hex, 'token': api_key, } r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def decodetx(tx_hex, coin_symbol='btc', api_key=None)
Takes a signed transaction hex binary (and coin_symbol) and decodes it to JSON. Does NOT broadcast the transaction to the bitcoin network. Especially useful for testing/debugging and sanity checking
5.063103
2.826936
1.791021
''' Get all the wallets belonging to an API key ''' assert is_valid_coin_symbol(coin_symbol), coin_symbol assert api_key params = {'token': api_key} kwargs = dict(wallets='hd' if is_hd_wallet else '') url = make_url(coin_symbol, **kwargs) r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def list_wallet_names(api_key, is_hd_wallet=False, coin_symbol='btc')
Get all the wallets belonging to an API key
4.459505
4.283279
1.041143
''' Create a new wallet with one address You can add addresses with the add_address_to_wallet method below You can delete the wallet with the delete_wallet method below ''' assert is_valid_address_for_coinsymbol(address, coin_symbol) assert api_key assert is_valid_wallet_name(wallet_name), wallet_name data = { 'name': wallet_name, 'addresses': [address, ], } params = {'token': api_key} url = make_url(coin_symbol, 'wallets') r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def create_wallet_from_address(wallet_name, address, api_key, coin_symbol='btc')
Create a new wallet with one address You can add addresses with the add_address_to_wallet method below You can delete the wallet with the delete_wallet method below
3.865485
2.845682
1.358369
''' Create a new wallet from an extended pubkey (xpub... for BTC) You can delete the wallet with the delete_wallet method below ''' inferred_coin_symbol = coin_symbol_from_mkey(mkey=xpubkey) if inferred_coin_symbol: assert inferred_coin_symbol == coin_symbol assert api_key, 'api_key required' assert len(wallet_name) <= 25, wallet_name data = { 'name': wallet_name, 'extended_public_key': xpubkey, } params = {'token': api_key} if subchain_indices: data['subchain_indexes'] = subchain_indices url = make_url(coin_symbol, **dict(wallets='hd')) r = requests.post(url, json=data, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def create_hd_wallet(wallet_name, xpubkey, api_key, subchain_indices=[], coin_symbol='btc')
Create a new wallet from an extended pubkey (xpub... for BTC) You can delete the wallet with the delete_wallet method below
4.689094
3.452906
1.358014
''' Returns a list of wallet addresses as well as some meta-data ''' assert is_valid_coin_symbol(coin_symbol) assert api_key assert len(wallet_name) <= 25, wallet_name assert zero_balance in (None, True, False) assert used in (None, True, False) assert isinstance(omit_addresses, bool), omit_addresses params = {'token': api_key} kwargs = {'hd/' if is_hd_wallet else '': wallet_name} # hack! url = make_url(coin_symbol, 'wallets', **kwargs) if zero_balance is True: params['zerobalance'] = 'true' elif zero_balance is False: params['zerobalance'] = 'false' if used is True: params['used'] = 'true' elif used is False: params['used'] = 'false' if omit_addresses: params['omitWalletAddresses'] = 'true' r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def get_wallet_addresses(wallet_name, api_key, is_hd_wallet=False, zero_balance=None, used=None, omit_addresses=False, coin_symbol='btc')
Returns a list of wallet addresses as well as some meta-data
2.716808
2.553667
1.063885
''' This is particularly useful over get_wallet_transactions and get_wallet_addresses in cases where you have lots of addresses/transactions. Much less data to return. ''' assert is_valid_coin_symbol(coin_symbol) assert api_key assert len(wallet_name) <= 25, wallet_name assert isinstance(omit_addresses, bool), omit_addresses params = {'token': api_key} if omit_addresses: params['omitWalletAddresses'] = 'true' url = make_url(coin_symbol, 'addrs/{}/balance'.format(wallet_name)) r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def get_wallet_balance(wallet_name, api_key, omit_addresses=False, coin_symbol='btc')
This is particularly useful over get_wallet_transactions and get_wallet_addresses in cases where you have lots of addresses/transactions. Much less data to return.
4.428653
2.660682
1.66448
''' Takes an unsigned transaction and what was used to build it (in create_unsigned_tx) and verifies that tosign_tx matches what is being signed and what was requestsed to be signed. Returns if valid: (True, '') Returns if invalid: (False, 'err_msg') Specifically, this checks that the outputs match what we're expecting (bad inputs would fail signature anyway). Note: it was a mistake to include `inputs` in verify_unsigned_tx as it by definition is not used. It would be removed but that would break compatibility. ''' if not (change_address or sweep_funds): err_msg = 'Cannot Verify Without Developer Supplying Change Address (or Sweeping)' return False, err_msg if 'tosign_tx' not in unsigned_tx: err_msg = 'tosign_tx not in API response:\n%s' % unsigned_tx return False, err_msg output_addr_list = [x['address'] for x in outputs if x.get('address') != None] if change_address: output_addr_list.append(change_address) assert len(unsigned_tx['tosign_tx']) == len(unsigned_tx['tosign']), unsigned_tx for cnt, tosign_tx_toverify in enumerate(unsigned_tx['tosign_tx']): # Confirm tosign is the dsha256 of tosign_tx if double_sha256(tosign_tx_toverify) != unsigned_tx['tosign'][cnt]: err_msg = 'double_sha256(%s) =! %s' % (tosign_tx_toverify, unsigned_tx['tosign'][cnt]) print(unsigned_tx) return False, err_msg try: txn_outputs_response_dict = get_txn_outputs_dict(raw_tx_hex=tosign_tx_toverify, output_addr_list=output_addr_list, coin_symbol=coin_symbol) except Exception as inst: # Could be wrong output addresses, keep print statement for debug print(unsigned_tx) print(coin_symbol) return False, str(inst) if sweep_funds: # output adresses are already confirmed in `get_txn_outputs`, # which was called by `get_txn_outputs_dict` # no point in confirming values for a sweep continue else: # get rid of change address as tx fee (which affects value) # is determined by blockcypher and can't be known up front try: txn_outputs_response_dict.pop(change_address) except KeyError: # This is possible in the case of change address not needed pass user_outputs = compress_txn_outputs(outputs) if txn_outputs_response_dict != user_outputs: # TODO: more helpful error message err_msg = 'API Response Ouputs != Supplied Outputs\n\n%s\n\n%s' % ( txn_outputs_response_dict, user_outputs) return False, err_msg return True, ''
def verify_unsigned_tx(unsigned_tx, outputs, inputs=None, sweep_funds=False, change_address=None, coin_symbol='btc')
Takes an unsigned transaction and what was used to build it (in create_unsigned_tx) and verifies that tosign_tx matches what is being signed and what was requestsed to be signed. Returns if valid: (True, '') Returns if invalid: (False, 'err_msg') Specifically, this checks that the outputs match what we're expecting (bad inputs would fail signature anyway). Note: it was a mistake to include `inputs` in verify_unsigned_tx as it by definition is not used. It would be removed but that would break compatibility.
4.993584
3.183407
1.568629
assert len(privkey_list) == len(pubkey_list) == len(txs_to_sign) # in the event of multiple inputs using the same pub/privkey, # that privkey should be included multiple times signatures = [] for cnt, tx_to_sign in enumerate(txs_to_sign): sig = der_encode_sig(*ecdsa_raw_sign(tx_to_sign.rstrip(' \t\r\n\0'), privkey_list[cnt])) err_msg = 'Bad Signature: sig %s for tx %s with pubkey %s' % ( sig, tx_to_sign, pubkey_list[cnt], ) assert ecdsa_raw_verify(tx_to_sign, der_decode_sig(sig), pubkey_list[cnt]), err_msg signatures.append(sig) return signatures
def make_tx_signatures(txs_to_sign, privkey_list, pubkey_list)
Loops through txs_to_sign and makes signatures using privkey_list and pubkey_list Not sure what privkeys and pubkeys to supply? Use get_input_addresses() to return a list of addresses. Matching those addresses to keys is up to you and how you store your private keys. A future version of this library may handle this for you, but it is not trivial. Note that if spending multisig funds the process is significantly more complicated. Each tx_to_sign must be signed by *each* private key. In a 2-of-3 transaction, two of [privkey1, privkey2, privkey3] must sign each tx_to_sign http://dev.blockcypher.com/#multisig-transactions
3.182087
3.246699
0.980099
''' Broadcasts the transaction from create_unsigned_tx ''' assert 'errors' not in unsigned_tx, unsigned_tx assert api_key, 'api_key required' url = make_url(coin_symbol, **dict(txs='send')) data = unsigned_tx.copy() data['signatures'] = signatures data['pubkeys'] = pubkeys params = {'token': api_key} r = requests.post(url, params=params, json=data, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) if response_dict.get('tx') and response_dict.get('received'): response_dict['tx']['received'] = parser.parse(response_dict['tx']['received']) return response_dict
def broadcast_signed_transaction(unsigned_tx, signatures, pubkeys, coin_symbol='btc', api_key=None)
Broadcasts the transaction from create_unsigned_tx
3.895345
3.413553
1.141141
''' Get metadata using blockcypher's API. This is data on blockcypher's servers and not embedded into the bitcoin (or other) blockchain. ''' assert is_valid_coin_symbol(coin_symbol), coin_symbol assert api_key or not private, 'Cannot see private metadata without an API key' kwarg = get_valid_metadata_identifier( coin_symbol=coin_symbol, address=address, tx_hash=tx_hash, block_hash=block_hash, ) url = make_url(coin_symbol, meta=True, **kwarg) params = {'token': api_key} if api_key else {'private': 'true'} r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) return response_dict
def get_metadata(address=None, tx_hash=None, block_hash=None, api_key=None, private=True, coin_symbol='btc')
Get metadata using blockcypher's API. This is data on blockcypher's servers and not embedded into the bitcoin (or other) blockchain.
4.725395
3.136583
1.506543
''' Embed metadata using blockcypher's API. This is not embedded into the bitcoin (or other) blockchain, and is only stored on blockcypher's servers. ''' assert is_valid_coin_symbol(coin_symbol), coin_symbol assert api_key assert metadata_dict and isinstance(metadata_dict, dict), metadata_dict kwarg = get_valid_metadata_identifier( coin_symbol=coin_symbol, address=address, tx_hash=tx_hash, block_hash=block_hash, ) url = make_url(coin_symbol, meta=True, **kwarg) params = {'token': api_key} if private: params['private'] = 'true' r = requests.put(url, json=metadata_dict, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r, allow_204=True)
def put_metadata(metadata_dict, address=None, tx_hash=None, block_hash=None, api_key=None, private=True, coin_symbol='btc')
Embed metadata using blockcypher's API. This is not embedded into the bitcoin (or other) blockchain, and is only stored on blockcypher's servers.
4.079915
2.702139
1.509883
''' Only available for metadata that was embedded privately. ''' assert is_valid_coin_symbol(coin_symbol), coin_symbol assert api_key, 'api_key required' kwarg = get_valid_metadata_identifier( coin_symbol=coin_symbol, address=address, tx_hash=tx_hash, block_hash=block_hash, ) url = make_url(coin_symbol, meta=True, **kwarg) params = {'token': api_key} r = requests.delete(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r, allow_204=True)
def delete_metadata(address=None, tx_hash=None, block_hash=None, api_key=None, coin_symbol='btc')
Only available for metadata that was embedded privately.
4.214725
3.263544
1.291456
''' convert to satoshis, no rounding ''' assert input_type in UNIT_CHOICES, input_type # convert to satoshis if input_type in ('btc', 'mbtc', 'bit'): satoshis = float(input_quantity) * float(UNIT_MAPPINGS[input_type]['satoshis_per']) elif input_type == 'satoshi': satoshis = input_quantity else: raise Exception('Invalid Unit Choice: %s' % input_type) return int(satoshis)
def to_satoshis(input_quantity, input_type)
convert to satoshis, no rounding
3.231972
3.113619
1.038011
''' Safe trimming means the following: 1.0010000 -> 1.001 1.0 -> 1.0 (no change) 1.0000001 -> 1.0000001 (no change) ''' qty_formatted = qty_as_string if '.' in qty_as_string: # only affect numbers with decimals while True: if qty_formatted[-1] == '0' and qty_formatted[-2] != '.': qty_formatted = qty_formatted[:-1] else: break return qty_formatted
def safe_trim(qty_as_string)
Safe trimming means the following: 1.0010000 -> 1.001 1.0 -> 1.0 (no change) 1.0000001 -> 1.0000001 (no change)
3.222228
1.99059
1.61873
''' Take an input like 11002343 satoshis and convert it to another unit (e.g. BTC) and format it with appropriate units if coin_symbol is supplied and print_cs == True then the units will be added (e.g. BTC or satoshis) Smart trimming gets rid of trailing 0s in the decimal place, except for satoshis (irrelevant) and bits (always two decimals points). It also preserves one decimal place in the case of 1.0 to show significant figures. It is stil technically correct and reversible. Smart rounding performs a rounding operation (so it is techincally not the correct number and is not reversible). The number of decimals to round by is a function of the output_type Requires python >= 2.7 ''' assert input_type in UNIT_CHOICES, input_type assert output_type in UNIT_CHOICES, output_type if print_cs: assert is_valid_coin_symbol(coin_symbol=coin_symbol), coin_symbol assert isinstance(round_digits, int) satoshis_float = to_satoshis(input_quantity=input_quantity, input_type=input_type) if round_digits: satoshis_float = round(satoshis_float, -1*round_digits) output_quantity = from_satoshis( input_satoshis=satoshis_float, output_type=output_type, ) if output_type == 'bit' and round_digits >= 2: pass # hack to add thousands separator with no decimals output_quantity_formatted = format_output(num=output_quantity, output_type='satoshi') else: # add thousands separator and appropriate # of decimals output_quantity_formatted = format_output(num=output_quantity, output_type=output_type) if safe_trimming and output_type not in ('satoshi', 'bit'): output_quantity_formatted = safe_trim(qty_as_string=output_quantity_formatted) if print_cs: curr_symbol = get_curr_symbol( coin_symbol=coin_symbol, output_type=output_type, ) output_quantity_formatted += ' %s' % curr_symbol return output_quantity_formatted
def format_crypto_units(input_quantity, input_type, output_type, coin_symbol=None, print_cs=False, safe_trimming=False, round_digits=0)
Take an input like 11002343 satoshis and convert it to another unit (e.g. BTC) and format it with appropriate units if coin_symbol is supplied and print_cs == True then the units will be added (e.g. BTC or satoshis) Smart trimming gets rid of trailing 0s in the decimal place, except for satoshis (irrelevant) and bits (always two decimals points). It also preserves one decimal place in the case of 1.0 to show significant figures. It is stil technically correct and reversible. Smart rounding performs a rounding operation (so it is techincally not the correct number and is not reversible). The number of decimals to round by is a function of the output_type Requires python >= 2.7
4.624944
1.945679
2.377034
''' Used to verify a transaction hex does what's expected of it. Must supply a list of output addresses so that the library can try to convert from script to address using both pubkey and script. Returns a list of the following form: [{'value': 12345, 'address': '1abc...'}, ...] Uses @vbuterin's decoding methods. ''' # Defensive checks: err_msg = 'Library not able to parse %s transactions' % coin_symbol assert lib_can_deserialize_cs(coin_symbol), err_msg assert isinstance(output_addr_list, (list, tuple)) for output_addr in output_addr_list: assert is_valid_address(output_addr), output_addr output_addr_set = set(output_addr_list) # speed optimization outputs = [] deserialized_tx = deserialize(str(raw_tx_hex)) for out in deserialized_tx.get('outs', []): output = {'value': out['value']} # determine if the address is a pubkey address, script address, or op_return pubkey_addr = script_to_address(out['script'], vbyte=COIN_SYMBOL_MAPPINGS[coin_symbol]['vbyte_pubkey']) script_addr = script_to_address(out['script'], vbyte=COIN_SYMBOL_MAPPINGS[coin_symbol]['vbyte_script']) nulldata = out['script'] if out['script'][0:2] == '6a' else None if pubkey_addr in output_addr_set: address = pubkey_addr output['address'] = address elif script_addr in output_addr_set: address = script_addr output['address'] = address elif nulldata: output['script'] = nulldata output['script_type'] = 'null-data' else: raise Exception('Script %s Does Not Contain a Valid Output Address: %s' % ( out['script'], output_addr_set, )) outputs.append(output) return outputs
def get_txn_outputs(raw_tx_hex, output_addr_list, coin_symbol)
Used to verify a transaction hex does what's expected of it. Must supply a list of output addresses so that the library can try to convert from script to address using both pubkey and script. Returns a list of the following form: [{'value': 12345, 'address': '1abc...'}, ...] Uses @vbuterin's decoding methods.
4.016279
2.569279
1.563193
''' Take a list of txn ouputs (from get_txn_outputs output of pybitcointools) and compress it to the sum of satoshis sent to each address in a dictionary. Returns a dict of the following form: {'1abc...': 12345, '1def': 54321, ...} ''' result_dict = {} outputs = (output for output in txn_outputs if output.get('address')) for txn_output in outputs: if txn_output['address'] in result_dict: result_dict[txn_output['address']] += txn_output['value'] else: result_dict[txn_output['address']] = txn_output['value'] return result_dict
def compress_txn_outputs(txn_outputs)
Take a list of txn ouputs (from get_txn_outputs output of pybitcointools) and compress it to the sum of satoshis sent to each address in a dictionary. Returns a dict of the following form: {'1abc...': 12345, '1def': 54321, ...}
3.474239
1.594452
2.178955
''' Blockcypher limits wallet names to 25 chars. Hash the master pubkey (with subchain indexes) and take the first 25 chars. Hackey determinstic method for naming. ''' # http://stackoverflow.com/a/19877309/1754586 mpub = mpub.encode('utf-8') if subchain_indices: mpub += ','.join([str(x) for x in subchain_indices]).encode('utf-8') return 'X%s' % sha256(mpub).hexdigest()[:24]
def get_blockcypher_walletname_from_mpub(mpub, subchain_indices=[])
Blockcypher limits wallet names to 25 chars. Hash the master pubkey (with subchain indexes) and take the first 25 chars. Hackey determinstic method for naming.
6.061759
2.882118
2.103231
''' Flattens a response from querying a list of address (or wallet) transactions If nesting==True then it will return an ordered dictionary where the keys are tranasaction hashes, otherwise it will be a list of dicts. (nesting==False is good for django templates) ''' nested_cleaned_txs = OrderedDict() for tx in tx_list: tx_hash = tx.get('tx_hash') satoshis = tx.get('value', 0) # rare edge case where API returns 0 if tx.get('tx_input_n') >= 0: satoshis *= -1 if tx_hash in nested_cleaned_txs: nested_cleaned_txs[tx_hash]['txns_satoshis_list'].append(satoshis) nested_cleaned_txs[tx_hash]['satoshis_net'] = sum(nested_cleaned_txs[tx_hash]['txns_satoshis_list']) if tx.get('double_spend') and not nested_cleaned_txs[tx_hash]['double_spend']: nested_cleaned_txs[tx_hash]['double_spend'] = True else: nested_cleaned_txs[tx_hash] = { 'txns_satoshis_list': [satoshis, ], 'satoshis_net': satoshis, 'received_at': tx.get('received'), 'confirmed_at': tx.get('confirmed'), 'confirmations': tx.get('confirmations', 0), 'block_height': tx.get('block_height'), 'double_spend': tx.get('double_spend', False), } if nesting: return nested_cleaned_txs else: unnested_cleaned_txs = [] for tx_hash in nested_cleaned_txs: tx_cleaned = nested_cleaned_txs[tx_hash] tx_cleaned['tx_hash'] = tx_hash unnested_cleaned_txs.append(tx_cleaned) return unnested_cleaned_txs
def flatten_txns_by_hash(tx_list, nesting=True)
Flattens a response from querying a list of address (or wallet) transactions If nesting==True then it will return an ordered dictionary where the keys are tranasaction hashes, otherwise it will be a list of dicts. (nesting==False is good for django templates)
2.577787
1.890454
1.363581
if byteorder == 'little': indexes = range(length) else: indexes = reversed(range(length)) return bytearray((n >> i * 8) & 0xff for i in indexes)
def _long_to_bytes(n, length, byteorder)
Convert a long to a bytestring For use in python version prior to 3.2 Source: http://bugs.python.org/issue16580#msg177208
2.564467
2.804566
0.91439
''' Is an address both valid *and* start with the correct character for its coin symbol (chain/network) ''' assert is_valid_coin_symbol(coin_symbol) if b58_address[0] in COIN_SYMBOL_MAPPINGS[coin_symbol]['address_first_char_list']: if is_valid_address(b58_address): return True return False
def is_valid_address_for_coinsymbol(b58_address, coin_symbol)
Is an address both valid *and* start with the correct character for its coin symbol (chain/network)
5.611725
2.591873
2.165124
start = datetime.now() - timedelta(days=1) return start.replace(hour=0, minute=0, second=0, microsecond=0)
def default_validity_start()
Sets validity_start field to 1 day before the current date (avoids "certificate not valid yet" edge case). In some cases, because of timezone differences, when certificates were just created they were considered valid in a timezone (eg: Europe) but not yet valid in another timezone (eg: US). This function intentionally returns naive datetime (not timezone aware), so that certificates are valid from 00:00 AM in all timezones.
2.478956
2.756308
0.899375
now = timezone.now() return self.cert_set.filter(revoked=True, validity_start__lte=now, validity_end__gte=now)
def get_revoked_certs(self)
Returns revoked certificates of this CA (does not include expired certificates)
3.864435
3.419842
1.130004
revoked_certs = self.get_revoked_certs() crl = crypto.CRL() now_str = timezone.now().strftime(generalized_time) for cert in revoked_certs: revoked = crypto.Revoked() revoked.set_serial(bytes_compat(cert.serial_number)) revoked.set_reason(b'unspecified') revoked.set_rev_date(bytes_compat(now_str)) crl.add_revoked(revoked) return crl.export(self.x509, self.pkey, days=1, digest=b'sha256')
def crl(self)
Returns up to date CRL of this CA
3.48236
3.356411
1.037525
now = timezone.now() self.revoked = True self.revoked_at = now self.save()
def revoke(self)
* flag certificate as revoked * fill in revoked_at DateTimeField
4.183501
3.263391
1.281949
authenticated = request.user.is_authenticated authenticated = authenticated() if callable(authenticated) else authenticated if app_settings.CRL_PROTECTED and not authenticated: return HttpResponse(_('Forbidden'), status=403, content_type='text/plain') ca = crl.ca_model.objects.get(pk=pk) return HttpResponse(ca.crl, status=200, content_type='application/x-pem-file')
def crl(request, pk)
returns CRL of a CA
3.609699
3.369629
1.071245
small_font = [] medium_font = [] large_font = [] xlarge_font = [] fonts = set(font_map.keys()) - set(RANDOM_FILTERED_FONTS) for font in fonts: length = max(map(len, font_map[font][0].values())) if length <= FONT_SMALL_THRESHOLD: small_font.append(font) elif length > FONT_SMALL_THRESHOLD and length <= FONT_MEDIUM_THRESHOLD: medium_font.append(font) elif length > FONT_MEDIUM_THRESHOLD and length <= FONT_LARGE_THRESHOLD: large_font.append(font) else: xlarge_font.append(font) return { "small_list": small_font, "medium_list": medium_font, "large_list": large_font, "xlarge_list": xlarge_font}
def font_size_splitter(font_map)
Split fonts to 4 category (small,medium,large,xlarge) by maximum length of letter in each font. :param font_map: input fontmap :type font_map : dict :return: splitted fonts as dict
1.998098
1.924521
1.038232
fonts = set(FONT_MAP.keys()) if test: fonts = fonts - set(TEST_FILTERED_FONTS) for item in sorted(list(fonts)): print(str(item) + " : ") text_temp = text try: tprint(text_temp, str(item)) except Exception: print(FONT_ENVIRONMENT_WARNING)
def font_list(text="test", test=False)
Print all fonts. :param text : input text :type text : str :param test: test flag :type test: bool :return: None
6.143719
7.233702
0.849319
for i in sorted(list(art_dic.keys())): try: if test: raise Exception print(i) aprint(i) line() except Exception: print(ART_ENVIRONMENT_WARNING) line() if test: break
def art_list(test=False)
Print all 1-Line arts. :param test : exception test flag :type test : bool :return: None
7.458127
7.955682
0.937459
tprint("art") tprint("v" + VERSION) print(DESCRIPTION + "\n") print("Webpage : http://art.shaghighi.ir\n") print("Help : \n") print(" - list --> (list of arts)\n") print(" - fonts --> (list of fonts)\n") print(" - test --> (run tests)\n") print(" - text 'yourtext' 'font(optional)' --> (text art) Example : 'python -m art text exampletext block'\n") print(" - shape 'shapename' --> (shape art) Example : 'python -m art shape butterfly'\n") print(" - save 'yourtext' 'font(optional)' --> Example : 'python -m art save exampletext block'\n") print(" - all 'yourtext' --> Example : 'python -m art all exampletext'")
def help_func()
Print help page. :return: None
5.236255
5.422133
0.965719
print(art(artname=artname, number=number, text=text))
def aprint(artname, number=1, text="")
Print 1-line art. :param artname: artname :type artname : str :return: None
3.765962
5.615166
0.670677
if isinstance(artname, str) is False: raise artError(ART_TYPE_ERROR) artname = artname.lower() arts = sorted(art_dic.keys()) if artname == "random" or artname == "rand" or artname == "rnd": filtered_arts = list(set(arts) - set(RANDOM_FILTERED_ARTS)) artname = random.choice(filtered_arts) elif artname not in art_dic.keys(): distance_list = list(map(lambda x: distance_calc(artname, x), arts)) min_distance = min(distance_list) selected_art = arts[distance_list.index(min_distance)] threshold = max(len(artname), len(selected_art)) / 2 if min_distance < threshold: artname = selected_art else: raise artError(ART_NAME_ERROR) art_value = art_dic[artname] if isinstance(number, int) is False: raise artError(NUMBER_TYPE_ERROR) if isinstance(art_value, str): return (art_value + " ") * number if isinstance(text, str) is False: raise artError(TEXT_TYPE_ERROR) return (art_value[0] + text + art_value[1] + " ") * number
def art(artname, number=1, text="")
Return 1-line art. :param artname: artname :type artname : str :return: ascii art as str
2.505733
2.562607
0.977806
r result = text2art(text, font=font, chr_ignore=chr_ignore) print(result)
def tprint(text, font=DEFAULT_FONT, chr_ignore=True)
r""" Print art text (support \n). :param text: input text :type text:str :param font: input font :type font:str :param chr_ignore: ignore not supported character :type chr_ignore:bool :return: None
7.040537
6.901722
1.020113
r try: if isinstance(text, str) is False: raise Exception(TEXT_TYPE_ERROR) files_list = os.listdir(os.getcwd()) extension = ".txt" splitted_filename = filename.split(".") name = splitted_filename[0] if len(splitted_filename) > 1: extension = "." + splitted_filename[1] index = 2 test_name = name while(True): if test_name + extension in files_list: test_name = name + str(index) index = index + 1 else: break if font.lower() in TEST_FILTERED_FONTS: file = codecs.open(test_name + extension, "w", encoding='utf-8') else: file = open(test_name + extension, "w") result = text2art(text, font=font, chr_ignore=chr_ignore) file.write(result) file.close() if print_status: print("Saved! \nFilename: " + test_name + extension) return {"Status": True, "Message": "OK"} except Exception as e: return {"Status": False, "Message": str(e)}
def tsave( text, font=DEFAULT_FONT, filename="art", chr_ignore=True, print_status=True)
r""" Save ascii art (support \n). :param text: input text :param font: input font :type font:str :type text:str :param filename: output file name :type filename:str :param chr_ignore: ignore not supported character :type chr_ignore:bool :param print_status : save message print flag :type print_status:bool :return: None
2.538614
2.568591
0.988329
if len(s1) > len(s2): s1, s2 = s2, s1 distances = range(len(s1) + 1) for i2, c2 in enumerate(s2): distances_ = [i2 + 1] for i1, c1 in enumerate(s1): if c1 == c2: distances_.append(distances[i1]) else: distances_.append( 1 + min((distances[i1], distances[i1 + 1], distances_[-1]))) distances = distances_ return distances[-1]
def distance_calc(s1, s2)
Calculate Levenshtein distance between two words. :param s1: first word :type s1 : str :param s2: second word :type s2 : str :return: distance between two word References : 1- https://stackoverflow.com/questions/2460177/edit-distance-in-python 2- https://en.wikipedia.org/wiki/Levenshtein_distance
1.30107
1.306021
0.996209
text_length = len(text) if text_length <= TEXT_XLARGE_THRESHOLD: font = random.choice(XLARGE_WIZARD_FONT) elif text_length > TEXT_XLARGE_THRESHOLD and text_length <= TEXT_LARGE_THRESHOLD: font = random.choice(LARGE_WIZARD_FONT) elif text_length > TEXT_LARGE_THRESHOLD and text_length <= TEXT_MEDIUM_THRESHOLD: font = random.choice(MEDIUM_WIZARD_FONT) else: font = random.choice(SMALL_WIZARD_FONT) return font
def wizard_font(text)
Check input text length for wizard mode. :param text: input text :type text:str :return: font as str
1.888946
1.889684
0.999609
if font == "rnd-small" or font == "random-small" or font == "rand-small": font = random.choice(RND_SIZE_DICT["small_list"]) return font if font == "rnd-medium" or font == "random-medium" or font == "rand-medium": font = random.choice(RND_SIZE_DICT["medium_list"]) return font if font == "rnd-large" or font == "random-large" or font == "rand-large": font = random.choice(RND_SIZE_DICT["large_list"]) return font if font == "rnd-xlarge" or font == "random-xlarge" or font == "rand-xlarge": font = random.choice(RND_SIZE_DICT["xlarge_list"]) return font if font == "random" or font == "rand" or font == "rnd": filtered_fonts = list(set(fonts) - set(RANDOM_FILTERED_FONTS)) font = random.choice(filtered_fonts) return font if font == "wizard" or font == "wiz" or font == "magic": font = wizard_font(text) return font if font == "rnd-na" or font == "random-na" or font == "rand-na": font = random.choice(TEST_FILTERED_FONTS) return font if font not in FONT_MAP.keys(): distance_list = list(map(lambda x: distance_calc(font, x), fonts)) font = fonts[distance_list.index(min(distance_list))] return font
def indirect_font(font, fonts, text)
Check input font for indirect modes. :param font: input font :type font : str :param fonts: fonts list :type fonts : list :param text: input text :type text:str :return: font as str
2.063862
2.076214
0.994051
split_list = [] result_list = [] splitter = "\n" for i in word: if (ord(i) == 9) or (ord(i) == 32 and font == "block"): continue if (i not in letters.keys()): if (chr_ignore): continue else: raise artError(str(i) + " is invalid.") if len(letters[i]) == 0: continue split_list.append(letters[i].split("\n")) if font in ["mirror", "mirror_flip"]: split_list.reverse() if len(split_list) == 0: return "" for i in range(len(split_list[0])): temp = "" for j in range(len(split_list)): if j > 0 and ( i == 1 or i == len( split_list[0]) - 2) and font == "block": temp = temp + " " temp = temp + split_list[j][i] result_list.append(temp) if "win32" != sys.platform: splitter = "\r\n" result = (splitter).join(result_list) if result[-1] != "\n": result += splitter return result
def __word2art(word, font, chr_ignore, letters)
Return art word. :param word: input word :type word: str :param font: input font :type font: str :param chr_ignore: ignore not supported character :type chr_ignore: bool :param letters: font letters table :type letters: dict :return: ascii art as str
3.10078
3.013554
1.028945
r letters = standard_dic text_temp = text if isinstance(text, str) is False: raise artError(TEXT_TYPE_ERROR) if isinstance(font, str) is False: raise artError(FONT_TYPE_ERROR) font = font.lower() fonts = sorted(FONT_MAP.keys()) font = indirect_font(font, fonts, text) letters = FONT_MAP[font][0] if FONT_MAP[font][1]: text_temp = text.lower() if font in UPPERCASE_FONTS: text_temp = text.upper() word_list = text_temp.split("\n") result = "" for word in word_list: if len(word) != 0: result = result + __word2art(word=word, font=font, chr_ignore=chr_ignore, letters=letters) return result
def text2art(text, font=DEFAULT_FONT, chr_ignore=True)
r""" Return art text (support \n). :param text: input text :type text:str :param font: input font :type font:str :param chr_ignore: ignore not supported character :type chr_ignore:bool :return: ascii art text as str
3.768334
3.694114
1.020091
if isinstance(font, str) is False: raise artError(FONT_TYPE_ERROR) if isinstance(chr_ignore, bool) is False: raise artError(CHR_IGNORE_TYPE_ERROR) if isinstance(filename, str) is False: raise artError(FILE_TYPE_ERROR) if isinstance(print_status, bool) is False: raise artError(PRINT_STATUS_TYPE_ERROR) tprint.__defaults__ = (font, chr_ignore) tsave.__defaults__ = (font, filename, chr_ignore, print_status) text2art.__defaults__ = (font, chr_ignore)
def set_default(font=DEFAULT_FONT, chr_ignore=True, filename="art", print_status=True)
Change text2art, tprint and tsave default values. :param font: input font :type font:str :param chr_ignore: ignore not supported character :type chr_ignore:bool :param filename: output file name (only tsave) :type filename:str :param print_status : save message print flag (only tsave) :type print_status:bool :return: None
2.225178
1.919119
1.159479
if not name and not callable(fn): name = fn fn = None def inner(fn): if isinstance(fn, Predicate): return fn p = Predicate(fn, name, **options) update_wrapper(p, fn) return p if fn: return inner(fn) else: return inner
def predicate(fn=None, name=None, **options)
Decorator that constructs a ``Predicate`` instance from any function:: >>> @predicate ... def is_book_author(user, book): ... return user == book.author ... >>> @predicate(bind=True) ... def is_book_author(self, user, book): ... if self.context.args: ... return user == book.author
2.571451
3.878065
0.663076
def _getter(request, *view_args, **view_kwargs): if attr_name not in view_kwargs: raise ImproperlyConfigured( 'Argument {0} is not available. Given arguments: [{1}]' .format(attr_name, ', '.join(view_kwargs.keys()))) try: return get_object_or_404(model, **{field_name: view_kwargs[attr_name]}) except FieldError: raise ImproperlyConfigured( 'Model {0} has no field named {1}' .format(model, field_name)) return _getter
def objectgetter(model, attr_name='pk', field_name='pk')
Helper that returns a function suitable for use as the ``fn`` argument to the ``permission_required`` decorator. Internally uses ``get_object_or_404``, so keep in mind that this may raise ``Http404``. ``model`` can be a model class, manager or queryset. ``attr_name`` is the name of the view attribute. ``field_name`` is the model's field name by which the lookup is made, eg. "id", "slug", etc.
2.469774
2.622897
0.941621
def decorator(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): # Normalize to a list of permissions if isinstance(perm, six.string_types): perms = (perm,) else: perms = perm # Get the object to check permissions against if callable(fn): obj = fn(request, *args, **kwargs) else: # pragma: no cover obj = fn # Get the user user = request.user # Check for permissions and return a response if not user.has_perms(perms, obj): # User does not have a required permission if raise_exception: raise PermissionDenied() else: return _redirect_to_login(request, view_func.__name__, login_url, redirect_field_name) else: # User has all required permissions -- allow the view to execute return view_func(request, *args, **kwargs) return _wrapped_view return decorator
def permission_required(perm, fn=None, login_url=None, raise_exception=False, redirect_field_name=REDIRECT_FIELD_NAME)
View decorator that checks for the given permissions before allowing the view to execute. Use it like this:: from django.shortcuts import get_object_or_404 from rules.contrib.views import permission_required from posts.models import Post def get_post_by_pk(request, post_id): return get_object_or_404(Post, pk=post_id) @permission_required('posts.change_post', fn=get_post_by_pk) def post_update(request, post_id): # ... ``perm`` is either a permission name as a string, or a list of permission names. ``fn`` is an optional callback that receives the same arguments as those passed to the decorated view and must return the object to check permissions against. If omitted, the decorator behaves just like Django's ``permission_required`` decorator, i.e. checks for model-level permissions. ``raise_exception`` is a boolean specifying whether to raise a ``django.core.exceptions.PermissionDenied`` exception if the check fails. You will most likely want to set this argument to ``True`` if you have specified a custom 403 response handler in your urlconf. If ``False``, the user will be redirected to the URL specified by ``login_url``. ``login_url`` is an optional custom URL to redirect the user to if permissions check fails. If omitted or empty, ``settings.LOGIN_URL`` is used.
2.300847
2.230581
1.031501
if not isinstance(self, BaseCreateView): # We do NOT want to call get_object in a BaseCreateView, see issue #85 if hasattr(self, 'get_object') and callable(self.get_object): # Requires SingleObjectMixin or equivalent ``get_object`` method return self.get_object() return None
def get_permission_object(self)
Override this method to provide the object to check for permission against. By default uses ``self.get_object()`` as provided by ``SingleObjectMixin``. Returns None if there's no ``get_object`` method.
7.283558
5.823723
1.25067
self._base_dir = base_dir self.icon_cache.set_base_dir(base_dir)
def set_base_dir(self, base_dir)
Set the base directory to be used for all relative filenames.
4.284178
3.965572
1.080343
align_flags = None for qt_align in alignment.split('|'): _, qt_align = qt_align.split('::') align = getattr(QtCore.Qt, qt_align) if align_flags is None: align_flags = align else: align_flags |= align return align_flags
def _parse_alignment(alignment)
Convert a C++ alignment to the corresponding flags.
4.087697
3.071755
1.330737
row = elem.attrib.get('row') column = elem.attrib.get('column') alignment = elem.attrib.get('alignment') # See if it is a box layout. if row is None or column is None: if alignment is None: return () return (0, _parse_alignment(alignment)) # It must be a grid or a form layout. row = int(row) column = int(column) rowspan = int(elem.attrib.get('rowspan', 1)) colspan = int(elem.attrib.get('colspan', 1)) if alignment is None: return (row, column, rowspan, colspan) return (row, column, rowspan, colspan, _parse_alignment(alignment))
def _layout_position(elem)
Return either (), (0, alignment), (row, column, rowspan, colspan) or (row, column, rowspan, colspan, alignment) depending on the type of layout and its configuration. The result will be suitable to use as arguments to the layout.
2.478734
2.076705
1.19359
try: suffix = self.name_suffixes[name] except KeyError: self.name_suffixes[name] = 0 return name suffix += 1 self.name_suffixes[name] = suffix return "%s%i" % (name, suffix)
def uniqueName(self, name)
UIParser.uniqueName(string) -> string Create a unique name from a string. >>> p = UIParser(QtCore, QtGui, QtWidgets) >>> p.uniqueName("foo") 'foo' >>> p.uniqueName("foo") 'foo1'
2.510848
3.302101
0.760379
for a in args: if a is not None and not isinstance(a, str): return True return False
def any_i18n(*args)
Return True if any argument appears to be an i18n string.
5.19179
4.085352
1.27083
item = self.factory.createQObject(item_type, "item", (), False) props = self.wprops # Note that not all types of widget items support the full set of # properties. text = props.getProperty(elem, 'text') status_tip = props.getProperty(elem, 'statusTip') tool_tip = props.getProperty(elem, 'toolTip') whats_this = props.getProperty(elem, 'whatsThis') if self.any_i18n(text, status_tip, tool_tip, whats_this): self.factory.invoke("item", getter, getter_args) if text: item.setText(text) if status_tip: item.setStatusTip(status_tip) if tool_tip: item.setToolTip(tool_tip) if whats_this: item.setWhatsThis(whats_this) text_alignment = props.getProperty(elem, 'textAlignment') if text_alignment: item.setTextAlignment(text_alignment) font = props.getProperty(elem, 'font') if font: item.setFont(font) icon = props.getProperty(elem, 'icon') if icon: item.setIcon(icon) background = props.getProperty(elem, 'background') if background: item.setBackground(background) foreground = props.getProperty(elem, 'foreground') if foreground: item.setForeground(foreground) flags = props.getProperty(elem, 'flags') if flags: item.setFlags(flags) check_state = props.getProperty(elem, 'checkState') if check_state: item.setCheckState(check_state) return item
def createWidgetItem(self, item_type, elem, getter, *getter_args)
Create a specific type of widget item.
2.09281
2.050578
1.020595
try: iterator = getattr(elem, 'iter') except AttributeError: iterator = getattr(elem, 'getiterator') for include in iterator("include"): loc = include.attrib.get("location") # Apply the convention for naming the Python files generated by # pyrcc5. if loc and loc.endswith('.qrc'): mname = os.path.basename(loc[:-4] + self._resource_suffix) if mname not in self.resources: self.resources.append(mname)
def readResources(self, elem)
Read a "resources" tag and add the module to import to the parser's list of them.
5.912518
5.424584
1.089949
import os # Compile a single .ui file. def compile_ui(ui_dir, ui_file): # Ignore if it doesn't seem to be a .ui file. if ui_file.endswith('.ui'): py_dir = ui_dir py_file = ui_file[:-3] + '.py' # Allow the caller to change the name of the .py file or generate # it in a different directory. if map is not None: py_dir, py_file = map(py_dir, py_file) # Make sure the destination directory exists. try: os.makedirs(py_dir) except: pass ui_path = os.path.join(ui_dir, ui_file) py_path = os.path.join(py_dir, py_file) ui_file = open(ui_path, 'r') py_file = open(py_path, 'w') try: compileUi(ui_file, py_file, **compileUi_args) finally: ui_file.close() py_file.close() if recurse: for root, _, files in os.walk(dir): for ui in files: compile_ui(root, ui) else: for ui in os.listdir(dir): if os.path.isfile(os.path.join(dir, ui)): compile_ui(dir, ui)
def compileUiDir(dir, recurse=False, map=None, **compileUi_args)
compileUiDir(dir, recurse=False, map=None, **compileUi_args) Creates Python modules from Qt Designer .ui files in a directory or directory tree. dir is the name of the directory to scan for files whose name ends with '.ui'. By default the generated Python module is created in the same directory ending with '.py'. recurse is set if any sub-directories should be scanned. The default is False. map is an optional callable that is passed the name of the directory containing the '.ui' file and the name of the Python module that will be created. The callable should return a tuple of the name of the directory in which the Python module will be created and the (possibly modified) name of the module. The default is None. compileUi_args are any additional keyword arguments that are passed to the compileUi() function that is called to create each Python module.
2.02144
1.945166
1.039212
from PyQt5.QtCore import PYQT_VERSION_STR try: uifname = uifile.name except AttributeError: uifname = uifile indenter.indentwidth = indent pyfile.write(_header % (uifname, PYQT_VERSION_STR)) winfo = compiler.UICompiler().compileUi(uifile, pyfile, from_imports, resource_suffix, import_from) if execute: indenter.write_code(_display_code % winfo)
def compileUi(uifile, pyfile, execute=False, indent=4, from_imports=False, resource_suffix='_rc', import_from='.')
compileUi(uifile, pyfile, execute=False, indent=4, from_imports=False, resource_suffix='_rc', import_from='.') Creates a Python module from a Qt Designer .ui file. uifile is a file name or file-like object containing the .ui file. pyfile is the file-like object to which the Python code will be written to. execute is optionally set to generate extra Python code that allows the code to be run as a standalone application. The default is False. indent is the optional indentation width using spaces. If it is 0 then a tab is used. The default is 4. from_imports is optionally set to generate relative import statements. At the moment this only applies to the import of resource modules. resource_suffix is the suffix appended to the basename of any resource file specified in the .ui file to create the name of the Python module generated from the resource file by pyrcc4. The default is '_rc', i.e. if the .ui file specified a resource file called foo.qrc then the corresponding Python module is foo_rc. import_from is optionally set to the package used for relative import statements. The default is ``'.'``.
4.572394
4.503559
1.015285
import sys from PyQt5 import QtWidgets if sys.hexversion >= 0x03000000: from .port_v3.string_io import StringIO else: from .port_v2.string_io import StringIO code_string = StringIO() winfo = compiler.UICompiler().compileUi(uifile, code_string, from_imports, resource_suffix, import_from) ui_globals = {} exec(code_string.getvalue(), ui_globals) return (ui_globals[winfo["uiclass"]], getattr(QtWidgets, winfo["baseclass"]))
def loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.')
loadUiType(uifile, from_imports=False, resource_suffix='_rc', import_from='.') -> (form class, base class) Load a Qt Designer .ui file and return the generated form class and the Qt base class. uifile is a file name or file-like object containing the .ui file. from_imports is optionally set to generate relative import statements. At the moment this only applies to the import of resource modules. resource_suffix is the suffix appended to the basename of any resource file specified in the .ui file to create the name of the Python module generated from the resource file by pyrcc4. The default is '_rc', i.e. if the .ui file specified a resource file called foo.qrc then the corresponding Python module is foo_rc. import_from is optionally set to the package used for relative import statements. The default is ``'.'``.
4.507427
4.025684
1.119667
from .Loader.loader import DynamicUILoader return DynamicUILoader(package).loadUi(uifile, baseinstance, resource_suffix)
def loadUi(uifile, baseinstance=None, package='', resource_suffix='_rc')
loadUi(uifile, baseinstance=None, package='') -> widget Load a Qt Designer .ui file and return an instance of the user interface. uifile is a file name or file-like object containing the .ui file. baseinstance is an optional instance of the Qt base class. If specified then the user interface is created in it. Otherwise a new instance of the base class is automatically created. package is the optional package which is used as the base for any relative imports of custom widgets. resource_suffix is the suffix appended to the basename of any resource file specified in the .ui file to create the name of the Python module generated from the resource file by pyrcc4. The default is '_rc', i.e. if the .ui file specified a resource file called foo.qrc then the corresponding Python module is foo_rc.
6.221137
7.475561
0.832197
# Handle a themed icon. theme = iconset.attrib.get('theme') if theme is not None: return self._object_factory.createQObject("QIcon.fromTheme", 'icon', (self._object_factory.asString(theme), ), is_attribute=False) # Handle an empty iconset property. if iconset.text is None: return None iset = _IconSet(iconset, self._base_dir) try: idx = self._cache.index(iset) except ValueError: idx = -1 if idx >= 0: # Return the icon from the cache. iset = self._cache[idx] else: # Follow uic's naming convention. name = 'icon' idx = len(self._cache) if idx > 0: name += str(idx) icon = self._object_factory.createQObject("QIcon", name, (), is_attribute=False) iset.set_icon(icon, self._qtgui_module) self._cache.append(iset) return iset.icon
def get_icon(self, iconset)
Return an icon described by the given iconset tag.
4.289914
4.121219
1.040933
fname = fname.replace("\\", "\\\\") if base_dir != '' and fname[0] != ':' and not os.path.isabs(fname): fname = os.path.join(base_dir, fname) return fname
def _file_name(fname, base_dir)
Convert a relative filename if we have a base directory.
3.054353
2.630696
1.161044
if self._use_fallback: icon.addFile(self._fallback) else: for role, pixmap in self._roles.items(): if role.endswith("off"): mode = role[:-3] state = qtgui_module.QIcon.Off elif role.endswith("on"): mode = role[:-2] state = qtgui_module.QIcon.On else: continue mode = getattr(qtgui_module.QIcon, mode.title()) if pixmap: icon.addPixmap(qtgui_module.QPixmap(pixmap), mode, state) else: icon.addPixmap(qtgui_module.QPixmap(), mode, state) self.icon = icon
def set_icon(self, icon, qtgui_module)
Save the icon and set its attributes.
2.715182
2.660449
1.020573
template = import PyQt5 exedir = os.path.dirname(sys.executable) qtpath = os.path.join(exedir, "qt.conf") pyqt5path = os.path.abspath(PyQt5.__file__) binpath = os.path.dirname(pyqt5path).replace("\\", "/") try: with open(qtpath, "w") as f: f.write(template.format(path=binpath)) except: pass
def createqtconf()
Create a qt.conf file next to the current executable
3.345931
3.145069
1.063866
package_data = dict() package_data['PyQt5'] = list() for subdir in ("doc/", "examples/", "include/", "mkspecs/", "plugins/", "qml/", "qsci/", "sip/", "translations/", "uic/"): abspath = os.path.abspath("PyQt5/" + subdir) for root, dirs, files in os.walk(abspath): for f in files: fpath = os.path.join(root, f) relpath = os.path.relpath(fpath, abspath) relpath = relpath.replace("\\", "/") package_data['PyQt5'].append(subdir + relpath) package_data['PyQt5'].extend(["*.exe", "*.dll", "*.pyd", "*.conf", "*.api", "*.qm", "*.bat"]) return package_data
def get_package_data()
Include all files from all sub-directories
3.392227
3.364073
1.008369
from PyQt5 import QtWidgets app = QtWidgets.QApplication([self._ui_file]) widget = loadUi(self._ui_file) widget.show() return app.exec_()
def _preview(self)
Preview the .ui file. Return the exit status to be passed back to the parent process.
5.472799
3.758075
1.456277
needs_close = False if sys.hexversion >= 0x03000000: if self._opts.output == '-': from io import TextIOWrapper pyfile = TextIOWrapper(sys.stdout.buffer, encoding='utf8') else: pyfile = open(self._opts.output, 'wt', encoding='utf8') needs_close = True else: if self._opts.output == '-': pyfile = sys.stdout else: pyfile = open(self._opts.output, 'wt') needs_close = True import_from = self._opts.import_from if import_from: from_imports = True elif self._opts.from_imports: from_imports = True import_from = '.' else: from_imports = False compileUi(self._ui_file, pyfile, self._opts.execute, self._opts.indent, from_imports, self._opts.resource_suffix, import_from) if needs_close: pyfile.close()
def _generate(self)
Generate the Python code.
2.626161
2.521477
1.041517
sys.stderr.write("Error: %s: \"%s\"\n" % (e.strerror, e.filename))
def on_IOError(self, e)
Handle an IOError exception.
4.414742
4.291137
1.028805
if logging.getLogger(self.LOGGER_NAME).level == logging.DEBUG: import traceback traceback.print_exception(*sys.exc_info()) else: from PyQt5 import QtCore sys.stderr.write( % QtCore.PYQT_VERSION_STR)
def on_Exception(self, e)
Handle a generic exception.
5.807506
5.537076
1.04884
plugin = open(filename, 'rU') try: exec(plugin.read(), plugin_globals, plugin_locals) except ImportError: return False except Exception as e: raise WidgetPluginError("%s: %s" % (e.__class__, str(e))) finally: plugin.close() return True
def load_plugin(filename, plugin_globals, plugin_locals)
Load the plugin from the given file. Return True if the plugin was loaded, or False if it wanted to be ignored. Raise an exception if there was an error.
3.258442
3.118954
1.044723
'''Parse an RFC822, RFC1123, RFC2822, or asctime-style date''' data = dateString.split() if data[0][-1] in (',', '.') or data[0].lower() in _daynames: del data[0] if len(data) == 4: s = data[3] s = s.split('+', 1) if len(s) == 2: data[3:] = s else: data.append('') dateString = " ".join(data) if len(data) < 5: dateString += ' 00:00:00 GMT' return email.utils.parsedate_tz(dateString)
def _parse_date_rfc822(dateString)
Parse an RFC822, RFC1123, RFC2822, or asctime-style date
2.865268
2.615721
1.095403
# build am and pm lists to contain # original case, lowercase, first-char and dotted # versions of the meridian text ptc.am = ['', ''] ptc.pm = ['', ''] for idx, xm in enumerate(ptc.locale.meridian[:2]): # 0: am # 1: pm target = ['am', 'pm'][idx] setattr(ptc, target, [xm]) target = getattr(ptc, target) if xm: lxm = xm.lower() target.extend((xm[0], '{0}.{1}.'.format(*xm), lxm, lxm[0], '{0}.{1}.'.format(*lxm)))
def _initSymbols(ptc)
Initialize symbols and single character constants.
6.356412
6.428304
0.988816
word_list, a, b = re.split(r"[,\s-]+", unitText), 0, 0 for word in word_list: x = self.ptc.small.get(word) if x is not None: a += x elif word == "hundred": a *= 100 else: x = self.ptc.magnitude.get(word) if x is not None: b += a * x a = 0 elif word in self.ptc.ignore: pass else: raise Exception("Unknown number: " + word) return a + b
def _convertUnitAsWords(self, unitText)
Converts text units into their number value. @type unitText: string @param unitText: number text to convert @rtype: integer @return: numerical value of unitText
3.518877
4.164695
0.84493
ctx = self.currentContext debug and log.debug('_buildTime: [%s][%s][%s]', quantity, modifier, units) if source is None: source = time.localtime() if quantity is None: quantity = '' else: quantity = quantity.strip() qty = self._quantityToReal(quantity) if modifier in self.ptc.Modifiers: qty = qty * self.ptc.Modifiers[modifier] if units is None or units == '': units = 'dy' # plurals are handled by regex's (could be a bug tho) (yr, mth, dy, hr, mn, sec, _, _, _) = source start = datetime.datetime(yr, mth, dy, hr, mn, sec) target = start # realunit = next((key for key, values in self.ptc.units.items() # if any(imap(units.__contains__, values))), None) realunit = units for key, values in self.ptc.units.items(): if units in values: realunit = key break debug and log.debug('units %s --> realunit %s (qty=%s)', units, realunit, qty) try: if realunit in ('years', 'months'): target = self.inc(start, **{realunit[:-1]: qty}) elif realunit in ('days', 'hours', 'minutes', 'seconds', 'weeks'): delta = datetime.timedelta(**{realunit: qty}) target = start + delta except OverflowError: # OverflowError is raise when target.year larger than 9999 pass else: ctx.updateAccuracy(realunit) return target.timetuple()
def _buildTime(self, source, quantity, modifier, units)
Take C{quantity}, C{modifier} and C{unit} strings and convert them into values. After converting, calcuate the time and return the adjusted sourceTime. @type source: time @param source: time to use as the base (or source) @type quantity: string @param quantity: quantity string @type modifier: string @param modifier: how quantity and units modify the source time @type units: string @param units: unit of the quantity (i.e. hours, days, months, etc) @rtype: struct_time @return: C{struct_time} of the calculated time
4.30586
4.270281
1.008332
if sourceTime is None: yr, mth, dy, hr, mn, sec, wd, yd, isdst = time.localtime() else: yr, mth, dy, hr, mn, sec, wd, yd, isdst = sourceTime # values pulled from regex's will be stored here and later # assigned to mth, dy, yr based on information from the locale # -1 is used as the marker value because we want zero values # to be passed thru so they can be flagged as errors later v1 = -1 v2 = -1 v3 = -1 accuracy = [] s = dateString m = self.ptc.CRE_DATE2.search(s) if m is not None: index = m.start() v1 = int(s[:index]) s = s[index + 1:] m = self.ptc.CRE_DATE2.search(s) if m is not None: index = m.start() v2 = int(s[:index]) v3 = int(s[index + 1:]) else: v2 = int(s.strip()) v = [v1, v2, v3] d = {'m': mth, 'd': dy, 'y': yr} # yyyy/mm/dd format dp_order = self.ptc.dp_order if v1 <= 31 else ['y', 'm', 'd'] for i in range(0, 3): n = v[i] c = dp_order[i] if n >= 0: d[c] = n accuracy.append({'m': pdtContext.ACU_MONTH, 'd': pdtContext.ACU_DAY, 'y': pdtContext.ACU_YEAR}[c]) # if the year is not specified and the date has already # passed, increment the year if v3 == -1 and ((mth > d['m']) or (mth == d['m'] and dy > d['d'])): yr = d['y'] + self.ptc.YearParseStyle else: yr = d['y'] mth = d['m'] dy = d['d'] # birthday epoch constraint if yr < self.ptc.BirthdayEpoch: yr += 2000 elif yr < 100: yr += 1900 daysInCurrentMonth = self.ptc.daysInMonth(mth, yr) debug and log.debug('parseDate: %s %s %s %s', yr, mth, dy, daysInCurrentMonth) with self.context() as ctx: if mth > 0 and mth <= 12 and dy > 0 and \ dy <= daysInCurrentMonth: sourceTime = (yr, mth, dy, hr, mn, sec, wd, yd, isdst) ctx.updateAccuracy(*accuracy) else: # return current time if date string is invalid sourceTime = time.localtime() return sourceTime
def parseDate(self, dateString, sourceTime=None)
Parse short-form date strings:: '05/28/2006' or '04.21' @type dateString: string @param dateString: text to convert to a C{datetime} @type sourceTime: struct_time @param sourceTime: C{struct_time} value to use as the base @rtype: struct_time @return: calculated C{struct_time} value of dateString
3.307811
3.384393
0.977372
if sourceTime is None: yr, mth, dy, hr, mn, sec, wd, yd, isdst = time.localtime() else: yr, mth, dy, hr, mn, sec, wd, yd, isdst = sourceTime currentMth = mth currentDy = dy accuracy = [] debug and log.debug('parseDateText currentMth %s currentDy %s', mth, dy) s = dateString.lower() m = self.ptc.CRE_DATE3.search(s) mth = m.group('mthname') mth = self.ptc.MonthOffsets[mth] accuracy.append('month') if m.group('day') is not None: dy = int(m.group('day')) accuracy.append('day') else: dy = 1 if m.group('year') is not None: yr = int(m.group('year')) accuracy.append('year') # birthday epoch constraint if yr < self.ptc.BirthdayEpoch: yr += 2000 elif yr < 100: yr += 1900 elif (mth < currentMth) or (mth == currentMth and dy < currentDy): # if that day and month have already passed in this year, # then increment the year by 1 yr += self.ptc.YearParseStyle with self.context() as ctx: if dy > 0 and dy <= self.ptc.daysInMonth(mth, yr): sourceTime = (yr, mth, dy, hr, mn, sec, wd, yd, isdst) ctx.updateAccuracy(*accuracy) else: # Return current time if date string is invalid sourceTime = time.localtime() debug and log.debug('parseDateText returned ' 'mth %d dy %d yr %d sourceTime %s', mth, dy, yr, sourceTime) return sourceTime
def parseDateText(self, dateString, sourceTime=None)
Parse long-form date strings:: 'May 31st, 2006' 'Jan 1st' 'July 2006' @type dateString: string @param dateString: text to convert to a datetime @type sourceTime: struct_time @param sourceTime: C{struct_time} value to use as the base @rtype: struct_time @return: calculated C{struct_time} value of dateString
3.225503
3.247357
0.99327
diffBase = wkdy - wd origOffset = offset if offset == 2: # no modifier is present. # i.e. string to be parsed is just DOW if wkdy * style > wd * style or \ currentDayStyle and wkdy == wd: # wkdy located in current week offset = 0 elif style in (-1, 1): # wkdy located in last (-1) or next (1) week offset = style else: # invalid style, or should raise error? offset = 0 # offset = -1 means last week # offset = 0 means current week # offset = 1 means next week diff = diffBase + 7 * offset if style == 1 and diff < -7: diff += 7 elif style == -1 and diff > 7: diff -= 7 debug and log.debug("wd %s, wkdy %s, offset %d, " "style %d, currentDayStyle %d", wd, wkdy, origOffset, style, currentDayStyle) return diff
def _CalculateDOWDelta(self, wd, wkdy, offset, style, currentDayStyle)
Based on the C{style} and C{currentDayStyle} determine what day-of-week value is to be returned. @type wd: integer @param wd: day-of-week value for the current day @type wkdy: integer @param wkdy: day-of-week value for the parsed day @type offset: integer @param offset: offset direction for any modifiers (-1, 0, 1) @type style: integer @param style: normally the value set in C{Constants.DOWParseStyle} @type currentDayStyle: integer @param currentDayStyle: normally the value set in C{Constants.CurrentDOWParseStyle} @rtype: integer @return: calculated day-of-week
4.718913
4.829866
0.977028
if not quantity: return 1.0 try: return float(quantity.replace(',', '.')) except ValueError: pass try: return float(self.ptc.numbers[quantity]) except KeyError: pass return 0.0
def _quantityToReal(self, quantity)
Convert a quantity, either spelled-out or numeric, to a float @type quantity: string @param quantity: quantity to parse to float @rtype: int @return: the quantity as an float, defaulting to 0.0
3.934641
4.536309
0.867366
ctx = self.currentContext s = datetimeString.strip() # Given string date is a RFC822 date if sourceTime is None: sourceTime = _parse_date_rfc822(s) debug and log.debug( 'attempt to parse as rfc822 - %s', str(sourceTime)) if sourceTime is not None: (yr, mth, dy, hr, mn, sec, wd, yd, isdst, _) = sourceTime ctx.updateAccuracy(ctx.ACU_YEAR, ctx.ACU_MONTH, ctx.ACU_DAY) if hr != 0 and mn != 0 and sec != 0: ctx.updateAccuracy(ctx.ACU_HOUR, ctx.ACU_MIN, ctx.ACU_SEC) sourceTime = (yr, mth, dy, hr, mn, sec, wd, yd, isdst) # Given string date is a W3CDTF date if sourceTime is None: sourceTime = _parse_date_w3dtf(s) if sourceTime is not None: ctx.updateAccuracy(ctx.ACU_YEAR, ctx.ACU_MONTH, ctx.ACU_DAY, ctx.ACU_HOUR, ctx.ACU_MIN, ctx.ACU_SEC) if sourceTime is None: sourceTime = time.localtime() return sourceTime
def _evalDT(self, datetimeString, sourceTime)
Calculate the datetime from known format like RFC822 or W3CDTF Examples handled:: RFC822, W3CDTF formatted dates HH:MM[:SS][ am/pm] MM/DD/YYYY DD MMMM YYYY @type datetimeString: string @param datetimeString: text to try and parse as more "traditional" date/time text @type sourceTime: struct_time @param sourceTime: C{struct_time} value to use as the base @rtype: datetime @return: calculated C{struct_time} value or current C{struct_time} if not parsed
2.829082
2.728643
1.036809
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is a time string with units like "5 hrs 30 min" modifier = '' # TODO m = self.ptc.CRE_UNITS.search(s) if m is not None: units = m.group('units') quantity = s[:m.start('units')] sourceTime = self._buildTime(sourceTime, quantity, modifier, units) return sourceTime
def _evalUnits(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseUnits()}
6.793437
6.512847
1.043082
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is a time string with single char units like "5 h 30 m" modifier = '' # TODO m = self.ptc.CRE_QUNITS.search(s) if m is not None: units = m.group('qunits') quantity = s[:m.start('qunits')] sourceTime = self._buildTime(sourceTime, quantity, modifier, units) return sourceTime
def _evalQUnits(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseQUnits()}
7.973969
7.471058
1.067315
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is in the format "May 23rd, 2005" debug and log.debug('checking for MMM DD YYYY') return self.parseDateText(s, sourceTime)
def _evalDateStr(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseDateStr()}
10.733618
10.146678
1.057846
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is in the format 07/21/2006 return self.parseDate(s, sourceTime)
def _evalDateStd(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseDateStd()}
7.935272
7.285056
1.089253
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is a natural language date string like today, tomorrow.. (yr, mth, dy, hr, mn, sec, wd, yd, isdst) = sourceTime try: offset = self.ptc.dayOffsets[s] except KeyError: offset = 0 if self.ptc.StartTimeFromSourceTime: startHour = hr startMinute = mn startSecond = sec else: startHour = 9 startMinute = 0 startSecond = 0 self.currentContext.updateAccuracy(pdtContext.ACU_DAY) start = datetime.datetime(yr, mth, dy, startHour, startMinute, startSecond) target = start + datetime.timedelta(days=offset) return target.timetuple()
def _evalDayStr(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseDaystr()}
5.052091
5.051237
1.000169
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is a weekday yr, mth, dy, hr, mn, sec, wd, yd, isdst = sourceTime start = datetime.datetime(yr, mth, dy, hr, mn, sec) wkdy = self.ptc.WeekdayOffsets[s] if wkdy > wd: qty = self._CalculateDOWDelta(wd, wkdy, 2, self.ptc.DOWParseStyle, self.ptc.CurrentDOWParseStyle) else: qty = self._CalculateDOWDelta(wd, wkdy, 2, self.ptc.DOWParseStyle, self.ptc.CurrentDOWParseStyle) self.currentContext.updateAccuracy(pdtContext.ACU_DAY) target = start + datetime.timedelta(days=qty) return target.timetuple()
def _evalWeekday(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseWeekday()}
4.973117
4.964939
1.001647
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) if s in self.ptc.re_values['now']: self.currentContext.updateAccuracy(pdtContext.ACU_NOW) else: # Given string is a natural language time string like # lunch, midnight, etc sTime = self.ptc.getSource(s, sourceTime) if sTime: sourceTime = sTime self.currentContext.updateAccuracy(pdtContext.ACU_HALFDAY) return sourceTime
def _evalTimeStr(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseTimeStr()}
8.889673
8.830701
1.006678
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is in the format HH:MM(:SS)(am/pm) yr, mth, dy, hr, mn, sec, wd, yd, isdst = sourceTime m = self.ptc.CRE_TIMEHMS2.search(s) if m is not None: dt = s[:m.start('meridian')].strip() if len(dt) <= 2: hr = int(dt) mn = 0 sec = 0 else: hr, mn, sec = _extract_time(m) if hr == 24: hr = 0 meridian = m.group('meridian').lower() # if 'am' found and hour is 12 - force hour to 0 (midnight) if (meridian in self.ptc.am) and hr == 12: hr = 0 # if 'pm' found and hour < 12, add 12 to shift to evening if (meridian in self.ptc.pm) and hr < 12: hr += 12 # time validation if hr < 24 and mn < 60 and sec < 60: sourceTime = (yr, mth, dy, hr, mn, sec, wd, yd, isdst) _pop_time_accuracy(m, self.currentContext) return sourceTime
def _evalMeridian(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseMeridian()}
3.857597
3.822075
1.009294
s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) # Given string is in the format HH:MM(:SS) yr, mth, dy, hr, mn, sec, wd, yd, isdst = sourceTime m = self.ptc.CRE_TIMEHMS.search(s) if m is not None: hr, mn, sec = _extract_time(m) if hr == 24: hr = 0 # time validation if hr < 24 and mn < 60 and sec < 60: sourceTime = (yr, mth, dy, hr, mn, sec, wd, yd, isdst) _pop_time_accuracy(m, self.currentContext) return sourceTime
def _evalTimeStd(self, datetimeString, sourceTime)
Evaluate text passed by L{_partialParseTimeStd()}
5.038966
4.943992
1.01921
parseStr = None chunk1 = chunk2 = '' # Modifier like next/prev/from/after/prior.. m = self.ptc.CRE_MODIFIER.search(s) if m is not None: if m.group() != s: # capture remaining string parseStr = m.group() chunk1 = s[:m.start()].strip() chunk2 = s[m.end():].strip() else: parseStr = s if parseStr: debug and log.debug('found (modifier) [%s][%s][%s]', parseStr, chunk1, chunk2) s, sourceTime = self._evalModifier(parseStr, chunk1, chunk2, sourceTime) return s, sourceTime, bool(parseStr)
def _partialParseModifier(self, s, sourceTime)
test if giving C{s} matched CRE_MODIFIER, used by L{parse()} @type s: string @param s: date/time text to evaluate @type sourceTime: struct_time @param sourceTime: C{struct_time} value to use as the base @rtype: tuple @return: tuple of remained date/time text, datetime object and an boolean value to describ if matched or not
5.090938
5.303475
0.959925
parseStr = None chunk1 = chunk2 = '' # Quantity + Units m = self.ptc.CRE_UNITS.search(s) if m is not None: debug and log.debug('CRE_UNITS matched') if self._UnitsTrapped(s, m, 'units'): debug and log.debug('day suffix trapped by unit match') else: if (m.group('qty') != s): # capture remaining string parseStr = m.group('qty') chunk1 = s[:m.start('qty')].strip() chunk2 = s[m.end('qty'):].strip() if chunk1[-1:] == '-': parseStr = '-%s' % parseStr chunk1 = chunk1[:-1] s = '%s %s' % (chunk1, chunk2) else: parseStr = s s = '' if parseStr: debug and log.debug('found (units) [%s][%s][%s]', parseStr, chunk1, chunk2) sourceTime = self._evalUnits(parseStr, sourceTime) return s, sourceTime, bool(parseStr)
def _partialParseUnits(self, s, sourceTime)
test if giving C{s} matched CRE_UNITS, used by L{parse()} @type s: string @param s: date/time text to evaluate @type sourceTime: struct_time @param sourceTime: C{struct_time} value to use as the base @rtype: tuple @return: tuple of remained date/time text, datetime object and an boolean value to describ if matched or not
4.601281
4.605581
0.999066
parseStr = None chunk1 = chunk2 = '' # Quantity + Units m = self.ptc.CRE_QUNITS.search(s) if m is not None: debug and log.debug('CRE_QUNITS matched') if self._UnitsTrapped(s, m, 'qunits'): debug and log.debug( 'day suffix trapped by qunit match') else: if (m.group('qty') != s): # capture remaining string parseStr = m.group('qty') chunk1 = s[:m.start('qty')].strip() chunk2 = s[m.end('qty'):].strip() if chunk1[-1:] == '-': parseStr = '-%s' % parseStr chunk1 = chunk1[:-1] s = '%s %s' % (chunk1, chunk2) else: parseStr = s s = '' if parseStr: debug and log.debug('found (qunits) [%s][%s][%s]', parseStr, chunk1, chunk2) sourceTime = self._evalQUnits(parseStr, sourceTime) return s, sourceTime, bool(parseStr)
def _partialParseQUnits(self, s, sourceTime)
test if giving C{s} matched CRE_QUNITS, used by L{parse()} @type s: string @param s: date/time text to evaluate @type sourceTime: struct_time @param sourceTime: C{struct_time} value to use as the base @rtype: tuple @return: tuple of remained date/time text, datetime object and an boolean value to describ if matched or not
4.6389
4.607861
1.006736